id stringlengths 1 265 | text stringlengths 6 5.19M | dataset_id stringclasses 7
values |
|---|---|---|
/DjangoComponents-0.0.0.33.tar.gz/DjangoComponents-0.0.0.33/django_components/static/js/build/.module-cache/57bc5a1a0e215d87b097f80724cb3db3a3befe00.js | var SearchOption = React.createClass({displayName: "SearchOption",
update: function(e) {
this.props.click(this.props.children);
},
render: function() {
return React.createElement("div", {className: "search-suggestion", onMouseDown: this.update}, this.props.children);
}
});
var DropdownSearch = React.createClass({displayName: "DropdownSearch",
getInitialState: function() {
return {update: this.props.click};
},
render: function() {
var items = [];
var click = this.props.click;
var search = this.props.search.split(' ');
var data = this.props.data;
for (var i = 0; i < this.props.data.length; i++) {
var pushable = false;
var amt = 0;
for (var j = 0; j < search.length; j++) {
if (data[i].includes(search[j]) || search[j] == '') {
amt++;
}
}
if (amt == search.length) pushable = true;
if (pushable) {
items.push(this.props.data[i]);
}
}
items = items.map(function(item) {return (React.createElement(SearchOption, {click: click}, item ))})
return (
React.createElement("div", {className: "dynamic-search-content", style: this.props.style},
items
)
);
}
});
var CustomSearch = React.createClass({displayName: "CustomSearch",
getInitialState: function() {
return {
search:'',
showSuggestions:false,
suggestionStyles:{display:'none'},
searchId:0,
data: JSON.parse(this.props.data),
idData: (JSON.parse(this.props.data)).map(function(item){return item[0]}),
nameData: (JSON.parse(this.props.data)).map(function(item){return item[1]})
};
},
change: function(e) {
this.setState({search: e.target.value});
},
focus: function() {
this.setState({suggestionStyles:{display:'block'}});
},
blur: function(e) {
this.setState({suggestionStyles:{display:'none'}});
},
clickSuggestion: function(item) {
this.setState({
search: item,
searchId: this.state.idData[this.state.nameData.indexOf(item)]
});
this.blur();
},
render: function() {
if (!(this.props.data)) {
throw "Please pass CustomSearch some data!";
}
console.log(this.state.nameData)
return (
React.createElement("div", {className: "lk-dynamic-search"},
React.createElement("input", {type: "text", onChange: this.change, onFocus: this.focus, onBlur: this.blur, value: this.state.search}),
React.createElement("input", {type: "text", name: this.props.name, value: this.state.searchId, style: {display:'none'}}),
React.createElement(DropdownSearch, {data: this.state.nameData, search: this.state.search, click: this.clickSuggestion, style: this.state.suggestionStyles})
)
);
}
}); | PypiClean |
/FinDt-3.0.2.tar.gz/FinDt-3.0.2/findt/FinDt.py |
r"""Classe base de suporte para operacoes com datas.
1)Objetivo:
-----------
Esta classe fornece um conjunto de funcoes cujo principal objetivo e
facilitar o trabalho com datas, voltadas, principalmente, para as financas:
dias uteis, dias corridos, numero de dias uteis entre duas datas, numero de
dias corridos entre duas datas.
"""
import csv
import re
import locale
locale.setlocale(locale.LC_ALL, '')
from datetime import date, timedelta
from collections import OrderedDict
__author__ = """\n""".join(['Marcelo G Facioli (mgfacioli@yahoo.com.br)'])
__version__ = "3.0.2"
class FormataData(object):
"""
FormataData é uma classe auxiliar à classe FinDt e que forncede métodos para transformar datas no formato 'string'
para o formato 'date' do Python e vice-versa. Quando fornecida uma data no formato 'string', transforma o separador
de data de diversos formatos (/, :, - ou espaço em branco) no separador padrao "/", antes de convertê-la no formato
date.
Parametros
----------
Data - cadeia de caracteres (string) que representa uma data que pode ser de diversos formatos:
"xx/xx/xxxx"
"xx:xx:xxxx"
"xx-xx-xxxx"
"xx xx xxxx"
"""
def __init__(self, data=None):
self._data = data
def normaliza_data(self):
"""
Converte o separador de data de diversos formatos no separador padrao "/".
"""
if self._data is not None:
try:
data_mask = re.compile(r'^(\d{2})\D*(\d{2})\D*(\d{4})$')
partes = data_mask.search(self._data).groups()
return "{}/{}/{}".format(partes[0], partes[1], partes[2])
except AttributeError as AttErr:
print("Separador Indefinido: {}".format(str(AttErr)))
return
except TypeError as TyErr:
print("O parametro deve ser uma string: {}".format(str(TyErr)))
return
else:
return None
def str_para_data(self):
"""
Transforma uma Data do formato String para formato Date
"""
if self._data is not None:
if type(self._data) is date:
return self._data
elif type(self._data) is str:
partes = self.normaliza_data().split("/")
return date(int(partes[2]), int(partes[1]), int(partes[0]))
else:
return None
def data_para_str(self):
"""
Transforma uma Data no formato Date para formato String.
"""
if self._data is not None:
if type(self._data) is str:
self._data = self.normaliza_data()
return self._data
elif type(self._data) is date:
return self._data.strftime("%d/%m/%Y")
else:
return None
class DatasFinanceiras(FormataData):
"""
Classe base de suporte para operacoes com datas.
Parametros
----------
data_inicio - (OBRIGATORIO) cadeia de caracteres (string) que representa uma data no formato "xx/xx/xxxx";
a data inicial do periodo desejado (inclusive).
data_fim - (OPICIONAL) cadeia de caracteres (string) que representa uma data no formato "xx/xx/xxxx";
a data final do periodo desejado (exclusive).
num_dias - (OPICIONAL) numero inteiro que representa o numero de dias desejados, em substituicao ao
argumento Data_Fim.
path_arquivo - (OPCIONAL/OBRIGATORIO) - seu uso é opcional para as opções 1 e 2 e obrigatório para a opção 3
(nesta opção, o arquivo contendo os feriados será necessário para a correta execucão da funcão. Portanto,
quando path_arquivo for obrigatório, será a cadeia de caracteres(string) representando o caminho (path)
para o arquivo tipo csv contendo os feriados nacionais, no formato (c:\\foo\\arquivo.csv).
O arquivo deve estar no formato csv, com as colunas um, dois e tres contendo, respectivamente, data,
dia_da_semana e descricão do feriado - dar preferencia para o arquivo da 'Anbima'
(site 'http://portal.anbima.com.br/informacoes-tecnicas/precos/feriados-bancarios/Pages/default.aspx')
o qual vem no formato xls (Excel) e que pode ser facilmente convertido para o formato csv, a partir do menu
"Salvar como" e escolhendo-se como Tipo "CSV - separado por virgula" a partir do Excel.
Apos a conversao, excluir o cabecalho (primeira linha) e informacoes adicionais (ultimas quatro ou cinco
linhas) para o arquivo manter somente os dados que nos interessam - data, dia da semana e nomemclatura do
feriado.
Exemplos
--------
1- Criando variaveis auxiliares:
>>> var_path= "C:\\foo\\feriados.csv"
>>> dt_ini = "01/01/2013"
>>> dt_fin = "28/02/2013"
2- Criando um instancia da classe DatasFinanceiras:
>>> import FinDt
>>> periodo = FinDt.DatasFinanceiras(dt_ini, dt_fin, path_arquivo = var_path)
3- Gerando uma lista de dias do periodo:
- formato datetime.date(aaaa, mm, dd):
>>> periodo.dias() # dias corridos
>>> periodo.dias(opt=2) # sem sabados e domingos
>>> periodo.dias(2) # sem sabados e domingos
>>> periodo.dias(opt=3) # sem sabados, domingos e feriados
>>> periodo.dias(3) # sem sabados, domingos e feriados
ou
- formato string 'dd/mm/aaaa'
>>> periodo.dias(1, 'str') # dias corridos
>>> periodo.dias(2, 'str') # sem sabados e domingos
>>> periodo.dias(3, 'str') # sem sabados, domingos e feriados
4- Obtendo um dicionário ordenado contendo todos os feriados do periodo:
(key : value = data : feriado)
>>> periodo.lista_feriados() # formato datetime.date(aaaa, mm, dd)
ou
>>> periodo.lista_feriados('str') # formato string 'dd/mm/aaaa'
5- Obtendo o dia da semana em que determinada data ira cair (mesmo com tal data nao
estando no periodo):
>>> periodo.dia_semana('03/04/2013')
Resultado: 'quarta-feira'
6- Criando uma lista de todas terca-feiras do periodo:
>>> periodo.lista_dia_especifico_semana(3) # formato datetime.date(aaaa, mm, dd)
ou
>>> periodo.lista_dia_especifico_semana(3, 'str') # formato string 'dd/mm/aaaa'
7- Obtendo o primeiro ou o último dia de determinado mês:
>>> periodo.primeiro_dia_mes('23/02/2015') # formato datetime.date(aaaa, mm, dd)
ou
>>> periodo.ultimo_dia_mes('23/02/2015', 'str') # formato string 'dd/mm/aaaa'
8- Gerando uma lista que representa um subperiodo de dias de DatasFinanceiras:
>>> periodo.subperiodo('15/01/2013', '15/02/2013')
9- Obtendo um dicionario ordenado 'Mes/Ano':
(key : value = (Mes/Ano) : (dias uteis por mes))
>>> periodo.dias_uteis_por_mes()
"""
def __init__(self, data_inicio=None, data_fim=None, num_dias=None, path_arquivo=''):
super().__init__()
if data_inicio is None:
raise ValueError('A Data Inicial e imprescindivel!!!')
else:
self._cData_Inicio = FormataData(data_inicio).str_para_data()
if data_fim is None and num_dias is None:
raise ValueError("Uma data final ou número de dias tem que ser fornecido!")
else:
if data_fim is not None:
self._cData_Fim = FormataData(data_fim).str_para_data()
self._ListaDatas = [self._cData_Inicio + timedelta(x)
for x in range(0, abs(int((self._cData_Fim - self._cData_Inicio).days) + 1))]
elif num_dias is not None:
self._cNum_Dias = num_dias
if self._cNum_Dias >= 1:
self._ListaDatas = [self._cData_Inicio + timedelta(x) for x in range(0, abs(self._cNum_Dias))]
else:
self._ListaDatas = [
(self._cData_Inicio - timedelta(days=abs(self._cNum_Dias) - 1) + timedelta(x))
for x in range(0, abs(self._cNum_Dias))]
self._cData_Fim = self._ListaDatas[-1]
self._cPath_Arquivo = path_arquivo
def dias(self, opt=1, dt_type='date'):
"""
Cria uma lista de Dias entre uma data inicial e uma data final.
Parametros
----------
opt - (OPICIONAL) Permite selecionar entre 3 opcoes para gerar a lista de dias:
Opcao 1: gera uma lista de dias corridos (incluindo sabados, domingos e feriados).
Opcao 2: gera uma lista de dias excluindo sabados e domingos.
Opcao 3: gera uma lista de dias excluindo sabados e domingos e feriados.
dt_type - (OPICIONAL) Permite determinar o tipo de dados que será retornado:
Opção date: retorna datas no formato datetime.date(aaaa, mm, dd) do python
Opção str: retorna datas no formato string "dd/mm/aaaa"
"""
if dt_type == 'date':
if opt == 1:
return [dia for dia in self._ListaDatas]
elif opt == 2:
return [dia for dia in self._ListaDatas if
(dia.isoweekday() != 6 and dia.isoweekday() != 7)]
elif opt == 3:
if self._cPath_Arquivo is None:
raise ValueError('E necessario um path/arquivo!')
else:
return [dia for dia in self.dias(opt=2) if
dia not in self.lista_feriados()]
elif dt_type == 'str':
return [FormataData(dia).data_para_str() for dia in self.dias(opt, dt_type='date')]
def lista_feriados(self, dt_type='date'):
"""
Cria um Dicionario ou uma Lista com os feriados entre a Data Inicial e a Data Final.
Parametros
----------
opt - (OPICIONAL) Permite selecionar entre 2 opcoes para gerar a lista de dias:
Opcao 1: gera uma lista de dias corridos (incluindo sabados, domingos e feriados).
Opcao 2: gera uma lista de dias excluindo sabados e domingos.
Opcao 3: gera uma lista de dias excluindo sabados e domingos e feriados.
dt_type - (OPICIONAL) Permite determinar o tipo de dados que será retornado:
Opção date: retorna datas no formato datetime.date(aaaa, mm, dd) do python
Opção str: retorna datas no formato string "dd/mm/aaaa"
"""
try:
with open(self._cPath_Arquivo, 'rU') as csvfile:
feriados = csv.reader(csvfile, dialect='excel', delimiter=';')
dic_selic = {FormataData(row[0]).str_para_data(): row[2] for row in feriados}
if dt_type == 'date':
return OrderedDict(sorted({dt: dic_selic[dt] for dt in self._ListaDatas if
dt in dic_selic}.items(), key=lambda t: t[0]))
elif dt_type == 'str':
return OrderedDict(sorted({FormataData(dt).data_para_str(): dic_selic[dt] for dt in self._ListaDatas if
dt in dic_selic}.items(), key=lambda t: t[0]))
except IOError as IOerr:
print("Erro de leitura do arquivo:" + str(IOerr))
except KeyError as Kerr:
print("Erro na chave do Dicionario" + str(Kerr))
def lista_dia_especifico_semana(self, dia_da_semana=1, dt_type='date'):
"""
Cria uma Lista com os dias em que um determinado dia da semana se repete entre a Data Inicial e a Data Final.
Parametros
----------
dia_da_semana - (OPICIONAL) numero inteiro que representa o dia da semana desejado, conforme tabela:
Segunda-Feira = 1
Terca-Feira = 2
Quarta-Feira = 3
Quinta-Feira = 4
Sexta-Feira = 5
Sabado = 6
Domingo = 7
dt_type - (OPICIONAL) Permite determinar o tipo de dados que será retornado:
Opção date: retorna datas no formato datetime.date(aaaa, mm, dd) do python
Opção str: retorna datas no formato string "dd/mm/aaaa"
"""
if dt_type == 'date':
return [dia for dia in self._ListaDatas if dia.isoweekday() == dia_da_semana]
elif dt_type == 'str':
return [FormataData(dia).data_para_str() for dia in self._ListaDatas if dia.isoweekday() == dia_da_semana]
@staticmethod
def dia_semana(data):
"""
Obtem o dia da semana a partir de uma data no formato String
Parametros
data - cadeia de caracteres (string) que representa uma data no formato "xx/xx/xxxx"
"""
return FormataData(data).str_para_data().strftime("%A")
@staticmethod
def primeiro_dia_mes(data, dt_type='date'):
"""
Fornecida uma data qualquer no formato string, retorna o primeiro dia do mes daquela data, tambem
no formato string.
Parametros
----------
data - a data para a qual se deseja obeter o ultimo dia do mes (formato string).
dt_type - (OPICIONAL) Permite determinar o tipo de dados que será retornado:
Opção date: retorna datas no formato datetime.date(aaaa, mm, dd) do python
Opção str: retorna datas no formato string "dd/mm/aaaa"
"""
if dt_type == 'date':
return FormataData(FormataData(data).str_para_data().strftime("01/%m/%Y")).str_para_data()
elif dt_type == 'str':
return FormataData(data).str_para_data().strftime("01/%m/%Y")
@staticmethod
def ultimo_dia_mes(data, dt_type='date'):
"""
Fornecida uma data qualquer no formato string, retorna o ultimo dia do mes daquela data, tambem
no formato string.
Parametros
----------
data - a data para a qual se deseja obeter o ultimo dia do mes (formato string).
dt_type - (OPICIONAL) Permite determinar o tipo de dados que será retornado:
Opção date: retorna datas no formato datetime.date(aaaa, mm, dd) do python
Opção str: retorna datas no formato string "dd/mm/aaaa"
"""
data_seguinte = FormataData(data).str_para_data()
while data_seguinte.month == FormataData(data).str_para_data().month:
data_seguinte = date.fromordinal(data_seguinte.toordinal() + 1)
if dt_type == 'date':
return date.fromordinal(data_seguinte.toordinal() - 1)
elif dt_type == 'str':
return FormataData(date.fromordinal(data_seguinte.toordinal() - 1)).data_para_str()
def dias_uteis_por_mes(self):
"""
Cria um dicionario contendo o numero de dias uteis (sem sabados, domingos e feriados) mensais entre uma
data inicial e uma data final.
"""
lista_mes_dias_uteis = []
for dia in self._ListaDatas:
if dia == self.ultimo_dia_mes(dia):
if self.primeiro_dia_mes(dia) < self._cData_Inicio:
dias_uteis_do_mes = DatasFinanceiras(self._cData_Inicio, self.ultimo_dia_mes(dia, 'str'),
path_arquivo=self._cPath_Arquivo)
lista_mes_dias_uteis.append(
("{}".format(dia.strftime("%m/%Y")), len(dias_uteis_do_mes.dias(opt=3))))
else:
dias_uteis_do_mes = DatasFinanceiras(self.primeiro_dia_mes(dia, 'str'),
self.ultimo_dia_mes(dia, 'str'), path_arquivo=self._cPath_Arquivo)
lista_mes_dias_uteis.append(
("{}".format(dia.strftime("%m/%Y")), len(dias_uteis_do_mes.dias(opt=3))))
elif dia == self._cData_Fim:
dias_uteis_do_mes = DatasFinanceiras(self.primeiro_dia_mes(dia, 'str'), self._cData_Fim,
path_arquivo=self._cPath_Arquivo)
lista_mes_dias_uteis.append(
("{}".format(dia.strftime("%m/%Y")), len(dias_uteis_do_mes.dias(opt=3))))
return OrderedDict(sorted({per[0]: per[1] for per in lista_mes_dias_uteis}.items(), key=lambda t: t[0]))
def subperiodo(self, data_inicio=None, data_fim=None, num_dias=1, dt_type='date'):
"""
Cria uma lista contendo um subperiodo de dias do periodo principal.
subperiodo é um subconjunto de dias do periodo principal.
Restrições
----------
A Data Inicial do subperiodo tem que ser maior ou igual a Data Inicial do Periodo Principal.
A Data Final do subperiodo tem que ser menor ou igual a Data Final do Periodo Principal.
Se Data Inicial e/ou Data Final estiverem fora dos limites do Periodo Principal, um ValueError será gerado.
Se uma Data Inicial e uma Data Final não forem especificados, subperiodo será igual ao
Período Principal (DatasFinanceiras).
Parametros
----------
data_inicio - (OBRIGATORIO) cadeia de caracteres (string) que representa uma data no formato "xx/xx/xxxx";
a data inicial do periodo desejado (inclusive).
data_fim - (OPICIONAL) cadeia de caracteres (string) que representa uma data no formato "xx/xx/xxxx";
a data final do periodo desejado (exclusive).
num_dias - (OPICIONAL) numero inteiro que representa o numero de dias desejados, em substituicao ao
argumento Data_Fim.
dt_type - (OPICIONAL) Permite determinar o tipo de dados que será retornado:
Opção date: retorna datas no formato datetime.date(aaaa, mm, dd) do python
Opção str: retorna datas no formato string "dd/mm/aaaa"
"""
if data_inicio is None or data_fim is None:
subper = DatasFinanceiras(self._cData_Inicio, self._cData_Fim, self._cNum_Dias,
path_arquivo=self._cPath_Arquivo)
return subper.dias(1, dt_type)
else:
if FormataData(data_inicio).str_para_data() in self._ListaDatas:
print("")
if FormataData(data_fim).str_para_data() in self._ListaDatas:
subper = DatasFinanceiras(data_inicio, data_fim, path_arquivo=self._cPath_Arquivo)
return subper.dias(1, dt_type)
else:
raise ValueError("Data Final do subperiodo fora do conjunto de dias do periodo principal!")
else:
raise ValueError("Data Inicial do subperiodo fora do conjunto de dias do periodo principal!")
def main():
pass
if __name__ == '__main__':
main() | PypiClean |
/NeuroDynamics-0.1.1.tar.gz/NeuroDynamics-0.1.1/brainpy/core_system/types.py |
from collections import OrderedDict
from .. import numpy as np
from .. import profile
try:
import numba as nb
except ImportError as e:
if profile.is_numba_bk():
raise e
nb = None
__all__ = [
'TypeChecker',
'TypeMismatchError',
'ObjState',
'NeuState',
'SynState',
'ListConn',
'MatConn',
'Array',
'Int',
'Float',
'List',
'Dict',
]
class TypeChecker(object):
def __init__(self, help):
self.help = help
def check(self, cls):
raise NotImplementedError
@classmethod
def make_copy(cls, *args, **kwargs):
raise NotImplementedError
class TypeMismatchError(Exception):
pass
class ObjState(dict, TypeChecker):
def __init__(self, fields, help=''):
TypeChecker.__init__(self, help=help)
variables = OrderedDict()
if isinstance(fields, (tuple, list)):
variables.update({v: 0. for v in fields})
elif isinstance(fields, dict):
variables.update(fields)
else:
assert ValueError(f'"fields" only supports tuple/list/dict, not {type(variables)}.')
self._keys = list(variables.keys())
self._values = list(variables.values())
self._vars = variables
def extract_by_index(self, idx):
return {k: self.__getitem__(k)[idx] for k in self._keys}
def update_by_index(self, idx, val):
data = self.__getitem__('_data')
for k, v in val.items():
_var2idx = self.__getitem__('_var2idx')
data[_var2idx[k], idx] = v
def check(self, cls):
if not isinstance(cls, type(self)):
raise TypeMismatchError(f'Must be an instance of "{type(self)}", but got "{type(cls)}".')
for k in self._keys:
if k not in cls:
raise TypeMismatchError(f'Key "{k}" is not found in "cls".')
def __str__(self):
return f'{self.__class__.__name__} ({str(self._keys)})'
def __repr__(self):
return self.__str__()
class NeuState(ObjState):
"""Neuron State Management. """
def __call__(self, size):
if isinstance(size, int):
size = (size,)
elif isinstance(size, (tuple, list)):
size = tuple(size)
else:
raise ValueError(f'Unknown size type: {type(size)}.')
data = np.zeros((len(self._vars),) + size, dtype=np.float_)
var2idx = dict()
idx2var = dict()
state = dict()
for i, (k, v) in enumerate(self._vars.items()):
state[k] = data[i]
data[i] = v
var2idx[k] = i
idx2var[i] = k
state['_data'] = data
state['_var2idx'] = var2idx
state['_idx2var'] = idx2var
dict.__init__(self, state)
return self
def __setitem__(self, key, val):
if key in self._vars:
data = self.__getitem__('_data')
_var2idx = self.__getitem__('_var2idx')
data[_var2idx[key]] = val
elif key in ['_data', '_var2idx', '_idx2var']:
raise KeyError(f'"{key}" cannot be modified.')
else:
raise KeyError(f'"{key}" is not defined in "{str(self._keys)}".')
def make_copy(self, size):
obj = NeuState(self._vars)
return obj(size=size)
class SynState(ObjState):
"""Synapse State Management. """
def __init__(self, fields, help=''):
super(SynState, self).__init__(fields=fields, help=help)
self._delay_len = 1
self._delay_in = 0
self._delay_out = 0
def __call__(self, size, delay=None, delay_vars=('cond',)):
# check size
if isinstance(size, int):
size = (size,)
elif isinstance(size, (tuple, list)):
size = tuple(size)
else:
raise ValueError(f'Unknown size type: {type(size)}.')
# check delay
delay = 0 if (delay is None) or (delay < 1) else delay
assert isinstance(delay, int), '"delay" must be a int to specify the delay length.'
self._delay_len = delay
self._delay_in = delay - 1
# check delay_vars
if isinstance(delay_vars, str):
delay_vars = (delay_vars,)
elif isinstance(delay_vars, (tuple, list)):
delay_vars = tuple(delay_vars)
else:
raise ValueError(f'Unknown delay_vars type: {type(delay_vars)}.')
# initialize data
length = len(self._vars) + delay * len(delay_vars)
data = np.zeros((length,) + size, dtype=np.float_)
var2idx = dict()
idx2var = dict()
state = dict()
for i, (k, v) in enumerate(self._vars.items()):
data[i] = v
state[k] = data[i]
var2idx[k] = i
idx2var[i] = k
index_offset = len(self._vars)
for i, v in enumerate(delay_vars):
var2idx[f'_{v}_offset'] = i * delay + index_offset
state[f'_{v}_delay'] = data[i * delay + index_offset: (i + 1) * delay + index_offset]
state['_data'] = data
state['_var2idx'] = var2idx
state['_idx2var'] = idx2var
dict.__init__(self, state)
return self
def __setitem__(self, key, val):
if key in self._vars:
data = self.__getitem__('_data')
_var2idx = self.__getitem__('_var2idx')
data[_var2idx[key]] = val
elif key in ['_data', '_var2idx', '_idx2var', '_cond_delay']:
raise KeyError(f'"{key}" cannot be modified.')
else:
raise KeyError(f'"{key}" is not defined in "{str(self._keys)}".')
def extract_by_index(self, idx, delay_pull=False):
if delay_pull:
res = {}
for k in self._keys:
if f'_{k}_delay' in self:
res[k] = self.delay_pull(k)[idx]
else:
res[k] = self.__getitem__(k)[idx]
return res
else:
return {k: self.__getitem__(k)[idx] for k in self._keys}
def make_copy(self, size, delay=None, delay_vars=('cond',)):
obj = SynState(self._vars)
return obj(size=size, delay=delay, delay_vars=delay_vars)
def delay_push(self, g, var='cond'):
if self._delay_len > 0:
data = self.__getitem__('_data')
offset = self.__getitem__('_var2idx')[f'_{var}_offset']
data[self._delay_in + offset] = g
def delay_pull(self, var='cond'):
if self._delay_len > 0:
data = self.__getitem__('_data')
offset = self.__getitem__('_var2idx')[f'_{var}_offset']
return data[self._delay_out + offset]
else:
data = self.__getitem__('_data')
var2idx = self.__getitem__('_var2idx')
return data[var2idx[var]]
def _update_delay_indices(self):
if self._delay_len > 0:
self._delay_in = (self._delay_in + 1) % self._delay_len
self._delay_out = (self._delay_out + 1) % self._delay_len
class ListConn(TypeChecker):
"""Synaptic connection with list type."""
def __init__(self, help=''):
super(ListConn, self).__init__(help=help)
def check(self, cls):
if profile.is_numba_bk():
if not isinstance(cls, nb.typed.List):
raise TypeMismatchError(f'In numba mode, "cls" must be an instance of {type(nb.typed.List)}, '
f'but got {type(cls)}. Hint: you can use "ListConn.create()" method.')
if not isinstance(cls[0], (nb.typed.List, np.ndarray)):
raise TypeMismatchError(f'In numba mode, elements in "cls" must be an instance of '
f'{type(nb.typed.List)} or ndarray, but got {type(cls[0])}. '
f'Hint: you can use "ListConn.create()" method.')
else:
if not isinstance(cls, list):
raise TypeMismatchError(f'ListConn requires a list, but got {type(cls)}.')
if not isinstance(cls[0], (list, np.ndarray)):
raise TypeMismatchError(f'ListConn requires the elements of the list must be list or '
f'ndarray, but got {type(cls)}.')
@classmethod
def make_copy(cls, conn):
assert isinstance(conn, (list, tuple)), '"conn" must be a tuple/list.'
assert isinstance(conn[0], (list, tuple)), 'Elements of "conn" must be tuple/list.'
if profile.is_numba_bk():
a_list = nb.typed.List()
for l in conn:
a_list.append(np.uint64(l))
else:
a_list = conn
return a_list
def __str__(self):
return 'ListConn'
class MatConn(TypeChecker):
"""Synaptic connection with matrix (2d array) type."""
def __init__(self, help=''):
super(MatConn, self).__init__(help=help)
def check(self, cls):
if not (isinstance(cls, np.ndarray) and np.ndim(cls) == 2):
raise TypeMismatchError(f'MatConn requires a two-dimensional ndarray.')
def __str__(self):
return 'MatConn'
class Array(TypeChecker):
"""NumPy ndarray."""
def __init__(self, dim, help=''):
self.dim = dim
super(Array, self).__init__(help=help)
def __call__(self, size):
if isinstance(size, int):
assert self.dim == 1
else:
assert len(size) == self.dim
return np.zeros(size, dtype=np.float_)
def check(self, cls):
if not (isinstance(cls, np.ndarray) and np.ndim(cls) == self.dim):
raise TypeMismatchError(f'MatConn requires a {self.dim}-D ndarray.')
def __str__(self):
return type(self).__name__ + f' (dim={self.dim})'
class String(TypeChecker):
def __init__(self, help=''):
super(String, self).__init__(help=help)
def check(self, cls):
if not isinstance(cls, str):
raise TypeMismatchError(f'Require a string, got {type(cls)}.')
def __str__(self):
return 'StringType'
class Int(TypeChecker):
def __init__(self, help=''):
super(Int, self).__init__(help=help)
def check(self, cls):
if not isinstance(cls, int):
raise TypeMismatchError(f'Require an int, got {type(cls)}.')
def __str__(self):
return 'IntType'
class Float(TypeChecker):
def __init__(self, help=''):
super(Float, self).__init__(help=help)
def check(self, cls):
if not isinstance(cls, float):
raise TypeMismatchError(f'Require a float, got {type(cls)}.')
def __str__(self):
return 'Floatype'
class List(TypeChecker):
def __init__(self, item_type=None, help=''):
if item_type is None:
self.item_type = None
else:
assert isinstance(item_type, TypeChecker), 'Must be a TypeChecker.'
self.item_type = item_type
super(List, self).__init__(help=help)
def check(self, cls):
if profile.is_numba_bk():
if not isinstance(cls, nb.typed.List):
raise TypeMismatchError(f'In numba, "List" requires an instance of {type(nb.typed.List)}, '
f'but got {type(cls)}.')
else:
if not isinstance(cls, list):
raise TypeMismatchError(f'"List" requires an instance of list, '
f'but got {type(cls)}.')
if self.item_type is not None:
self.item_type.check(cls[0])
def __str__(self):
return type(self).__name__ + f'(item_type={str(self.item_type)})'
class Dict(TypeChecker):
def __init__(self, key_type=String, item_type=None, help=''):
if key_type is not None:
assert isinstance(key_type, TypeChecker), 'Must be a TypeChecker.'
self.key_type = key_type
if item_type is not None:
assert isinstance(item_type, TypeChecker), 'Must be a TypeChecker.'
self.item_type = item_type
super(Dict, self).__init__(help=help)
def check(self, cls):
if profile.is_numba_bk():
if not isinstance(cls, nb.typed.Dict):
raise TypeMismatchError(f'In numba, "Dict" requires an instance of {type(nb.typed.Dict)}, '
f'but got {type(cls)}.')
else:
if not isinstance(cls, dict):
raise TypeMismatchError(f'"Dict" requires an instance of dict, '
f'but got {type(cls)}.')
if self.key_type is not None:
for key in cls.keys():
self.key_type.check(key)
if self.item_type is not None:
for item in cls.items():
self.item_type.check(item)
def __str__(self):
return type(self).__name__ + f'(key_type={str(self.key_type)}, item_type={str(self.item_type)})' | PypiClean |
/Amara-2.0.0a6.tar.bz2/Amara-2.0.0a6/lib/thirdparty/httplib2/socks.py |
Minor modifications made by Christopher Gilbert (http://motomastyle.com/)
for use in PyLoris (http://pyloris.sourceforge.net/)
Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/)
mainly to merge bug fixes found in Sourceforge
"""
import socket
import struct
import sys
if getattr(socket, 'socket', None) is None:
raise ImportError('socket.socket missing, proxy support unusable')
PROXY_TYPE_SOCKS4 = 1
PROXY_TYPE_SOCKS5 = 2
PROXY_TYPE_HTTP = 3
_defaultproxy = None
_orgsocket = socket.socket
class ProxyError(Exception): pass
class GeneralProxyError(ProxyError): pass
class Socks5AuthError(ProxyError): pass
class Socks5Error(ProxyError): pass
class Socks4Error(ProxyError): pass
class HTTPError(ProxyError): pass
_generalerrors = ("success",
"invalid data",
"not connected",
"not available",
"bad proxy type",
"bad input")
_socks5errors = ("succeeded",
"general SOCKS server failure",
"connection not allowed by ruleset",
"Network unreachable",
"Host unreachable",
"Connection refused",
"TTL expired",
"Command not supported",
"Address type not supported",
"Unknown error")
_socks5autherrors = ("succeeded",
"authentication is required",
"all offered authentication methods were rejected",
"unknown username or invalid password",
"unknown error")
_socks4errors = ("request granted",
"request rejected or failed",
"request rejected because SOCKS server cannot connect to identd on the client",
"request rejected because the client program and identd report different user-ids",
"unknown error")
def setdefaultproxy(proxytype=None, addr=None, port=None, rdns=True, username=None, password=None):
"""setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
Sets a default proxy which all further socksocket objects will use,
unless explicitly changed.
"""
global _defaultproxy
_defaultproxy = (proxytype, addr, port, rdns, username, password)
def wrapmodule(module):
"""wrapmodule(module)
Attempts to replace a module's socket library with a SOCKS socket. Must set
a default proxy using setdefaultproxy(...) first.
This will only work on modules that import socket directly into the namespace;
most of the Python Standard Library falls into this category.
"""
if _defaultproxy != None:
module.socket.socket = socksocket
else:
raise GeneralProxyError((4, "no proxy specified"))
class socksocket(socket.socket):
"""socksocket([family[, type[, proto]]]) -> socket object
Open a SOCKS enabled socket. The parameters are the same as
those of the standard socket init. In order for SOCKS to work,
you must specify family=AF_INET, type=SOCK_STREAM and proto=0.
"""
def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None):
_orgsocket.__init__(self, family, type, proto, _sock)
if _defaultproxy != None:
self.__proxy = _defaultproxy
else:
self.__proxy = (None, None, None, None, None, None)
self.__proxysockname = None
self.__proxypeername = None
def __recvall(self, count):
"""__recvall(count) -> data
Receive EXACTLY the number of bytes requested from the socket.
Blocks until the required number of bytes have been received.
"""
data = self.recv(count)
while len(data) < count:
d = self.recv(count-len(data))
if not d: raise GeneralProxyError((0, "connection closed unexpectedly"))
data = data + d
return data
def setproxy(self, proxytype=None, addr=None, port=None, rdns=True, username=None, password=None):
"""setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
Sets the proxy to be used.
proxytype - The type of the proxy to be used. Three types
are supported: PROXY_TYPE_SOCKS4 (including socks4a),
PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP
addr - The address of the server (IP or DNS).
port - The port of the server. Defaults to 1080 for SOCKS
servers and 8080 for HTTP proxy servers.
rdns - Should DNS queries be preformed on the remote side
(rather than the local side). The default is True.
Note: This has no effect with SOCKS4 servers.
username - Username to authenticate with to the server.
The default is no authentication.
password - Password to authenticate with to the server.
Only relevant when username is also provided.
"""
self.__proxy = (proxytype, addr, port, rdns, username, password)
def __negotiatesocks5(self, destaddr, destport):
"""__negotiatesocks5(self,destaddr,destport)
Negotiates a connection through a SOCKS5 server.
"""
# First we'll send the authentication packages we support.
if (self.__proxy[4]!=None) and (self.__proxy[5]!=None):
# The username/password details were supplied to the
# setproxy method so we support the USERNAME/PASSWORD
# authentication (in addition to the standard none).
self.sendall(struct.pack('BBBB', 0x05, 0x02, 0x00, 0x02))
else:
# No username/password were entered, therefore we
# only support connections with no authentication.
self.sendall(struct.pack('BBB', 0x05, 0x01, 0x00))
# We'll receive the server's response to determine which
# method was selected
chosenauth = self.__recvall(2)
if chosenauth[0:1] != chr(0x05).encode():
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
# Check the chosen authentication method
if chosenauth[1:2] == chr(0x00).encode():
# No authentication is required
pass
elif chosenauth[1:2] == chr(0x02).encode():
# Okay, we need to perform a basic username/password
# authentication.
self.sendall(chr(0x01).encode() + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5])
authstat = self.__recvall(2)
if authstat[0:1] != chr(0x01).encode():
# Bad response
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
if authstat[1:2] != chr(0x00).encode():
# Authentication failed
self.close()
raise Socks5AuthError((3, _socks5autherrors[3]))
# Authentication succeeded
else:
# Reaching here is always bad
self.close()
if chosenauth[1] == chr(0xFF).encode():
raise Socks5AuthError((2, _socks5autherrors[2]))
else:
raise GeneralProxyError((1, _generalerrors[1]))
# Now we can request the actual connection
req = struct.pack('BBB', 0x05, 0x01, 0x00)
# If the given destination address is an IP address, we'll
# use the IPv4 address request even if remote resolving was specified.
try:
ipaddr = socket.inet_aton(destaddr)
req = req + chr(0x01).encode() + ipaddr
except socket.error:
# Well it's not an IP number, so it's probably a DNS name.
if self.__proxy[3]:
# Resolve remotely
ipaddr = None
req = req + chr(0x03).encode() + chr(len(destaddr)).encode() + destaddr
else:
# Resolve locally
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
req = req + chr(0x01).encode() + ipaddr
req = req + struct.pack(">H", destport)
self.sendall(req)
# Get the response
resp = self.__recvall(4)
if resp[0:1] != chr(0x05).encode():
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
elif resp[1:2] != chr(0x00).encode():
# Connection failed
self.close()
if ord(resp[1:2])<=8:
raise Socks5Error((ord(resp[1:2]), _socks5errors[ord(resp[1:2])]))
else:
raise Socks5Error((9, _socks5errors[9]))
# Get the bound address/port
elif resp[3:4] == chr(0x01).encode():
boundaddr = self.__recvall(4)
elif resp[3:4] == chr(0x03).encode():
resp = resp + self.recv(1)
boundaddr = self.__recvall(ord(resp[4:5]))
else:
self.close()
raise GeneralProxyError((1,_generalerrors[1]))
boundport = struct.unpack(">H", self.__recvall(2))[0]
self.__proxysockname = (boundaddr, boundport)
if ipaddr != None:
self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
else:
self.__proxypeername = (destaddr, destport)
def getproxysockname(self):
"""getsockname() -> address info
Returns the bound IP address and port number at the proxy.
"""
return self.__proxysockname
def getproxypeername(self):
"""getproxypeername() -> address info
Returns the IP and port number of the proxy.
"""
return _orgsocket.getpeername(self)
def getpeername(self):
"""getpeername() -> address info
Returns the IP address and port number of the destination
machine (note: getproxypeername returns the proxy)
"""
return self.__proxypeername
def __negotiatesocks4(self,destaddr,destport):
"""__negotiatesocks4(self,destaddr,destport)
Negotiates a connection through a SOCKS4 server.
"""
# Check if the destination address provided is an IP address
rmtrslv = False
try:
ipaddr = socket.inet_aton(destaddr)
except socket.error:
# It's a DNS name. Check where it should be resolved.
if self.__proxy[3]:
ipaddr = struct.pack("BBBB", 0x00, 0x00, 0x00, 0x01)
rmtrslv = True
else:
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
# Construct the request packet
req = struct.pack(">BBH", 0x04, 0x01, destport) + ipaddr
# The username parameter is considered userid for SOCKS4
if self.__proxy[4] != None:
req = req + self.__proxy[4]
req = req + chr(0x00).encode()
# DNS name if remote resolving is required
# NOTE: This is actually an extension to the SOCKS4 protocol
# called SOCKS4A and may not be supported in all cases.
if rmtrslv:
req = req + destaddr + chr(0x00).encode()
self.sendall(req)
# Get the response from the server
resp = self.__recvall(8)
if resp[0:1] != chr(0x00).encode():
# Bad data
self.close()
raise GeneralProxyError((1,_generalerrors[1]))
if resp[1:2] != chr(0x5A).encode():
# Server returned an error
self.close()
if ord(resp[1:2]) in (91, 92, 93):
self.close()
raise Socks4Error((ord(resp[1:2]), _socks4errors[ord(resp[1:2]) - 90]))
else:
raise Socks4Error((94, _socks4errors[4]))
# Get the bound address/port
self.__proxysockname = (socket.inet_ntoa(resp[4:]), struct.unpack(">H", resp[2:4])[0])
if rmtrslv != None:
self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
else:
self.__proxypeername = (destaddr, destport)
def __negotiatehttp(self, destaddr, destport):
"""__negotiatehttp(self,destaddr,destport)
Negotiates a connection through an HTTP server.
"""
# If we need to resolve locally, we do this now
if not self.__proxy[3]:
addr = socket.gethostbyname(destaddr)
else:
addr = destaddr
self.sendall(("CONNECT " + addr + ":" + str(destport) + " HTTP/1.1\r\n" + "Host: " + destaddr + "\r\n\r\n").encode())
# We read the response until we get the string "\r\n\r\n"
resp = self.recv(1)
while resp.find("\r\n\r\n".encode()) == -1:
resp = resp + self.recv(1)
# We just need the first line to check if the connection
# was successful
statusline = resp.splitlines()[0].split(" ".encode(), 2)
if statusline[0] not in ("HTTP/1.0".encode(), "HTTP/1.1".encode()):
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
try:
statuscode = int(statusline[1])
except ValueError:
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
if statuscode != 200:
self.close()
raise HTTPError((statuscode, statusline[2]))
self.__proxysockname = ("0.0.0.0", 0)
self.__proxypeername = (addr, destport)
def connect(self, destpair):
"""connect(self, despair)
Connects to the specified destination through a proxy.
destpar - A tuple of the IP/DNS address and the port number.
(identical to socket's connect).
To select the proxy server use setproxy().
"""
# Do a minimal input check first
if (not type(destpair) in (list,tuple)) or (len(destpair) < 2) or (type(destpair[0]) != type('')) or (type(destpair[1]) != int):
raise GeneralProxyError((5, _generalerrors[5]))
if self.__proxy[0] == PROXY_TYPE_SOCKS5:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 1080
_orgsocket.connect(self, (self.__proxy[1], portnum))
self.__negotiatesocks5(destpair[0], destpair[1])
elif self.__proxy[0] == PROXY_TYPE_SOCKS4:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 1080
_orgsocket.connect(self,(self.__proxy[1], portnum))
self.__negotiatesocks4(destpair[0], destpair[1])
elif self.__proxy[0] == PROXY_TYPE_HTTP:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 8080
_orgsocket.connect(self,(self.__proxy[1], portnum))
self.__negotiatehttp(destpair[0], destpair[1])
elif self.__proxy[0] == None:
_orgsocket.connect(self, (destpair[0], destpair[1]))
else:
raise GeneralProxyError((4, _generalerrors[4])) | PypiClean |
/Hecuba-2.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl/hecuba/tools.py | import uuid
from . import config
valid_types = ['counter', 'text', 'boolean', 'decimal', 'double', 'int', 'list', 'set', 'map', 'bigint', 'blob',
'tuple', 'dict', 'float', 'timestamp', 'time', 'date', 'numpy.ndarray']
basic_types = valid_types[:-1]
def storage_id_from_name(name):
return uuid.uuid5(uuid.NAMESPACE_DNS, name)
def process_path(module_path):
"""
Method to obtain module and class_name from a module path
Args:
module_path(String): path in the format module.class_name
Returns:
tuple containing class_name and module
"""
if module_path == 'numpy.ndarray':
return 'StorageNumpy', 'hecuba.hnumpy'
if module_path == 'StorageDict':
return 'StorageDict', 'hecuba.hdict'
last = 0
for key, i in enumerate(module_path):
if i == '.' and key > last:
last = key
module = module_path[:last]
class_name = module_path[last + 1:]
return class_name, module
"""
Cassandra related methods
"""
_size_estimates = config.session.prepare(("SELECT mean_partition_size, partitions_count "
"FROM system.size_estimates WHERE keyspace_name=? and table_name=?"))
_max_token = int(((2 ** 63) - 1)) # type: int
_min_token = int(-2 ** 63) # type: int
_select_istorage_meta = config.session.prepare("SELECT * FROM hecuba.istorage WHERE storage_id = ?")
_select_istorage_meta_by_name = config.session.prepare("SELECT * FROM hecuba.istorage WHERE name = ? allow filtering")
def extract_ks_tab(name):
"""
Method used to obtain keyspace and table from a given name
Args:
name: a string containing keyspace name and table name, or only table name
Returns:
a tuple containing keyspace name and table name
"""
if not name:
return None, None
sp = name.split(".")
if len(sp) == 2:
ksp = sp[0]
table = sp[1]
else:
ksp = config.execution_name
table = name
return ksp.lower(), table.lower()
def tokens_partitions(ksp, table, tokens_ranges):
"""
Method that calculates the new token partitions for a given object
Args:
tokens: current number of tokens of the object
min_tokens_per_worker: defined minimum number of tokens
number_of_workers: defined
Returns:
a partition every time it's called
:type tokens_ranges: list[(long,long)]
"""
from collections import defaultdict
from bisect import bisect_right
from cassandra.metadata import Murmur3Token
splits_per_node = config.splits_per_node
token_range_size = config.token_range_size
target_token_range_size = config.target_token_range_size
tm = config.cluster.metadata.token_map
tmap = tm.tokens_to_hosts_by_ks.get(ksp, None)
tokens_murmur3 = map(lambda a: (Murmur3Token(a[0]), a[1]), tokens_ranges)
if not tmap:
tm.rebuild_keyspace(ksp, build_if_absent=True)
tmap = tm.tokens_to_hosts_by_ks[ksp]
tokens_per_node = defaultdict(list)
for tmumur, t_to in tokens_murmur3:
point = bisect_right(tm.ring, tmumur)
if point == len(tm.ring):
tokens_per_node[tmap[tm.ring[0]][0]].append((tmumur.value, t_to))
else:
tokens_per_node[tmap[tm.ring[point]][0]].append((tmumur.value, t_to))
n_nodes = len(tokens_per_node)
step_size = _max_token // (splits_per_node * n_nodes)
if token_range_size:
step_size = token_range_size
elif target_token_range_size:
one = config.session.execute(_size_estimates, [ksp, table]).one()
if one:
(mean_p_size, p_count) = one
estimated_size = mean_p_size * p_count
if estimated_size > 0:
step_size = _max_token // int(
max(estimated_size / target_token_range_size,
splits_per_node * n_nodes)
)
for tokens_in_node in tokens_per_node.values():
partition = []
for fraction, to in tokens_in_node:
while fraction < to - step_size:
partition.append((fraction, fraction + step_size))
fraction += step_size
partition.append((fraction, to))
group_size = max(len(partition) // splits_per_node, 1)
for i in range(0, len(partition), group_size):
yield partition[i:i + group_size]
def generate_token_ring_ranges():
ring = config.cluster.metadata.token_map.ring
tokens = [token.value for token in ring]
return discrete_token_ranges(tokens)
def discrete_token_ranges(tokens):
"""
Makes proper tokens ranges ensuring that in a tuple (a,b) a <= b
Args:
tokens: a list of tokens [-1, 0, 10]
Returns:
a rationalized list [(min, -1) (-1, 0),(0,10),(10, max)]
"""
tokens.sort()
if len(tokens) == 0:
return tokens
if tokens[0] > _min_token:
token_ranges = [(_min_token, tokens[0])]
else:
token_ranges = []
n_tns = len(tokens)
for i in range(0, n_tns - 1):
token_ranges.append((tokens[i], tokens[i + 1]))
token_ranges.append((tokens[n_tns - 1], _max_token))
return token_ranges
def count_name_collision(ksp, table, attribute):
import re
m = re.compile("^%s_%s(_[0-9]+)?$" % (table, attribute))
q = config.session.execute("SELECT table_name FROM system_schema.tables WHERE keyspace_name = %s",
[ksp])
return sum(1 for elem in q if m.match(elem[0]))
def get_istorage_attrs(storage_id):
return list(config.session.execute(_select_istorage_meta, [storage_id]))
#DEPRECATED method due to split! because it may provide more than one result!
#def get_istorage_attrs_by_name(name):
# return list(config.session.execute(_select_istorage_meta_by_name, [name]))
def build_remotely(args):
"""
Takes the information which consists of at least the type,
:raises TypeError if the object class doesn't subclass IStorage
:param obj_info: Contains the information to be used to create the IStorage obj
:return: An IStorage object
"""
if "built_remotely" not in args.keys():
built_remotely = True
else:
built_remotely = args["built_remotely"]
obj_type = args.get('class_name', args.get('type', None))
if obj_type is None:
raise TypeError("Trying to build an IStorage obj without giving the type")
# Import the class defined by obj_type
cname, module = process_path(obj_type)
'''
if obj_type == str(StorageNumpy.__class__):
return StorageNumpy(name=args["name"], storage_id=args["storage_id"])
'''
try:
mod = __import__(module, globals(), locals(), [cname], 0)
except ValueError:
raise ValueError("Can't import class {} from module {}".format(cname, module))
imported_class = getattr(mod, cname)
args = {k: v for k, v in args.items() if k in imported_class.args_names}
args.pop('class_name', None)
args["built_remotely"] = built_remotely
return imported_class(**args) | PypiClean |
/MuPhyN-0.1.1.post4-py3-none-any.whl/muphyn/packages/interface/graphical_actions/diagram_remove_graphical_element_action.py |
# General Imports
from typing import Iterable, List, Any
# Project Imports
from muphyn.packages.interface.graphical_actions.abstract_diagram_action import AbstractDiagramAction
from muphyn.packages.interface.models.graphical_models.abstract_box_model import AbstractBoxModel
from muphyn.packages.interface.models.graphical_models.abstract_graphical_element import AbstractGraphicalElement
from muphyn.packages.interface.models.links_model.abstract_link_model import AbstractLinkModel
from muphyn.packages.interface.graphical_actions import parser_decode as parser_decode
from muphyn.packages.interface.graphical_actions import parser_encode as parser_encode
#-----------------------------------
# Class
#-----------------------------------
class DiagramRemoveGraphicalElementAction (AbstractDiagramAction) :
"""Est l'action capable de supprimer la sélection actuelle de box."""
# -------------
# Methods
# -------------
def __init__ (self, diagram_model : Any, elements : Iterable[Any]) :
AbstractDiagramAction.__init__(self, diagram_model)
self._elements_index = []
for el in elements :
if isinstance(el, AbstractBoxModel) or isinstance(el, AbstractLinkModel) :
self._elements_index.append(el.graphical_index)
self._reconstructors = []
# -------------
# Methods
# -------------
def elements (self) -> Iterable[AbstractGraphicalElement] :
"""Permet de récuperer les éléments à supprimer directement depuis leur indexs."""
for index in self._elements_index :
el = self.diagram_model.get_element_by_graphical_index(index)
if not(el is None) :
yield el
def do (self) :
to_delete : List[AbstractBoxModel] = []
for box_model in self.elements() :
if isinstance(box_model, AbstractBoxModel) :
to_delete.append(box_model)
for link in self.elements() :
if isinstance(link, AbstractLinkModel) :
self._reconstructors.append(parser_encode.link(link))
link.unbind()
self.diagram_model.remove_element(link)
for box in to_delete :
signals = list(box.signals)
for index, link in enumerate(signals):
self._reconstructors.append(parser_encode.link(link))
link.unbind()
self.diagram_model.remove_element(link)
for box in to_delete :
self._reconstructors.append(parser_encode.encode(box))
self.diagram_model.remove_element(box)
self._elements_index.clear()
def undo (self) :
# Rebuild all items except link
for reconstructor_box in self._reconstructors :
if not(reconstructor_box['type'] == 'link') :
box_model = parser_decode.box(reconstructor_box)
self.diagram_model.add_element(box_model)
self._elements_index.append(box_model.graphical_index)
box_model.setSelected(True)
# Rebuild all links
for reconstructor_link in self._reconstructors :
if reconstructor_link['type'] == 'link' :
link_model = parser_decode.link(reconstructor_link, self.diagram_model)
self._elements_index.append(link_model.graphical_index)
link_model.setSelected(True)
self._reconstructors.clear() | PypiClean |
/FlaskBB-2.0.2.tar.gz/FlaskBB-2.0.2/flaskbb/cli/utils.py | import sys
import os
import re
import click
from flask import current_app, __version__ as flask_version
from flask_themes2 import get_theme
from flaskbb import __version__
from flaskbb.utils.populate import create_user, update_user
_email_regex = r"[^@]+@[^@]+\.[^@]+"
class FlaskBBCLIError(click.ClickException):
"""An exception that signals a usage error including color support.
This aborts any further handling.
:param styles: The style kwargs which should be forwarded to click.secho.
"""
def __init__(self, message, **styles):
click.ClickException.__init__(self, message)
self.styles = styles
def show(self, file=None):
if file is None:
file = click._compat.get_text_stderr()
click.secho("error: %s" % self.format_message(), file=file,
**self.styles)
class EmailType(click.ParamType):
"""The choice type allows a value to be checked against a fixed set of
supported values. All of these values have to be strings.
See :ref:`choice-opts` for an example.
"""
name = "email"
def convert(self, value, param, ctx):
# Exact match
if re.match(_email_regex, value):
return value
else:
self.fail(("invalid email: %s" % value), param, ctx)
def __repr__(self):
return "email"
def validate_plugin(plugin):
"""Checks if a plugin is installed.
TODO: Figure out how to use this in a callback. Doesn't work because
the appcontext can't be found and using with_appcontext doesn't
help either.
"""
# list_name holds all plugin names, also the disabled ones (they won't do
# anything as they are set as 'blocked' on pluggy)
if plugin not in current_app.pluggy.list_name():
raise FlaskBBCLIError("Plugin {} not found.".format(plugin), fg="red")
return True
def validate_theme(theme):
"""Checks if a theme is installed."""
try:
get_theme(theme)
except KeyError:
raise FlaskBBCLIError("Theme {} not found.".format(theme), fg="red")
def get_cookiecutter():
cookiecutter_available = False
try:
from cookiecutter.main import cookiecutter # noqa
cookiecutter_available = True
except ImportError:
pass
if not cookiecutter_available:
raise FlaskBBCLIError(
"Can't continue because cookiecutter is not installed. "
"You can install it with 'pip install cookiecutter'.", fg="red"
)
return cookiecutter
def get_version(ctx, param, value):
if not value or ctx.resilient_parsing:
return
message = ("FlaskBB %(version)s using Flask %(flask_version)s on "
"Python %(python_version)s")
click.echo(message % {
'version': __version__,
'flask_version': flask_version,
'python_version': sys.version.split("\n")[0]
}, color=ctx.color)
ctx.exit()
def prompt_save_user(username, email, password, group, only_update=False):
if not username:
username = click.prompt(
click.style("Username", fg="magenta"), type=str,
default=os.environ.get("USER", "")
)
if not email:
email = click.prompt(
click.style("Email address", fg="magenta"), type=EmailType()
)
if not password:
password = click.prompt(
click.style("Password", fg="magenta"), hide_input=True,
confirmation_prompt=True
)
if not group:
group = click.prompt(
click.style("Group", fg="magenta"),
type=click.Choice(["admin", "super_mod", "mod", "member"]),
default="admin"
)
if only_update:
return update_user(username, password, email, group)
return create_user(username, password, email, group)
def prompt_config_path(config_path):
"""Asks for a config path. If the path exists it will ask the user
for a new path until a he enters a path that doesn't exist.
:param config_path: The path to the configuration.
"""
click.secho("The path to save this configuration file.", fg="cyan")
while True:
if os.path.exists(config_path) and click.confirm(click.style(
"Config {cfg} exists. Do you want to overwrite it?"
.format(cfg=config_path), fg="magenta")
):
break
config_path = click.prompt(
click.style("Save to", fg="magenta"),
default=config_path)
if not os.path.exists(config_path):
break
return config_path
def write_config(config, config_template, config_path):
"""Writes a new config file based upon the config template.
:param config: A dict containing all the key/value pairs which should be
used for the new configuration file.
:param config_template: The config (jinja2-)template.
:param config_path: The place to write the new config file.
"""
with open(config_path, 'wb') as cfg_file:
cfg_file.write(
config_template.render(**config).encode("utf-8")
) | PypiClean |
/GOSTnets-1.0.1.tar.gz/GOSTnets-1.0.1/README.md | # GOSTNets
**Python for network analysis**
Build, process, and analyze networks. GOSTNets is built on top of geopandas, networkx, osmnx, and peartree.
### Installation
Eventually we will have the tool available on pip and conda, but for now, please use the setup.py in this repository
```
conda create --name test python=3.6
conda activate test
conda install -c conda-forge rtree geopandas rasterio geojson
git clone https://github.com/worldbank/GOSTnets.git
python setup.py build
python setup.py install
```
#### Optional Dependencies
##### load_osm.py
```
conda install -c conda-forge gdal
pip install geopy
pip install boltons
```
##### optimization.py
```
pip install pulp
```
#### Install Jupyter Notebook
Jupyter Notebook is used in many GOSTnets examples. We recommend installing it within your environment
```
conda install -c conda-forge jupyterlab
```
## Documentation
Documentation available at [readthedocs](https://gostnets.readthedocs.io/)
Plenty of examples and tutorials using Jupyter Notebooks live inside of the Implementations folder within the [GOST_PublicGoods Github repo](https://github.com/worldbank/GOST_PublicGoods)
### how to autobuild docs:
in the docs/source dir, run:
```
sphinx-apidoc -f -o . ../../GOSTnets
```
in the docs dir, run:
```
make html
```
## Usage
Every function contains a docstring which can be brought up in use to check the inputs for various functions. For example:
```python
import GOSTnets as gn
gn.edge_gdf_from_graph?
```
returns:
```
Signature: gn.edge_gdf_from_graph(G, crs={'init': 'epsg:4326'}, attr_list=None, geometry_tag='geometry', xCol='x', yCol='y')
#### Function for generating a GeoDataFrame from a networkx Graph object ###
REQUIRED: a graph object G
OPTIONAL: crs - projection of format {'init' :'epsg:4326'}. Defaults to
WGS84. Note: here we are defining the crs of the input geometry -
we do NOT reproject to this crs. To reproject, consider using
geopandas' to_crs method on the returned gdf.
attr_list: list of the keys which you want to be moved over to
the GeoDataFrame.
geometry_tag - the key in the data dictionary for each edge which
contains the geometry info.
xCol - if no geometry is present in the edge data dictionary, the
function will try to construct a straight line between the start
and end nodes, if geometry information is present in their data
dictionaries. Pass the Longitude info as 'xCol'.
yCol - likewise, determining the Latitude tag for the node's data
dictionary allows us to make a straight line geometry where an
actual geometry is missing.
RETURNS: a GeoDataFrame object of the edges in the graph
#-------------------------------------------------------------------------#
```
These docstrings have been written for every function, and should help new and old users alike with the options and syntax.
| PypiClean |
/CsuPTMD-1.0.12.tar.gz/CsuPTMD-1.0.12/PTMD/maskrcnn_benchmark/data/datasets/evaluation/coco/coco_eval.py | import logging
import os
import tempfile
from collections import OrderedDict
import torch
from tqdm import tqdm
from demo.inference import PlaneClustering
from PTMD.maskrcnn_benchmark.modeling.roi_heads.mask_head.inference import Masker
from PTMD.maskrcnn_benchmark.structures.bounding_box import BoxList
from PTMD.maskrcnn_benchmark.structures.boxlist_ops import boxlist_iou
def do_coco_evaluation(
dataset,
predictions,
box_only,
output_folder,
iou_types,
expected_results,
expected_results_sigma_tol,
masker,
):
logger = logging.getLogger("maskrcnn_benchmark.inference")
if box_only:
logger.info("Evaluating bbox proposals")
areas = {"all": "", "small": "s", "medium": "m", "large": "l"}
res = COCOResults("box_proposal")
for limit in [100, 1000]:
for area, suffix in areas.items():
stats = evaluate_box_proposals(
predictions, dataset, area=area, limit=limit
)
key = "AR{}@{:d}".format(suffix, limit)
res.results["box_proposal"][key] = stats["ar"].item()
logger.info(res)
check_expected_results(res, expected_results, expected_results_sigma_tol)
if output_folder:
torch.save(res, os.path.join(output_folder, "box_proposals.pth"))
return
logger.info("Preparing results for COCO format")
coco_results = {}
if "bbox" in iou_types:
logger.info("Preparing bbox results")
coco_results["bbox"] = prepare_for_coco_detection(predictions, dataset)
if "segm" in iou_types:
logger.info("Preparing segm results")
coco_results["segm"] = prepare_for_coco_segmentation(predictions, dataset, masker)
if 'keypoints' in iou_types:
logger.info('Preparing keypoints results')
coco_results['keypoints'] = prepare_for_coco_keypoint(predictions, dataset)
results = COCOResults(*iou_types)
logger.info("Evaluating predictions")
for iou_type in iou_types:
with tempfile.NamedTemporaryFile() as f:
file_path = f.name
if output_folder:
file_path = os.path.join(output_folder, iou_type + ".json")
res = evaluate_predictions_on_coco(
dataset.coco, coco_results[iou_type], file_path, iou_type
)
results.update(res)
logger.info(results)
check_expected_results(results, expected_results, expected_results_sigma_tol)
if output_folder:
torch.save(results, os.path.join(output_folder, "coco_results.pth"))
return results, coco_results
def prepare_for_coco_detection(predictions, dataset):
# assert isinstance(dataset, COCODataset)
coco_results = []
for image_id, prediction in enumerate(predictions):
original_id = dataset.id_to_img_map[image_id]
if len(prediction) == 0:
continue
img_info = dataset.get_img_info(image_id)
image_width = img_info["width"]
image_height = img_info["height"]
prediction = prediction.resize((image_width, image_height))
prediction = prediction.convert("xywh")
boxes = prediction.bbox.tolist()
scores = prediction.get_field("scores").tolist()
labels = prediction.get_field("labels").tolist()
mapped_labels = [dataset.contiguous_category_id_to_json_id[i] for i in labels]
coco_results.extend(
[
{
"image_id": original_id,
"category_id": mapped_labels[k],
"bbox": box,
"score": scores[k],
}
for k, box in enumerate(boxes)
]
)
return coco_results
def prepare_for_coco_segmentation(predictions, dataset, masker):
import pycocotools.mask as mask_util
import numpy as np
assert isinstance(masker, Masker)
# assert isinstance(dataset, COCODataset)
coco_results = []
for image_id, prediction in tqdm(enumerate(predictions)):
original_id = dataset.id_to_img_map[image_id]
if len(prediction) == 0:
continue
img_info = dataset.get_img_info(image_id)
image_width = img_info["width"]
image_height = img_info["height"]
prediction = prediction.resize((image_width, image_height))
masks = prediction.get_field("mask")
# t = time.time()
# When evaluating, we convert the masks from 28x28 "heatmap" to "binary mask" or "polygon points list" by masker
masks = masker.forward_single_image(masks, prediction)
# logger.info('Time mask: {}'.format(time.time() - t))
# prediction = prediction.convert('xywh')
# boxes = prediction.bbox.tolist()
scores = prediction.get_field("scores").tolist()
labels = prediction.get_field("labels").tolist()
# rles = prediction.get_field('mask')
# Then convert masks in ["polygon points list", "binary mask"] to RLE
if isinstance(masker, PlaneClustering):
points_list = masks.reshape((-1, 8)).tolist()
rles = mask_util.frPyObjects(points_list, image_height, image_width)
else:
rles = [
mask_util.encode(np.array(mask[0, :, :, np.newaxis], order="F"))[0]
for mask in masks
]
for rle in rles:
rle["counts"] = rle["counts"].decode("utf-8")
mapped_labels = [dataset.contiguous_category_id_to_json_id[i] for i in labels]
coco_results.extend(
[
{
"image_id": original_id,
"category_id": mapped_labels[k],
"segmentation": rle,
"score": scores[k],
}
for k, rle in enumerate(rles)
]
)
return coco_results
def prepare_for_coco_keypoint(predictions, dataset):
# assert isinstance(dataset, COCODataset)
coco_results = []
for image_id, prediction in enumerate(predictions):
original_id = dataset.id_to_img_map[image_id]
if len(prediction.bbox) == 0:
continue
# TODO replace with get_img_info?
image_width = dataset.coco.imgs[original_id]['width']
image_height = dataset.coco.imgs[original_id]['height']
prediction = prediction.resize((image_width, image_height))
prediction = prediction.convert('xywh')
boxes = prediction.bbox.tolist()
scores = prediction.get_field('scores').tolist()
labels = prediction.get_field('labels').tolist()
keypoints = prediction.get_field('keypoints')
keypoints = keypoints.resize((image_width, image_height))
keypoints = keypoints.keypoints.view(keypoints.keypoints.shape[0], -1).tolist()
mapped_labels = [dataset.contiguous_category_id_to_json_id[i] for i in labels]
coco_results.extend([{
'image_id': original_id,
'category_id': mapped_labels[k],
'keypoints': keypoint,
'score': scores[k]} for k, keypoint in enumerate(keypoints)])
return coco_results
# inspired from Detectron
def evaluate_box_proposals(
predictions, dataset, thresholds=None, area="all", limit=None
):
"""Evaluate detection proposal recall metrics. This function is a much
faster alternative to the official COCO API recall evaluation code. However,
it produces slightly different results.
"""
# Record max overlap value for each gt box
# Return vector of overlap values
areas = {
"all": 0,
"small": 1,
"medium": 2,
"large": 3,
"96-128": 4,
"128-256": 5,
"256-512": 6,
"512-inf": 7,
}
area_ranges = [
[0 ** 2, 1e5 ** 2], # all
[0 ** 2, 32 ** 2], # small
[32 ** 2, 96 ** 2], # medium
[96 ** 2, 1e5 ** 2], # large
[96 ** 2, 128 ** 2], # 96-128
[128 ** 2, 256 ** 2], # 128-256
[256 ** 2, 512 ** 2], # 256-512
[512 ** 2, 1e5 ** 2],
] # 512-inf
assert area in areas, "Unknown area range: {}".format(area)
area_range = area_ranges[areas[area]]
gt_overlaps = []
num_pos = 0
for image_id, prediction in enumerate(predictions):
original_id = dataset.id_to_img_map[image_id]
img_info = dataset.get_img_info(image_id)
image_width = img_info["width"]
image_height = img_info["height"]
prediction = prediction.resize((image_width, image_height))
# sort predictions in descending order
# TODO maybe remove this and make it explicit in the documentation
inds = prediction.get_field("objectness").sort(descending=True)[1]
prediction = prediction[inds]
ann_ids = dataset.coco.getAnnIds(imgIds=original_id)
anno = dataset.coco.loadAnns(ann_ids)
gt_boxes = [obj["bbox"] for obj in anno if obj["iscrowd"] == 0]
gt_boxes = torch.as_tensor(gt_boxes).reshape(-1, 4) # guard against no boxes
gt_boxes = BoxList(gt_boxes, (image_width, image_height), mode="xywh").convert(
"xyxy"
)
gt_areas = torch.as_tensor([obj["area"] for obj in anno if obj["iscrowd"] == 0])
if len(gt_boxes) == 0:
continue
valid_gt_inds = (gt_areas >= area_range[0]) & (gt_areas <= area_range[1])
gt_boxes = gt_boxes[valid_gt_inds]
num_pos += len(gt_boxes)
if len(gt_boxes) == 0:
continue
if len(prediction) == 0:
continue
if limit is not None and len(prediction) > limit:
prediction = prediction[:limit]
overlaps = boxlist_iou(prediction, gt_boxes)
_gt_overlaps = torch.zeros(len(gt_boxes))
for j in range(min(len(prediction), len(gt_boxes))):
# find which proposal box maximally covers each gt box
# and get the iou amount of coverage for each gt box
max_overlaps, argmax_overlaps = overlaps.max(dim=0)
# find which gt box is 'best' covered (i.e. 'best' = most iou)
gt_ovr, gt_ind = max_overlaps.max(dim=0)
assert gt_ovr >= 0
# find the proposal box that covers the best covered gt box
box_ind = argmax_overlaps[gt_ind]
# record the iou coverage of this gt box
_gt_overlaps[j] = overlaps[box_ind, gt_ind]
assert _gt_overlaps[j] == gt_ovr
# mark the proposal box and the gt box as used
overlaps[box_ind, :] = -1
overlaps[:, gt_ind] = -1
# append recorded iou coverage level
gt_overlaps.append(_gt_overlaps)
gt_overlaps = torch.cat(gt_overlaps, dim=0)
gt_overlaps, _ = torch.sort(gt_overlaps)
if thresholds is None:
step = 0.05
thresholds = torch.arange(0.5, 0.95 + 1e-5, step, dtype=torch.float32)
recalls = torch.zeros_like(thresholds)
# compute recall for each iou threshold
for i, t in enumerate(thresholds):
recalls[i] = (gt_overlaps >= t).float().sum() / float(num_pos)
# ar = 2 * np.trapz(recalls, thresholds)
ar = recalls.mean()
return {
"ar": ar,
"recalls": recalls,
"thresholds": thresholds,
"gt_overlaps": gt_overlaps,
"num_pos": num_pos,
}
def evaluate_predictions_on_coco(
coco_gt, coco_results, json_result_file, iou_type="bbox"
):
import json
with open(json_result_file, "w") as f:
json.dump(coco_results, f)
from pycocotools.coco import COCO
from pycocotools.cocoeval import COCOeval
coco_dt = coco_gt.loadRes(str(json_result_file)) if coco_results else COCO()
# coco_dt = coco_gt.loadRes(coco_results)
coco_eval = COCOeval(coco_gt, coco_dt, iou_type)
coco_eval.evaluate()
coco_eval.accumulate()
coco_eval.summarize()
return coco_eval
class COCOResults(object):
METRICS = {
"bbox": ["AP", "AP50", "AP75", "APs", "APm", "APl"],
"segm": ["AP", "AP50", "AP75", "APs", "APm", "APl"],
"box_proposal": [
"AR@100",
"ARs@100",
"ARm@100",
"ARl@100",
"AR@1000",
"ARs@1000",
"ARm@1000",
"ARl@1000",
],
"keypoints": ["AP", "AP50", "AP75", "APm", "APl"],
}
def __init__(self, *iou_types):
allowed_types = ("box_proposal", "bbox", "segm", "keypoints")
assert all(iou_type in allowed_types for iou_type in iou_types)
results = OrderedDict()
for iou_type in iou_types:
results[iou_type] = OrderedDict(
[(metric, -1) for metric in COCOResults.METRICS[iou_type]]
)
self.results = results
def update(self, coco_eval):
if coco_eval is None:
return
from pycocotools.cocoeval import COCOeval
assert isinstance(coco_eval, COCOeval)
s = coco_eval.stats
iou_type = coco_eval.params.iouType
res = self.results[iou_type]
metrics = COCOResults.METRICS[iou_type]
for idx, metric in enumerate(metrics):
res[metric] = s[idx]
def __repr__(self):
# TODO make it pretty
return repr(self.results)
def check_expected_results(results, expected_results, sigma_tol):
if not expected_results:
return
logger = logging.getLogger("maskrcnn_benchmark.inference")
for task, metric, (mean, std) in expected_results:
actual_val = results.results[task][metric]
lo = mean - sigma_tol * std
hi = mean + sigma_tol * std
ok = (lo < actual_val) and (actual_val < hi)
msg = (
"{} > {} sanity check (actual vs. expected): "
"{:.3f} vs. mean={:.4f}, std={:.4}, range=({:.4f}, {:.4f})"
).format(task, metric, actual_val, mean, std, lo, hi)
if not ok:
msg = "FAIL: " + msg
logger.error(msg)
else:
msg = "PASS: " + msg
logger.info(msg) | PypiClean |
/JSTools-1.0.tar.gz/JSTools-1.0/jstools/utils.py | from configparser import NoSectionError
from configparser import ConfigParser
from collections import MutableMapping
import sys
import logging
import os
try:
from functools import wraps
except ImportError:
def wraps(f):
def _decorator(func):
def _wrapper(*args, **kwargs):
func(*args, **kwargs)
return _wrapper
return _decorator
def arg_parser(optparser):
"Allows for short circuiting of optparser"
def wrapper(func):
@wraps(func)
def caller(args=None, options=None, parser=optparser):
if args is None and options is None:
argv = sys.argv
options, args = optparser.parse_args(argv)
return func(args, options, optparser)
return caller
return wrapper
class SectionMap(MutableMapping):
def __init__(self, cp, section):
if not cp.has_section(section):
raise NoSectionError("%s does not exist in %s" %(section, cp))
self.cp = cp
self.section = section
@property
def section_items(self):
return self.cp.items(self.section)
def __getitem__(self, key):
return dict(self.section_items)[key]
def __delitem__(self, key):
self.cp.remove_option(self.section, key)
def __setitem__(self, key, value):
self.cp.set(self.section, key, value)
def keys(self):
return list(dict(self.section_items).keys())
def load_return(section=None):
cp = ConfigParser()
def load_file_or_section(path):
cp.read(path)
if section is not None:
return SectionMap(cp, section)
return cp
return load_file_or_section
# for compressor
def retrieve_config(section=None, strict=False):
# walk up from current directory
# check virtualenv
# check user directory
# return either a ConfigParser or a SectionMap
from paver.easy import path
fn = ".jstools.cfg"
conf = None
directory = path(os.path.abspath(os.curdir))
section_or_parser = load_return(section)
while conf is None and directory:
if os.path.exists(directory / fn):
return section_or_parser(directory / fn)
directory = directory.parent
venv = os.environ.get("VIRTUAL_ENV")
if venv and (path(venv) / fn).exists():
return section_or_parser(path(venv) / fn)
user = path(os.path.expanduser("~/"))
if (user / fn).exists():
return section_or_parser(user / fn) | PypiClean |
/Jomini-0.1.4.tar.gz/Jomini-0.1.4/jomini/Lanchester.py | from math import ceil
class Battle:
def __init__(self, red=20_000, blue=20_000, rho=0.0100, beta=0.0100):
"""
We assume that battles are two sided
:param int red: Number of soldiers in the side red
:param int blue: Number of soldiers in the side blue
:param float rho: Power coefficient of the red side (default is 0.0100)
:param float beta: Power coefficient of the blue side (default is 0.0100)
"""
if not isinstance(red, int) or not isinstance(blue, int):
raise TypeError("Number of soldiers must be integer")
if red < 0 or blue < 0:
raise ValueError("Number of soldiers can not be negative")
if not isinstance(rho, (int, float)) or not isinstance(beta, (int, float)):
raise TypeError("The power coefficient must be int or float")
if rho < 0 or beta < 0:
raise ValueError("Power coefficient values can not be negative")
self.red = red
self.blue = blue
self.rho = rho
self.beta = beta
def __repr__(self):
return f"Battle({self.red}, {self.rho}, {self.blue}, {self.beta})"
def __str__(self):
return f"Battle\n" \
f"Red side: {self.red} Soldiers | Rho: {self.rho}\n" \
f"Blue side: {self.blue} Soldiers | Beta: {self.beta}"
class Lanchester:
"""Shows the general properties & methods of the Linear and Square Law"""
def __init__(self):
raise RuntimeError("Lanchester class should not be instantiated")
def _check_time(self, _time):
# You really don't need this method in your calculations
if _time is None: # If time is none, get the maximum time for the battle to end
rate_red, rate_blue = self.get_casualty_rates()
_time = int(ceil(self.battle.red / rate_red)) if rate_red > rate_blue \
else int(ceil(self.battle.blue / rate_blue))
if not isinstance(_time, int):
raise TypeError("time should be int")
if _time <= 0:
raise ValueError("time can not be zero or negative")
return _time
def get_casualties(self, time=None):
""" For a battle that takes time t, returns a tuple for the casualties of both sides.
If time is None, the battle will continue until one side is annihilated. """
time = self._check_time(time)
rate_red, rate_blue = self.get_casualty_rates()
casualties_red = rate_red * time if rate_red * time < self.battle.red else self.battle.red
casualties_blue = rate_blue * time if rate_blue * time < self.battle.blue else self.battle.blue
return int(casualties_red), int(casualties_blue)
def get_remaining(self, time=None):
""" After the casualties are calculated, returns armies with their new sizes """
time = self._check_time(time)
casualties_red, casualties_blue = self.get_casualties(time)
remaining_red = self.battle.red - casualties_red
remaining_blue = self.battle.blue - casualties_blue
return int(remaining_red), int(remaining_blue)
def _simulate_battle(self, which_model, time=None):
""" Returns a string showing the simulated battle. If time is None, the battle will continue
until one side is annihilated. """
time = self._check_time(time)
casualties_red, casualties_blue = self.get_casualties(time)
remaining_red, remaining_blue = self.get_remaining(time)
return_str = f"----- {which_model} BATTLE RESULTS -----\n" + str(self.battle) + "\n" \
+ f"The battle lasted {time} time units.\n" \
+ f"Red casualties: {casualties_red} | " + f"Blue casualties: {casualties_blue}\n" \
+ f"Red remaining: {remaining_red} | " + f"Blue Remaining: {remaining_blue}\n" + "-" * 60
return return_str
class LinearLaw(Lanchester):
def __init__(self, battle, engagement_width):
"""
Implementing Lanchester's Linear Law
:param battle: Battle object
:param int engagement_width: Denotes the width of the front-line
"""
if not isinstance(battle, Battle):
raise TypeError("battle should be an object of the Battle class")
if not isinstance(engagement_width, int):
raise TypeError("engagement_width must be an integer")
if engagement_width < 1:
raise ValueError("engagement_width can not be lesser than 1")
if engagement_width > min(battle.red, battle.blue):
raise ValueError("engagement_width can not be greater than the size of either side")
self.battle = battle
self.engagement_width = engagement_width
def get_density(self):
density_red = (self.engagement_width / self.battle.red ** 2)
density_blue = (self.engagement_width / self.battle.blue ** 2)
return density_red, density_blue
def get_casualty_rates(self):
""" Returns a tuple representing casualty rates for unit time """
density_red, density_blue = self.get_density()
rate_red = density_red * self.battle.red * self.battle.blue * self.battle.beta * 100
rate_blue = density_blue * self.battle.red * self.battle.blue * self.battle.rho * 100
return int(rate_red), int(rate_blue)
def simulate_battle(self, time=None):
time = self._check_time(time)
density_red, density_blue = self.get_density()
casualties_red, casualties_blue = self.get_casualties(time)
remaining_red, remaining_blue = self.get_remaining(time)
return_str = f"----- LINEAR LAW BATTLE RESULTS -----\n" + str(self.battle) + "\n" \
+ f"Engagement width: {self.engagement_width} | " \
+ f"Red density: {density_red} | Blue density: {density_blue}\n" \
+ f"The battle lasted {time} time units.\n" \
+ f"Red casualties: {casualties_red} | " + f"Blue casualties: {casualties_blue}\n" \
+ f"Red remaining: {remaining_red} | " + f"Blue Remaining: {remaining_blue}\n" + "-" * 60
return return_str
class SquareLaw(Lanchester):
def __init__(self, battle):
"""
Implementing Lanchester's Square Law
:param battle: Battle object
"""
if not isinstance(battle, Battle):
raise TypeError("battle should be an object of the Battle class")
self.battle = battle
def get_casualty_rates(self):
""" Returns a tuple representing casualty rates for unit time """
rate_red = self.battle.blue * self.battle.beta
rate_blue = self.battle.red * self.battle.rho
return int(rate_red), int(rate_blue)
def simulate_battle(self, time=None):
return self._simulate_battle(time=time, which_model="SQUARE LAW")
class LogarithmicLaw(Lanchester):
def __init__(self, battle):
"""
Implementing the Logarithmic Law
:param battle: Battle object
"""
if not isinstance(battle, Battle):
raise TypeError("battle should be an object of the Battle class")
self.battle = battle
def get_casualty_rates(self):
""" Returns a tuple representing casualty rates for unit time """
rate_red = self.battle.red * self.battle.beta
rate_blue = self.battle.blue * self.battle.rho
return int(rate_red), int(rate_blue)
def simulate_battle(self, time=None):
return self._simulate_battle(time=time, which_model="LOGARITHMIC LAW")
class GeneralLaw(Lanchester):
def __init__(self, battle, engagement_width, p, q):
"""
Implementing the General Law by Bracken (Generalization of Linear, Square and Logarithmic laws)
p = q = 1 leads to the Linear Law (The actual Linear Law, not our version)
p = 0 AND q = 1 leads to the Logarithmic Law
p = 1 AND q = 0 leads to the Square Law
:param battle: Battle object
:param int engagement_width: Denotes the width of the front-line
:param float p: Exponent value which determines proximity to the said laws
:param float q: Another exponent value
"""
if not isinstance(battle, Battle):
raise TypeError("battle should be an object of the Battle class")
if not isinstance(engagement_width, int):
raise TypeError("engagement_width must be an integer")
if engagement_width < 1:
raise ValueError("engagement_width can not be lesser than 1")
if engagement_width > min(battle.red, battle.blue):
raise ValueError("engagement_width can not be greater than the size of either side")
if not all(isinstance(el, (int, float)) for el in [p, q]):
raise TypeError("p and q must be int or float")
if not (0 <= p <= 1 and 0 <= q <= 1):
raise ValueError("p and q must be between 0 and 1 included.")
self.battle = battle
self.engagement_width = engagement_width
self.p = p
self.q = q
def get_density(self):
density_red = (self.engagement_width / self.battle.red ** 2)
density_blue = (self.engagement_width / self.battle.blue ** 2)
return density_red, density_blue
def get_casualty_rates(self):
""" Returns a tuple representing casualty rates for unit time """
SCALE_CONSTANT = 1_000_000
density_red, density_blue = self.get_density()
rate_red = density_red * (self.battle.red ** self.q) * (self.battle.blue ** self.p) \
* self.battle.beta * SCALE_CONSTANT
rate_blue = density_blue * (self.battle.red ** self.p) * (self.battle.blue ** self.q) \
* self.battle.rho * SCALE_CONSTANT
return int(rate_red), int(rate_blue)
def simulate_battle(self, time=None):
time = self._check_time(time)
density_red, density_blue = self.get_density()
casualties_red, casualties_blue = self.get_casualties(time)
remaining_red, remaining_blue = self.get_remaining(time)
return_str = f"----- GENERAL LAW BATTLE RESULTS -----\n" + str(self.battle) + "\n" \
+ f"Engagement width: {self.engagement_width} | " \
+ f"Red density: {density_red} | Blue density: {density_blue}\n" \
+ f"p value: {self.p} | q value: {self.q}\n" \
+ f"The battle lasted {time} time units.\n" \
+ f"Red casualties: {casualties_red} | " + f"Blue casualties: {casualties_blue}\n" \
+ f"Red remaining: {remaining_red} | " + f"Blue Remaining: {remaining_blue}\n" + "-" * 60
return return_str | PypiClean |
/Hanita-0.1.3-py3-none-any.whl/hanita/forms/just_ui.py | import random
import sys
import time
from PyQt5 import Qt, QtCore, QtWidgets
import form_contacts
import form_new_chat
import form_main
class ContactsDialog(QtWidgets.QDialog):
handle_msg = QtCore.pyqtSignal(dict)
def __init__(self, parent=None, user_list=None):
QtWidgets.QDialog.__init__(self, parent)
self.ui = form_contacts.Ui_dialog_contacts()
self.ui.setupUi(self)
self.data = {}
user_list = user_list or []
for user in user_list:
self.createItem(user)
self.ui.lw_contacts.itemDoubleClicked.connect(self.chat_with)
self.ui.pb_add_contact.clicked.connect(self.add_contact)
self.parent().redraw_contacts.connect(self.redraw)
@QtCore.pyqtSlot(list)
def redraw(self, data):
self.ui.lw_contacts.clear()
for user in data:
self.createItem(user)
def createItem(self, user):
item = QtWidgets.QListWidgetItem(self.ui.lw_contacts)
widget = QtWidgets.QWidget()
widgetText = QtWidgets.QLabel(user["user_name"])
widgetButton = QtWidgets.QPushButton("X")
widgetButton.setProperty("id", user["user_id"])
widgetButton.clicked.connect(self.del_contact)
widgetButton.setFixedWidth(24)
widgetLayout = QtWidgets.QHBoxLayout()
widgetLayout.addWidget(widgetText)
widgetLayout.addWidget(widgetButton)
widget.setLayout(widgetLayout)
item.setSizeHint(widget.sizeHint())
self.ui.lw_contacts.setItemWidget(item, widget)
item.setData(QtCore.Qt.UserRole, user["user_id"])
def add_contact(self):
user_name = self.ui.le_add_input.text()
self.ui.le_add_input.setText("")
self.ui.le_add_input.setFocus()
if user_name:
self.handle_msg.emit(
{
"action": "add_contact",
"user_name": user_name
})
def del_contact(self):
self.handle_msg.emit(
{
"action": "del_contact",
"user_id": self.sender().property("id")
})
def chat_with(self):
action = "new_chat"
selected_id = []
user = self.ui.lw_contacts.selectedItems()
if user:
selected_id = [
i.data(QtCore.Qt.UserRole)
for i in user
]
self.data = {"action": action, "chat_name": "",
"chat_user_ids": selected_id}
self.handle_msg.emit(self.data)
self.close()
class NewChatDialog(QtWidgets.QDialog):
handle_msg = QtCore.pyqtSignal(dict)
def __init__(self, parent=None, user_list=None):
QtWidgets.QDialog.__init__(self, parent)
self.ui = form_new_chat.Ui_dialog_new_chat()
self.ui.setupUi(self)
self.chat_data = {}
user_list = user_list or []
for user in user_list:
item = QtWidgets.QListWidgetItem(self.ui.lw_chat_contacts)
item.setText(user["user_name"])
item.setData(QtCore.Qt.UserRole, user["user_id"])
self.ui.pb_chat_create.clicked.connect(self.chat_create)
def chat_create(self):
name = self.ui.le_chat_name.text()
if not name:
QtWidgets.QMessageBox.information(
self, "info", "Пожалуйста, введите название чата")
return
users = self.ui.lw_chat_contacts.selectedItems()
if not users:
QtWidgets.QMessageBox.information(
self, "info", "Пожалуйста, выберите минимум одного пользователя")
return
users_data = [i.data(QtCore.Qt.UserRole) for i in users]
self.chat_data = {
"action": "new_chat",
"chat_name": name,
"chat_user_ids": users_data
}
self.handle_msg.emit(self.chat_data)
self.close()
###############################################################################
# ### MainWindow
###############################################################################
class MainWindow(QtWidgets.QMainWindow):
"""
Класс представления главного окна.
Методы для переопределения:
get_handle_msg(self, data)
get_chatlist(self) -> [{'chat_id': ..., 'chat_name': ...,}, ...]
get_contactlist(self) -> [{'user_id': ..., 'user_name': ...}, ...]
get_msgslist(self) -> [{'user_name':..., 'timestamp':..., 'message':...,}, ...]
"""
redraw_contacts = QtCore.pyqtSignal(list)
change_view = QtCore.pyqtSignal()
handle_msg = QtCore.pyqtSignal(dict)
model_changed = QtCore.pyqtSignal()
def __init__(self, parent=None):
QtWidgets.QMainWindow.__init__(self, parent)
self.ui = form_main.Ui_MainWindow()
self.ui.setupUi(self)
self.ui.main_stack.setCurrentIndex(0)
self.newchat_dialog = NewChatDialog
self.contacts_dialog = ContactsDialog
self.current_user = {
"user_id": 1,
"user_name": "Max"
}
self.current_chat = {
"chat_id": None,
"chat_name": None
}
self.storage = {
"contacts": [],
"chats": [],
"messages": []
}
contact_list = [
{"user_id": i, "user_name": "contact_{}".format(i)}
for i in range(10)
]
self.storage["contacts"] = contact_list
self.ui.pb_main_newchat.clicked.connect(self.show_newchat_dialog)
self.ui.pb_main_contacts.clicked.connect(self.show_contacts_dialog)
self.ui.lw_list_chats.itemClicked.connect(self.change_current_chat)
self.ui.pb_send.clicked.connect(self.send_message)
self.ui.te_input_msg.installEventFilter(self)
self.ui.pb_login_submit.clicked.connect(self.login)
self.handle_msg.connect(self.get_handle_msg)
self.model_changed.connect(self.render)
self.change_view.connect(self.change_current_view)
def eventFilter(self, source, event):
""" Фильтр событий """
if (event.type() == QtCore.QEvent.KeyPress and
event.key() == QtCore.Qt.Key_Return):
self.send_message()
return True
return QtWidgets.QMainWindow.eventFilter(self, source, event)
def model_is_changed(self):
""" Оповестить о изменении данных """
self.model_changed.emit()
def render(self):
""" Перерисовать внешний вид """
if self.current_user:
self.draw_chatlist()
self.draw_msgslist()
self.redraw_contacts.emit(self.get_contactlist())
def change_current_view(self):
self.ui.main_stack.setCurrentIndex(1)
def change_current_chat(self):
""" Изменить активный чат """
item = self.ui.lw_list_chats.selectedItems()[0]
chat_id = item.data(QtCore.Qt.UserRole)
chat_name = item.text()
self.current_chat = {
"chat_id": chat_id,
"chat_name": chat_name
}
self.model_is_changed()
def login(self):
self.ui.login_error.setText("")
login = self.ui.le_login_input.text().strip()
password = self.ui.le_login_password.text()
if not login:
self.ui.login_error.setText("Error: empty login")
return
msg = {
"action": "authenticate",
"user": {
"login": login,
"password": password
}
}
self.handle_msg.emit(msg)
@QtCore.pyqtSlot(dict)
def get_handle_msg(self, data):
""" Обработка управляющих сообщений.
Данный метод нужно переопределить.
"""
if data["action"] == "del_contact":
#########################################
_id = data["user_id"]
self.storage["contacts"] = [
i for i in self.storage["contacts"] if i["user_id"] != _id
]
###########################################
elif data["action"] == "add_contact":
_id = random.randint(100, 10000)
self.storage["contacts"].append(
{"user_id": _id, "user_name": data["user_name"]})
elif data["action"] == "new_chat":
if data["chat_name"]:
chat = {"chat_id": random.randint(
1, 1000), "chat_name": data["chat_name"]}
else:
user_name = ""
contacts = self.get_contactlist()
for user in self.storage["contacts"]:
if user["user_id"] == data["contact_ids"][0]:
user_name = user["user_name"]
chat = {"chat_id": random.randint(
1, 1000), "chat_name": user_name}
self.storage["chats"].append(chat)
elif data["action"] == "msg":
data["user_id"] = self.current_user["user_id"]
self.storage["messages"].append(data)
elif data["action"] == "leave":
chats = self.storage["chats"]
chats = [i for i in chats if i["chat_id"] != data["chat_id"]]
self.storage["chats"] = chats
elif data["action"] == "authenticate":
self.ui.main_stack.setCurrentIndex(1)
self.model_is_changed()
def show_newchat_dialog(self):
""" Показать окно создания нового чата """
dialog = self.newchat_dialog(self, self.get_contactlist())
dialog.handle_msg.connect(self.get_handle_msg)
dialog.exec_()
def show_contacts_dialog(self):
""" Показать окно контактов """
dialog = self.contacts_dialog(self, self.get_contactlist())
dialog.handle_msg.connect(self.get_handle_msg)
dialog.exec_()
def draw_chatlist(self):
""" Перерисовать окно chatlist """
self.ui.lw_list_chats.clear()
data = self.get_chatlist()
for chat in data:
self.createItem(chat)
def createItem(self, chat):
item = QtWidgets.QListWidgetItem(self.ui.lw_list_chats)
widget = QtWidgets.QWidget()
widgetText = QtWidgets.QLabel(chat["chat_name"])
widgetButton = QtWidgets.QPushButton("X")
widgetButton.setProperty("id", chat["chat_id"])
widgetButton.clicked.connect(self.del_chat)
widgetButton.setFixedWidth(24)
widgetLayout = QtWidgets.QHBoxLayout()
widgetLayout.addWidget(widgetText)
widgetLayout.addWidget(widgetButton)
widget.setLayout(widgetLayout)
item.setSizeHint(widget.sizeHint())
self.ui.lw_list_chats.setItemWidget(item, widget)
item.setData(QtCore.Qt.UserRole, chat["chat_id"])
if self.current_chat["chat_id"] == chat["chat_id"]:
self.ui.lw_list_chats.setCurrentItem(item)
def del_chat(self):
""" Сгенерировать сообщение о покидании чата. """
msg = {
"action": "leave",
"chat_id": self.sender().property("id")
}
self.handle_msg.emit(msg)
def get_chatlist(self):
""" Получить список чатов.
Данный метод нужно переопределить.
"""
return self.storage["chats"]
def get_contactlist(self):
""" Получить список контактов.
Данный метод нужно переопределить.
"""
return self.storage["contacts"]
def get_msgslist(self):
""" Получить список сообщений для активного чата.
Данный метод нужно переопределить.
"""
cur_msgs = [
i for i in self.storage["messages"]
if i["chat_id"] == self.current_chat["chat_id"]
]
return [
{
"user_id": i["user_id"],
"user_name": self.current_user["user_name"],
"timestamp": i["timestamp"],
"message": i["message"]
}
for i in cur_msgs
]
def send_message(self):
""" Отправить сообщение. """
text = self.ui.te_input_msg.toPlainText()
self.ui.te_input_msg.setText("")
self.ui.te_input_msg.setFocus()
if text:
message = {
"action": "msg",
"chat_id": self.current_chat["chat_id"],
"timestamp": time.time(),
"message": text
}
self.handle_msg.emit(message)
def draw_msgslist(self):
""" Перерисовать окно чата """
messages = self.get_msgslist()
chat_id = self.current_chat["chat_id"]
chat_name = self.current_chat["chat_name"]
self.ui.l_current_chat.setText(chat_name)
curr_user_id = self.current_user["user_id"]
curr_user_name = self.current_user["user_name"]
arr = []
for i, msg in enumerate(messages):
formated_msg = self.format_msg(
name=msg["user_name"],
text=msg["message"],
timestamp=msg["timestamp"],
ident=msg["user_id"] == curr_user_id,
add=msg["user_id"] == messages[i -
1]["user_id"] if i > 0 else False
)
arr.append(formated_msg)
msg_string = '<body bgcolor="#FFF">' + \
"".join(arr) + '<a name="end" style="color:#FFF">a</a>' + '</html>'
self.ui.te_list_msg.setHtml(msg_string)
self.ui.te_list_msg.scrollToAnchor("end")
def format_msg(self, name, text, timestamp, ident=False, add=False):
""" Отформатировать текст для отображения. """
template_text = '''
<div style="margin:0 {right} 0 {left};">
{text}
</div>
'''
template_name = '''
<div style="margin:15px {right} 0 {left};">
<b style="color:{color};">
{name}
</b>
<i style="color:lightgrey;font-size:small;">
{timestamp}
</i>
</div>
'''
formated_name = template_name.format(
left="5px" if ident else "25px",
right="25px" if ident else "5px",
color="orange" if ident else "blue",
name=name,
timestamp=time.ctime(timestamp),
)
formated_text = template_text.format(
left="15px" if ident else "35px",
right="25px" if ident else "5px",
text=text.replace("\n", "<br>")
)
if add:
return formated_text
return formated_name + formated_text
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
window = MainWindow()
window.show()
sys.exit(app.exec_()) | PypiClean |
/AutoMapDB-0.1.5-py3-none-any.whl/automapdb/db.py |
import atexit
from sqlalchemy import MetaData, create_engine
from sqlalchemy.engine import Connection, Engine
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.ext.automap import AutomapBase, automap_base
from sqlalchemy.orm import DeclarativeMeta, Session
from sqlalchemy.orm.query import Query
from sqlalchemy.sql.base import ImmutableColumnCollection
from automapdb.utils import Singleton, log
class ProxyTable:
def __init__(self, schema: str, name: str, sa_table: DeclarativeMeta):
"""Simple wrapper around the SQLAlchemy Table"""
self.name: str = name
self.schema: str = schema
self.sa_table: DeclarativeMeta = sa_table
self.columns: ImmutableColumnCollection = self.sa_table.__table__.columns
self.path: str = f"{schema}.{name}"
def primary_keys(self) -> list:
"""Get primary keys from table metadata"""
return [c.name for c in self.columns if c.primary_key]
def not_nullable(self) -> list:
"""Get not nullable fields from table metadata"""
return [c.name for c in self.columns if not c.nullable]
class AutoMapDB(metaclass=Singleton):
"""Manage Basic Database connection"""
def __init__(self, db_url: str, options: dict = None, autocommit: bool = False):
"""Create basic database session through SQLAlchemy"""
log.debug(f"{self.__class__.__name__}")
self.raise_on_rollback: bool = True
self.autocommit: bool = autocommit
self.metadata: MetaData = None
self.commit: bool = False
self.schema: str = None
self.base: DeclarativeMeta = None
# Instantiate engine, pass on options
self.engine: Engine = create_engine(db_url, connect_args=options or {})
log.debug(f"Connecting to database: {db_url}")
# Open connection with engine
self.connection: Connection = self.engine.connect()
# Create session
self.session: Session = Session(
self.engine, autoflush=True, expire_on_commit=False
)
log.debug("Connection and Session opened.")
def set_schema(self, schema: str):
"""
Create metadata object from schema, map base to schema
I found it impossible to map a base across all postgres schemas or
use dotted table paths. Therefore an sa_table object needs to set
which schema it belongs to before querying.
"""
# Skip if schema is already set
if self.schema == schema:
return
self.schema = schema
# Create MetaData object with schema name
self.metadata: MetaData = MetaData(schema=self.schema)
# Obtain table metadata through reflection
self.metadata.reflect(self.engine)
# Create Base object
self.base: AutomapBase = automap_base(metadata=self.metadata)
# Create mapper from MetaData
self.base.prepare()
log.debug(f"Schema {self.schema} mapped")
def get_table(self, path) -> ProxyTable:
"""Set schema and create ProxyTable from AutomapBase"""
try:
schema, tab_name = path.split(".")
except ValueError:
msg = f"Invalid path: '{path}'. Does it contain 'schema.table'?"
raise Exception(msg)
# Update database search path
self.set_schema(schema)
# Get SQLAlchemy sa_table object through db object
sa_table = getattr(self.base.classes, tab_name)
return ProxyTable(schema, tab_name, sa_table)
def post_statement(self, force_commit: bool = None) -> bool:
"""Configurable hook called after each CRUD operation"""
try:
# Push pending operations to the database
self.session.flush()
# Set commit switch to trigger commit later
self.commit = True
# Catch SQL errors early
except SQLAlchemyError as err:
msg = f"{err.__cause__.__class__.__name__}: {err.__cause__}"
log.error(msg.replace("\n", " "))
except Exception as err:
log.error(repr(err))
self.session.rollback()
self.commit = False
log.warning("Rollback done!")
if self.raise_on_rollback:
raise err
if force_commit or self.autocommit:
try:
log.info("Trying autocommit...")
self.session.commit()
return True
except Exception as err:
msg = f"{err.__cause__.__class__.__name__}: {err.__cause__}"
log.error(msg.replace("\n", " "))
self.session.rollback()
return False
def execute(self, query: Query, *args) -> bool:
"""Add `CREATE` query to psql session and flushes handler"""
self.session.execute(query, *args)
return self.post_statement()
def add(self, query: Query) -> bool:
"""Add `CREATE` query to psql session and flushes handler"""
self.session.add(query)
return self.post_statement()
def update(self, query: Query, data: dict) -> bool:
"""Add `UPDATE` query to the session and flushes the connection"""
query.update(data)
return self.post_statement()
def delete(self, query: Query) -> bool:
"""Add `DELETE` query to psql session and flushes handler"""
self.session.delete(query)
return self.post_statement()
# Ugly hack to ensure transactions are committed on exit
@atexit.register
def post_db():
try:
db = AutoMapDB(None)
except AttributeError:
log.debug("Looks like no database is connected... Closing.")
return
log.debug(f"Closing {db.engine}...")
if db.commit:
try:
db.session.commit()
except SQLAlchemyError as err:
log.error(err)
print(err) | PypiClean |
/NEMO_billing-2.6.7-py3-none-any.whl/NEMO_billing/invoices/views/project.py | from collections import defaultdict
from datetime import date, datetime, timedelta
from typing import Dict, Set
from NEMO.decorators import accounting_or_user_office_or_manager_required
from NEMO.forms import ProjectForm
from NEMO.models import Account, ActivityHistory, MembershipHistory, Project, ProjectDocuments, User
from NEMO.utilities import render_combine_responses
from NEMO.views.accounts_and_projects import select_accounts_and_projects
from NEMO.views.customization import ProjectsAccountsCustomization
from django.contrib.auth.decorators import login_required, permission_required
from django.forms import models
from django.http import HttpResponse
from django.shortcuts import redirect, render
from django.views.decorators.http import require_GET, require_http_methods
from NEMO_billing.invoices.customization import BillingCustomization
from NEMO_billing.invoices.models import ProjectBillingDetails
from NEMO_billing.invoices.utilities import render_and_send_email
from NEMO_billing.rates.models import RateCategory
class ProjectDetailsForm(models.ModelForm):
class Meta:
model = ProjectBillingDetails
exclude = ["project"]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields["staff_host"].queryset = User.objects.filter(is_active=True, is_staff=True)
@accounting_or_user_office_or_manager_required
@require_http_methods(["GET", "POST"])
def edit_project(request, project_id=None):
try:
project = Project.objects.get(id=project_id)
except (Project.DoesNotExist, ValueError):
project = None
try:
project_details = project.projectbillingdetails
except (ProjectBillingDetails.DoesNotExist, AttributeError):
project_details = ProjectBillingDetails(project=project)
form = ProjectForm(request.POST or None, instance=project)
details_form = ProjectDetailsForm(request.POST or None, instance=project_details)
dictionary = {
"user_list": User.objects.filter(is_active=True),
"rate_categories": RateCategory.objects.all(),
"allow_document_upload": ProjectsAccountsCustomization.get_bool("project_allow_document_upload"),
"form": form,
"form_details": details_form,
}
if request.method == "GET":
return render(request, "invoices/project/edit_project.html", dictionary)
elif request.method == "POST":
if not form.is_valid() or not details_form.is_valid():
if request.FILES.getlist("project_documents") or request.POST.get("remove_documents"):
form.add_error(field=None, error="Project document changes were lost, please resubmit them.")
return render(request, "invoices/project/edit_project.html", dictionary)
else:
project = form.save()
details_form.instance.project = project
details_form.save()
active_changed = form.initial.get("active", None) != project.active
account_changed = form.initial.get("account", None) != project.account.id
if not project_id or account_changed:
if project_id and account_changed:
removed_account_history = MembershipHistory()
removed_account_history.authorizer = request.user
removed_account_history.action = MembershipHistory.Action.REMOVED
removed_account_history.child_content_object = project
removed_account_history.parent_content_object = Account.objects.get(id=form.initial["account"])
removed_account_history.save()
account_history = MembershipHistory()
account_history.authorizer = request.user
account_history.action = MembershipHistory.Action.ADDED
account_history.child_content_object = project
account_history.parent_content_object = project.account
account_history.save()
if not project_id or active_changed:
project_history = ActivityHistory()
project_history.authorizer = request.user
project_history.action = project.active
project_history.content_object = project
project_history.save()
# Handle file uploads
for f in request.FILES.getlist("project_documents"):
ProjectDocuments.objects.create(document=f, project=project)
ProjectDocuments.objects.filter(id__in=request.POST.getlist("remove_documents")).delete()
return redirect("project", project.id)
@accounting_or_user_office_or_manager_required
@require_http_methods(["GET", "POST"])
def custom_project_view(request, kind=None, identifier=None):
original_response = select_accounts_and_projects(request, kind=kind, identifier=identifier)
projects = []
if kind == "project":
projects = Project.objects.filter(id=identifier)
elif kind == "account":
projects = Project.objects.filter(account_id=identifier)
return render_combine_responses(
request,
original_response,
"invoices/project/view_project_additional_info.html",
{"projects": projects, "rate_categories": RateCategory.objects.exists()},
)
@login_required
@require_GET
@permission_required("NEMO.trigger_timed_services", raise_exception=True)
def deactivate_expired_projects(request):
return do_deactivate_expired_projects()
def do_deactivate_expired_projects():
for project in Project.objects.filter(active=True, projectbillingdetails__expires_on__lt=datetime.now()):
project.active = False
project.save()
send_project_expiration_reminders()
return HttpResponse()
def send_project_expiration_reminders():
accounting_email = BillingCustomization.get("billing_accounting_email_address")
expiration_reminder_days = BillingCustomization.get("billing_project_expiration_reminder_days")
if expiration_reminder_days:
project_expiration_reminder_cc = BillingCustomization.get("billing_project_expiration_reminder_cc")
email_projects : Dict[str, Set[Project]] = defaultdict(set)
ccs = [e for e in project_expiration_reminder_cc.split(",") if e]
for remaining_days in [int(days) for days in expiration_reminder_days.split(",")]:
expiration_date = date.today() + timedelta(days=remaining_days)
for project in Project.objects.filter(active=True, projectbillingdetails__expires_on=expiration_date):
send_to = project.projectbillingdetails.email_to()
for email_pi in send_to:
email_projects[email_pi].add(project)
for pi_email, projects in email_projects.items():
sorted_projects = sorted(projects, key=lambda x:x.projectbillingdetails.expires_on)
render_and_send_email(
"invoices/email/billing_project_expiration_reminder_email",
{"projects": sorted_projects, "today": datetime.today().date()},
to=[pi_email],
from_email=accounting_email,
cc=ccs,
) | PypiClean |
/MaterialDjango-0.2.5.tar.gz/MaterialDjango-0.2.5/bower_components/paper-dialog/.github/ISSUE_TEMPLATE.md | <!-- Instructions: https://github.com/PolymerElements/paper-dialog/CONTRIBUTING.md#filing-issues -->
### Description
<!-- Example: The `paper-foo` element causes the page to turn pink when clicked. -->
### Expected outcome
<!-- Example: The page stays the same color. -->
### Actual outcome
<!-- Example: The page turns pink. -->
### Live Demo
<!-- Example: https://jsbin.com/cagaye/edit?html,output -->
### Steps to reproduce
<!-- Example
1. Put a `paper-foo` element in the page.
2. Open the page in a web browser.
3. Click the `paper-foo` element.
-->
### Browsers Affected
<!-- Check all that apply -->
- [ ] Chrome
- [ ] Firefox
- [ ] Safari 9
- [ ] Safari 8
- [ ] Safari 7
- [ ] Edge
- [ ] IE 11
- [ ] IE 10
| PypiClean |
/OSIx-0.4.2.tar.gz/OSIx-0.4.2/README.md | # OSIx - **O**pen **S**ource **I**ntelligence e**X**plorer - V0.0.3
[](https://github.com/guibacellar/OSIx/actions/workflows/qa.yml)




<!-- TABLE OF CONTENTS -->
<details open="open">
<summary>Table of Contents</summary>
<ol>
<li>
<a href="#about-the-project">About The Project</a>
</li>
<li>
<a href="#getting-started">Getting Started</a>
<ul>
<li><a href="#prerequisites">Prerequisites</a></li>
<li><a href="#installation">Installation</a></li>
</ul>
</li>
<li><a href="#available-modules">Available Modules</a></li>
<li><a href="#usage">Usage</a></li>
<li><a href="#roadmap">Roadmap</a></li>
<li><a href="#contributing">Contributing</a></li>
<li><a href="#license">License</a></li>
<li><a href="#contact">Contact</a></li>
</ol>
</details>
<!-- ABOUT THE PROJECT -->
## About The Project
OSIx is a OSINT (Open Source Intelligence) tool created to help Researchers, Investigators and Law Enforcement Agents to Collect and Process Open Data.
Created in Python and using a Modular Architecture, the OSIx easily allows to add new modules to enrich the available functionalities.
<!-- GETTING STARTED -->
## Getting Started
### Prerequisites
* Python 3.6.7+
### Installation
**Stable**
```bash
wget https://github.com/guibacellar/OSIx/archive/master.zip
unzip -o master.zip
mv OSIx-master/ OSIx
pip3 install -r OSIx/requirements.txt
rm -rf master.zip
```
**In Development**
```bash
wget https://github.com/guibacellar/OSIx/archive/develop.zip
unzip -o develop.zip
mv OSIx-develop/ OSIx
pip3 install -r OSIx/requirements.txt
rm -rf develop.zip
```
<!-- AVAILABLE MODULES -->
## Available Modules
* Username Search
* [Username Searcher](docs/module_username.md) - Allow to find the Specified Username in 150+ Websites (Including NSFW Ones) and Export a CSV file with the Founded Entries;
* GitHub Username Grabber - Download GitHub Data from Specified Username (Profile Data, Location, Repositories, Followers and Following Accounts);
* [Bitcoin Wallet Info & Transactions](docs/module_btc_wallet.md) - Download the Bitcoin Transactions from a Wallet and Generates Graphs for Visualization (Gephi and GraphML Compatible);
<!-- USAGE EXAMPLES -->
## Usage
### Command Line
```bash
usage: OSIx.py [-h] [--job_name JOB_NAME] [--purge_temp_files]
[--btc_wallet BTC_WALLET] [--btc_get_transactions]
[--export_btc_transactions_as_graphml]
[--export_btc_transactions_as_gephi] [--username USERNAME]
[--username_scan] [--username_allow_nsfw_scan]
[--username_print_result] [--username_show_all]
[--username_enable_dump_file]
optional arguments:
-h, --help show this help message and exit
--job_name JOB_NAME Job Name. Used to Save/Restore State File.
--purge_temp_files Force Delete All Temporary Files
--btc_wallet BTC_WALLET
BitCoin Wallet Address
--btc_get_transactions
Allow to Download All BitCoin Transactions from Wallet
--export_btc_transactions_as_graphml
Allow to Export the BitCoin Transactions as GraphML
--export_btc_transactions_as_gephi
Allow to Export the BitCoin Transactions as Gephi File
--username USERNAME Username to Search
--username_scan Allow the Executor to Scan the the Username in All
Social Networks and WebSites
--username_allow_nsfw_scan
Allow the Executor to Scan the NSFW WebSites
--username_print_result
Allow to Print the Result in sysout
--username_show_all Allow to Print all Results, otherwise, Print Only the
Founded Ones.
--username_enable_dump_file
Allow to Dump a Result file into data/export Folder.
```
**Jobname**
The *job_name* parameter allow to specify a job name to the executor and the executor will save a state file with all parameters and configurations.
```bash
python OSIx.py --job_name MY_JOB
```
**Purge All Temporary Files**
The *purge_temp_files* parameter tell's to the executor to cleanup all generated temporary files.
```bash
python OSIx.py --purge_temp_files
```
**Output Example**
```bash
python3 OSIx.py \
--username marcos --username_allow_nsfw_scan \
--username_print_result --username_enable_dump_file
OSIx - Open Source Intelligence eXplorer
Version: 0.0.2
By: Th3 0bservator
[*] Loading Configurations:
[*] Installed Modules:
bitcoin_wallet.py
bitcoin_wallet_graph.py
http_navigation_manager.py
input_args_handler.py
state_file_handler.py
temp_file_manager.py
username_handler.py
[*] Executing Pipeline:
[+] input_args_handler.InputArgsHandler
job_name = dev_001
purge_temp_files = False
btc_wallet =
btc_get_transactions =
export_btc_transactions_as_graphml = False
export_btc_transactions_as_gephi = True
username = marcos
username_allow_nsfw_scan = True
username_print_result = True
username_show_all = False
username_enable_dump_file = True
[+] temp_file_manager.TempFileManager
Checking Age data/temp/state for 31557600 seconds
Checking Age data/temp/bitcoin_wallet for 604800 seconds
Checking Age data/temp/username_search for 604800 seconds
[+] state_file_handler.LoadStateFileHandler
[+] http_navigation_manager.HttpNavigationManagerHandler
[+] bitcoin_wallet.BitcoinWalletInfoDownloader
Target BTC Wallet Empty.
[+] bitcoin_wallet.BitcoinWalletTransactionsDownloader
[+] bitcoin_wallet_graph.BitcoinWalletGraphGenerator
Target BTC Wallet Empty.
[+] username_handler.UsernameScanner
NSFW Sites Allowed.
Starting Scan with 20 Workers.
7Cups: Claimed > https://www.7cups.com/@marcos
9GAG: Claimed > https://www.9gag.com/u/marcos
About.me: Claimed > https://about.me/marcos
Academia.edu: Claimed > https://independent.academia.edu/marcos
Asciinema: Claimed > https://asciinema.org/~marcos
AskFM: Claimed > https://ask.fm/marcos
Atom Discussions: Claimed > https://discuss.atom.io/u/marcos/summary
Audiojungle: Claimed > https://audiojungle.net/user/marcos
Avizo: Claimed > https://www.avizo.cz/marcos/
BLIP.fm: Claimed > https://blip.fm/marcos
Bandcamp: Claimed > https://www.bandcamp.com/marcos
Behance: Claimed > https://www.behance.net/marcos
BitBucket: Claimed > https://bitbucket.org/marcos/
Blogger: Claimed > https://marcos.blogspot.com
BodyBuilding: Claimed > https://bodyspace.bodybuilding.com/marcos
Bookcrossing: Claimed > https://www.bookcrossing.com/mybookshelf/marcos/
BuzzFeed: Claimed > https://buzzfeed.com/marcos
CNET: Claimed > https://www.cnet.com/profiles/marcos/
CapFriendly: Claimed > https://www.capfriendly.com/users/marcos
Carbonmade: Claimed > https://marcos.carbonmade.com
Career.habr: Claimed > https://career.habr.com/marcos
Championat: Claimed > https://www.championat.com/user/marcos
Chatujme.cz: Claimed > https://profil.chatujme.cz/marcos
Cloob: Claimed > https://www.cloob.com/name/marcos
Codecademy: Claimed > https://www.codecademy.com/profiles/marcos
Codechef: Claimed > https://www.codechef.com/users/marcos
Coroflot: Claimed > https://www.coroflot.com/marcos
DEV Community: Claimed > https://dev.to/marcos
Designspiration: Claimed > https://www.designspiration.net/marcos/
DeviantART: Claimed > https://marcos.deviantart.com
[+] state_file_handler.SaveStateFileHandler
```
<!-- ROADMAP -->
## Roadmap
See the [open issues](https://github.com/othneildrew/Best-README-Template/issues) for a list of proposed features (and known issues).
<!-- CONTRIBUTING -->
## Contributing
Contributions are what make the open source community such an amazing place to be learn, inspire, and create. Any contributions you make are **greatly appreciated**.
1. Fork the Project
2. Create your Feature Branch (`git checkout -b feature/AmazingFeature`)
3. Commit your Changes (`git commit -m 'Add some AmazingFeature'`)
4. Push to the Branch (`git push origin feature/AmazingFeature`)
5. Open a Pull Request
<!-- LICENSE -->
## License
Distributed under the Apache License. See `LICENSE` for more information.
<!-- CONTACT -->
## Contact
**Th3 0bservator**
[](https://www.theobservator.net/)
[](https://twitter.com/th3_0bservator)
[](https://github.com/guibacellar/)
[](https://www.linkedin.com/in/guilherme-bacellar/)
| PypiClean |
/Lin-CMS-0.4.11.tar.gz/Lin-CMS-0.4.11/src/lin/logger.py | import re
from functools import wraps
from flask import Response, request
from flask_jwt_extended import get_current_user
from sqlalchemy import Column, Integer, String, func
from .db import db
from .interface import InfoCrud
from .manager import manager
class Log(InfoCrud):
__tablename__ = "lin_log"
id = Column(Integer(), primary_key=True)
message = Column(String(450), comment="日志信息")
user_id = Column(Integer(), nullable=False, comment="用户id")
username = Column(String(24), comment="用户当时的昵称")
status_code = Column(Integer(), comment="请求的http返回码")
method = Column(String(20), comment="请求方法")
path = Column(String(50), comment="请求路径")
permission = Column(String(100), comment="访问哪个权限")
@property
def time(self):
return int(round(self.create_time.timestamp() * 1000))
@classmethod
def select_by_conditions(cls, **kwargs) -> list:
"""
根据条件筛选日志,条件的可以是所有表内字段,以及start, end 时间段,keyword模糊匹配message字段
"""
conditions = dict()
# 过滤 传入参数
avaliable_keys = [c for c in vars(Log).keys() if not c.startswith("_")] + [
"start",
"end",
"keyword",
]
for key, value in kwargs.items():
if key in avaliable_keys:
conditions[key] = value
query = cls.query.filter_by(soft=True)
# 搜索特殊字段
if conditions.get("start"):
query = query.filter(cls.create_time > conditions.get("start"))
del conditions["start"]
if conditions.get("end"):
query = query.filter(cls.create_time < conditions.get("end"))
del conditions["end"]
if conditions.get("keyword"):
query = query.filter(
cls.message.like(
"%{keyword}%".format(keyword=conditions.get("keyword"))
)
)
del conditions["keyword"]
# 搜索表内字段
query = (
query.filter_by(**conditions)
.group_by(cls.create_time)
.order_by(cls.create_time.desc())
)
logs = query.all()
return logs
@classmethod
def get_usernames(cls) -> list:
result = (
db.session.query(cls.username)
.filter(cls.is_deleted == False)
.group_by(cls.username)
.having(func.count(cls.username) > 0)
)
# [(‘张三',),('李四',),...] -> ['张三','李四',...]
usernames = [x[0] for x in result.all()]
return usernames
@staticmethod
def create_log(**kwargs):
log = Log()
for key in kwargs.keys():
if hasattr(log, key):
setattr(log, key, kwargs[key])
db.session.add(log)
if kwargs.get("commit") is True:
db.session.commit()
return log
REG_XP = r"[{](.*?)[}]"
OBJECTS = ["user", "response", "request"]
class Logger(object):
"""
用户行为日志记录器
"""
# message template
template = None
def __init__(self, template=None):
if template:
self.template: str = template
elif self.template is None:
raise Exception("template must not be None!")
self.message = ""
self.response = None
self.user = None
def __call__(self, func):
@wraps(func)
def wrap(*args, **kwargs):
response: Response = func(*args, **kwargs)
self.response = response
self.user = get_current_user()
if not self.user:
raise Exception("Logger must be used in the login state")
self.message = self._parse_template()
self.write_log()
return response
return wrap
def write_log(self):
info = manager.find_info_by_ep(request.endpoint)
permission = info.name if info is not None else ""
status_code = getattr(self.response, "status_code", None)
if status_code is None:
status_code = getattr(self.response, "code", None)
if status_code is None:
status_code = 0
Log.create_log(
message=self.message,
user_id=self.user.id,
username=self.user.username,
status_code=status_code,
method=request.method,
path=request.path,
permission=permission,
commit=True,
)
# 解析自定义模板
def _parse_template(self):
message = self.template
total = re.findall(REG_XP, message)
for it in total:
assert "." in it, "%s中必须包含 . ,且为一个" % it
i = it.rindex(".")
obj = it[:i]
assert obj in OBJECTS, "%s只能为user,response,request中的一个" % obj
prop = it[i + 1 :]
if obj == "user":
item = getattr(self.user, prop, "")
elif obj == "response":
item = getattr(self.response, prop, "")
else:
item = getattr(request, prop, "")
message = message.replace("{%s}" % it, str(item))
return message | PypiClean |
/CCC-2.0.1.tar.gz/CCC-2.0.1/ccc/static/crm/js/common/browser-calls.js | const TwilioMixin = {
data () {
return {
call_status : 'Initializing...',
in_call: false,
is_ringing: false,
outgoing_recipient: '',
// For answering call, we assign this so we can accept through it with the accept button
connection: null,
duration: 0,
timer: null,
isMute: false
}
},
computed: {
getDuration () {
let mins = (parseInt(this.duration / 60, 10)).toString()
let seconds = (this.duration % 60).toString()
mins = mins.length > 1 ? mins : ('0' + mins)
seconds = seconds.length > 1 ? seconds : ('0' + seconds)
return mins + ':' + seconds
}
},
watch: {
isMute (newValue) {
this.connection.mute(newValue)
}
},
methods: {
initTwilio () {
HTTPClient.get('/api/marketing/autodialer/dial/get_token/', {forPage: window.location.pathname})
.then(response => {
Twilio.Device.setup(response.data.token, {debug:true})
Twilio.Device.ready(device => {
this.call_status = 'Ready'
});
/* Report any errors to the call status display */
Twilio.Device.error(error => {
this.call_status = 'Error: ' + error.message
if (error.code === 31205) { // If the token is expired
window.location.reload()
}
//TODO: Check if the twilio client has encountered an error , if the error is due to network issue, pause the autodialer
});
this.listenDeviceConnect()
this.listenDeviceDisconnect()
this.listenIncomingCall()
})
},
listenDeviceDisconnect () {
/* Callback for when a call ends */
Twilio.Device.disconnect(connection => {
this.call_status = "Ready";
this.connection = null;
this.outgoing_recipient = ''
if (this.autoDialingInProgress && typeof this.autoDialPosition === 'number') {
if (this.startCountdown) {
this.startCountdown()
}
let app = this
setTimeout(() => {
if (!app.autoDialingPaused) {
app.autoDialPosition += 1
}
}, this.callInterval * 1000)
}
UIkit.modal('#callingModal').hide()
});
},
listenDeviceConnect () {
Twilio.Device.connect(connection => {
this.in_call = true
this.connection = connection
this.startTimer()
UIkit.modal('#callingModal').show()
// If phoneNumber is part of the connection, this is a call from a
// support agent to a customer's phone
if ("PhoneNumber" in connection.message) {
this.call_status = "In call with " + connection.message.PhoneNumber;
} else {
// This is a call from a website user to a support agent
this.call_status = "In call with support";
}
});
},
listenIncomingCall () {
Twilio.Device.incoming(connection => {
this.call_status = "Incoming support call";
this.is_ringing = true
UIkit.modal('#callingModal').show()
// Set a callback to be executed when the connection is accepted
connection.accept(() => {
this.call_status = "In call with customer";
});
this.connection = connection
});
},
answerPhone() {
this.connection.accept()
this.is_ringing = false
},
callCustomer (to, from) {
this.call_status = 'Calling Customer ' + to + '...'
this.outgoing_recipient = to
let params = {"PhoneNumber": to, "from_no": from};
Twilio.Device.connect(params);
},
callBackFromCampaign (to, from) {
this.call_status = 'Calling ' + to + '...'
let params = {"PhoneNumber": to, "from_no": from};
Twilio.Device.connect(params);
},
callSupport () {
Twilio.Device.connect();
},
hangUp () {
this.in_call = false
Twilio.Device.disconnectAll();
this.stopTimer()
},
rejectCall () {
this.connection.reject()
this.is_ringing = false
},
toggleMute () {
this.isMute = !this.isMute
},
startTimer () {
this.timer = setInterval(this.updateDuration, 1000)
},
stopTimer () {
clearInterval(this.timer)
this.duration = 0
},
modalToggleListener () {
let app = this
UIkit.util.on('#callingModal', 'hide', function () {
if (app.connection) {
document.querySelector('#floating-call').classList.remove('uk-hidden')
}
else {
document.querySelector('#floating-call').classList.add('uk-hidden')
}
})
UIkit.util.on('#callingModal', 'show', function () {
document.querySelector('#floating-call').classList.add('uk-hidden')
})
},
expandCall () {
UIkit.modal('#callingModal').show()
},
updateDuration () {
this.duration += 1
}
},
mounted () {
this.initTwilio()
this.modalToggleListener()
}
} | PypiClean |
/GnuCash%20Web-0.1.0rc10.tar.gz/GnuCash Web-0.1.0rc10/gnucash_web/static/bootstrap/js/bootstrap.min.js | !function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e(require("@popperjs/core")):"function"==typeof define&&define.amd?define(["@popperjs/core"],e):(t="undefined"!=typeof globalThis?globalThis:t||self).bootstrap=e(t.Popper)}(this,(function(t){"use strict";function e(t){if(t&&t.__esModule)return t;const e=Object.create(null);if(t)for(const i in t)if("default"!==i){const s=Object.getOwnPropertyDescriptor(t,i);Object.defineProperty(e,i,s.get?s:{enumerable:!0,get:()=>t[i]})}return e.default=t,Object.freeze(e)}const i=e(t),s="transitionend",n=t=>{let e=t.getAttribute("data-bs-target");if(!e||"#"===e){let i=t.getAttribute("href");if(!i||!i.includes("#")&&!i.startsWith("."))return null;i.includes("#")&&!i.startsWith("#")&&(i=`#${i.split("#")[1]}`),e=i&&"#"!==i?i.trim():null}return e},o=t=>{const e=n(t);return e&&document.querySelector(e)?e:null},r=t=>{const e=n(t);return e?document.querySelector(e):null},a=t=>{t.dispatchEvent(new Event(s))},l=t=>!(!t||"object"!=typeof t)&&(void 0!==t.jquery&&(t=t[0]),void 0!==t.nodeType),c=t=>l(t)?t.jquery?t[0]:t:"string"==typeof t&&t.length>0?document.querySelector(t):null,h=(t,e,i)=>{Object.keys(i).forEach((s=>{const n=i[s],o=e[s],r=o&&l(o)?"element":null==(a=o)?`${a}`:{}.toString.call(a).match(/\s([a-z]+)/i)[1].toLowerCase();var a;if(!new RegExp(n).test(r))throw new TypeError(`${t.toUpperCase()}: Option "${s}" provided type "${r}" but expected type "${n}".`)}))},d=t=>!(!l(t)||0===t.getClientRects().length)&&"visible"===getComputedStyle(t).getPropertyValue("visibility"),u=t=>!t||t.nodeType!==Node.ELEMENT_NODE||!!t.classList.contains("disabled")||(void 0!==t.disabled?t.disabled:t.hasAttribute("disabled")&&"false"!==t.getAttribute("disabled")),g=t=>{if(!document.documentElement.attachShadow)return null;if("function"==typeof t.getRootNode){const e=t.getRootNode();return e instanceof ShadowRoot?e:null}return t instanceof ShadowRoot?t:t.parentNode?g(t.parentNode):null},_=()=>{},f=t=>{t.offsetHeight},p=()=>{const{jQuery:t}=window;return t&&!document.body.hasAttribute("data-bs-no-jquery")?t:null},m=[],b=()=>"rtl"===document.documentElement.dir,v=t=>{var e;e=()=>{const e=p();if(e){const i=t.NAME,s=e.fn[i];e.fn[i]=t.jQueryInterface,e.fn[i].Constructor=t,e.fn[i].noConflict=()=>(e.fn[i]=s,t.jQueryInterface)}},"loading"===document.readyState?(m.length||document.addEventListener("DOMContentLoaded",(()=>{m.forEach((t=>t()))})),m.push(e)):e()},y=t=>{"function"==typeof t&&t()},E=(t,e,i=!0)=>{if(!i)return void y(t);const n=(t=>{if(!t)return 0;let{transitionDuration:e,transitionDelay:i}=window.getComputedStyle(t);const s=Number.parseFloat(e),n=Number.parseFloat(i);return s||n?(e=e.split(",")[0],i=i.split(",")[0],1e3*(Number.parseFloat(e)+Number.parseFloat(i))):0})(e)+5;let o=!1;const r=({target:i})=>{i===e&&(o=!0,e.removeEventListener(s,r),y(t))};e.addEventListener(s,r),setTimeout((()=>{o||a(e)}),n)},w=(t,e,i,s)=>{let n=t.indexOf(e);if(-1===n)return t[!i&&s?t.length-1:0];const o=t.length;return n+=i?1:-1,s&&(n=(n+o)%o),t[Math.max(0,Math.min(n,o-1))]},A=/[^.]*(?=\..*)\.|.*/,T=/\..*/,C=/::\d+$/,k={};let L=1;const S={mouseenter:"mouseover",mouseleave:"mouseout"},O=/^(mouseenter|mouseleave)/i,N=new Set(["click","dblclick","mouseup","mousedown","contextmenu","mousewheel","DOMMouseScroll","mouseover","mouseout","mousemove","selectstart","selectend","keydown","keypress","keyup","orientationchange","touchstart","touchmove","touchend","touchcancel","pointerdown","pointermove","pointerup","pointerleave","pointercancel","gesturestart","gesturechange","gestureend","focus","blur","change","reset","select","submit","focusin","focusout","load","unload","beforeunload","resize","move","DOMContentLoaded","readystatechange","error","abort","scroll"]);function D(t,e){return e&&`${e}::${L++}`||t.uidEvent||L++}function I(t){const e=D(t);return t.uidEvent=e,k[e]=k[e]||{},k[e]}function P(t,e,i=null){const s=Object.keys(t);for(let n=0,o=s.length;n<o;n++){const o=t[s[n]];if(o.originalHandler===e&&o.delegationSelector===i)return o}return null}function x(t,e,i){const s="string"==typeof e,n=s?i:e;let o=H(t);return N.has(o)||(o=t),[s,n,o]}function M(t,e,i,s,n){if("string"!=typeof e||!t)return;if(i||(i=s,s=null),O.test(e)){const t=t=>function(e){if(!e.relatedTarget||e.relatedTarget!==e.delegateTarget&&!e.delegateTarget.contains(e.relatedTarget))return t.call(this,e)};s?s=t(s):i=t(i)}const[o,r,a]=x(e,i,s),l=I(t),c=l[a]||(l[a]={}),h=P(c,r,o?i:null);if(h)return void(h.oneOff=h.oneOff&&n);const d=D(r,e.replace(A,"")),u=o?function(t,e,i){return function s(n){const o=t.querySelectorAll(e);for(let{target:r}=n;r&&r!==this;r=r.parentNode)for(let a=o.length;a--;)if(o[a]===r)return n.delegateTarget=r,s.oneOff&&$.off(t,n.type,e,i),i.apply(r,[n]);return null}}(t,i,s):function(t,e){return function i(s){return s.delegateTarget=t,i.oneOff&&$.off(t,s.type,e),e.apply(t,[s])}}(t,i);u.delegationSelector=o?i:null,u.originalHandler=r,u.oneOff=n,u.uidEvent=d,c[d]=u,t.addEventListener(a,u,o)}function j(t,e,i,s,n){const o=P(e[i],s,n);o&&(t.removeEventListener(i,o,Boolean(n)),delete e[i][o.uidEvent])}function H(t){return t=t.replace(T,""),S[t]||t}const $={on(t,e,i,s){M(t,e,i,s,!1)},one(t,e,i,s){M(t,e,i,s,!0)},off(t,e,i,s){if("string"!=typeof e||!t)return;const[n,o,r]=x(e,i,s),a=r!==e,l=I(t),c=e.startsWith(".");if(void 0!==o){if(!l||!l[r])return;return void j(t,l,r,o,n?i:null)}c&&Object.keys(l).forEach((i=>{!function(t,e,i,s){const n=e[i]||{};Object.keys(n).forEach((o=>{if(o.includes(s)){const s=n[o];j(t,e,i,s.originalHandler,s.delegationSelector)}}))}(t,l,i,e.slice(1))}));const h=l[r]||{};Object.keys(h).forEach((i=>{const s=i.replace(C,"");if(!a||e.includes(s)){const e=h[i];j(t,l,r,e.originalHandler,e.delegationSelector)}}))},trigger(t,e,i){if("string"!=typeof e||!t)return null;const s=p(),n=H(e),o=e!==n,r=N.has(n);let a,l=!0,c=!0,h=!1,d=null;return o&&s&&(a=s.Event(e,i),s(t).trigger(a),l=!a.isPropagationStopped(),c=!a.isImmediatePropagationStopped(),h=a.isDefaultPrevented()),r?(d=document.createEvent("HTMLEvents"),d.initEvent(n,l,!0)):d=new CustomEvent(e,{bubbles:l,cancelable:!0}),void 0!==i&&Object.keys(i).forEach((t=>{Object.defineProperty(d,t,{get:()=>i[t]})})),h&&d.preventDefault(),c&&t.dispatchEvent(d),d.defaultPrevented&&void 0!==a&&a.preventDefault(),d}},B=new Map,z={set(t,e,i){B.has(t)||B.set(t,new Map);const s=B.get(t);s.has(e)||0===s.size?s.set(e,i):console.error(`Bootstrap doesn't allow more than one instance per element. Bound instance: ${Array.from(s.keys())[0]}.`)},get:(t,e)=>B.has(t)&&B.get(t).get(e)||null,remove(t,e){if(!B.has(t))return;const i=B.get(t);i.delete(e),0===i.size&&B.delete(t)}};class R{constructor(t){(t=c(t))&&(this._element=t,z.set(this._element,this.constructor.DATA_KEY,this))}dispose(){z.remove(this._element,this.constructor.DATA_KEY),$.off(this._element,this.constructor.EVENT_KEY),Object.getOwnPropertyNames(this).forEach((t=>{this[t]=null}))}_queueCallback(t,e,i=!0){E(t,e,i)}static getInstance(t){return z.get(c(t),this.DATA_KEY)}static getOrCreateInstance(t,e={}){return this.getInstance(t)||new this(t,"object"==typeof e?e:null)}static get VERSION(){return"5.1.3"}static get NAME(){throw new Error('You have to implement the static method "NAME", for each component!')}static get DATA_KEY(){return`bs.${this.NAME}`}static get EVENT_KEY(){return`.${this.DATA_KEY}`}}const F=(t,e="hide")=>{const i=`click.dismiss${t.EVENT_KEY}`,s=t.NAME;$.on(document,i,`[data-bs-dismiss="${s}"]`,(function(i){if(["A","AREA"].includes(this.tagName)&&i.preventDefault(),u(this))return;const n=r(this)||this.closest(`.${s}`);t.getOrCreateInstance(n)[e]()}))};class q extends R{static get NAME(){return"alert"}close(){if($.trigger(this._element,"close.bs.alert").defaultPrevented)return;this._element.classList.remove("show");const t=this._element.classList.contains("fade");this._queueCallback((()=>this._destroyElement()),this._element,t)}_destroyElement(){this._element.remove(),$.trigger(this._element,"closed.bs.alert"),this.dispose()}static jQueryInterface(t){return this.each((function(){const e=q.getOrCreateInstance(this);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}F(q,"close"),v(q);const W='[data-bs-toggle="button"]';class U extends R{static get NAME(){return"button"}toggle(){this._element.setAttribute("aria-pressed",this._element.classList.toggle("active"))}static jQueryInterface(t){return this.each((function(){const e=U.getOrCreateInstance(this);"toggle"===t&&e[t]()}))}}function K(t){return"true"===t||"false"!==t&&(t===Number(t).toString()?Number(t):""===t||"null"===t?null:t)}function V(t){return t.replace(/[A-Z]/g,(t=>`-${t.toLowerCase()}`))}$.on(document,"click.bs.button.data-api",W,(t=>{t.preventDefault();const e=t.target.closest(W);U.getOrCreateInstance(e).toggle()})),v(U);const X={setDataAttribute(t,e,i){t.setAttribute(`data-bs-${V(e)}`,i)},removeDataAttribute(t,e){t.removeAttribute(`data-bs-${V(e)}`)},getDataAttributes(t){if(!t)return{};const e={};return Object.keys(t.dataset).filter((t=>t.startsWith("bs"))).forEach((i=>{let s=i.replace(/^bs/,"");s=s.charAt(0).toLowerCase()+s.slice(1,s.length),e[s]=K(t.dataset[i])})),e},getDataAttribute:(t,e)=>K(t.getAttribute(`data-bs-${V(e)}`)),offset(t){const e=t.getBoundingClientRect();return{top:e.top+window.pageYOffset,left:e.left+window.pageXOffset}},position:t=>({top:t.offsetTop,left:t.offsetLeft})},Y={find:(t,e=document.documentElement)=>[].concat(...Element.prototype.querySelectorAll.call(e,t)),findOne:(t,e=document.documentElement)=>Element.prototype.querySelector.call(e,t),children:(t,e)=>[].concat(...t.children).filter((t=>t.matches(e))),parents(t,e){const i=[];let s=t.parentNode;for(;s&&s.nodeType===Node.ELEMENT_NODE&&3!==s.nodeType;)s.matches(e)&&i.push(s),s=s.parentNode;return i},prev(t,e){let i=t.previousElementSibling;for(;i;){if(i.matches(e))return[i];i=i.previousElementSibling}return[]},next(t,e){let i=t.nextElementSibling;for(;i;){if(i.matches(e))return[i];i=i.nextElementSibling}return[]},focusableChildren(t){const e=["a","button","input","textarea","select","details","[tabindex]",'[contenteditable="true"]'].map((t=>`${t}:not([tabindex^="-"])`)).join(", ");return this.find(e,t).filter((t=>!u(t)&&d(t)))}},Q="carousel",G={interval:5e3,keyboard:!0,slide:!1,pause:"hover",wrap:!0,touch:!0},Z={interval:"(number|boolean)",keyboard:"boolean",slide:"(boolean|string)",pause:"(string|boolean)",wrap:"boolean",touch:"boolean"},J="next",tt="prev",et="left",it="right",st={ArrowLeft:it,ArrowRight:et},nt="slid.bs.carousel",ot="active",rt=".active.carousel-item";class at extends R{constructor(t,e){super(t),this._items=null,this._interval=null,this._activeElement=null,this._isPaused=!1,this._isSliding=!1,this.touchTimeout=null,this.touchStartX=0,this.touchDeltaX=0,this._config=this._getConfig(e),this._indicatorsElement=Y.findOne(".carousel-indicators",this._element),this._touchSupported="ontouchstart"in document.documentElement||navigator.maxTouchPoints>0,this._pointerEvent=Boolean(window.PointerEvent),this._addEventListeners()}static get Default(){return G}static get NAME(){return Q}next(){this._slide(J)}nextWhenVisible(){!document.hidden&&d(this._element)&&this.next()}prev(){this._slide(tt)}pause(t){t||(this._isPaused=!0),Y.findOne(".carousel-item-next, .carousel-item-prev",this._element)&&(a(this._element),this.cycle(!0)),clearInterval(this._interval),this._interval=null}cycle(t){t||(this._isPaused=!1),this._interval&&(clearInterval(this._interval),this._interval=null),this._config&&this._config.interval&&!this._isPaused&&(this._updateInterval(),this._interval=setInterval((document.visibilityState?this.nextWhenVisible:this.next).bind(this),this._config.interval))}to(t){this._activeElement=Y.findOne(rt,this._element);const e=this._getItemIndex(this._activeElement);if(t>this._items.length-1||t<0)return;if(this._isSliding)return void $.one(this._element,nt,(()=>this.to(t)));if(e===t)return this.pause(),void this.cycle();const i=t>e?J:tt;this._slide(i,this._items[t])}_getConfig(t){return t={...G,...X.getDataAttributes(this._element),..."object"==typeof t?t:{}},h(Q,t,Z),t}_handleSwipe(){const t=Math.abs(this.touchDeltaX);if(t<=40)return;const e=t/this.touchDeltaX;this.touchDeltaX=0,e&&this._slide(e>0?it:et)}_addEventListeners(){this._config.keyboard&&$.on(this._element,"keydown.bs.carousel",(t=>this._keydown(t))),"hover"===this._config.pause&&($.on(this._element,"mouseenter.bs.carousel",(t=>this.pause(t))),$.on(this._element,"mouseleave.bs.carousel",(t=>this.cycle(t)))),this._config.touch&&this._touchSupported&&this._addTouchEventListeners()}_addTouchEventListeners(){const t=t=>this._pointerEvent&&("pen"===t.pointerType||"touch"===t.pointerType),e=e=>{t(e)?this.touchStartX=e.clientX:this._pointerEvent||(this.touchStartX=e.touches[0].clientX)},i=t=>{this.touchDeltaX=t.touches&&t.touches.length>1?0:t.touches[0].clientX-this.touchStartX},s=e=>{t(e)&&(this.touchDeltaX=e.clientX-this.touchStartX),this._handleSwipe(),"hover"===this._config.pause&&(this.pause(),this.touchTimeout&&clearTimeout(this.touchTimeout),this.touchTimeout=setTimeout((t=>this.cycle(t)),500+this._config.interval))};Y.find(".carousel-item img",this._element).forEach((t=>{$.on(t,"dragstart.bs.carousel",(t=>t.preventDefault()))})),this._pointerEvent?($.on(this._element,"pointerdown.bs.carousel",(t=>e(t))),$.on(this._element,"pointerup.bs.carousel",(t=>s(t))),this._element.classList.add("pointer-event")):($.on(this._element,"touchstart.bs.carousel",(t=>e(t))),$.on(this._element,"touchmove.bs.carousel",(t=>i(t))),$.on(this._element,"touchend.bs.carousel",(t=>s(t))))}_keydown(t){if(/input|textarea/i.test(t.target.tagName))return;const e=st[t.key];e&&(t.preventDefault(),this._slide(e))}_getItemIndex(t){return this._items=t&&t.parentNode?Y.find(".carousel-item",t.parentNode):[],this._items.indexOf(t)}_getItemByOrder(t,e){const i=t===J;return w(this._items,e,i,this._config.wrap)}_triggerSlideEvent(t,e){const i=this._getItemIndex(t),s=this._getItemIndex(Y.findOne(rt,this._element));return $.trigger(this._element,"slide.bs.carousel",{relatedTarget:t,direction:e,from:s,to:i})}_setActiveIndicatorElement(t){if(this._indicatorsElement){const e=Y.findOne(".active",this._indicatorsElement);e.classList.remove(ot),e.removeAttribute("aria-current");const i=Y.find("[data-bs-target]",this._indicatorsElement);for(let e=0;e<i.length;e++)if(Number.parseInt(i[e].getAttribute("data-bs-slide-to"),10)===this._getItemIndex(t)){i[e].classList.add(ot),i[e].setAttribute("aria-current","true");break}}}_updateInterval(){const t=this._activeElement||Y.findOne(rt,this._element);if(!t)return;const e=Number.parseInt(t.getAttribute("data-bs-interval"),10);e?(this._config.defaultInterval=this._config.defaultInterval||this._config.interval,this._config.interval=e):this._config.interval=this._config.defaultInterval||this._config.interval}_slide(t,e){const i=this._directionToOrder(t),s=Y.findOne(rt,this._element),n=this._getItemIndex(s),o=e||this._getItemByOrder(i,s),r=this._getItemIndex(o),a=Boolean(this._interval),l=i===J,c=l?"carousel-item-start":"carousel-item-end",h=l?"carousel-item-next":"carousel-item-prev",d=this._orderToDirection(i);if(o&&o.classList.contains(ot))return void(this._isSliding=!1);if(this._isSliding)return;if(this._triggerSlideEvent(o,d).defaultPrevented)return;if(!s||!o)return;this._isSliding=!0,a&&this.pause(),this._setActiveIndicatorElement(o),this._activeElement=o;const u=()=>{$.trigger(this._element,nt,{relatedTarget:o,direction:d,from:n,to:r})};if(this._element.classList.contains("slide")){o.classList.add(h),f(o),s.classList.add(c),o.classList.add(c);const t=()=>{o.classList.remove(c,h),o.classList.add(ot),s.classList.remove(ot,h,c),this._isSliding=!1,setTimeout(u,0)};this._queueCallback(t,s,!0)}else s.classList.remove(ot),o.classList.add(ot),this._isSliding=!1,u();a&&this.cycle()}_directionToOrder(t){return[it,et].includes(t)?b()?t===et?tt:J:t===et?J:tt:t}_orderToDirection(t){return[J,tt].includes(t)?b()?t===tt?et:it:t===tt?it:et:t}static carouselInterface(t,e){const i=at.getOrCreateInstance(t,e);let{_config:s}=i;"object"==typeof e&&(s={...s,...e});const n="string"==typeof e?e:s.slide;if("number"==typeof e)i.to(e);else if("string"==typeof n){if(void 0===i[n])throw new TypeError(`No method named "${n}"`);i[n]()}else s.interval&&s.ride&&(i.pause(),i.cycle())}static jQueryInterface(t){return this.each((function(){at.carouselInterface(this,t)}))}static dataApiClickHandler(t){const e=r(this);if(!e||!e.classList.contains("carousel"))return;const i={...X.getDataAttributes(e),...X.getDataAttributes(this)},s=this.getAttribute("data-bs-slide-to");s&&(i.interval=!1),at.carouselInterface(e,i),s&&at.getInstance(e).to(s),t.preventDefault()}}$.on(document,"click.bs.carousel.data-api","[data-bs-slide], [data-bs-slide-to]",at.dataApiClickHandler),$.on(window,"load.bs.carousel.data-api",(()=>{const t=Y.find('[data-bs-ride="carousel"]');for(let e=0,i=t.length;e<i;e++)at.carouselInterface(t[e],at.getInstance(t[e]))})),v(at);const lt="collapse",ct={toggle:!0,parent:null},ht={toggle:"boolean",parent:"(null|element)"},dt="show",ut="collapse",gt="collapsing",_t="collapsed",ft=":scope .collapse .collapse",pt='[data-bs-toggle="collapse"]';class mt extends R{constructor(t,e){super(t),this._isTransitioning=!1,this._config=this._getConfig(e),this._triggerArray=[];const i=Y.find(pt);for(let t=0,e=i.length;t<e;t++){const e=i[t],s=o(e),n=Y.find(s).filter((t=>t===this._element));null!==s&&n.length&&(this._selector=s,this._triggerArray.push(e))}this._initializeChildren(),this._config.parent||this._addAriaAndCollapsedClass(this._triggerArray,this._isShown()),this._config.toggle&&this.toggle()}static get Default(){return ct}static get NAME(){return lt}toggle(){this._isShown()?this.hide():this.show()}show(){if(this._isTransitioning||this._isShown())return;let t,e=[];if(this._config.parent){const t=Y.find(ft,this._config.parent);e=Y.find(".collapse.show, .collapse.collapsing",this._config.parent).filter((e=>!t.includes(e)))}const i=Y.findOne(this._selector);if(e.length){const s=e.find((t=>i!==t));if(t=s?mt.getInstance(s):null,t&&t._isTransitioning)return}if($.trigger(this._element,"show.bs.collapse").defaultPrevented)return;e.forEach((e=>{i!==e&&mt.getOrCreateInstance(e,{toggle:!1}).hide(),t||z.set(e,"bs.collapse",null)}));const s=this._getDimension();this._element.classList.remove(ut),this._element.classList.add(gt),this._element.style[s]=0,this._addAriaAndCollapsedClass(this._triggerArray,!0),this._isTransitioning=!0;const n=`scroll${s[0].toUpperCase()+s.slice(1)}`;this._queueCallback((()=>{this._isTransitioning=!1,this._element.classList.remove(gt),this._element.classList.add(ut,dt),this._element.style[s]="",$.trigger(this._element,"shown.bs.collapse")}),this._element,!0),this._element.style[s]=`${this._element[n]}px`}hide(){if(this._isTransitioning||!this._isShown())return;if($.trigger(this._element,"hide.bs.collapse").defaultPrevented)return;const t=this._getDimension();this._element.style[t]=`${this._element.getBoundingClientRect()[t]}px`,f(this._element),this._element.classList.add(gt),this._element.classList.remove(ut,dt);const e=this._triggerArray.length;for(let t=0;t<e;t++){const e=this._triggerArray[t],i=r(e);i&&!this._isShown(i)&&this._addAriaAndCollapsedClass([e],!1)}this._isTransitioning=!0,this._element.style[t]="",this._queueCallback((()=>{this._isTransitioning=!1,this._element.classList.remove(gt),this._element.classList.add(ut),$.trigger(this._element,"hidden.bs.collapse")}),this._element,!0)}_isShown(t=this._element){return t.classList.contains(dt)}_getConfig(t){return(t={...ct,...X.getDataAttributes(this._element),...t}).toggle=Boolean(t.toggle),t.parent=c(t.parent),h(lt,t,ht),t}_getDimension(){return this._element.classList.contains("collapse-horizontal")?"width":"height"}_initializeChildren(){if(!this._config.parent)return;const t=Y.find(ft,this._config.parent);Y.find(pt,this._config.parent).filter((e=>!t.includes(e))).forEach((t=>{const e=r(t);e&&this._addAriaAndCollapsedClass([t],this._isShown(e))}))}_addAriaAndCollapsedClass(t,e){t.length&&t.forEach((t=>{e?t.classList.remove(_t):t.classList.add(_t),t.setAttribute("aria-expanded",e)}))}static jQueryInterface(t){return this.each((function(){const e={};"string"==typeof t&&/show|hide/.test(t)&&(e.toggle=!1);const i=mt.getOrCreateInstance(this,e);if("string"==typeof t){if(void 0===i[t])throw new TypeError(`No method named "${t}"`);i[t]()}}))}}$.on(document,"click.bs.collapse.data-api",pt,(function(t){("A"===t.target.tagName||t.delegateTarget&&"A"===t.delegateTarget.tagName)&&t.preventDefault();const e=o(this);Y.find(e).forEach((t=>{mt.getOrCreateInstance(t,{toggle:!1}).toggle()}))})),v(mt);const bt="dropdown",vt="Escape",yt="Space",Et="ArrowUp",wt="ArrowDown",At=new RegExp("ArrowUp|ArrowDown|Escape"),Tt="click.bs.dropdown.data-api",Ct="keydown.bs.dropdown.data-api",kt="show",Lt='[data-bs-toggle="dropdown"]',St=".dropdown-menu",Ot=b()?"top-end":"top-start",Nt=b()?"top-start":"top-end",Dt=b()?"bottom-end":"bottom-start",It=b()?"bottom-start":"bottom-end",Pt=b()?"left-start":"right-start",xt=b()?"right-start":"left-start",Mt={offset:[0,2],boundary:"clippingParents",reference:"toggle",display:"dynamic",popperConfig:null,autoClose:!0},jt={offset:"(array|string|function)",boundary:"(string|element)",reference:"(string|element|object)",display:"string",popperConfig:"(null|object|function)",autoClose:"(boolean|string)"};class Ht extends R{constructor(t,e){super(t),this._popper=null,this._config=this._getConfig(e),this._menu=this._getMenuElement(),this._inNavbar=this._detectNavbar()}static get Default(){return Mt}static get DefaultType(){return jt}static get NAME(){return bt}toggle(){return this._isShown()?this.hide():this.show()}show(){if(u(this._element)||this._isShown(this._menu))return;const t={relatedTarget:this._element};if($.trigger(this._element,"show.bs.dropdown",t).defaultPrevented)return;const e=Ht.getParentFromElement(this._element);this._inNavbar?X.setDataAttribute(this._menu,"popper","none"):this._createPopper(e),"ontouchstart"in document.documentElement&&!e.closest(".navbar-nav")&&[].concat(...document.body.children).forEach((t=>$.on(t,"mouseover",_))),this._element.focus(),this._element.setAttribute("aria-expanded",!0),this._menu.classList.add(kt),this._element.classList.add(kt),$.trigger(this._element,"shown.bs.dropdown",t)}hide(){if(u(this._element)||!this._isShown(this._menu))return;const t={relatedTarget:this._element};this._completeHide(t)}dispose(){this._popper&&this._popper.destroy(),super.dispose()}update(){this._inNavbar=this._detectNavbar(),this._popper&&this._popper.update()}_completeHide(t){$.trigger(this._element,"hide.bs.dropdown",t).defaultPrevented||("ontouchstart"in document.documentElement&&[].concat(...document.body.children).forEach((t=>$.off(t,"mouseover",_))),this._popper&&this._popper.destroy(),this._menu.classList.remove(kt),this._element.classList.remove(kt),this._element.setAttribute("aria-expanded","false"),X.removeDataAttribute(this._menu,"popper"),$.trigger(this._element,"hidden.bs.dropdown",t))}_getConfig(t){if(t={...this.constructor.Default,...X.getDataAttributes(this._element),...t},h(bt,t,this.constructor.DefaultType),"object"==typeof t.reference&&!l(t.reference)&&"function"!=typeof t.reference.getBoundingClientRect)throw new TypeError(`${bt.toUpperCase()}: Option "reference" provided type "object" without a required "getBoundingClientRect" method.`);return t}_createPopper(t){if(void 0===i)throw new TypeError("Bootstrap's dropdowns require Popper (https://popper.js.org)");let e=this._element;"parent"===this._config.reference?e=t:l(this._config.reference)?e=c(this._config.reference):"object"==typeof this._config.reference&&(e=this._config.reference);const s=this._getPopperConfig(),n=s.modifiers.find((t=>"applyStyles"===t.name&&!1===t.enabled));this._popper=i.createPopper(e,this._menu,s),n&&X.setDataAttribute(this._menu,"popper","static")}_isShown(t=this._element){return t.classList.contains(kt)}_getMenuElement(){return Y.next(this._element,St)[0]}_getPlacement(){const t=this._element.parentNode;if(t.classList.contains("dropend"))return Pt;if(t.classList.contains("dropstart"))return xt;const e="end"===getComputedStyle(this._menu).getPropertyValue("--bs-position").trim();return t.classList.contains("dropup")?e?Nt:Ot:e?It:Dt}_detectNavbar(){return null!==this._element.closest(".navbar")}_getOffset(){const{offset:t}=this._config;return"string"==typeof t?t.split(",").map((t=>Number.parseInt(t,10))):"function"==typeof t?e=>t(e,this._element):t}_getPopperConfig(){const t={placement:this._getPlacement(),modifiers:[{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"offset",options:{offset:this._getOffset()}}]};return"static"===this._config.display&&(t.modifiers=[{name:"applyStyles",enabled:!1}]),{...t,..."function"==typeof this._config.popperConfig?this._config.popperConfig(t):this._config.popperConfig}}_selectMenuItem({key:t,target:e}){const i=Y.find(".dropdown-menu .dropdown-item:not(.disabled):not(:disabled)",this._menu).filter(d);i.length&&w(i,e,t===wt,!i.includes(e)).focus()}static jQueryInterface(t){return this.each((function(){const e=Ht.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}static clearMenus(t){if(t&&(2===t.button||"keyup"===t.type&&"Tab"!==t.key))return;const e=Y.find(Lt);for(let i=0,s=e.length;i<s;i++){const s=Ht.getInstance(e[i]);if(!s||!1===s._config.autoClose)continue;if(!s._isShown())continue;const n={relatedTarget:s._element};if(t){const e=t.composedPath(),i=e.includes(s._menu);if(e.includes(s._element)||"inside"===s._config.autoClose&&!i||"outside"===s._config.autoClose&&i)continue;if(s._menu.contains(t.target)&&("keyup"===t.type&&"Tab"===t.key||/input|select|option|textarea|form/i.test(t.target.tagName)))continue;"click"===t.type&&(n.clickEvent=t)}s._completeHide(n)}}static getParentFromElement(t){return r(t)||t.parentNode}static dataApiKeydownHandler(t){if(/input|textarea/i.test(t.target.tagName)?t.key===yt||t.key!==vt&&(t.key!==wt&&t.key!==Et||t.target.closest(St)):!At.test(t.key))return;const e=this.classList.contains(kt);if(!e&&t.key===vt)return;if(t.preventDefault(),t.stopPropagation(),u(this))return;const i=this.matches(Lt)?this:Y.prev(this,Lt)[0],s=Ht.getOrCreateInstance(i);if(t.key!==vt)return t.key===Et||t.key===wt?(e||s.show(),void s._selectMenuItem(t)):void(e&&t.key!==yt||Ht.clearMenus());s.hide()}}$.on(document,Ct,Lt,Ht.dataApiKeydownHandler),$.on(document,Ct,St,Ht.dataApiKeydownHandler),$.on(document,Tt,Ht.clearMenus),$.on(document,"keyup.bs.dropdown.data-api",Ht.clearMenus),$.on(document,Tt,Lt,(function(t){t.preventDefault(),Ht.getOrCreateInstance(this).toggle()})),v(Ht);const $t=".fixed-top, .fixed-bottom, .is-fixed, .sticky-top",Bt=".sticky-top";class zt{constructor(){this._element=document.body}getWidth(){const t=document.documentElement.clientWidth;return Math.abs(window.innerWidth-t)}hide(){const t=this.getWidth();this._disableOverFlow(),this._setElementAttributes(this._element,"paddingRight",(e=>e+t)),this._setElementAttributes($t,"paddingRight",(e=>e+t)),this._setElementAttributes(Bt,"marginRight",(e=>e-t))}_disableOverFlow(){this._saveInitialAttribute(this._element,"overflow"),this._element.style.overflow="hidden"}_setElementAttributes(t,e,i){const s=this.getWidth();this._applyManipulationCallback(t,(t=>{if(t!==this._element&&window.innerWidth>t.clientWidth+s)return;this._saveInitialAttribute(t,e);const n=window.getComputedStyle(t)[e];t.style[e]=`${i(Number.parseFloat(n))}px`}))}reset(){this._resetElementAttributes(this._element,"overflow"),this._resetElementAttributes(this._element,"paddingRight"),this._resetElementAttributes($t,"paddingRight"),this._resetElementAttributes(Bt,"marginRight")}_saveInitialAttribute(t,e){const i=t.style[e];i&&X.setDataAttribute(t,e,i)}_resetElementAttributes(t,e){this._applyManipulationCallback(t,(t=>{const i=X.getDataAttribute(t,e);void 0===i?t.style.removeProperty(e):(X.removeDataAttribute(t,e),t.style[e]=i)}))}_applyManipulationCallback(t,e){l(t)?e(t):Y.find(t,this._element).forEach(e)}isOverflowing(){return this.getWidth()>0}}const Rt={className:"modal-backdrop",isVisible:!0,isAnimated:!1,rootElement:"body",clickCallback:null},Ft={className:"string",isVisible:"boolean",isAnimated:"boolean",rootElement:"(element|string)",clickCallback:"(function|null)"},qt="show",Wt="mousedown.bs.backdrop";class Ut{constructor(t){this._config=this._getConfig(t),this._isAppended=!1,this._element=null}show(t){this._config.isVisible?(this._append(),this._config.isAnimated&&f(this._getElement()),this._getElement().classList.add(qt),this._emulateAnimation((()=>{y(t)}))):y(t)}hide(t){this._config.isVisible?(this._getElement().classList.remove(qt),this._emulateAnimation((()=>{this.dispose(),y(t)}))):y(t)}_getElement(){if(!this._element){const t=document.createElement("div");t.className=this._config.className,this._config.isAnimated&&t.classList.add("fade"),this._element=t}return this._element}_getConfig(t){return(t={...Rt,..."object"==typeof t?t:{}}).rootElement=c(t.rootElement),h("backdrop",t,Ft),t}_append(){this._isAppended||(this._config.rootElement.append(this._getElement()),$.on(this._getElement(),Wt,(()=>{y(this._config.clickCallback)})),this._isAppended=!0)}dispose(){this._isAppended&&($.off(this._element,Wt),this._element.remove(),this._isAppended=!1)}_emulateAnimation(t){E(t,this._getElement(),this._config.isAnimated)}}const Kt={trapElement:null,autofocus:!0},Vt={trapElement:"element",autofocus:"boolean"},Xt=".bs.focustrap",Yt="backward";class Qt{constructor(t){this._config=this._getConfig(t),this._isActive=!1,this._lastTabNavDirection=null}activate(){const{trapElement:t,autofocus:e}=this._config;this._isActive||(e&&t.focus(),$.off(document,Xt),$.on(document,"focusin.bs.focustrap",(t=>this._handleFocusin(t))),$.on(document,"keydown.tab.bs.focustrap",(t=>this._handleKeydown(t))),this._isActive=!0)}deactivate(){this._isActive&&(this._isActive=!1,$.off(document,Xt))}_handleFocusin(t){const{target:e}=t,{trapElement:i}=this._config;if(e===document||e===i||i.contains(e))return;const s=Y.focusableChildren(i);0===s.length?i.focus():this._lastTabNavDirection===Yt?s[s.length-1].focus():s[0].focus()}_handleKeydown(t){"Tab"===t.key&&(this._lastTabNavDirection=t.shiftKey?Yt:"forward")}_getConfig(t){return t={...Kt,..."object"==typeof t?t:{}},h("focustrap",t,Vt),t}}const Gt="modal",Zt="Escape",Jt={backdrop:!0,keyboard:!0,focus:!0},te={backdrop:"(boolean|string)",keyboard:"boolean",focus:"boolean"},ee="hidden.bs.modal",ie="show.bs.modal",se="resize.bs.modal",ne="click.dismiss.bs.modal",oe="keydown.dismiss.bs.modal",re="mousedown.dismiss.bs.modal",ae="modal-open",le="show",ce="modal-static";class he extends R{constructor(t,e){super(t),this._config=this._getConfig(e),this._dialog=Y.findOne(".modal-dialog",this._element),this._backdrop=this._initializeBackDrop(),this._focustrap=this._initializeFocusTrap(),this._isShown=!1,this._ignoreBackdropClick=!1,this._isTransitioning=!1,this._scrollBar=new zt}static get Default(){return Jt}static get NAME(){return Gt}toggle(t){return this._isShown?this.hide():this.show(t)}show(t){this._isShown||this._isTransitioning||$.trigger(this._element,ie,{relatedTarget:t}).defaultPrevented||(this._isShown=!0,this._isAnimated()&&(this._isTransitioning=!0),this._scrollBar.hide(),document.body.classList.add(ae),this._adjustDialog(),this._setEscapeEvent(),this._setResizeEvent(),$.on(this._dialog,re,(()=>{$.one(this._element,"mouseup.dismiss.bs.modal",(t=>{t.target===this._element&&(this._ignoreBackdropClick=!0)}))})),this._showBackdrop((()=>this._showElement(t))))}hide(){if(!this._isShown||this._isTransitioning)return;if($.trigger(this._element,"hide.bs.modal").defaultPrevented)return;this._isShown=!1;const t=this._isAnimated();t&&(this._isTransitioning=!0),this._setEscapeEvent(),this._setResizeEvent(),this._focustrap.deactivate(),this._element.classList.remove(le),$.off(this._element,ne),$.off(this._dialog,re),this._queueCallback((()=>this._hideModal()),this._element,t)}dispose(){[window,this._dialog].forEach((t=>$.off(t,".bs.modal"))),this._backdrop.dispose(),this._focustrap.deactivate(),super.dispose()}handleUpdate(){this._adjustDialog()}_initializeBackDrop(){return new Ut({isVisible:Boolean(this._config.backdrop),isAnimated:this._isAnimated()})}_initializeFocusTrap(){return new Qt({trapElement:this._element})}_getConfig(t){return t={...Jt,...X.getDataAttributes(this._element),..."object"==typeof t?t:{}},h(Gt,t,te),t}_showElement(t){const e=this._isAnimated(),i=Y.findOne(".modal-body",this._dialog);this._element.parentNode&&this._element.parentNode.nodeType===Node.ELEMENT_NODE||document.body.append(this._element),this._element.style.display="block",this._element.removeAttribute("aria-hidden"),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.scrollTop=0,i&&(i.scrollTop=0),e&&f(this._element),this._element.classList.add(le),this._queueCallback((()=>{this._config.focus&&this._focustrap.activate(),this._isTransitioning=!1,$.trigger(this._element,"shown.bs.modal",{relatedTarget:t})}),this._dialog,e)}_setEscapeEvent(){this._isShown?$.on(this._element,oe,(t=>{this._config.keyboard&&t.key===Zt?(t.preventDefault(),this.hide()):this._config.keyboard||t.key!==Zt||this._triggerBackdropTransition()})):$.off(this._element,oe)}_setResizeEvent(){this._isShown?$.on(window,se,(()=>this._adjustDialog())):$.off(window,se)}_hideModal(){this._element.style.display="none",this._element.setAttribute("aria-hidden",!0),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._isTransitioning=!1,this._backdrop.hide((()=>{document.body.classList.remove(ae),this._resetAdjustments(),this._scrollBar.reset(),$.trigger(this._element,ee)}))}_showBackdrop(t){$.on(this._element,ne,(t=>{this._ignoreBackdropClick?this._ignoreBackdropClick=!1:t.target===t.currentTarget&&(!0===this._config.backdrop?this.hide():"static"===this._config.backdrop&&this._triggerBackdropTransition())})),this._backdrop.show(t)}_isAnimated(){return this._element.classList.contains("fade")}_triggerBackdropTransition(){if($.trigger(this._element,"hidePrevented.bs.modal").defaultPrevented)return;const{classList:t,scrollHeight:e,style:i}=this._element,s=e>document.documentElement.clientHeight;!s&&"hidden"===i.overflowY||t.contains(ce)||(s||(i.overflowY="hidden"),t.add(ce),this._queueCallback((()=>{t.remove(ce),s||this._queueCallback((()=>{i.overflowY=""}),this._dialog)}),this._dialog),this._element.focus())}_adjustDialog(){const t=this._element.scrollHeight>document.documentElement.clientHeight,e=this._scrollBar.getWidth(),i=e>0;(!i&&t&&!b()||i&&!t&&b())&&(this._element.style.paddingLeft=`${e}px`),(i&&!t&&!b()||!i&&t&&b())&&(this._element.style.paddingRight=`${e}px`)}_resetAdjustments(){this._element.style.paddingLeft="",this._element.style.paddingRight=""}static jQueryInterface(t,e){return this.each((function(){const i=he.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===i[t])throw new TypeError(`No method named "${t}"`);i[t](e)}}))}}$.on(document,"click.bs.modal.data-api",'[data-bs-toggle="modal"]',(function(t){const e=r(this);["A","AREA"].includes(this.tagName)&&t.preventDefault(),$.one(e,ie,(t=>{t.defaultPrevented||$.one(e,ee,(()=>{d(this)&&this.focus()}))}));const i=Y.findOne(".modal.show");i&&he.getInstance(i).hide(),he.getOrCreateInstance(e).toggle(this)})),F(he),v(he);const de="offcanvas",ue={backdrop:!0,keyboard:!0,scroll:!1},ge={backdrop:"boolean",keyboard:"boolean",scroll:"boolean"},_e="show",fe=".offcanvas.show",pe="hidden.bs.offcanvas";class me extends R{constructor(t,e){super(t),this._config=this._getConfig(e),this._isShown=!1,this._backdrop=this._initializeBackDrop(),this._focustrap=this._initializeFocusTrap(),this._addEventListeners()}static get NAME(){return de}static get Default(){return ue}toggle(t){return this._isShown?this.hide():this.show(t)}show(t){this._isShown||$.trigger(this._element,"show.bs.offcanvas",{relatedTarget:t}).defaultPrevented||(this._isShown=!0,this._element.style.visibility="visible",this._backdrop.show(),this._config.scroll||(new zt).hide(),this._element.removeAttribute("aria-hidden"),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.classList.add(_e),this._queueCallback((()=>{this._config.scroll||this._focustrap.activate(),$.trigger(this._element,"shown.bs.offcanvas",{relatedTarget:t})}),this._element,!0))}hide(){this._isShown&&($.trigger(this._element,"hide.bs.offcanvas").defaultPrevented||(this._focustrap.deactivate(),this._element.blur(),this._isShown=!1,this._element.classList.remove(_e),this._backdrop.hide(),this._queueCallback((()=>{this._element.setAttribute("aria-hidden",!0),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._element.style.visibility="hidden",this._config.scroll||(new zt).reset(),$.trigger(this._element,pe)}),this._element,!0)))}dispose(){this._backdrop.dispose(),this._focustrap.deactivate(),super.dispose()}_getConfig(t){return t={...ue,...X.getDataAttributes(this._element),..."object"==typeof t?t:{}},h(de,t,ge),t}_initializeBackDrop(){return new Ut({className:"offcanvas-backdrop",isVisible:this._config.backdrop,isAnimated:!0,rootElement:this._element.parentNode,clickCallback:()=>this.hide()})}_initializeFocusTrap(){return new Qt({trapElement:this._element})}_addEventListeners(){$.on(this._element,"keydown.dismiss.bs.offcanvas",(t=>{this._config.keyboard&&"Escape"===t.key&&this.hide()}))}static jQueryInterface(t){return this.each((function(){const e=me.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}$.on(document,"click.bs.offcanvas.data-api",'[data-bs-toggle="offcanvas"]',(function(t){const e=r(this);if(["A","AREA"].includes(this.tagName)&&t.preventDefault(),u(this))return;$.one(e,pe,(()=>{d(this)&&this.focus()}));const i=Y.findOne(fe);i&&i!==e&&me.getInstance(i).hide(),me.getOrCreateInstance(e).toggle(this)})),$.on(window,"load.bs.offcanvas.data-api",(()=>Y.find(fe).forEach((t=>me.getOrCreateInstance(t).show())))),F(me),v(me);const be=new Set(["background","cite","href","itemtype","longdesc","poster","src","xlink:href"]),ve=/^(?:(?:https?|mailto|ftp|tel|file|sms):|[^#&/:?]*(?:[#/?]|$))/i,ye=/^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[\d+/a-z]+=*$/i,Ee=(t,e)=>{const i=t.nodeName.toLowerCase();if(e.includes(i))return!be.has(i)||Boolean(ve.test(t.nodeValue)||ye.test(t.nodeValue));const s=e.filter((t=>t instanceof RegExp));for(let t=0,e=s.length;t<e;t++)if(s[t].test(i))return!0;return!1};function we(t,e,i){if(!t.length)return t;if(i&&"function"==typeof i)return i(t);const s=(new window.DOMParser).parseFromString(t,"text/html"),n=[].concat(...s.body.querySelectorAll("*"));for(let t=0,i=n.length;t<i;t++){const i=n[t],s=i.nodeName.toLowerCase();if(!Object.keys(e).includes(s)){i.remove();continue}const o=[].concat(...i.attributes),r=[].concat(e["*"]||[],e[s]||[]);o.forEach((t=>{Ee(t,r)||i.removeAttribute(t.nodeName)}))}return s.body.innerHTML}const Ae="tooltip",Te=new Set(["sanitize","allowList","sanitizeFn"]),Ce={animation:"boolean",template:"string",title:"(string|element|function)",trigger:"string",delay:"(number|object)",html:"boolean",selector:"(string|boolean)",placement:"(string|function)",offset:"(array|string|function)",container:"(string|element|boolean)",fallbackPlacements:"array",boundary:"(string|element)",customClass:"(string|function)",sanitize:"boolean",sanitizeFn:"(null|function)",allowList:"object",popperConfig:"(null|object|function)"},ke={AUTO:"auto",TOP:"top",RIGHT:b()?"left":"right",BOTTOM:"bottom",LEFT:b()?"right":"left"},Le={animation:!0,template:'<div class="tooltip" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover focus",title:"",delay:0,html:!1,selector:!1,placement:"top",offset:[0,0],container:!1,fallbackPlacements:["top","right","bottom","left"],boundary:"clippingParents",customClass:"",sanitize:!0,sanitizeFn:null,allowList:{"*":["class","dir","id","lang","role",/^aria-[\w-]*$/i],a:["target","href","title","rel"],area:[],b:[],br:[],col:[],code:[],div:[],em:[],hr:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],i:[],img:["src","srcset","alt","title","width","height"],li:[],ol:[],p:[],pre:[],s:[],small:[],span:[],sub:[],sup:[],strong:[],u:[],ul:[]},popperConfig:null},Se={HIDE:"hide.bs.tooltip",HIDDEN:"hidden.bs.tooltip",SHOW:"show.bs.tooltip",SHOWN:"shown.bs.tooltip",INSERTED:"inserted.bs.tooltip",CLICK:"click.bs.tooltip",FOCUSIN:"focusin.bs.tooltip",FOCUSOUT:"focusout.bs.tooltip",MOUSEENTER:"mouseenter.bs.tooltip",MOUSELEAVE:"mouseleave.bs.tooltip"},Oe="fade",Ne="show",De="show",Ie="out",Pe=".tooltip-inner",xe=".modal",Me="hide.bs.modal",je="hover",He="focus";class $e extends R{constructor(t,e){if(void 0===i)throw new TypeError("Bootstrap's tooltips require Popper (https://popper.js.org)");super(t),this._isEnabled=!0,this._timeout=0,this._hoverState="",this._activeTrigger={},this._popper=null,this._config=this._getConfig(e),this.tip=null,this._setListeners()}static get Default(){return Le}static get NAME(){return Ae}static get Event(){return Se}static get DefaultType(){return Ce}enable(){this._isEnabled=!0}disable(){this._isEnabled=!1}toggleEnabled(){this._isEnabled=!this._isEnabled}toggle(t){if(this._isEnabled)if(t){const e=this._initializeOnDelegatedTarget(t);e._activeTrigger.click=!e._activeTrigger.click,e._isWithActiveTrigger()?e._enter(null,e):e._leave(null,e)}else{if(this.getTipElement().classList.contains(Ne))return void this._leave(null,this);this._enter(null,this)}}dispose(){clearTimeout(this._timeout),$.off(this._element.closest(xe),Me,this._hideModalHandler),this.tip&&this.tip.remove(),this._disposePopper(),super.dispose()}show(){if("none"===this._element.style.display)throw new Error("Please use show on visible elements");if(!this.isWithContent()||!this._isEnabled)return;const t=$.trigger(this._element,this.constructor.Event.SHOW),e=g(this._element),s=null===e?this._element.ownerDocument.documentElement.contains(this._element):e.contains(this._element);if(t.defaultPrevented||!s)return;"tooltip"===this.constructor.NAME&&this.tip&&this.getTitle()!==this.tip.querySelector(Pe).innerHTML&&(this._disposePopper(),this.tip.remove(),this.tip=null);const n=this.getTipElement(),o=(t=>{do{t+=Math.floor(1e6*Math.random())}while(document.getElementById(t));return t})(this.constructor.NAME);n.setAttribute("id",o),this._element.setAttribute("aria-describedby",o),this._config.animation&&n.classList.add(Oe);const r="function"==typeof this._config.placement?this._config.placement.call(this,n,this._element):this._config.placement,a=this._getAttachment(r);this._addAttachmentClass(a);const{container:l}=this._config;z.set(n,this.constructor.DATA_KEY,this),this._element.ownerDocument.documentElement.contains(this.tip)||(l.append(n),$.trigger(this._element,this.constructor.Event.INSERTED)),this._popper?this._popper.update():this._popper=i.createPopper(this._element,n,this._getPopperConfig(a)),n.classList.add(Ne);const c=this._resolvePossibleFunction(this._config.customClass);c&&n.classList.add(...c.split(" ")),"ontouchstart"in document.documentElement&&[].concat(...document.body.children).forEach((t=>{$.on(t,"mouseover",_)}));const h=this.tip.classList.contains(Oe);this._queueCallback((()=>{const t=this._hoverState;this._hoverState=null,$.trigger(this._element,this.constructor.Event.SHOWN),t===Ie&&this._leave(null,this)}),this.tip,h)}hide(){if(!this._popper)return;const t=this.getTipElement();if($.trigger(this._element,this.constructor.Event.HIDE).defaultPrevented)return;t.classList.remove(Ne),"ontouchstart"in document.documentElement&&[].concat(...document.body.children).forEach((t=>$.off(t,"mouseover",_))),this._activeTrigger.click=!1,this._activeTrigger.focus=!1,this._activeTrigger.hover=!1;const e=this.tip.classList.contains(Oe);this._queueCallback((()=>{this._isWithActiveTrigger()||(this._hoverState!==De&&t.remove(),this._cleanTipClass(),this._element.removeAttribute("aria-describedby"),$.trigger(this._element,this.constructor.Event.HIDDEN),this._disposePopper())}),this.tip,e),this._hoverState=""}update(){null!==this._popper&&this._popper.update()}isWithContent(){return Boolean(this.getTitle())}getTipElement(){if(this.tip)return this.tip;const t=document.createElement("div");t.innerHTML=this._config.template;const e=t.children[0];return this.setContent(e),e.classList.remove(Oe,Ne),this.tip=e,this.tip}setContent(t){this._sanitizeAndSetContent(t,this.getTitle(),Pe)}_sanitizeAndSetContent(t,e,i){const s=Y.findOne(i,t);e||!s?this.setElementContent(s,e):s.remove()}setElementContent(t,e){if(null!==t)return l(e)?(e=c(e),void(this._config.html?e.parentNode!==t&&(t.innerHTML="",t.append(e)):t.textContent=e.textContent)):void(this._config.html?(this._config.sanitize&&(e=we(e,this._config.allowList,this._config.sanitizeFn)),t.innerHTML=e):t.textContent=e)}getTitle(){const t=this._element.getAttribute("data-bs-original-title")||this._config.title;return this._resolvePossibleFunction(t)}updateAttachment(t){return"right"===t?"end":"left"===t?"start":t}_initializeOnDelegatedTarget(t,e){return e||this.constructor.getOrCreateInstance(t.delegateTarget,this._getDelegateConfig())}_getOffset(){const{offset:t}=this._config;return"string"==typeof t?t.split(",").map((t=>Number.parseInt(t,10))):"function"==typeof t?e=>t(e,this._element):t}_resolvePossibleFunction(t){return"function"==typeof t?t.call(this._element):t}_getPopperConfig(t){const e={placement:t,modifiers:[{name:"flip",options:{fallbackPlacements:this._config.fallbackPlacements}},{name:"offset",options:{offset:this._getOffset()}},{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"arrow",options:{element:`.${this.constructor.NAME}-arrow`}},{name:"onChange",enabled:!0,phase:"afterWrite",fn:t=>this._handlePopperPlacementChange(t)}],onFirstUpdate:t=>{t.options.placement!==t.placement&&this._handlePopperPlacementChange(t)}};return{...e,..."function"==typeof this._config.popperConfig?this._config.popperConfig(e):this._config.popperConfig}}_addAttachmentClass(t){this.getTipElement().classList.add(`${this._getBasicClassPrefix()}-${this.updateAttachment(t)}`)}_getAttachment(t){return ke[t.toUpperCase()]}_setListeners(){this._config.trigger.split(" ").forEach((t=>{if("click"===t)$.on(this._element,this.constructor.Event.CLICK,this._config.selector,(t=>this.toggle(t)));else if("manual"!==t){const e=t===je?this.constructor.Event.MOUSEENTER:this.constructor.Event.FOCUSIN,i=t===je?this.constructor.Event.MOUSELEAVE:this.constructor.Event.FOCUSOUT;$.on(this._element,e,this._config.selector,(t=>this._enter(t))),$.on(this._element,i,this._config.selector,(t=>this._leave(t)))}})),this._hideModalHandler=()=>{this._element&&this.hide()},$.on(this._element.closest(xe),Me,this._hideModalHandler),this._config.selector?this._config={...this._config,trigger:"manual",selector:""}:this._fixTitle()}_fixTitle(){const t=this._element.getAttribute("title"),e=typeof this._element.getAttribute("data-bs-original-title");(t||"string"!==e)&&(this._element.setAttribute("data-bs-original-title",t||""),!t||this._element.getAttribute("aria-label")||this._element.textContent||this._element.setAttribute("aria-label",t),this._element.setAttribute("title",""))}_enter(t,e){e=this._initializeOnDelegatedTarget(t,e),t&&(e._activeTrigger["focusin"===t.type?He:je]=!0),e.getTipElement().classList.contains(Ne)||e._hoverState===De?e._hoverState=De:(clearTimeout(e._timeout),e._hoverState=De,e._config.delay&&e._config.delay.show?e._timeout=setTimeout((()=>{e._hoverState===De&&e.show()}),e._config.delay.show):e.show())}_leave(t,e){e=this._initializeOnDelegatedTarget(t,e),t&&(e._activeTrigger["focusout"===t.type?He:je]=e._element.contains(t.relatedTarget)),e._isWithActiveTrigger()||(clearTimeout(e._timeout),e._hoverState=Ie,e._config.delay&&e._config.delay.hide?e._timeout=setTimeout((()=>{e._hoverState===Ie&&e.hide()}),e._config.delay.hide):e.hide())}_isWithActiveTrigger(){for(const t in this._activeTrigger)if(this._activeTrigger[t])return!0;return!1}_getConfig(t){const e=X.getDataAttributes(this._element);return Object.keys(e).forEach((t=>{Te.has(t)&&delete e[t]})),(t={...this.constructor.Default,...e,..."object"==typeof t&&t?t:{}}).container=!1===t.container?document.body:c(t.container),"number"==typeof t.delay&&(t.delay={show:t.delay,hide:t.delay}),"number"==typeof t.title&&(t.title=t.title.toString()),"number"==typeof t.content&&(t.content=t.content.toString()),h(Ae,t,this.constructor.DefaultType),t.sanitize&&(t.template=we(t.template,t.allowList,t.sanitizeFn)),t}_getDelegateConfig(){const t={};for(const e in this._config)this.constructor.Default[e]!==this._config[e]&&(t[e]=this._config[e]);return t}_cleanTipClass(){const t=this.getTipElement(),e=new RegExp(`(^|\\s)${this._getBasicClassPrefix()}\\S+`,"g"),i=t.getAttribute("class").match(e);null!==i&&i.length>0&&i.map((t=>t.trim())).forEach((e=>t.classList.remove(e)))}_getBasicClassPrefix(){return"bs-tooltip"}_handlePopperPlacementChange(t){const{state:e}=t;e&&(this.tip=e.elements.popper,this._cleanTipClass(),this._addAttachmentClass(this._getAttachment(e.placement)))}_disposePopper(){this._popper&&(this._popper.destroy(),this._popper=null)}static jQueryInterface(t){return this.each((function(){const e=$e.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}v($e);const Be={...$e.Default,placement:"right",offset:[0,8],trigger:"click",content:"",template:'<div class="popover" role="tooltip"><div class="popover-arrow"></div><h3 class="popover-header"></h3><div class="popover-body"></div></div>'},ze={...$e.DefaultType,content:"(string|element|function)"},Re={HIDE:"hide.bs.popover",HIDDEN:"hidden.bs.popover",SHOW:"show.bs.popover",SHOWN:"shown.bs.popover",INSERTED:"inserted.bs.popover",CLICK:"click.bs.popover",FOCUSIN:"focusin.bs.popover",FOCUSOUT:"focusout.bs.popover",MOUSEENTER:"mouseenter.bs.popover",MOUSELEAVE:"mouseleave.bs.popover"};class Fe extends $e{static get Default(){return Be}static get NAME(){return"popover"}static get Event(){return Re}static get DefaultType(){return ze}isWithContent(){return this.getTitle()||this._getContent()}setContent(t){this._sanitizeAndSetContent(t,this.getTitle(),".popover-header"),this._sanitizeAndSetContent(t,this._getContent(),".popover-body")}_getContent(){return this._resolvePossibleFunction(this._config.content)}_getBasicClassPrefix(){return"bs-popover"}static jQueryInterface(t){return this.each((function(){const e=Fe.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}v(Fe);const qe="scrollspy",We={offset:10,method:"auto",target:""},Ue={offset:"number",method:"string",target:"(string|element)"},Ke="active",Ve=".nav-link, .list-group-item, .dropdown-item",Xe="position";class Ye extends R{constructor(t,e){super(t),this._scrollElement="BODY"===this._element.tagName?window:this._element,this._config=this._getConfig(e),this._offsets=[],this._targets=[],this._activeTarget=null,this._scrollHeight=0,$.on(this._scrollElement,"scroll.bs.scrollspy",(()=>this._process())),this.refresh(),this._process()}static get Default(){return We}static get NAME(){return qe}refresh(){const t=this._scrollElement===this._scrollElement.window?"offset":Xe,e="auto"===this._config.method?t:this._config.method,i=e===Xe?this._getScrollTop():0;this._offsets=[],this._targets=[],this._scrollHeight=this._getScrollHeight(),Y.find(Ve,this._config.target).map((t=>{const s=o(t),n=s?Y.findOne(s):null;if(n){const t=n.getBoundingClientRect();if(t.width||t.height)return[X[e](n).top+i,s]}return null})).filter((t=>t)).sort(((t,e)=>t[0]-e[0])).forEach((t=>{this._offsets.push(t[0]),this._targets.push(t[1])}))}dispose(){$.off(this._scrollElement,".bs.scrollspy"),super.dispose()}_getConfig(t){return(t={...We,...X.getDataAttributes(this._element),..."object"==typeof t&&t?t:{}}).target=c(t.target)||document.documentElement,h(qe,t,Ue),t}_getScrollTop(){return this._scrollElement===window?this._scrollElement.pageYOffset:this._scrollElement.scrollTop}_getScrollHeight(){return this._scrollElement.scrollHeight||Math.max(document.body.scrollHeight,document.documentElement.scrollHeight)}_getOffsetHeight(){return this._scrollElement===window?window.innerHeight:this._scrollElement.getBoundingClientRect().height}_process(){const t=this._getScrollTop()+this._config.offset,e=this._getScrollHeight(),i=this._config.offset+e-this._getOffsetHeight();if(this._scrollHeight!==e&&this.refresh(),t>=i){const t=this._targets[this._targets.length-1];this._activeTarget!==t&&this._activate(t)}else{if(this._activeTarget&&t<this._offsets[0]&&this._offsets[0]>0)return this._activeTarget=null,void this._clear();for(let e=this._offsets.length;e--;)this._activeTarget!==this._targets[e]&&t>=this._offsets[e]&&(void 0===this._offsets[e+1]||t<this._offsets[e+1])&&this._activate(this._targets[e])}}_activate(t){this._activeTarget=t,this._clear();const e=Ve.split(",").map((e=>`${e}[data-bs-target="${t}"],${e}[href="${t}"]`)),i=Y.findOne(e.join(","),this._config.target);i.classList.add(Ke),i.classList.contains("dropdown-item")?Y.findOne(".dropdown-toggle",i.closest(".dropdown")).classList.add(Ke):Y.parents(i,".nav, .list-group").forEach((t=>{Y.prev(t,".nav-link, .list-group-item").forEach((t=>t.classList.add(Ke))),Y.prev(t,".nav-item").forEach((t=>{Y.children(t,".nav-link").forEach((t=>t.classList.add(Ke)))}))})),$.trigger(this._scrollElement,"activate.bs.scrollspy",{relatedTarget:t})}_clear(){Y.find(Ve,this._config.target).filter((t=>t.classList.contains(Ke))).forEach((t=>t.classList.remove(Ke)))}static jQueryInterface(t){return this.each((function(){const e=Ye.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}$.on(window,"load.bs.scrollspy.data-api",(()=>{Y.find('[data-bs-spy="scroll"]').forEach((t=>new Ye(t)))})),v(Ye);const Qe="active",Ge="fade",Ze="show",Je=".active",ti=":scope > li > .active";class ei extends R{static get NAME(){return"tab"}show(){if(this._element.parentNode&&this._element.parentNode.nodeType===Node.ELEMENT_NODE&&this._element.classList.contains(Qe))return;let t;const e=r(this._element),i=this._element.closest(".nav, .list-group");if(i){const e="UL"===i.nodeName||"OL"===i.nodeName?ti:Je;t=Y.find(e,i),t=t[t.length-1]}const s=t?$.trigger(t,"hide.bs.tab",{relatedTarget:this._element}):null;if($.trigger(this._element,"show.bs.tab",{relatedTarget:t}).defaultPrevented||null!==s&&s.defaultPrevented)return;this._activate(this._element,i);const n=()=>{$.trigger(t,"hidden.bs.tab",{relatedTarget:this._element}),$.trigger(this._element,"shown.bs.tab",{relatedTarget:t})};e?this._activate(e,e.parentNode,n):n()}_activate(t,e,i){const s=(!e||"UL"!==e.nodeName&&"OL"!==e.nodeName?Y.children(e,Je):Y.find(ti,e))[0],n=i&&s&&s.classList.contains(Ge),o=()=>this._transitionComplete(t,s,i);s&&n?(s.classList.remove(Ze),this._queueCallback(o,t,!0)):o()}_transitionComplete(t,e,i){if(e){e.classList.remove(Qe);const t=Y.findOne(":scope > .dropdown-menu .active",e.parentNode);t&&t.classList.remove(Qe),"tab"===e.getAttribute("role")&&e.setAttribute("aria-selected",!1)}t.classList.add(Qe),"tab"===t.getAttribute("role")&&t.setAttribute("aria-selected",!0),f(t),t.classList.contains(Ge)&&t.classList.add(Ze);let s=t.parentNode;if(s&&"LI"===s.nodeName&&(s=s.parentNode),s&&s.classList.contains("dropdown-menu")){const e=t.closest(".dropdown");e&&Y.find(".dropdown-toggle",e).forEach((t=>t.classList.add(Qe))),t.setAttribute("aria-expanded",!0)}i&&i()}static jQueryInterface(t){return this.each((function(){const e=ei.getOrCreateInstance(this);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}$.on(document,"click.bs.tab.data-api",'[data-bs-toggle="tab"], [data-bs-toggle="pill"], [data-bs-toggle="list"]',(function(t){["A","AREA"].includes(this.tagName)&&t.preventDefault(),u(this)||ei.getOrCreateInstance(this).show()})),v(ei);const ii="toast",si="hide",ni="show",oi="showing",ri={animation:"boolean",autohide:"boolean",delay:"number"},ai={animation:!0,autohide:!0,delay:5e3};class li extends R{constructor(t,e){super(t),this._config=this._getConfig(e),this._timeout=null,this._hasMouseInteraction=!1,this._hasKeyboardInteraction=!1,this._setListeners()}static get DefaultType(){return ri}static get Default(){return ai}static get NAME(){return ii}show(){$.trigger(this._element,"show.bs.toast").defaultPrevented||(this._clearTimeout(),this._config.animation&&this._element.classList.add("fade"),this._element.classList.remove(si),f(this._element),this._element.classList.add(ni),this._element.classList.add(oi),this._queueCallback((()=>{this._element.classList.remove(oi),$.trigger(this._element,"shown.bs.toast"),this._maybeScheduleHide()}),this._element,this._config.animation))}hide(){this._element.classList.contains(ni)&&($.trigger(this._element,"hide.bs.toast").defaultPrevented||(this._element.classList.add(oi),this._queueCallback((()=>{this._element.classList.add(si),this._element.classList.remove(oi),this._element.classList.remove(ni),$.trigger(this._element,"hidden.bs.toast")}),this._element,this._config.animation)))}dispose(){this._clearTimeout(),this._element.classList.contains(ni)&&this._element.classList.remove(ni),super.dispose()}_getConfig(t){return t={...ai,...X.getDataAttributes(this._element),..."object"==typeof t&&t?t:{}},h(ii,t,this.constructor.DefaultType),t}_maybeScheduleHide(){this._config.autohide&&(this._hasMouseInteraction||this._hasKeyboardInteraction||(this._timeout=setTimeout((()=>{this.hide()}),this._config.delay)))}_onInteraction(t,e){switch(t.type){case"mouseover":case"mouseout":this._hasMouseInteraction=e;break;case"focusin":case"focusout":this._hasKeyboardInteraction=e}if(e)return void this._clearTimeout();const i=t.relatedTarget;this._element===i||this._element.contains(i)||this._maybeScheduleHide()}_setListeners(){$.on(this._element,"mouseover.bs.toast",(t=>this._onInteraction(t,!0))),$.on(this._element,"mouseout.bs.toast",(t=>this._onInteraction(t,!1))),$.on(this._element,"focusin.bs.toast",(t=>this._onInteraction(t,!0))),$.on(this._element,"focusout.bs.toast",(t=>this._onInteraction(t,!1)))}_clearTimeout(){clearTimeout(this._timeout),this._timeout=null}static jQueryInterface(t){return this.each((function(){const e=li.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}return F(li),v(li),{Alert:q,Button:U,Carousel:at,Collapse:mt,Dropdown:Ht,Modal:he,Offcanvas:me,Popover:Fe,ScrollSpy:Ye,Tab:ei,Toast:li,Tooltip:$e}}));
//# sourceMappingURL=bootstrap.min.js.map | PypiClean |
/Djaloha-0.4.2.tar.gz/Djaloha-0.4.2/djaloha/static/aloha.0.20.20/plugins/common/undo/lib/undo-plugin.js | define(
['aloha', 'aloha/jquery', 'aloha/plugin', 'undo/vendor/undo', 'undo/vendor/diff_match_patch_uncompressed'],
function( Aloha, jQuery, Plugin) {
var
dmp = new diff_match_patch,
resetFlag = false;
function reversePatch(patch) {
var reversed = dmp.patch_deepCopy(patch);
for (var i = 0; i < reversed.length; i++) {
for (var j = 0; j < reversed[i].diffs.length; j++) {
reversed[i].diffs[j][0] = -(reversed[i].diffs[j][0]);
}
}
return reversed;
}
/**
* register the plugin with unique name
*/
return Plugin.create('undo', {
/**
* Initialize the plugin and set initialize flag on true
*/
init: function () {
var stack = new Undo.Stack(),
EditCommand = Undo.Command.extend({
constructor: function(editable, patch) {
this.editable = editable;
this.patch = patch;
},
execute: function() {
//command object is created after execution.
},
undo: function() {
this.phase(reversePatch(this.patch));
},
redo: function() {
this.phase(this.patch);
},
phase: function(patch) {
var contents = this.editable.getContents(),
applied = dmp.patch_apply(patch, contents),
newValue = applied[0],
didNotApply = applied[1];
if (didNotApply.length) {
//error
}
this.reset(newValue);
},
reset: function(val) {
//we have to trigger a smartContentChange event
//after doing an undo or redo, but we mustn't
//push new commands on the stack, because there
//are no new commands, just the old commands on
//the stack that are undone or redone.
resetFlag = true;
var reactivate = null;
if (Aloha.getActiveEditable() === this.editable) {
Aloha.deactivateEditable();
reactivate = this.editable;
}
this.editable.obj.html(val);
if (null !== reactivate) {
reactivate.activate();
}
//TODO: this is a call to an internal
//function. There should be an API to generate
//new smartContentChangeEvents.
this.editable.smartContentChange({type : 'blur'});
resetFlag = false;
}
});
stack.changed = function() {
// update UI
};
jQuery(document).keydown(function(event) {
if (!event.metaKey || event.keyCode != 90) {
return;
}
event.preventDefault();
//Before doing an undo, bring the smartContentChange
//event up to date.
if ( null !== Aloha.getActiveEditable() ) {
Aloha.getActiveEditable().smartContentChange({type : 'blur'});
}
if (event.shiftKey) {
stack.canRedo() && stack.redo();
} else {
stack.canUndo() && stack.undo();
}
});
Aloha.bind('aloha-smart-content-changed', function(jevent, aevent) {
if (resetFlag) {
return;
}
var oldValue = aevent.snapshotContent,
newValue = aevent.editable.getContents(),
patch = dmp.patch_make(oldValue, newValue);
// only push an EditCommand if something actually changed.
if (0 !== patch.length) {
stack.execute( new EditCommand( aevent.editable, patch ) );
}
});
},
/**
* toString method
* @return string
*/
toString: function () {
return 'undo';
}
});
}); | PypiClean |
/GeoNode-3.2.0-py3-none-any.whl/geonode/base/management/commands/fixoauthuri.py |
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
from oauth2_provider.models import Application
from oauth2_provider.generators import generate_client_id, generate_client_secret
from geonode import geoserver # noqa
from geonode.utils import check_ogc_backend
class Command(BaseCommand):
"""Creates or updates the oauth2 Application
"""
can_import_settings = True
def add_arguments(self, parser):
# Named (optional) arguments
parser.add_argument(
'-f',
'--force',
action='store_true',
dest='force_exec',
default=False,
help='Forces the regeneration of OAUth keys.')
parser.add_argument(
'--target-address',
dest='target_address',
help='Target Address (the one to be changed e.g. http://my-public.geonode.org)')
def handle(self, *args, **options):
force_exec = options.get('force_exec')
target_address = options.get('target_address')
client_id = None
client_secret = None
if check_ogc_backend(geoserver.BACKEND_PACKAGE):
from geonode.geoserver.helpers import ogc_server_settings
redirect_uris = f'{ogc_server_settings.LOCATION}\n{ogc_server_settings.public_url}\n{target_address}/geoserver/' # noqa
if Application.objects.filter(name='GeoServer').exists():
Application.objects.filter(name='GeoServer').update(redirect_uris=redirect_uris)
if force_exec:
Application.objects.filter(name='GeoServer').update(
client_id=generate_client_id(),
client_secret=generate_client_secret()
)
app = Application.objects.filter(name='GeoServer')[0]
client_id = app.client_id
client_secret = app.client_secret
else:
client_id = generate_client_id()
client_secret = generate_client_secret()
Application.objects.create(
skip_authorization=True,
redirect_uris=redirect_uris,
name='GeoServer',
authorization_grant_type='authorization-code',
client_type='confidential',
client_id=client_id,
client_secret=client_secret,
user=get_user_model().objects.filter(is_superuser=True)[0]
)
return f'{client_id},{client_secret}' | PypiClean |
/Eupompos-0.1.1-py3-none-any.whl/eupompos/parser.py | # something's wrong with the way pylint understands the import statement :
# pylint: disable=import-error
import os.path
import re
from eupompos.cppparser import CppParser
from eupompos.pyparser import PyParser
from eupompos.filetools import normpath
from eupompos.messages import MSG
from eupompos.parserreport import ParserReport
import eupompos.settings as settings
################################################################################
class Parser(object):
"""
Parser class
________________________________________________________________________
Use this class to parse a path or a file.
________________________________________________________________________
class attributes : -
instance attribute(s) : -
o cppparser : a CppParser object
o report : a ParserReport object
class methods :
o __init__()
o filename_ok() [static]
o parse_a_file()
o parse_a_path()
"""
#///////////////////////////////////////////////////////////////////////////
def __init__(self, _src):
"""
Parser.__init__()
________________________________________________________________
Entry point of the class : lauch the parsing of the source named
_src.
________________________________________________________________
PARAMETER :
o (str)_src : either a path either a file
RETURNED VALUE : None
"""
self.report = ParserReport()
self.cppparser = CppParser(self.report)
self.pyparser = PyParser(self.report)
source = normpath(_src)
if not os.path.exists(source):
MSG.error("The path \"{0}\" (full path : \"{1}\") doesn't exist.".format(_src, source))
elif os.path.isfile(source):
self.parse_a_file(_src, *os.path.split(source))
elif os.path.isdir(source):
self.parse_a_path(source)
else:
MSG.error("\"{0}\" (full path : \"{1}\") is " \
"neither a file nor a directory.".format(_src,
source))
#///////////////////////////////////////////////////////////////////////////
@staticmethod
def filename_ok(_path, _filename):
"""
Parser.filename_ok()
________________________________________________________________
Say if the file named "_path,_filename" has to be parsed.
________________________________________________________________
PARAMETERS :
o (str) _path : path to the file
o (str) _filename : filename (without the path)
RETURNED VALUE : ( (bool)ok, (None/str)language )
language : either None, either "python", either "cpp"
"""
res = (False, None)
if re.search(settings.REGEX__PYTHON_FILENAMES, _filename) is not None:
res = (True, "python")
elif re.search(settings.REGEX__CPP_CODE_FILENAMES, _filename) is not None:
res = (True, "cpp")
for keyword in settings.FULLNAME_BLACKLIST:
if keyword in _path or keyword in _filename:
res = (False, None)
break
return res
#///////////////////////////////////////////////////////////////////////////
def parse_a_file(self, _projectsource, _root, _filename):
"""
Parser.parse_a_file()
________________________________________________________________
Parse the file named _root+_filename.
________________________________________________________________
PARAMETER :
o (str) _projectsource, the project's source directory
o (str) _root, the file's directory
o (str) _filename, the file's name.
RETURNED VALUE : None
"""
MSG.debug("Parser.parse_a_file() : {0}, {1}, {2}".format(_projectsource, _root, _filename))
tobeparsed, language = Parser.filename_ok(_root, _filename)
if tobeparsed:
with open(os.path.join(_root, _filename), 'r') as src:
if language == "cpp":
self.cppparser.parse(_root=_projectsource,
_filename=os.path.join(_root, _filename),
_srccontent=src)
elif language == "python":
self.pyparser.parse(_root=_projectsource,
_filename=os.path.join(_root, _filename),
_srccontent=src)
#///////////////////////////////////////////////////////////////////////////
def parse_a_path(self, _src):
"""
Parser.parse_a_path()
________________________________________________________________
Parse the directory named _src.
________________________________________________________________
PARAMETER :
o (str)_src : the directory's name
RETURNED VALUE : None
"""
MSG.debug("Parser.parse_a_path() : " + _src)
self.report.clear()
for root, _, files in os.walk(_src):
for filename in files:
self.parse_a_file(_src, root, filename) | PypiClean |
/NeXpy-1.0.4-py3-none-any.whl/nexpy/gui/widgets.py | import math
import warnings
import numpy as np
from matplotlib import colors
from matplotlib.patches import Ellipse, Polygon, Rectangle
from .pyqt import QtCore, QtGui, QtWidgets
from .utils import (boundaries, find_nearest, format_float, get_color,
natural_sort, report_error)
warnings.filterwarnings("ignore", category=DeprecationWarning)
bold_font = QtGui.QFont()
bold_font.setBold(True)
class NXStack(QtWidgets.QWidget):
"""Widget containing a stack of widgets selected by a dropdown menu.
Attributes
----------
layout : QtWidgets.QVBoxLayout
Layout of the entire stack.
stack : QtWidgets.QStackedWidget
Widget containing the stacked widgets.
box : QtWidgets.QComboBox
Pull-down menu containing the stack options.
"""
def __init__(self, labels, widgets, parent=None):
"""Initialize the widget stack.
Parameters
----------
labels : list of str
List of labels to be used in the QComboBox.
widgets : list of QWidgets
List of QWidgets to be stacked.
parent : QObject, optional
Parent of the NXStack instance (the default is None).
"""
super().__init__(parent=parent)
self.layout = QtWidgets.QVBoxLayout()
self.stack = QtWidgets.QStackedWidget(self)
self.widgets = dict(zip(labels, widgets))
self.box = NXComboBox(slot=self.stack.setCurrentIndex, items=labels)
for widget in widgets:
self.stack.addWidget(widget)
self.layout.addWidget(self.box)
self.layout.addWidget(self.stack)
self.layout.addStretch()
self.setLayout(self.layout)
def add(self, label, widget):
"""Add a widget to the stack.
Parameters
----------
label : str
Label used to select the widget in the QComboBox
widget : QtWidgets.QWidget
Widget to be added to the stack
"""
self.box.addItem(label)
self.stack.addWidget(widget)
def remove(self, label):
if label in self.widgets:
self.stack.removeWidget(self.widgets[label])
del self.widgets[label]
self.box.remove(label)
class NXSortModel(QtCore.QSortFilterProxyModel):
def __init__(self, parent=None):
super().__init__(parent=parent)
def lessThan(self, left, right):
try:
left_text = self.sourceModel().itemFromIndex(left).text()
right_text = self.sourceModel().itemFromIndex(right).text()
return natural_sort(left_text) < natural_sort(right_text)
except Exception:
return True
class NXScrollArea(QtWidgets.QScrollArea):
"""Scroll area embedding a widget."""
def __init__(self, content=None, horizontal=False, parent=None):
"""Initialize the scroll area.
Parameters
----------
content : QtWidgets.QWidget or QtWidgets.QLayout
Widget or layout to be contained within the scroll area.
horizontal : bool
True if a horizontal scroll bar is enabled, default False.
"""
super().__init__(parent=parent)
if content:
if isinstance(content, QtWidgets.QWidget):
self.setWidget(content)
elif isinstance(content, QtWidgets.QLayout):
widget = QtWidgets.QWidget()
widget.setLayout(content)
self.setWidget(widget)
if not horizontal:
self.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.setWidgetResizable(True)
self.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Expanding)
def setWidget(self, widget):
if isinstance(widget, QtWidgets.QLayout):
w = QtWidgets.QWidget()
w.setLayout(widget)
widget = w
super().setWidget(widget)
widget.setMinimumWidth(widget.sizeHint().width() +
self.verticalScrollBar().sizeHint().width())
class NXLabel(QtWidgets.QLabel):
"""A text label.
This is being subclassed from the PyQt QLabel class because of a bug in
recent versions of PyQt5 (>11) that requires the box to be repainted
after any programmatic changes.
"""
def __init__(self, text=None, parent=None, bold=False, width=None,
align='left'):
"""Initialize the edit window and optionally set the alignment
Parameters
----------
text : str, optional
The default text.
parent : QWidget
Parent of the NXLineEdit box.
bold : bool, optional
True if the label text is bold, default False.
width : int, optional
Fixed width of label.
align : 'left', 'center', 'right'
Alignment of text.
"""
super().__init__(parent=parent)
if text:
self.setText(text)
if bold:
self.setFont(bold_font)
if width:
self.setFixedWidth(width)
if align == 'left':
self.setAlignment(QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter)
elif align == 'center':
self.setAlignment(QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter)
elif align == 'right':
self.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
def setText(self, text):
"""Function to set the text in the box.
Parameters
----------
text : str
Text to replace the text box contents.
"""
super().setText(str(text))
self.repaint()
class NXLineEdit(QtWidgets.QLineEdit):
"""An editable text box.
This is being subclassed from the PyQt QLineEdit class because of a bug in
recent versions of PyQt5 (>11) that requires the box to be repainted
after any programmatic changes.
"""
def __init__(self, text=None, parent=None, slot=None, readonly=False,
width=None, align='left'):
"""Initialize the edit window and optionally set the alignment
Parameters
----------
text : str, optional
The default text.
parent : QWidget
Parent of the NXLineEdit box.
slot: func, optional
Slot to be used for editingFinished signals.
right : bool, optional
If True, make the box text right-aligned.
"""
super().__init__(parent=parent)
if slot:
self.editingFinished.connect(slot)
if text is not None:
self.setText(text)
if readonly:
self.setReadOnly(True)
if width:
self.setFixedWidth(width)
if align == 'left':
self.setAlignment(QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter)
elif align == 'center':
self.setAlignment(QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter)
elif align == 'right':
self.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
def setText(self, text):
"""Function to set the text in the box.
Parameters
----------
text : str
Text to replace the text box contents.
"""
super().setText(str(text))
self.repaint()
class NXTextBox(NXLineEdit):
"""Subclass of NXLineEdit with floating point values."""
def value(self):
"""Return the text box value as a floating point number.
Returns
-------
float
Value of text box converted to a floating point number
"""
return float(str(self.text()))
def setValue(self, value):
"""Set the value of the text box string formatted as a float.
Parameters
----------
value : str or int or float
Text box value to be formatted as a float
"""
self.setText(str(float(f'{value:.4g}')))
class NXPlainTextEdit(QtWidgets.QPlainTextEdit):
"""An editable text window."""
def __init__(self, text=None, wrap=True, parent=None):
super().__init__(parent=parent)
self.setFont(QtGui.QFont('Courier'))
if not wrap:
self.setLineWrapMode(QtWidgets.QPlainTextEdit.NoWrap)
if text:
self.setPlainText(text)
self.setFocusPolicy(QtCore.Qt.StrongFocus)
def __repr__(self):
return 'NXPlainTextEdit()'
def setPlainText(self, text):
"""Function to set the text in the window.
Parameters
----------
text : str
Text to replace the text box contents.
"""
super().setPlainText(str(text))
self.repaint()
def get_text(self, tab_spaces=4):
"""Return the text contained in the edit window.
Parameters
----------
tab_spaces : int, optional
Number of spaces to replace tabs (default is 4). If set to 0, tab
characters are not replaced.
Returns
-------
str
Current text in the edit window.
"""
text = self.document().toPlainText().strip()
if tab_spaces > 0:
return text.replace('\t', tab_spaces*' ')
else:
return text + '\n'
class NXMessageBox(QtWidgets.QMessageBox):
"""A scrollable message box"""
def __init__(self, title, text, *args, **kwargs):
super().__init__(*args, **kwargs)
scroll = NXScrollArea(parent=self)
self.content = QtWidgets.QWidget()
scroll.setWidget(self.content)
scroll.setWidgetResizable(True)
layout = QtWidgets.QVBoxLayout(self.content)
layout.addWidget(NXLabel(title, bold=True))
layout.addWidget(NXLabel(text, self))
self.layout().addWidget(scroll, 0, 0, 1, self.layout().columnCount())
self.setStyleSheet("QScrollArea{min-width:300 px; min-height: 400px}")
class NXComboBox(QtWidgets.QComboBox):
"""Dropdown menu for selecting a set of options."""
def __init__(self, slot=None, items=[], default=None):
"""Initialize the dropdown menu with an initial list of items
Parameters
----------
slot : func, optional
A function to be called when a selection is made
items : list of str, optional
A list of options to initialize the dropdown menu
default : str, optional
The option to be set as default when the menu is initialized
"""
super().__init__()
self.setSizeAdjustPolicy(QtWidgets.QComboBox.AdjustToContents)
self.setFocusPolicy(QtCore.Qt.StrongFocus)
self.setMinimumWidth(80)
if items:
self.addItems([str(item) for item in items])
if default:
self.setCurrentIndex(self.findText(default))
if slot:
self.activated.connect(slot)
def __iter__(self):
"""Implement key iteration."""
return self.items().__iter__()
def __next__(self):
"""Implements key iteration."""
return self.items().__next__()
def __contains__(self, item):
"""True if the item is one of the options."""
return item in self.items()
def keyPressEvent(self, event):
"""Function to enable the use of cursor keys to make selections.
`Up` and `Down` keys are used to select options in the dropdown menu.
`Left` and `Right` keys ar used to expand the dropdown menu to
display the options.
Parameters
----------
event : QtCore.QEvent
Keypress event that triggered the function.
"""
if (event.key() == QtCore.Qt.Key_Up or
event.key() == QtCore.Qt.Key_Down):
super().keyPressEvent(event)
elif (event.key() == QtCore.Qt.Key_Right or
event.key() == QtCore.Qt.Key_Left):
self.showPopup()
else:
self.parent().keyPressEvent(event)
def findText(self, value, **kwargs):
"""Function to return the index of a text value.
This is needed since h5py now returns byte strings, which will trigger
ValueErrors unless they are converted to unicode strings.
Parameters
----------
value :
Searched value.
Returns
-------
int
Index of the searched value.
"""
if isinstance(value, bytes):
value = value.decode('utf-8')
return super().findText(str(value), **kwargs)
def add(self, *items):
"""Add items to the list of options.
Parameters
----------
*items : list of str
List of options to be added to the dropdown menu.
"""
for item in items:
if item not in self:
self.addItem(str(item))
def insert(self, idx, item):
"""Insert item at the specified index.
Parameters
----------
item : str or int
List of options to be added to the dropdown menu.
idx : int
Index of position before which to insert item
"""
if item not in self:
self.insertItem(idx, str(item))
def remove(self, item):
"""Remove item from the list of options.
Parameters
----------
item : str or int
Option to be removed from the dropdown menu.
"""
if str(item) in self:
self.removeItem(self.findText(str(item)))
def items(self):
"""Return a list of the dropdown menu options.
Returns
-------
list of str
The options currently listed in the dropdown menu
"""
return [self.itemText(idx) for idx in range(self.count())]
def sort(self):
"""Sorts the box items in alphabetical order."""
self.model().sort(0)
def select(self, item):
"""Select the option matching the text.
Parameters
----------
item : str
The option to be selected in the dropdown menu.
"""
self.setCurrentIndex(self.findText(str(item)))
self.repaint()
@property
def selected(self):
"""Return the currently selected option.
Returns
-------
str
Currently selected option in the dropdown menu.
"""
return self.currentText()
class NXCheckBox(QtWidgets.QCheckBox):
"""A checkbox with associated label and slot function."""
def __init__(self, label=None, slot=None, checked=False):
"""Initialize the checkbox.
Parameters
----------
label : str, optional
Text describing the checkbox.
slot : func, optional
Function to be called when the checkbox state is changed.
checked : bool, optional
Initial checkbox state (the default is False).
"""
super().__init__(label)
self.setFocusPolicy(QtCore.Qt.StrongFocus)
self.setChecked(checked)
if slot:
self.stateChanged.connect(slot)
def keyPressEvent(self, event):
"""Function to enable the use of cursor keys to change the state.
`Up` and `Down` keys are used to toggle the checkbox state.
Parameters
----------
event : QtCore.QEvent
Keypress event that triggered the function.
"""
if (event.key() == QtCore.Qt.Key_Up or
event.key() == QtCore.Qt.Key_Down):
if self.isChecked():
self.setCheckState(QtCore.Qt.Unchecked)
else:
self.setCheckState(QtCore.Qt.Checked)
else:
self.parent().keyPressEvent(event)
class NXPushButton(QtWidgets.QPushButton):
"""A button with associated label and slot function."""
def __init__(self, label, slot, checkable=False, width=None, parent=None):
"""Initialize button
Parameters
----------
label : str
Text describing the button
slot : func
Function to be called when the button is pressed
parent : QObject, optional
Parent of button.
"""
super().__init__(label, parent=parent)
self.setFocusPolicy(QtCore.Qt.StrongFocus)
self.setDefault(False)
self.setAutoDefault(False)
self.clicked.connect(slot)
if checkable:
self.setCheckable(True)
if width:
self.setFixedWidth(width)
def keyPressEvent(self, event):
"""Function to enable the use of keys to press the button.
`Return`, Enter`, and `Space` keys activate the slot function.
Parameters
----------
event : QtCore.QEvent
Keypress event that triggered the function.
"""
if (event.key() == QtCore.Qt.Key_Return or
event.key() == QtCore.Qt.Key_Enter or
event.key() == QtCore.Qt.Key_Space):
self.clicked.emit()
else:
self.parent().keyPressEvent(event)
class NXColorButton(QtWidgets.QPushButton):
"""Push button for selecting colors."""
colorChanged = QtCore.Signal(QtGui.QColor)
def __init__(self, parent=None):
super().__init__(parent=parent)
self.setFixedWidth(18)
self.setStyleSheet("width:18px; height:18px; "
"margin: 0px; border: 0px; padding: 0px;"
"background-color: white")
self.setIconSize(QtCore.QSize(12, 12))
self.clicked.connect(self.choose_color)
self._color = QtGui.QColor()
def choose_color(self):
color = QtWidgets.QColorDialog.getColor(self._color,
self.parentWidget())
if color.isValid():
self.set_color(color)
def get_color(self):
return self._color
@QtCore.Slot(QtGui.QColor)
def set_color(self, color):
if color != self._color:
self._color = color
self.colorChanged.emit(self._color)
pixmap = QtGui.QPixmap(self.iconSize())
pixmap.fill(color)
self.setIcon(QtGui.QIcon(pixmap))
self.repaint()
color = QtCore.Property(QtGui.QColor, get_color, set_color)
class NXColorBox(QtWidgets.QWidget):
"""Text box and color square for selecting colors.
This utilizes the ColorButton class in the formlayout package.
Attributes
----------
layout : QHBoxLayout
Layout containing the text and color boxes.
box : NXLineEdit
Text box containing the string representation of the color.
button : QPushButton
Color button consisting of a colored icon.
"""
def __init__(self, color='#ffffff', label=None, width=None, parent=None):
"""Initialize the text and color box.
The selected color can be changed by entering a valid text string or
by selecting the color using the standard system GUI.
Valid text strings are HTML hex strings or standard Matplotlib colors.
Parameters
----------
color : str, optional
Initial color (the default is '#ffffff', which represents 'white')
parent : QObject, optional
Parent of the color box.
"""
super().__init__(parent=parent)
self.color_text = get_color(color)
color = self.qcolor(self.color_text)
self.layout = QtWidgets.QHBoxLayout()
self.layout.setContentsMargins(0, 0, 0, 0)
if label:
self.layout.addStretch()
self.layout.addWidget(NXLabel(label))
self.textbox = NXLineEdit(self.color_text,
parent=parent, slot=self.update_color,
width=width, align='right')
self.layout.addWidget(self.textbox)
self.button = NXColorButton(parent=parent)
self.button.color = color
self.button.colorChanged.connect(self.update_text)
self.layout.addWidget(self.button)
self.setLayout(self.layout)
self.update_color()
def update_color(self):
"""Set the button color following a change to the text box."""
try:
color = self.qcolor(get_color(self.textbox.text()))
if color.isValid():
self.button.color = color
self.color_text = self.textbox.text()
except ValueError as error:
report_error("Invalid color", error)
self.textbox.setText(self.color_text)
def update_text(self, color):
"""Set the text box string following a change to the color button."""
self.color_text = colors.to_hex(color.getRgbF())
self.textbox.setText(self.color_text)
def qcolor(self, text):
"""Create a QColor from a Matplotlib color."""
qcolor = QtGui.QColor()
text = get_color(text)
if text.startswith('#') and len(text) == 7:
correct = '#0123456789abcdef'
for char in text:
if char.lower() not in correct:
return qcolor
elif text not in list(QtGui.QColor.colorNames()):
return qcolor
qcolor.setNamedColor(text)
return qcolor
class NXSpinBox(QtWidgets.QSpinBox):
"""Subclass of QSpinBox with floating values.
Parameters
----------
slot : function
PyQt slot triggered by changing values
data : array-like, optional
Values of data to be adjusted by the spin box.
Attributes
----------
data : array-like
Data values.
validator : QDoubleValidator
Function to ensure only floating point values are entered.
old_value : float
Previously stored value.
diff : float
Difference between maximum and minimum values when the box is
locked.
pause : bool
Used when playing a movie with changing z-values.
"""
def __init__(self, slot=None, data=None):
super().__init__()
self.data = data
self.validator = QtGui.QDoubleValidator()
self.old_value = None
self.diff = None
self.pause = False
if slot:
self.valueChanged.connect(slot)
self.editingFinished.connect(slot)
self.setAlignment(QtCore.Qt.AlignRight)
self.setFixedWidth(100)
self.setKeyboardTracking(False)
self.setAccelerated(False)
self.app = QtWidgets.QApplication.instance()
def value(self):
"""Return the value of the spin box.
Returns
-------
float
Floating point number defined by the spin box value
"""
if self.data is not None:
return float(self.centers[self.index])
else:
return 0.0
@property
def centers(self):
"""The values of the data points based on bin centers.
Returns
-------
array-like
Data points set by the spin box
"""
if self.data is None:
return None
elif self.reversed:
return self.data[::-1]
else:
return self.data
@property
def boundaries(self):
if self.data is None:
return None
else:
return boundaries(self.centers, self.data.shape[0])
@property
def index(self):
"""Return the current index of the spin box."""
return super().value()
@property
def reversed(self):
"""Return `True` if the data are in reverse order."""
if self.data[-1] < self.data[0]:
return True
else:
return False
def setValue(self, value):
super().setValue(self.valueFromText(value))
self.repaint()
def valueFromText(self, text):
return self.indexFromValue(float(str(text)))
def textFromValue(self, value):
try:
return format_float(float(f'{self.centers[value]:.4g}'))
except Exception:
return ''
def valueFromIndex(self, idx):
if idx < 0:
return self.centers[0]
elif idx > self.maximum():
return self.centers[-1]
else:
return self.centers[idx]
def indexFromValue(self, value):
return (np.abs(self.centers - value)).argmin()
def minBoundaryValue(self, idx):
if idx <= 0:
return self.boundaries[0]
elif idx >= len(self.centers) - 1:
return self.boundaries[-2]
else:
return self.boundaries[idx]
def maxBoundaryValue(self, idx):
if idx <= 0:
return self.boundaries[1]
elif idx >= len(self.centers) - 1:
return self.boundaries[-1]
else:
return self.boundaries[idx+1]
def validate(self, input_value, pos):
return self.validator.validate(input_value, pos)
@property
def tolerance(self):
return self.diff / 100.0
def stepBy(self, steps):
self.pause = False
if self.diff:
value = self.value() + steps * self.diff
if (value <= self.centers[-1] + self.tolerance) and \
(value - self.diff >= self.centers[0] - self.tolerance):
self.setValue(value)
else:
self.pause = True
else:
if self.index + steps <= self.maximum() and \
self.index + steps >= 0:
super().stepBy(steps)
else:
self.pause = True
def timerEvent(self, event):
self.app.processEvents()
if self.app.mouseButtons() & QtCore.Qt.LeftButton:
super().timerEvent(event)
class NXDoubleSpinBox(QtWidgets.QDoubleSpinBox):
"""Subclass of QDoubleSpinBox.
Parameters
----------
slot : function
PyQt slot triggered by changing values
Attributes
----------
validator : QDoubleValidator
Function to ensure only floating point values are entered.
old_value : float
Previously stored value.
diff : float
Difference between maximum and minimum values when the box is
locked.
"""
def __init__(self, slot=None, editing=None):
super().__init__()
self.validator = QtGui.QDoubleValidator()
self.validator.setRange(-np.inf, np.inf)
self.validator.setDecimals(1000)
self.old_value = None
self.diff = None
if slot and editing:
self.valueChanged.connect(slot)
self.editingFinished.connect(editing)
elif slot:
self.valueChanged.connect(slot)
self.editingFinished.connect(slot)
self.setAlignment(QtCore.Qt.AlignRight)
self.setFixedWidth(100)
self.setKeyboardTracking(False)
self.setDecimals(2)
self.steps = np.array([1, 2, 5, 10])
self.app = QtWidgets.QApplication.instance()
def validate(self, input_value, position):
return self.validator.validate(input_value, position)
def setSingleStep(self, value):
value = abs(value)
if value == 0:
stepsize = 0.01
else:
digits = math.floor(math.log10(value))
multiplier = 10**digits
stepsize = find_nearest(self.steps, value/multiplier) * multiplier
super().setSingleStep(stepsize)
def stepBy(self, steps):
if self.diff:
self.setValue(self.value() + steps * self.diff)
else:
super().stepBy(steps)
self.old_value = self.text()
def valueFromText(self, text):
value = float(text)
if value > self.maximum():
self.setMaximum(value)
elif value < self.minimum():
self.setMinimum(value)
return value
def textFromValue(self, value):
if value > 1e6:
return format_float(value)
else:
return format_float(value, width=8)
def setValue(self, value):
if value == 0:
self.setDecimals(2)
else:
digits = math.floor(math.log10(abs(value)))
if digits < 0:
self.setDecimals(-digits)
else:
self.setDecimals(2)
if value > self.maximum():
self.setMaximum(value)
elif value < self.minimum():
self.setMinimum(value)
super().setValue(value)
self.repaint()
def timerEvent(self, event):
self.app.processEvents()
if self.app.mouseButtons() & QtCore.Qt.LeftButton:
super().timerEvent(event)
class NXSlider(QtWidgets.QSlider):
"""Subclass of QSlider.
Parameters
----------
slot : function
PyQt slot triggered by changing values
move : bool
True if the slot is triggered by moving the slider. Otherwise,
it is only triggered on release.
"""
def __init__(self, slot=None, move=True, inverse=False):
super().__init__(QtCore.Qt.Horizontal)
self.setFocusPolicy(QtCore.Qt.NoFocus)
self.setMinimumWidth(100)
self.setRange(0, 100)
self.setSingleStep(5)
self.setTracking(True)
self.inverse = inverse
if self.inverse:
self.setInvertedAppearance(True)
self.setValue(100)
else:
self.setInvertedAppearance(False)
self.setValue(0)
if slot:
self.sliderReleased.connect(slot)
if move:
self.sliderMoved.connect(slot)
def value(self):
_value = super().value()
if self.inverse:
return self.maximum() - _value
else:
return _value
def setValue(self, value):
if self.inverse:
super().setValue(self.maximum() - int(value))
else:
super().setValue(int(value))
class NXpatch:
"""Class for a draggable shape on the NXPlotView canvas."""
lock = None
def __init__(self, shape, border_tol=0.1, resize=True, plotview=None):
if plotview:
self.plotview = plotview
else:
from .plotview import get_plotview
self.plotview = get_plotview()
self.canvas = self.plotview.canvas
self.shape = shape
self.border_tol = border_tol
self.press = None
self.background = None
self.allow_resize = resize
self.plotview.ax.add_patch(self.shape)
def __getattr__(self, name):
"""Return Matplotlib attributes if not defined in the class."""
return getattr(self.shape, name)
def connect(self):
'connect to all the events we need'
self.plotview.deactivate()
self.cidpress = self.canvas.mpl_connect(
'button_press_event', self.on_press)
self.cidrelease = self.canvas.mpl_connect(
'button_release_event', self.on_release)
self.cidmotion = self.canvas.mpl_connect(
'motion_notify_event', self.on_motion)
def is_inside(self, event):
if event.inaxes != self.shape.axes:
return False
contains, _ = self.shape.contains(event)
if contains:
return True
else:
return False
def initialize(self, xp, yp):
"""Function to be overridden by shape sub-class."""
def update(self, x, y):
"""Function to be overridden by shape sub-class."""
def on_press(self, event):
"""Store coordinates on button press if over the object."""
if not self.is_inside(event):
self.press = None
return
self.press = self.initialize(event.xdata, event.ydata)
self.canvas.draw()
def on_motion(self, event):
"""Move the object if motion activated over the object."""
if self.press is None:
return
if event.inaxes != self.shape.axes:
return
self.update(event.xdata, event.ydata)
self.canvas.draw()
def on_release(self, event):
"""Reset the data when the button is released."""
if self.press is None:
return
self.press = None
self.canvas.draw()
def disconnect(self):
"""Disconnect all the stored connection ids."""
self.canvas.mpl_disconnect(self.cidpress)
self.canvas.mpl_disconnect(self.cidrelease)
self.canvas.mpl_disconnect(self.cidmotion)
self.plotview.activate()
def remove(self):
if self in self.plotview.shapes:
self.plotview.shapes.remove(self)
self.shape.remove()
self.plotview.draw()
def set_facecolor(self, color):
self.shape.set_facecolor(color)
self.plotview.draw()
def set_edgecolor(self, color):
self.shape.set_edgecolor(color)
self.plotview.draw()
def set_color(self, color):
self.shape.set_facecolor(color)
self.shape.set_edgecolor(color)
self.plotview.draw()
def set_alpha(self, alpha):
self.shape.set_alpha(alpha)
self.plotview.draw()
def set_linestyle(self, linestyle):
self.shape.set_linestyle(linestyle)
self.plotview.draw()
def set_linewidth(self, linewidth):
self.shape.set_linewidth(linewidth)
self.plotview.draw()
class NXcircle(NXpatch):
def __init__(self, x, y, r, border_tol=0.1, resize=True, plotview=None,
**opts):
x, y, r = float(x), float(y), float(r)
shape = Ellipse((x, y), 2*r, 2*r, **opts)
if 'linewidth' not in opts:
shape.set_linewidth(1.0)
if 'color' not in opts and 'facecolor' not in opts:
shape.set_facecolor('r')
super().__init__(shape, border_tol, resize, plotview)
self.shape.set_label('Circle')
self.circle = self.shape
self.circle.height = self.height
def __repr__(self):
x, y = self.circle.center
r = abs(self.circle.width) / 2
return f'NXcircle({x:g}, {y:g}, {r:g})'
@property
def transform(self):
return self.plotview.ax.transData.transform
@property
def inverse_transform(self):
return self.plotview.ax.transData.inverted().transform
@property
def center(self):
return self.circle.center
@property
def radius(self):
return abs(self.circle.width) / 2.0
@property
def width(self):
return abs(self.circle.width)
@property
def height(self):
return 2 * (self.inverse_transform((0, self.pixel_radius)) -
self.inverse_transform((0, 0)))[1]
@property
def pixel_radius(self):
return (self.transform((self.radius, 0)) - self.transform((0, 0)))[0]
def pixel_shift(self, x, y, x0, y0):
return tuple(self.transform((x, y)) - self.transform((x0, y0)))
def radius_shift(self, x, y, xp, yp, x0, y0):
xt, yt = self.pixel_shift(x, y, x0, y0)
r = np.sqrt(xt**2 + yt**2)
xt, yt = self.pixel_shift(xp, yp, x0, y0)
r0 = np.sqrt(xt**2 + yt**2)
return (self.inverse_transform((r, 0)) -
self.inverse_transform((r0, 0)))[0]
def set_center(self, x, y):
self.circle.center = x, y
self.plotview.draw()
def set_radius(self, radius):
self.circle.width = 2.0 * radius
self.circle.height = self.height
self.plotview.draw()
def initialize(self, xp, yp):
x0, y0 = self.circle.center
w0, h0 = self.width, self.height
xt, yt = self.pixel_shift(xp, yp, x0, y0)
rt = self.pixel_radius
if (self.allow_resize and
(np.sqrt(xt**2 + yt**2) > rt * (1-self.border_tol))):
expand = True
else:
expand = False
return x0, y0, w0, h0, xp, yp, expand
def update(self, x, y):
x0, y0, w0, h0, xp, yp, expand = self.press
if expand:
self.circle.width = self.width + \
self.radius_shift(x, y, xp, yp, x0, y0)
self.circle.height = self.height
else:
self.circle.center = (x0+x-xp, y0+y-yp)
class NXellipse(NXpatch):
def __init__(self, x, y, dx, dy, border_tol=0.2, resize=True,
plotview=None, **opts):
shape = Ellipse((float(x), float(y)), dx, dy, **opts)
if 'linewidth' not in opts:
shape.set_linewidth(1.0)
if 'color' not in opts and 'facecolor' not in opts:
shape.set_facecolor('r')
super().__init__(shape, border_tol, resize, plotview)
self.shape.set_label('Ellipse')
self.ellipse = self.shape
def __repr__(self):
x, y = self.ellipse.center
w, h = self.ellipse.width, self.ellipse.height
return f'NXellipse({x:g}, {y:g}, {w:g}, {h:g})'
@property
def center(self):
return self.ellipse.center
@property
def width(self):
return self.ellipse.width
@property
def height(self):
return self.ellipse.height
def set_center(self, x, y):
self.ellipse.set_center((x, y))
self.plotview.draw()
def set_width(self, width):
self.ellipse.width = width
self.plotview.draw()
def set_height(self, height):
self.ellipse.height = height
self.plotview.draw()
def initialize(self, xp, yp):
x0, y0 = self.ellipse.center
w0, h0 = self.ellipse.width, self.ellipse.height
bt = self.border_tol
if (self.allow_resize and
((abs(x0-xp) < bt*w0 and
abs(y0+np.true_divide(h0, 2)-yp) < bt*h0) or
(abs(x0-xp) < bt*w0
and abs(y0-np.true_divide(h0, 2)-yp) < bt*h0) or
(abs(y0-yp) < bt*h0
and abs(x0+np.true_divide(w0, 2)-xp) < bt*w0) or
(abs(y0-yp) < bt*h0
and abs(x0-np.true_divide(w0, 2)-xp) < bt*w0))):
expand = True
else:
expand = False
return x0, y0, w0, h0, xp, yp, expand
def update(self, x, y):
x0, y0, w0, h0, xp, yp, expand = self.press
dx, dy = (x-xp, y-yp)
bt = self.border_tol
if expand:
if (abs(x0-xp) < bt*w0
and abs(y0+np.true_divide(h0, 2)-yp) < bt*h0):
self.ellipse.height = h0 + dy
elif (abs(x0-xp) < bt*w0
and abs(y0-np.true_divide(h0, 2)-yp) < bt*h0):
self.ellipse.height = h0 - dy
elif (abs(y0-yp) < bt*h0
and abs(x0+np.true_divide(w0, 2)-xp) < bt*w0):
self.ellipse.width = w0 + dx
elif (abs(y0-yp) < bt*h0
and abs(x0-np.true_divide(w0, 2)-xp) < bt*w0):
self.ellipse.width = w0 - dx
else:
self.ellipse.set_center((x0+dx, y0+dy))
class NXrectangle(NXpatch):
def __init__(self, x, y, dx, dy, border_tol=0.1, resize=True,
plotview=None, **opts):
shape = Rectangle((float(x), float(y)), float(dx), float(dy), **opts)
if 'linewidth' not in opts:
shape.set_linewidth(1.0)
if 'color' not in opts and 'facecolor' not in opts:
shape.set_facecolor('r')
super().__init__(shape, border_tol, resize, plotview)
self.shape.set_label('Rectangle')
self.rectangle = self.shape
def __repr__(self):
x, y = self.rectangle.xy
w, h = self.rectangle.get_width(), self.rectangle.get_height()
return f'NXrectangle({x:g}, {y:g}, {w:g}, {h:g})'
@property
def width(self):
return self.rectangle.get_width()
@property
def height(self):
return self.rectangle.get_height()
@property
def xy(self):
return self.rectangle.xy
def set_bounds(self, x, y, dx, dy):
self.rectangle.set_bounds(x, y, dx, dy)
self.plotview.draw()
def set_left(self, left):
self.rectangle.set_x(left)
self.plotview.draw()
def set_right(self, right):
self.rectangle.set_x(right - self.rectangle.get_width())
self.plotview.draw()
def set_bottom(self, bottom):
self.rectangle.set_y(bottom)
self.plotview.draw()
def set_top(self, top):
self.rectangle.set_y(top - self.rectangle.get_height())
self.plotview.draw()
def set_width(self, width):
self.rectangle.set_width(width)
self.plotview.draw()
def set_height(self, height):
self.rectangle.set_height(height)
self.plotview.draw()
def initialize(self, xp, yp):
x0, y0 = self.rectangle.xy
w0, h0 = self.rectangle.get_width(), self.rectangle.get_height()
bt = self.border_tol
if (self.allow_resize and
(abs(x0+np.true_divide(w0, 2)-xp) > np.true_divide(w0, 2)-bt*w0 or
abs(y0+np.true_divide(h0, 2)-yp) > np.true_divide(h0, 2)-bt*h0)):
expand = True
else:
expand = False
return x0, y0, w0, h0, xp, yp, expand
def update(self, x, y):
x0, y0, w0, h0, xp, yp, expand = self.press
dx, dy = (x-xp, y-yp)
bt = self.border_tol
if expand:
if abs(x0 - xp) < bt * w0:
self.rectangle.set_x(x0+dx)
self.rectangle.set_width(w0-dx)
elif abs(x0 + w0 - xp) < bt * w0:
self.rectangle.set_width(w0+dx)
elif abs(y0 - yp) < bt * h0:
self.rectangle.set_y(y0+dy)
self.rectangle.set_height(h0-dy)
elif abs(y0 + h0 - yp) < bt * h0:
self.rectangle.set_height(h0+dy)
else:
self.rectangle.set_x(x0+dx)
self.rectangle.set_y(y0+dy)
class NXpolygon(NXpatch):
def __init__(self, xy, closed=True, plotview=None, **opts):
shape = Polygon(xy, closed, **opts)
if 'linewidth' not in opts:
shape.set_linewidth(1.0)
if 'color' not in opts and 'facecolor' not in opts:
shape.set_facecolor('r')
super().__init__(shape, resize=False, plotview=plotview)
self.shape.set_label('Polygon')
self.polygon = self.shape
def __repr__(self):
xy = self.polygon.xy
v = xy.shape[0] - 1
return f'NXpolygon({xy[0][0]:g}, {xy[0][1]:g}, vertices={v})'
@property
def xy(self):
return self.polygon.xy
def initialize(self, xp, yp):
xy0 = self.polygon.xy
return xy0, xp, yp
def update(self, x, y):
xy0, xp, yp = self.press
dxy = (x-xp, y-yp)
self.polygon.set_xy(xy0+dxy) | PypiClean |
/0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/gas_strategies/time_based.py | import collections
import math
import operator
from eth_utils import (
to_tuple,
)
from web3._utils.math import (
percentile,
)
from web3._utils.toolz import (
curry,
groupby,
sliding_window,
)
from web3.exceptions import (
InsufficientData,
ValidationError,
)
MinerData = collections.namedtuple(
'MinerData',
['miner', 'num_blocks', 'min_gas_price', 'low_percentile_gas_price'])
Probability = collections.namedtuple('Probability', ['gas_price', 'prob'])
def _get_avg_block_time(w3, sample_size):
latest = w3.eth.getBlock('latest')
constrained_sample_size = min(sample_size, latest['number'])
if constrained_sample_size == 0:
raise ValidationError('Constrained sample size is 0')
oldest = w3.eth.getBlock(latest['number'] - constrained_sample_size)
return (latest['timestamp'] - oldest['timestamp']) / constrained_sample_size
def _get_raw_miner_data(w3, sample_size):
latest = w3.eth.getBlock('latest', full_transactions=True)
for transaction in latest['transactions']:
yield (latest['miner'], latest['hash'], transaction['gasPrice'])
block = latest
for _ in range(sample_size - 1):
if block['number'] == 0:
break
# we intentionally trace backwards using parent hashes rather than
# block numbers to make caching the data easier to implement.
block = w3.eth.getBlock(block['parentHash'], full_transactions=True)
for transaction in block['transactions']:
yield (block['miner'], block['hash'], transaction['gasPrice'])
def _aggregate_miner_data(raw_data):
data_by_miner = groupby(0, raw_data)
for miner, miner_data in data_by_miner.items():
_, block_hashes, gas_prices = map(set, zip(*miner_data))
try:
price_percentile = percentile(gas_prices, percentile=20)
except InsufficientData:
price_percentile = min(gas_prices)
yield MinerData(
miner,
len(set(block_hashes)),
min(gas_prices),
price_percentile)
@to_tuple
def _compute_probabilities(miner_data, wait_blocks, sample_size):
"""
Computes the probabilities that a txn will be accepted at each of the gas
prices accepted by the miners.
"""
miner_data_by_price = tuple(sorted(
miner_data,
key=operator.attrgetter('low_percentile_gas_price'),
reverse=True,
))
for idx in range(len(miner_data_by_price)):
low_percentile_gas_price = miner_data_by_price[idx].low_percentile_gas_price
num_blocks_accepting_price = sum(m.num_blocks for m in miner_data_by_price[idx:])
inv_prob_per_block = (sample_size - num_blocks_accepting_price) / sample_size
probability_accepted = 1 - inv_prob_per_block ** wait_blocks
yield Probability(low_percentile_gas_price, probability_accepted)
def _compute_gas_price(probabilities, desired_probability):
"""
Given a sorted range of ``Probability`` named-tuples returns a gas price
computed based on where the ``desired_probability`` would fall within the
range.
:param probabilities: An iterable of `Probability` named-tuples sorted in reverse order.
:param desired_probability: An floating point representation of the desired
probability. (e.g. ``85% -> 0.85``)
"""
first = probabilities[0]
last = probabilities[-1]
if desired_probability >= first.prob:
return first.gas_price
elif desired_probability <= last.prob:
return last.gas_price
for left, right in sliding_window(2, probabilities):
if desired_probability < right.prob:
continue
elif desired_probability > left.prob:
# This code block should never be reachable as it would indicate
# that we already passed by the probability window in which our
# `desired_probability` is located.
raise Exception('Invariant')
adj_prob = desired_probability - right.prob
window_size = left.prob - right.prob
position = adj_prob / window_size
gas_window_size = left.gas_price - right.gas_price
gas_price = int(math.ceil(right.gas_price + gas_window_size * position))
return gas_price
else:
# The initial `if/else` clause in this function handles the case where
# the `desired_probability` is either above or below the min/max
# probability found in the `probabilities`.
#
# With these two cases handled, the only way this code block should be
# reachable would be if the `probabilities` were not sorted correctly.
# Otherwise, the `desired_probability` **must** fall between two of the
# values in the `probabilities``.
raise Exception('Invariant')
@curry
def construct_time_based_gas_price_strategy(max_wait_seconds,
sample_size=120,
probability=98):
"""
A gas pricing strategy that uses recently mined block data to derive a gas
price for which a transaction is likely to be mined within X seconds with
probability P.
:param max_wait_seconds: The desired maxiumum number of seconds the
transaction should take to mine.
:param sample_size: The number of recent blocks to sample
:param probability: An integer representation of the desired probability
that the transaction will be mined within ``max_wait_seconds``. 0 means 0%
and 100 means 100%.
"""
def time_based_gas_price_strategy(web3, transaction_params):
avg_block_time = _get_avg_block_time(web3, sample_size=sample_size)
wait_blocks = int(math.ceil(max_wait_seconds / avg_block_time))
raw_miner_data = _get_raw_miner_data(web3, sample_size=sample_size)
miner_data = _aggregate_miner_data(raw_miner_data)
probabilities = _compute_probabilities(
miner_data,
wait_blocks=wait_blocks,
sample_size=sample_size,
)
gas_price = _compute_gas_price(probabilities, probability / 100)
return gas_price
return time_based_gas_price_strategy
# fast: mine within 1 minute
fast_gas_price_strategy = construct_time_based_gas_price_strategy(
max_wait_seconds=60,
sample_size=120,
)
# medium: mine within 10 minutes
medium_gas_price_strategy = construct_time_based_gas_price_strategy(
max_wait_seconds=600,
sample_size=120,
)
# slow: mine within 1 hour (60 minutes)
slow_gas_price_strategy = construct_time_based_gas_price_strategy(
max_wait_seconds=60 * 60,
sample_size=120,
)
# glacial: mine within the next 24 hours.
glacial_gas_price_strategy = construct_time_based_gas_price_strategy(
max_wait_seconds=24 * 60 * 60,
sample_size=720,
) | PypiClean |
/GuildWars2_API_Client-0.6.0-py3-none-any.whl/gw2api/objects/api_version_2/__init__.py | from gw2api.objects.base_object import BaseAPIObject
class BaseAPIv2Object(BaseAPIObject):
"""Extends the base API handler to automatically handle pagination and id parameters"""
def get(self, **kwargs):
return super().get(id=kwargs.get('id'),
ids=kwargs.get('ids'),
url=kwargs.get('url'),
page=kwargs.get('page'),
page_size=kwargs.get('page_size')).json()
class Account(BaseAPIv2Object):
"""
This returns information about the player's account.
Authenticated Endpoint.
"""
pass
class AccountAchievements(BaseAPIv2Object):
"""
This returns information about the player's progress on achievements.
Authenticated Endpoint.
"""
pass
class AccountBank(BaseAPIv2Object):
"""
This returns the items stored in a player's vault.
Authenticated Endpoint.
"""
pass
class AccountBuildStorage(BaseAPIv2Object):
"""
This returns the templates stored in a player's build storage.
Authenticated Endpoint.
"""
pass
class AccountDailyCrafting(BaseAPIv2Object):
"""
This returns information about time-gated recipes that have been crafted by the account since daily-reset.
Authenticated Endpoint.
"""
pass
class AccountDungeons(BaseAPIv2Object):
"""
This resource returns the dungeons completed since daily dungeon reset.
Authenticated Endpoint.
"""
pass
class AccountDyes(BaseAPIv2Object):
"""
This returns the unlocked dyes of the account.
Authenticated Endpoint.
"""
pass
class AccountEmote(BaseAPIv2Object):
"""
This returns the player's unlocked emotes.
Authenticated Endpoint.
"""
pass
class AccountFinishers(BaseAPIv2Object):
"""
This returns information about finishers that are unlocked for an account.
Authenticated Endpoint.
"""
pass
class AccountGliders(BaseAPIv2Object):
"""
This returns information about gliders that are unlocked for an account.
Authenticated Endpoint.
"""
pass
class AccountHomeCats(BaseAPIv2Object):
"""
This returns information about unlocked home instance cats.
Authenticated Endpoint.
"""
pass
class AccountHomeNodes(BaseAPIv2Object):
"""
This returns information about unlocked home instance nodes.
Authenticated Endpoint.
"""
pass
class AccountInventory(BaseAPIv2Object):
"""
This returns the shared inventory slots in an account.
Authenticated Endpoint.
"""
pass
class AccountLuck(BaseAPIv2Object):
"""
This returns the total amount of luck consumed on an account.
Authenticated Endpoint.
"""
pass
class AccountMailCarriers(BaseAPIv2Object):
"""
This returns information about mail carriers that are unlocked for an account.
Authenticated Endpoint.
"""
pass
class AccountMapChests(BaseAPIv2Object):
"""
This returns information about Hero's Choice Chests acquired by the account since daily-reset.
Authenticated Endpoint.
"""
pass
class AccountMasteries(BaseAPIv2Object):
"""
This returns information about masteries that are unlocked for an account.
Authenticated Endpoint.
"""
pass
class AccountMasteryPoints(BaseAPIv2Object):
"""
This returns information about the total amount of masteries that are unlocked for an account.
Authenticated Endpoint.
"""
pass
class AccountMaterials(BaseAPIv2Object):
"""
This returns the materials stored in a player's vault.
Authenticated Endpoint.
"""
pass
class AccountMinis(BaseAPIv2Object):
"""
This returns the unlocked miniatures of the account.
Authenticated Endpoint.
"""
pass
class AccountMountsSkins(BaseAPIv2Object):
"""
This returns the unlocked mount skins of the account.
Authenticated Endpoint.
"""
pass
class AccountMountsTypes(BaseAPIv2Object):
"""
This returns the unlocked mounts of the account.
Authenticated Endpoint.
"""
pass
class AccountNovelties(BaseAPIv2Object):
"""
This returns information about novelties that are unlocked for an account.
Authenticated Endpoint.
"""
pass
class AccountOutfits(BaseAPIv2Object):
"""
This returns information about outfits that are unlocked for an account.
Authenticated Endpoint.
"""
pass
class AccountPvPHeroes(BaseAPIv2Object):
"""
This returns information about pvp heroes that are unlocked for an account.
Authenticated Endpoint.
"""
pass
class AccountRaids(BaseAPIv2Object):
"""
This returns the completed raid encounters since weekly raid reset.
Authenticated Endpoint.
"""
pass
class AccountRecipes(BaseAPIv2Object):
"""
This returns information about recipes that are unlocked for an account.
Authenticated Endpoint.
"""
pass
class AccountSkins(BaseAPIv2Object):
"""
This returns the unlocked skins of the account.
Authenticated Endpoint.
"""
pass
class AccountTitles(BaseAPIv2Object):
"""
This returns information about titles that are unlocked for an account.
Authenticated Endpoint.
"""
pass
class AccountWallet(BaseAPIv2Object):
"""
This returns the currencies of the account.
Authenticated Endpoint.
"""
pass
class AccountWorldBosses(BaseAPIv2Object):
"""
This returns information about which world bosses have been killed by the account since daily-reset.
Authenticated Endpoint.
"""
pass
class Achievements(BaseAPIv2Object):
"""
This returns all achievements in the game, including localized names and icons.
"""
pass
class AchievementsCategories(BaseAPIv2Object):
"""
This returns all the categories for achievements.
"""
pass
class AchievementsDaily(BaseAPIv2Object):
"""
This returns the current set of daily achievements.
"""
pass
class AchievementsDailyTomorrow(BaseAPIv2Object):
"""
This returns the next set of daily achievements.
"""
pass
class AchievementsGroups(BaseAPIv2Object):
"""
This returns all the top-level groups for achievements.
"""
pass
class BackstoryAnswers(BaseAPIv2Object):
"""
This returns information about the Biography answers that are in the game.
"""
pass
class BackstoryQuestions(BaseAPIv2Object):
"""
This returns information about the Biography questions that are in the game.
"""
pass
class Build(BaseAPIv2Object):
"""
This returns the current build id of the game.
"""
pass
class Characters(BaseAPIv2Object):
"""
This returns information about characters attached to a specific account.
Authenticated Endpoint.
"""
pass
class CharactersBackstory(BaseAPIv2Object):
"""
This returns information about the backstory of a character attached to a specific account.
Authenticated Endpoint.
"""
def get(self, char_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
char_id: string, the id of the character to add to the endpoint.
"""
endpoint_url = self._build_endpoint_base_url()
try:
endpoint_url = endpoint_url.replace(':id', char_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class CharactersBuildTabs(BaseAPIv2Object):
"""
This returns information about an accounts build template tabs.
Authenticated Endpoint.
"""
def get(self, char_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
char_id: string, the id of the character to add to the endpoint.
"""
endpoint_url = self._build_endpoint_base_url()
try:
endpoint_url = endpoint_url.replace(':id', char_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class CharactersBuildTabsActive(BaseAPIv2Object):
"""
This returns information about an accounts build template tabs.
Only those flagged True in is_active are returned.
Authenticated Endpoint.
"""
def get(self, char_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
char_id: string, the id of the character to add to the endpoint.
"""
endpoint_url = self._build_endpoint_base_url()
try:
endpoint_url = endpoint_url.replace(':id', char_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class CharactersCore(BaseAPIv2Object):
"""
This returns core information about a character attached to a specific account.
Authenticated Endpoint.
"""
def get(self, char_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
char_id: string, the id of the character to add to the endpoint.
"""
endpoint_url = self._build_endpoint_base_url()
try:
endpoint_url = endpoint_url.replace(':id', char_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class CharactersCrafting(BaseAPIv2Object):
"""
This returns information about the crafting disciplines available
to a character attached to a specific account.
Authenticated Endpoint.
"""
def get(self, char_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
char_id: string, the id of the character to add to the endpoint.
"""
endpoint_url = self._build_endpoint_base_url()
try:
endpoint_url = endpoint_url.replace(':id', char_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class CharactersEquipment(BaseAPIv2Object):
"""
This returns information about the equipment on a character attached to a specific account.
Authenticated Endpoint.
"""
def get(self, char_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
char_id: string, the id of the character to add to the endpoint.
"""
endpoint_url = self._build_endpoint_base_url()
try:
endpoint_url = endpoint_url.replace(':id', char_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class CharactersEquipmentTabs(BaseAPIv2Object):
"""
This returns information about an accounts equipment template tabs.
Authenticated Endpoint.
"""
def get(self, char_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
char_id: string, the id of the character to add to the endpoint.
"""
endpoint_url = self._build_endpoint_base_url()
try:
endpoint_url = endpoint_url.replace(':id', char_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class CharactersEquipmentTabsActive(BaseAPIv2Object):
"""
This returns information about an accounts equipment template tabs.
Only those flagged True in is_active are returned.
Authenticated Endpoint.
"""
def get(self, char_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
char_id: string, the id of the character to add to the endpoint.
"""
endpoint_url = self._build_endpoint_base_url()
try:
endpoint_url = endpoint_url.replace(':id', char_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class CharactersHeroPoints(BaseAPIv2Object):
"""
This returns information about the hero points obtained by a character attached to a specific account.
Authenticated Endpoint.
"""
def get(self, char_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
char_id: string, the id of the character to add to the endpoint.
"""
endpoint_url = self._build_endpoint_base_url()
try:
endpoint_url = endpoint_url.replace(':id', char_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class CharactersInventory(BaseAPIv2Object):
"""
This returns information about the inventory of a character attached to a specific account.
Authenticated Endpoint.
"""
def get(self, char_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
char_id: string, the id of the character to add to the endpoint.
"""
endpoint_url = self._build_endpoint_base_url()
try:
endpoint_url = endpoint_url.replace(':id', char_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class CharactersQuests(BaseAPIv2Object):
"""
This returns information about the quests selected that by a character attached to a specific account.
Authenticated Endpoint.
"""
def get(self, char_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
char_id: string, the id of the character to add to the endpoint.
"""
endpoint_url = self._build_endpoint_base_url()
try:
endpoint_url = endpoint_url.replace(':id', char_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class CharactersRecipes(BaseAPIv2Object):
"""
This returns information about recipes that the given character can use.
Authenticated Endpoint.
"""
def get(self, char_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
char_id: string, the id of the character to add to the endpoint.
"""
endpoint_url = self._build_endpoint_base_url()
try:
endpoint_url = endpoint_url.replace(':id', char_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class CharactersSab(BaseAPIv2Object):
"""
This returns information about Super Adventure Box on a character attached to a specific account.
Authenticated Endpoint.
"""
def get(self, char_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
char_id: string, the id of the character to add to the endpoint.
"""
endpoint_url = self._build_endpoint_base_url()
try:
endpoint_url = endpoint_url.replace(':id', char_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class CharactersSkills(BaseAPIv2Object):
"""
This returns information about the skills equipped on a character attached to a specific account.
Authenticated Endpoint.
"""
def get(self, char_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
char_id: string, the id of the character to add to the endpoint.
"""
endpoint_url = self._build_endpoint_base_url()
try:
endpoint_url = endpoint_url.replace(':id', char_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class CharactersSpecialization(BaseAPIv2Object):
"""
This returns information about the specializations equipped on a character attached to a specific account.
Authenticated Endpoint.
"""
def get(self, char_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
char_id: string, the id of the character to add to the endpoint.
"""
endpoint_url = self._build_endpoint_base_url()
try:
endpoint_url = endpoint_url.replace(':id', char_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class CharactersTraining(BaseAPIv2Object):
"""
This returns information about the training of a character attached to a specific account.
Authenticated Endpoint.
"""
def get(self, char_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
char_id: string, the id of the character to add to the endpoint.
"""
endpoint_url = self._build_endpoint_base_url()
try:
endpoint_url = endpoint_url.replace(':id', char_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class Colors(BaseAPIv2Object):
"""
This returns all dye colors in the game, including localized names and their color component information.
"""
pass
class CommerceDelivery(BaseAPIv2Object):
"""
This provides access to the current items and coins available for pickup on this account.
Authenticated Endpoint.
"""
pass
class CommerceExchangeCoins(BaseAPIv2Object):
"""Returns the current coins to gems exchange rate"""
def get(self, quantity):
"""Returns the current coins to gems exchange rate
Args:
quantity: The number of coins to convert to gems
Returns:
The JSON response
"""
endpoint_url = self._build_endpoint_base_url()
endpoint_url += "?quantity=" + str(quantity)
return super().get(url=endpoint_url)
class CommerceExchangeGems(BaseAPIv2Object):
"""Returns the current gems to coins exchange rate"""
def get(self, quantity):
"""Returns the current gems to coins exchange rate
Args:
quantity: The number of gems to convert to coins
Returns:
The JSON response
"""
endpoint_url = self._build_endpoint_base_url()
endpoint_url += "?quantity=" + str(quantity)
return super().get(url=endpoint_url)
class CommerceListings(BaseAPIv2Object):
"""
This returns current buy and sell listings from the trading post.
"""
pass
class CommercePrices(BaseAPIv2Object):
"""
This returns current aggregated buy and sell listing information from the trading post.
"""
pass
class CommerceTransactions(BaseAPIv2Object):
"""Returns information on an account's past and current trading post transactions"""
@property
def session(self):
return self._session
@session.setter
def session(self, val):
self._session = val
self.history.session = val
self.current.session = val
self.history.buys.session = val
self.history.sells.session = val
self.current.buys.session = val
self.current.sells.session = val
def __init__(self, object_type):
"""
Initializes a CommerceTransactions API object. See BaseAPIObject's __init__ documentation
:param object_type: String indicating what type of object to
interface with (i.e. 'guild'). Primarily
acts as the relative path to the base URL
:raises ValueError: In the event that either a `Session` object
or `object_type` are not set.
"""
self._session = None
# create second-level endpoints
self.history = BaseAPIv2Object(object_type + "/history")
self.current = BaseAPIv2Object(object_type + "/current")
# create third-level endpoints
self.history.buys = BaseAPIv2Object(self.history.object_type + "/buys")
self.history.sells = BaseAPIv2Object(self.history.object_type + "/sells")
self.current.buys = BaseAPIv2Object(self.current.object_type + "/buys")
self.current.sells = BaseAPIv2Object(self.current.object_type + "/sells")
super().__init__(object_type)
class Continents(BaseAPIv2Object):
"""
This returns static information about the
continents, floors, regions, maps, sectors, points of interest and tasks.
"""
def _validate_kwargs(self, **kwargs):
"""Validates the keyword arguments.
Since the continents endpoint is hierarchical, each level is dependent
on its predecessor.
Hence, the validation checks for the leaf supplied and walks up the
tree to see if
1. any higher level is missing
2. any higher level supplies multiple ids
In either case, a KeyError is raised.
Special care is taken of the 'id' and 'ids' keywords, as those
are synonymous for continents.
Args:
kwargs: The keyword arguments to validate.
Raises:
KeyError: if any needed level is missing, or any non-leaf level
provides multiple IDs.
"""
def raise_for_non_int(value):
try:
int(str(value))
except ValueError:
raise KeyError('too many ids supplied for {}'.format(level))
levels = ['sectors', 'maps', 'regions', 'floors', 'continents']
for i, current_level in enumerate(levels):
if current_level in kwargs:
for level in reversed(levels[i+1:]): # All higher levels
if level not in kwargs: # Check if level is supplied
if level != 'continents': # Backwards compatibility for ids
raise KeyError('Please provide the {} key.'.format(level))
else:
if 'id' not in kwargs:
raise KeyError('Please provide the continents key.')
else: # Check if no higher level supplies multiple IDs
if level != 'continents':
raise_for_non_int(kwargs.get(level))
else: # Backwards compatibility for ids
value = kwargs.get(level) or kwargs.get('ids')
raise_for_non_int(value)
def get(self, **kwargs):
"""Gets the continents resource.
This resource is slightly different than other API resources, hence
its differs slightly. Instead of using the id and ids attributes,
this resource can walk along several levels:
continents, floors, regions, maps, sectors
For each layer, individual (single) IDs can be specified.
For the leaf layer, i.e. the last specified layer, multiple IDs
can be specified.
If one layer is specified, all previous layers must be specified, too.
For example, if specifying regions=38, then floors and continents need
to be supplied, too.
Args:
kwargs: Can be any combination of
- continents
- floors
- regions
- maps
- sectors
With values being either single ints (ids), lists of ints,
or strings. A special case is 'all', which can be used
to get a list of all ids in a subresource.
Returns:
The JSON response.
Raises:
KeyError: if the validation of the keyword arguments fails.
"""
request_url = self._build_endpoint_base_url()
self._validate_kwargs(**kwargs)
_id = kwargs.get('id')
ids = kwargs.get('ids')
continents = kwargs.get('continents') or ids or _id
floors = kwargs.get('floors')
regions = kwargs.get('regions')
maps = kwargs.get('maps')
sectors = kwargs.get('sectors')
def id_string(value_or_values):
if value_or_values == 'all':
return ''
if isinstance(value_or_values, str):
if ',' in value_or_values:
return '?ids=' + value_or_values
return '/' + value_or_values
try:
return '?ids=' + ','.join(map(str, value_or_values))
except TypeError: # single values are not iterable
return '/' + str(value_or_values)
# Since we validate before, we just have to build the url in order
# not nested
if continents:
request_url += id_string(continents)
if floors:
request_url += '/floors' + id_string(floors)
if regions:
request_url += '/regions' + id_string(regions)
if maps:
request_url += '/maps' + id_string(maps)
if sectors:
request_url += '/sectors' + id_string(sectors)
return super().get(url=request_url)
class CreateSubToken(BaseAPIv2Object):
"""
This allows for the creation of Subtokens; essentially API keys with a more limited set of permissions,
which can be used as a substitute for them.
Authenticated Endpoint.
"""
def get(self, expire, permissions, **kwargs):
"""
This appends the match_id and guild_id to the endpoint and then passes it to the parent get() function.
Returns None if it fails to append expire datem, permissions, or urls.
Args:
expire: datetime, the date and time when the subtoken expires
permissions: list, collection of permissions for subtoken to inherit
**kwargs:
urls: list, Endpoints that will be accessible using this Subtoken.
"""
endpoint_url = self._build_endpoint_base_url() + '?'
try:
endpoint_url += f'expire={expire.isoformat()}&'
except AttributeError:
print('expire param must be of type datetime.')
return None
try:
endpoint_url += 'permissions=' + ','.join([str(_) for _ in permissions]) + '&'
except TypeError:
print("Could not add permissions because the given permissions argument is not an iterable.")
return None
urls = kwargs.get('urls')
if urls:
try:
endpoint_url += 'urls=' + ','.join([str(_) for _ in urls]) + '&'
except TypeError:
print("Could not add urls because the given urls argument is not an iterable.")
return None
return super().get(url=endpoint_url, **kwargs)
pass
class Currencies(BaseAPIv2Object):
"""
This returns a list of the currencies contained in the account wallet.
"""
pass
class DailyCrafting(BaseAPIv2Object):
"""
This returns information about time-gated recipes that can be crafted in-game.
"""
pass
class Dungeons(BaseAPIv2Object):
"""
This returns details about each dungeon and it's associated paths.
"""
pass
class EmblemBackgrounds(BaseAPIv2Object):
"""
This returns image resources that are needed to render the background of guild emblems.
"""
pass
class EmblemForegrounds(BaseAPIv2Object):
"""
This returns image resources that are needed to render the foreground of guild emblems.
"""
pass
class Emotes(BaseAPIv2Object):
"""
This returns information about unlockable emotes.
"""
pass
class Files(BaseAPIv2Object):
"""
This returns commonly requested in-game assets that may be used to enhance API-derived applications.
"""
pass
class Finishers(BaseAPIv2Object):
"""
This returns information about finishers that are available in-game.
"""
pass
class Gliders(BaseAPIv2Object):
"""
This returns information about gliders that are available in-game.
"""
pass
class GuildId(BaseAPIv2Object):
"""
This returns core details about a given guild.
The end point will include more or less fields dependent on whether or not an API Key of a
Leader or Member of the Guild with the guilds scope is included in the Request.
Authenticated Endpoint.
"""
def get(self, guild_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
guild_id: string, the id of the guild to add to the endpoint.
"""
endpoint_url = self._build_endpoint_base_url()
try:
endpoint_url = endpoint_url.replace(':id', guild_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class GuildIdLog(BaseAPIv2Object):
"""
This returns information about certain events in a guild's log.
The endpoint requires the scope guilds, and will only work if the API key is from the guild leader's account.
Authenticated Endpoint.
"""
def get(self, guild_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
guild_id: string, the id of the guild to add to the endpoint.
**kwargs:
since: int, the id to use as a ceiling for return values.
"""
try:
endpoint_url = self._build_endpoint_base_url()
endpoint_url = endpoint_url.replace(':id', guild_id)
since = kwargs.get('since')
if since:
endpoint_url += f'?since=={since}'
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class GuildIdMembers(BaseAPIv2Object):
"""
This returns information about the members of a specified guild.
The endpoint requires the scope guilds, and will only work if the API key is from the guild leader's account.
Authenticated Endpoint.
"""
def get(self, guild_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
guild_id: string, the id of the guild to add to the endpoint.
"""
try:
endpoint_url = self._build_endpoint_base_url()
endpoint_url = endpoint_url.replace(':id', guild_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class GuildIdRanks(BaseAPIv2Object):
"""
This returns information about the ranks of a specified guild.
The endpoint requires the scope guilds, and will only work if the API key is from the guild leader's account.
Authenticated Endpoint.
"""
def get(self, guild_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
guild_id: string, the id of the guild to add to the endpoint.
"""
try:
endpoint_url = self._build_endpoint_base_url()
endpoint_url = endpoint_url.replace(':id', guild_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class GuildIdStash(BaseAPIv2Object):
"""
This returns information about the items in a guild's vault.
The endpoint requires the scope guilds, and will only work if the API key is from the guild leader's account.
Authenticated Endpoint.
"""
def get(self, guild_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
guild_id: string, the id of the guild to add to the endpoint.
"""
try:
endpoint_url = self._build_endpoint_base_url()
endpoint_url = endpoint_url.replace(':id', guild_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class GuildIdStorage(BaseAPIv2Object):
"""
This returns information about the items in a guild's storage.
The endpoint requires the scope guilds, and will only work if the API key is from the guild leader's account.
Authenticated Endpoint.
"""
def get(self, guild_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
guild_id: string, the id of the guild to add to the endpoint.
"""
try:
endpoint_url = self._build_endpoint_base_url()
endpoint_url = endpoint_url.replace(':id', guild_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class GuildIdTeams(BaseAPIv2Object):
"""
This returns information about the teams in a guild.
The endpoint requires the scope guilds, and will only work if the API key is from the guild leader's account.
Authenticated Endpoint.
"""
def get(self, guild_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
guild_id: string, the id of the guild to add to the endpoint.
"""
try:
endpoint_url = self._build_endpoint_base_url()
endpoint_url = endpoint_url.replace(':id', guild_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class GuildIdTreasury(BaseAPIv2Object):
"""
This returns information about the items in a guild's treasury.
The endpoint requires the scope guilds, and will only work if the API key is from the guild leader's account.
Authenticated Endpoint.
"""
def get(self, guild_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
guild_id: string, the id of the guild to add to the endpoint.w.
"""
try:
endpoint_url = self._build_endpoint_base_url()
endpoint_url = endpoint_url.replace(':id', guild_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class GuildIdUpgrades(BaseAPIv2Object):
"""
This returns information about the guild's upgrades.
The endpoint requires the scope guilds, and will only work if the API key is from the guild leader's account.
Authenticated Endpoint.
"""
def get(self, guild_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Args:
guild_id: string, the id of the guild to add to the endpoint.
"""
try:
endpoint_url = self._build_endpoint_base_url()
endpoint_url = endpoint_url.replace(':id', guild_id)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class GuildPermissions(BaseAPIv2Object):
"""
This resource returns information about all guild permissions.
"""
pass
class GuildSearch(BaseAPIv2Object):
"""
The endpoint an array, each value being a string of the guild id for the given name.
"""
def get(self, name, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
name: string, the name of the guild to add to the endpoint.
"""
try:
endpoint_url = self._build_endpoint_base_url()
endpoint_url += '?name={name}'.format(name=name)
except TypeError:
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class GuildUpgrades(BaseAPIv2Object):
"""
This returns information about all available Guild Hall upgrades, including scribe decorations.
"""
pass
class HomeCats(BaseAPIv2Object):
"""
This returns an array of ids for cats available for the home instance.
"""
pass
class HomeNodes(BaseAPIv2Object):
"""
This returns an array of ids for all currently available home instance nodes.
"""
pass
class Items(BaseAPIv2Object):
"""
This returns information about items that were discovered by players in the game.
"""
pass
class ItemStats(BaseAPIv2Object):
"""
This returns information about itemstats for items that are in the game.
"""
pass
class Legends(BaseAPIv2Object):
"""
This returns information about the Revenant Legends that are in the game.
"""
pass
class MailCarriers(BaseAPIv2Object):
"""
This returns information about mail carriers that are available in-game.
"""
pass
class MapChests(BaseAPIv2Object):
"""
This returns information about Hero's Choice Chests that can be be acquired once a day in-game.
"""
pass
class Maps(BaseAPIv2Object):
"""
This resource returns details about maps in the game, including details about
floor and translation data on how to translate between world coordinates and map coordinates.
"""
pass
class Masteries(BaseAPIv2Object):
"""
This returns information about masteries that are available in-game.
"""
pass
class Materials(BaseAPIv2Object):
"""
This returns information about the categories in material storage.
"""
pass
class Minis(BaseAPIv2Object):
"""
This returns all minis in the game.
"""
pass
class MountsSkins(BaseAPIv2Object):
"""
This returns information about mount skins that are available in-game.
"""
pass
class MountsTypes(BaseAPIv2Object):
"""
This returns information about mount types that are available in-game.
"""
pass
class Novelties(BaseAPIv2Object):
"""
This returns information about novelties that are available in-game.
"""
pass
class Outfits(BaseAPIv2Object):
"""
This returns information about the outfits that are in the game.
"""
pass
class Pets(BaseAPIv2Object):
"""
This returns information about the Ranger pets that are in the game.
"""
pass
class Professions(BaseAPIv2Object):
"""
This returns information about professions that are in the game.
"""
pass
class PvPAmulets(BaseAPIv2Object):
"""
This returns information about the PvP amulets that are in the game.
"""
pass
class PvPGames(BaseAPIv2Object):
"""
This returns information about past PvP matches the player has participated in.
Authenticated Endpoint.
"""
pass
class PvPHeroes(BaseAPIv2Object):
"""
This returns information about pvp heroes that are available in-game.
"""
pass
class PvPRanks(BaseAPIv2Object):
"""
This information about the available ranks in the Player versus Player game mode.
"""
pass
class PvPSeasons(BaseAPIv2Object):
"""
This returns information about League seasons.
"""
pass
class PvPSeasonsLeaderboards(BaseAPIv2Object):
"""
This returns information about League season leaderboards for either NA or EU.
"""
def get(self, season_id, **kwargs):
"""
This appends the 'id' to the endpoint and then passes it to the parent get() function.
Appends board and region if they are supplied
Args:
season_id: string, the id of the guild to add to the endpoint.
**kwargs
board: string, type of leaderboard to get. I.E. 'legendary'
region: string, the region to get leaderboard of. I.E. 'eu/na
"""
try:
endpoint_url = self._build_endpoint_base_url()
endpoint_url = endpoint_url.replace(':id', season_id)
except TypeError:
return super().get(**kwargs)
board = kwargs.get('board')
if board:
try:
endpoint_url += f'/{board}'
except TypeError:
print("Failed to add board. Ensure it is of type string")
return None
region = kwargs.get('region')
if region:
try:
endpoint_url += f'/{region}'
except TypeError:
print("Failed to add region. Ensure it is of type string")
return None
return super().get(url=endpoint_url, **kwargs)
class PvPStandings(BaseAPIv2Object):
"""
This returns information about player pips.
Authenticated Endpoint.
"""
pass
class PvPStats(BaseAPIv2Object):
"""
This returns information about wins and losses in the account's PvP matches.
Authenticated Endpoint.
"""
pass
class Quaggans(BaseAPIv2Object):
"""
This returns quaggan images.
"""
pass
class Quests(BaseAPIv2Object):
"""
This resource returns information about Story Journal missions within the personal story and Living World.
"""
pass
class Races(BaseAPIv2Object):
"""
This returns information about the different playable races.
"""
pass
class Raids(BaseAPIv2Object):
"""
This returns details about each raid and it's associated wings.
"""
pass
class Recipes(BaseAPIv2Object):
"""
This returns information about recipes that were discovered by players in the game.
"""
pass
class RecipesSearch(BaseAPIv2Object):
"""
This allows searching for recipe.
To get additional information about the returned recipes, use the recipes endpoint.
"""
def get(self, **kwargs):
"""
If either 'input' or 'output' is passed into **kwargs
, then append item id and the given param into the endpoint.
Args:
**kwargs: dict, dictionary of key word arguments.
Returns:
List of recipe ids that match the query or None if input/output cant be converted to strings
"""
if any(key in ['input', 'output'] for key in kwargs):
param = 'input' if 'input' in kwargs else 'output'
item_id = kwargs.get(param)
endpoint_url = self._build_endpoint_base_url()
endpoint_url += '?{param}={item_id}'.format(param=param, item_id=item_id)
return super().get(url=endpoint_url)
# Fallback to let the official API handle the error cases
return super().get(**kwargs)
class Skills(BaseAPIv2Object):
"""
This returns information about skills usable by players in the game.
"""
pass
class Skins(BaseAPIv2Object):
"""
This returns information about skins that were discovered by players in the game.
"""
pass
class Specializations(BaseAPIv2Object):
"""
This returns information on currently released specializations.
"""
pass
class Stories(BaseAPIv2Object):
"""
This returns information about the Story Journal stories; including the personal story and Living World.
"""
pass
class StoriesSeasons(BaseAPIv2Object):
"""
This returns information about the Story Journal story seasons; including the personal story and Living World.
"""
pass
class Titles(BaseAPIv2Object):
"""
This returns information about the titles that are in the game.
"""
pass
class Tokeninfo(BaseAPIv2Object):
"""
This resource returns information about the supplied API key.
Authenticated Endpoint.
"""
pass
class Traits(BaseAPIv2Object):
"""
This returns information about specific traits which are contained within specializations.
"""
pass
class WorldBosses(BaseAPIv2Object):
"""
This returns information about scheduled World bosses in Core Tyria that reward boss chests
that can be be opened once a day in-game.
"""
pass
class Worlds(BaseAPIv2Object):
"""
This returns information about the available worlds, or servers.
"""
pass
class WvwAbilities(BaseAPIv2Object):
"""
This returns information about the available abilities in the World versus World game mode.
"""
pass
class WvwMatches(BaseAPIv2Object):
"""
This returns either an array of matches or further details about a specified match.
"""
pass
class WvwMatchesOverview(BaseAPIv2Object):
"""
This returns either an array of matches or further details about a specified match.
"""
pass
class WvwMatchesScores(BaseAPIv2Object):
"""
This returns either an array of match overviews or further details about a specified match.
"""
pass
class WvwMatchesStats(BaseAPIv2Object):
"""
This returns either an array of match stats or further details about a specified match.
"""
pass
class WvwMatchesStatsGuilds(BaseAPIv2Object):
"""
This returns specific details pertaining to guilds and their WVW stats.
"""
def get(self, match_id, guild_id, **kwargs):
"""
This appends the match_id and guild_id to the endpoint and then passes it to the parent get() function.
Args:
guild_id: string, the id of the guild to add to the endpoint.
match_id: int, the ID of the match.
"""
endpoint_url = self._build_endpoint_base_url()
try:
endpoint_url = endpoint_url.replace(':id', match_id)
endpoint_url = endpoint_url.replace(':guild_id', guild_id)
except TypeError:
print('Failed to set match id or guild id. Ensure they are both strings.')
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class WvwMatchesStatsTeamsKDR(BaseAPIv2Object):
"""
This returns specific details pertaining to teams and their KDR.
"""
def get(self, match_id, team_id, **kwargs):
"""
This appends the match_id and team_id to the endpoint and then passes it to the parent get() function.
Returns None if it fails to append the ids.
Args:
match_id: int, the ID of the match.
team_id: string, the id of the guild to add to the endpoint.
"""
endpoint_url = self._build_endpoint_base_url()
try:
endpoint_url = endpoint_url.replace(':id', match_id)
endpoint_url = endpoint_url.replace(':team', team_id)
except TypeError:
print('Failed to set match id or team id. Ensure they are both strings.')
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class WvwMatchesStatsTeamsKills(BaseAPIv2Object):
"""
This returns specific details pertaining to teams and their kills.
"""
def get(self, match_id, team_id, **kwargs):
"""
This appends the match_id and team_id to the endpoint and then passes it to the parent get() function.
Args:
match_id: int, the ID of the match.
team_id: string, the id of the guild to add to the endpoint.
"""
endpoint_url = self._build_endpoint_base_url()
try:
endpoint_url = endpoint_url.replace(':id', match_id)
endpoint_url = endpoint_url.replace(':team', team_id)
except TypeError:
print('Failed to set match id or team id. Ensure they are both strings.')
return super().get(**kwargs)
return super().get(url=endpoint_url, **kwargs)
class WvwObjectives(BaseAPIv2Object):
"""
This returns details about World vs. World objectives such as camps, towers, and keeps.
"""
pass
class WvwRanks(BaseAPIv2Object):
"""
This returns information about the available ranks in the World versus World game mode.
"""
pass
class WvwUpgrades(BaseAPIv2Object):
"""
This returns details about available World vs. World upgrades for objectives such as camps, towers, and keeps.
"""
pass
API_OBJECTS = [Account('account'),
AccountAchievements('account/achievements'),
AccountBank('account/bank'),
AccountBuildStorage('account/buildstorage'),
AccountDailyCrafting('account/dailycrafting'),
AccountDungeons('account/dungeons'),
AccountDyes('account/dyes'),
AccountEmote('account/emotes'),
AccountFinishers('account/finishers'),
AccountGliders('account/gliders'),
AccountHomeCats('account/home/cats'),
AccountHomeNodes('account/home/nodes'),
AccountInventory('account/inventory'),
AccountLuck('account/luck'),
AccountMailCarriers('account/mailcarriers'),
AccountMapChests('account/mapchests'),
AccountMasteries('account/masteries'),
AccountMasteryPoints('account/mastery/points'),
AccountMaterials('account/materials'),
AccountMinis('account/minis'),
AccountMountsSkins('account/mounts/skins'),
AccountMountsTypes('account/mounts/types'),
AccountNovelties('account/novelties'),
AccountOutfits('account/outfits'),
AccountPvPHeroes('account/pvp/heroes'),
AccountRaids('account/raids'),
AccountRecipes('account/recipes'),
AccountSkins('account/skins'),
AccountTitles('account/titles'),
AccountWallet('account/wallet'),
AccountWorldBosses('account/worldbosses'),
Achievements('achievements'),
AchievementsCategories('achievements/categories'),
AchievementsDaily('achievements/daily'),
AchievementsDailyTomorrow('achievements/daily/tomorrow'),
AchievementsGroups('achievements/groups'),
BackstoryAnswers('backstory/answers'),
BackstoryQuestions('backstory/questions'),
Build('build'),
Characters('characters'),
CharactersBackstory('characters/:id/backstory'),
CharactersBuildTabs('characters/:id/buildtabs'),
CharactersBuildTabsActive('characters/:id/buildtabs/active'),
CharactersCore('characters/:id/core'),
CharactersCrafting('characters/:id/crafting'),
CharactersEquipment('characters/:id/equipment'),
CharactersEquipmentTabs('characters/:id/equipmenttabs'),
CharactersEquipmentTabsActive('characters/:id/equipmenttabs/active'),
CharactersHeroPoints('characters/:id/heropoints'),
CharactersInventory('characters/:id/inventory'),
CharactersQuests('characters/:id/quests'),
CharactersRecipes('characters/:id/recipes'),
CharactersSab('characters/:id/sab'),
CharactersSkills('characters/:id/skills'),
CharactersSpecialization('characters/:id/specializations'),
CharactersTraining('characters/:id/training'),
Colors('colors'),
CommerceDelivery('commerce/delivery'),
CommerceExchangeCoins('commerce/exchange/coins'),
CommerceExchangeGems('commerce/exchange/gems'),
CommerceListings('commerce/listings'),
CommercePrices('commerce/prices'),
CommerceTransactions('commerce/transactions'),
Continents('continents'),
CreateSubToken('createsubtoken'),
Currencies('currencies'),
DailyCrafting('dailycrafting'),
Dungeons('dungeons'),
EmblemBackgrounds('emblem/backgrounds'),
EmblemForegrounds('emblem/foregrounds'),
Emotes('emotes'),
Files('files'),
Finishers('finishers'),
Gliders('gliders'),
GuildId('guild/:id'),
GuildIdLog('guild/:id/log'),
GuildIdMembers('guild/:id/members'),
GuildIdRanks('guild/:id/ranks'),
GuildIdStash('guild/:id/stash'),
GuildIdStorage('guild/:id/storage'),
GuildIdTeams('guild/:id/teams'),
GuildIdTreasury('guild/:id/treasury'),
GuildIdUpgrades('guild/:id/upgrades'),
GuildPermissions('guild/permissions'),
GuildSearch('guild/search'),
GuildUpgrades('guild/upgrades'),
HomeCats('home/cats'),
HomeNodes('home/nodes'),
Items('items'),
ItemStats('itemstats'),
Legends('legends'),
MailCarriers('mailcarriers'),
MapChests('mapchests'),
Maps('maps'),
Masteries('masteries'),
Materials('materials'),
Minis('minis'),
MountsSkins('mounts/skins'),
MountsTypes('mounts/types'),
Novelties('novelties'),
Outfits('outfits'),
Pets('pets'),
Professions('professions'),
PvPAmulets('pvp/amulets'),
PvPGames('pvp/games'),
PvPHeroes('pvp/heroes'),
PvPRanks('pvp/ranks'),
PvPSeasons('pvp/seasons'),
PvPSeasonsLeaderboards('pvp/seasons/:id/leaderboards'),
PvPStandings('pvp/standings'),
PvPStats('pvp/stats'),
Quaggans('quaggans'),
Quests('quests'),
Races('races'),
Raids('raids'),
Recipes('recipes'),
RecipesSearch('recipes/search'),
Skills('skills'),
Skins('skins'),
Specializations('specializations'),
Stories('stories'),
StoriesSeasons('stories/seasons'),
Titles('titles'),
Tokeninfo('tokeninfo'),
Traits('traits'),
WorldBosses('worldbosses'),
Worlds('worlds'),
WvwAbilities('wvw/abilities'),
WvwMatches('wvw/matches'),
WvwMatchesOverview('wvw/matches/overview'),
WvwMatchesScores('wvw/matches/scores'),
WvwMatchesStats('wvw/matches/stats'),
WvwMatchesStatsGuilds('wvw/matches/stats/:id/guilds/:guild_id'),
WvwMatchesStatsTeamsKDR('wvw/matches/stats/:id/teams/:team/top/kdr'),
WvwMatchesStatsTeamsKills('wvw/matches/stats/:id/teams/:team/top/kills'),
WvwObjectives('wvw/objectives'),
WvwRanks('wvw/ranks'),
WvwUpgrades('wvw/upgrades')] | PypiClean |
/CherryPy-18.8.0.tar.gz/CherryPy-18.8.0/cherrypy/lib/sessions.py | import sys
import datetime
import os
import time
import threading
import binascii
import pickle
import zc.lockfile
import cherrypy
from cherrypy.lib import httputil
from cherrypy.lib import locking
from cherrypy.lib import is_iterator
missing = object()
class Session(object):
"""A CherryPy dict-like Session object (one per request)."""
_id = None
id_observers = None
"A list of callbacks to which to pass new id's."
@property
def id(self):
"""Return the current session id."""
return self._id
@id.setter
def id(self, value):
self._id = value
for o in self.id_observers:
o(value)
timeout = 60
'Number of minutes after which to delete session data.'
locked = False
"""
If True, this session instance has exclusive read/write access
to session data."""
loaded = False
"""
If True, data has been retrieved from storage. This should happen
automatically on the first attempt to access session data."""
clean_thread = None
'Class-level Monitor which calls self.clean_up.'
clean_freq = 5
'The poll rate for expired session cleanup in minutes.'
originalid = None
'The session id passed by the client. May be missing or unsafe.'
missing = False
'True if the session requested by the client did not exist.'
regenerated = False
"""
True if the application called session.regenerate(). This is not set by
internal calls to regenerate the session id."""
debug = False
'If True, log debug information.'
# --------------------- Session management methods --------------------- #
def __init__(self, id=None, **kwargs):
self.id_observers = []
self._data = {}
for k, v in kwargs.items():
setattr(self, k, v)
self.originalid = id
self.missing = False
if id is None:
if self.debug:
cherrypy.log('No id given; making a new one', 'TOOLS.SESSIONS')
self._regenerate()
else:
self.id = id
if self._exists():
if self.debug:
cherrypy.log('Set id to %s.' % id, 'TOOLS.SESSIONS')
else:
if self.debug:
cherrypy.log('Expired or malicious session %r; '
'making a new one' % id, 'TOOLS.SESSIONS')
# Expired or malicious session. Make a new one.
# See https://github.com/cherrypy/cherrypy/issues/709.
self.id = None
self.missing = True
self._regenerate()
def now(self):
"""Generate the session specific concept of 'now'.
Other session providers can override this to use alternative,
possibly timezone aware, versions of 'now'.
"""
return datetime.datetime.now()
def regenerate(self):
"""Replace the current session (with a new id)."""
self.regenerated = True
self._regenerate()
def _regenerate(self):
if self.id is not None:
if self.debug:
cherrypy.log(
'Deleting the existing session %r before '
'regeneration.' % self.id,
'TOOLS.SESSIONS')
self.delete()
old_session_was_locked = self.locked
if old_session_was_locked:
self.release_lock()
if self.debug:
cherrypy.log('Old lock released.', 'TOOLS.SESSIONS')
self.id = None
while self.id is None:
self.id = self.generate_id()
# Assert that the generated id is not already stored.
if self._exists():
self.id = None
if self.debug:
cherrypy.log('Set id to generated %s.' % self.id,
'TOOLS.SESSIONS')
if old_session_was_locked:
self.acquire_lock()
if self.debug:
cherrypy.log('Regenerated lock acquired.', 'TOOLS.SESSIONS')
def clean_up(self):
"""Clean up expired sessions."""
pass
def generate_id(self):
"""Return a new session id."""
return binascii.hexlify(os.urandom(20)).decode('ascii')
def save(self):
"""Save session data."""
try:
# If session data has never been loaded then it's never been
# accessed: no need to save it
if self.loaded:
t = datetime.timedelta(seconds=self.timeout * 60)
expiration_time = self.now() + t
if self.debug:
cherrypy.log('Saving session %r with expiry %s' %
(self.id, expiration_time),
'TOOLS.SESSIONS')
self._save(expiration_time)
else:
if self.debug:
cherrypy.log(
'Skipping save of session %r (no session loaded).' %
self.id, 'TOOLS.SESSIONS')
finally:
if self.locked:
# Always release the lock if the user didn't release it
self.release_lock()
if self.debug:
cherrypy.log('Lock released after save.', 'TOOLS.SESSIONS')
def load(self):
"""Copy stored session data into this session instance."""
data = self._load()
# data is either None or a tuple (session_data, expiration_time)
if data is None or data[1] < self.now():
if self.debug:
cherrypy.log('Expired session %r, flushing data.' % self.id,
'TOOLS.SESSIONS')
self._data = {}
else:
if self.debug:
cherrypy.log('Data loaded for session %r.' % self.id,
'TOOLS.SESSIONS')
self._data = data[0]
self.loaded = True
# Stick the clean_thread in the class, not the instance.
# The instances are created and destroyed per-request.
cls = self.__class__
if self.clean_freq and not cls.clean_thread:
# clean_up is an instancemethod and not a classmethod,
# so that tool config can be accessed inside the method.
t = cherrypy.process.plugins.Monitor(
cherrypy.engine, self.clean_up, self.clean_freq * 60,
name='Session cleanup')
t.subscribe()
cls.clean_thread = t
t.start()
if self.debug:
cherrypy.log('Started cleanup thread.', 'TOOLS.SESSIONS')
def delete(self):
"""Delete stored session data."""
self._delete()
if self.debug:
cherrypy.log('Deleted session %s.' % self.id,
'TOOLS.SESSIONS')
# -------------------- Application accessor methods -------------------- #
def __getitem__(self, key):
if not self.loaded:
self.load()
return self._data[key]
def __setitem__(self, key, value):
if not self.loaded:
self.load()
self._data[key] = value
def __delitem__(self, key):
if not self.loaded:
self.load()
del self._data[key]
def pop(self, key, default=missing):
"""Remove the specified key and return the corresponding value.
If key is not found, default is returned if given,
otherwise KeyError is raised.
"""
if not self.loaded:
self.load()
if default is missing:
return self._data.pop(key)
else:
return self._data.pop(key, default)
def __contains__(self, key):
if not self.loaded:
self.load()
return key in self._data
def get(self, key, default=None):
"""D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None."""
if not self.loaded:
self.load()
return self._data.get(key, default)
def update(self, d):
"""D.update(E) -> None. Update D from E: for k in E: D[k] = E[k]."""
if not self.loaded:
self.load()
self._data.update(d)
def setdefault(self, key, default=None):
"""D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D."""
if not self.loaded:
self.load()
return self._data.setdefault(key, default)
def clear(self):
"""D.clear() -> None. Remove all items from D."""
if not self.loaded:
self.load()
self._data.clear()
def keys(self):
"""D.keys() -> list of D's keys."""
if not self.loaded:
self.load()
return self._data.keys()
def items(self):
"""D.items() -> list of D's (key, value) pairs, as 2-tuples."""
if not self.loaded:
self.load()
return self._data.items()
def values(self):
"""D.values() -> list of D's values."""
if not self.loaded:
self.load()
return self._data.values()
class RamSession(Session):
# Class-level objects. Don't rebind these!
cache = {}
locks = {}
def clean_up(self):
"""Clean up expired sessions."""
now = self.now()
for _id, (data, expiration_time) in self.cache.copy().items():
if expiration_time <= now:
try:
del self.cache[_id]
except KeyError:
pass
try:
if self.locks[_id].acquire(blocking=False):
lock = self.locks.pop(_id)
lock.release()
except KeyError:
pass
# added to remove obsolete lock objects
for _id in list(self.locks):
locked = (
_id not in self.cache
and self.locks[_id].acquire(blocking=False)
)
if locked:
lock = self.locks.pop(_id)
lock.release()
def _exists(self):
return self.id in self.cache
def _load(self):
return self.cache.get(self.id)
def _save(self, expiration_time):
self.cache[self.id] = (self._data, expiration_time)
def _delete(self):
self.cache.pop(self.id, None)
def acquire_lock(self):
"""Acquire an exclusive lock on the currently-loaded session data."""
self.locked = True
self.locks.setdefault(self.id, threading.RLock()).acquire()
def release_lock(self):
"""Release the lock on the currently-loaded session data."""
self.locks[self.id].release()
self.locked = False
def __len__(self):
"""Return the number of active sessions."""
return len(self.cache)
class FileSession(Session):
"""Implementation of the File backend for sessions
storage_path
The folder where session data will be saved. Each session
will be saved as pickle.dump(data, expiration_time) in its own file;
the filename will be self.SESSION_PREFIX + self.id.
lock_timeout
A timedelta or numeric seconds indicating how long
to block acquiring a lock. If None (default), acquiring a lock
will block indefinitely.
"""
SESSION_PREFIX = 'session-'
LOCK_SUFFIX = '.lock'
pickle_protocol = pickle.HIGHEST_PROTOCOL
def __init__(self, id=None, **kwargs):
# The 'storage_path' arg is required for file-based sessions.
kwargs['storage_path'] = os.path.abspath(kwargs['storage_path'])
kwargs.setdefault('lock_timeout', None)
Session.__init__(self, id=id, **kwargs)
# validate self.lock_timeout
if isinstance(self.lock_timeout, (int, float)):
self.lock_timeout = datetime.timedelta(seconds=self.lock_timeout)
if not isinstance(self.lock_timeout, (datetime.timedelta, type(None))):
raise ValueError(
'Lock timeout must be numeric seconds or a timedelta instance.'
)
@classmethod
def setup(cls, **kwargs):
"""Set up the storage system for file-based sessions.
This should only be called once per process; this will be done
automatically when using sessions.init (as the built-in Tool does).
"""
# The 'storage_path' arg is required for file-based sessions.
kwargs['storage_path'] = os.path.abspath(kwargs['storage_path'])
for k, v in kwargs.items():
setattr(cls, k, v)
def _get_file_path(self):
f = os.path.join(self.storage_path, self.SESSION_PREFIX + self.id)
if not os.path.abspath(f).startswith(self.storage_path):
raise cherrypy.HTTPError(400, 'Invalid session id in cookie.')
return f
def _exists(self):
path = self._get_file_path()
return os.path.exists(path)
def _load(self, path=None):
assert self.locked, ('The session load without being locked. '
"Check your tools' priority levels.")
if path is None:
path = self._get_file_path()
try:
with open(path, 'rb') as f:
return pickle.load(f)
except (IOError, EOFError):
e = sys.exc_info()[1]
if self.debug:
cherrypy.log('Error loading the session pickle: %s' %
e, 'TOOLS.SESSIONS')
return None
def _save(self, expiration_time):
assert self.locked, ('The session was saved without being locked. '
"Check your tools' priority levels.")
with open(self._get_file_path(), 'wb') as f:
pickle.dump((self._data, expiration_time), f, self.pickle_protocol)
def _delete(self):
assert self.locked, ('The session deletion without being locked. '
"Check your tools' priority levels.")
try:
os.unlink(self._get_file_path())
except OSError:
pass
def acquire_lock(self, path=None):
"""Acquire an exclusive lock on the currently-loaded session data."""
if path is None:
path = self._get_file_path()
path += self.LOCK_SUFFIX
checker = locking.LockChecker(self.id, self.lock_timeout)
while not checker.expired():
try:
self.lock = zc.lockfile.LockFile(path)
except zc.lockfile.LockError:
time.sleep(0.1)
else:
break
self.locked = True
if self.debug:
cherrypy.log('Lock acquired.', 'TOOLS.SESSIONS')
def release_lock(self, path=None):
"""Release the lock on the currently-loaded session data."""
self.lock.close()
self.locked = False
def clean_up(self):
"""Clean up expired sessions."""
now = self.now()
# Iterate over all session files in self.storage_path
for fname in os.listdir(self.storage_path):
have_session = (
fname.startswith(self.SESSION_PREFIX)
and not fname.endswith(self.LOCK_SUFFIX)
)
if have_session:
# We have a session file: lock and load it and check
# if it's expired. If it fails, nevermind.
path = os.path.join(self.storage_path, fname)
self.acquire_lock(path)
if self.debug:
# This is a bit of a hack, since we're calling clean_up
# on the first instance rather than the entire class,
# so depending on whether you have "debug" set on the
# path of the first session called, this may not run.
cherrypy.log('Cleanup lock acquired.', 'TOOLS.SESSIONS')
try:
contents = self._load(path)
# _load returns None on IOError
if contents is not None:
data, expiration_time = contents
if expiration_time < now:
# Session expired: deleting it
os.unlink(path)
finally:
self.release_lock(path)
def __len__(self):
"""Return the number of active sessions."""
return len([fname for fname in os.listdir(self.storage_path)
if (fname.startswith(self.SESSION_PREFIX) and
not fname.endswith(self.LOCK_SUFFIX))])
class MemcachedSession(Session):
# The most popular memcached client for Python isn't thread-safe.
# Wrap all .get and .set operations in a single lock.
mc_lock = threading.RLock()
# This is a separate set of locks per session id.
locks = {}
servers = ['localhost:11211']
@classmethod
def setup(cls, **kwargs):
"""Set up the storage system for memcached-based sessions.
This should only be called once per process; this will be done
automatically when using sessions.init (as the built-in Tool does).
"""
for k, v in kwargs.items():
setattr(cls, k, v)
import memcache
cls.cache = memcache.Client(cls.servers)
def _exists(self):
self.mc_lock.acquire()
try:
return bool(self.cache.get(self.id))
finally:
self.mc_lock.release()
def _load(self):
self.mc_lock.acquire()
try:
return self.cache.get(self.id)
finally:
self.mc_lock.release()
def _save(self, expiration_time):
# Send the expiration time as "Unix time" (seconds since 1/1/1970)
td = int(time.mktime(expiration_time.timetuple()))
self.mc_lock.acquire()
try:
if not self.cache.set(self.id, (self._data, expiration_time), td):
raise AssertionError(
'Session data for id %r not set.' % self.id)
finally:
self.mc_lock.release()
def _delete(self):
self.cache.delete(self.id)
def acquire_lock(self):
"""Acquire an exclusive lock on the currently-loaded session data."""
self.locked = True
self.locks.setdefault(self.id, threading.RLock()).acquire()
if self.debug:
cherrypy.log('Lock acquired.', 'TOOLS.SESSIONS')
def release_lock(self):
"""Release the lock on the currently-loaded session data."""
self.locks[self.id].release()
self.locked = False
def __len__(self):
"""Return the number of active sessions."""
raise NotImplementedError
# Hook functions (for CherryPy tools)
def save():
"""Save any changed session data."""
if not hasattr(cherrypy.serving, 'session'):
return
request = cherrypy.serving.request
response = cherrypy.serving.response
# Guard against running twice
if hasattr(request, '_sessionsaved'):
return
request._sessionsaved = True
if response.stream:
# If the body is being streamed, we have to save the data
# *after* the response has been written out
request.hooks.attach('on_end_request', cherrypy.session.save)
else:
# If the body is not being streamed, we save the data now
# (so we can release the lock).
if is_iterator(response.body):
response.collapse_body()
cherrypy.session.save()
save.failsafe = True
def close():
"""Close the session object for this request."""
sess = getattr(cherrypy.serving, 'session', None)
if getattr(sess, 'locked', False):
# If the session is still locked we release the lock
sess.release_lock()
if sess.debug:
cherrypy.log('Lock released on close.', 'TOOLS.SESSIONS')
close.failsafe = True
close.priority = 90
def init(storage_type=None, path=None, path_header=None, name='session_id',
timeout=60, domain=None, secure=False, clean_freq=5,
persistent=True, httponly=False, debug=False,
# Py27 compat
# *, storage_class=RamSession,
**kwargs):
"""Initialize session object (using cookies).
storage_class
The Session subclass to use. Defaults to RamSession.
storage_type
(deprecated)
One of 'ram', 'file', memcached'. This will be
used to look up the corresponding class in cherrypy.lib.sessions
globals. For example, 'file' will use the FileSession class.
path
The 'path' value to stick in the response cookie metadata.
path_header
If 'path' is None (the default), then the response
cookie 'path' will be pulled from request.headers[path_header].
name
The name of the cookie.
timeout
The expiration timeout (in minutes) for the stored session data.
If 'persistent' is True (the default), this is also the timeout
for the cookie.
domain
The cookie domain.
secure
If False (the default) the cookie 'secure' value will not
be set. If True, the cookie 'secure' value will be set (to 1).
clean_freq (minutes)
The poll rate for expired session cleanup.
persistent
If True (the default), the 'timeout' argument will be used
to expire the cookie. If False, the cookie will not have an expiry,
and the cookie will be a "session cookie" which expires when the
browser is closed.
httponly
If False (the default) the cookie 'httponly' value will not be set.
If True, the cookie 'httponly' value will be set (to 1).
Any additional kwargs will be bound to the new Session instance,
and may be specific to the storage type. See the subclass of Session
you're using for more information.
"""
# Py27 compat
storage_class = kwargs.pop('storage_class', RamSession)
request = cherrypy.serving.request
# Guard against running twice
if hasattr(request, '_session_init_flag'):
return
request._session_init_flag = True
# Check if request came with a session ID
id = None
if name in request.cookie:
id = request.cookie[name].value
if debug:
cherrypy.log('ID obtained from request.cookie: %r' % id,
'TOOLS.SESSIONS')
first_time = not hasattr(cherrypy, 'session')
if storage_type:
if first_time:
msg = 'storage_type is deprecated. Supply storage_class instead'
cherrypy.log(msg)
storage_class = storage_type.title() + 'Session'
storage_class = globals()[storage_class]
# call setup first time only
if first_time:
if hasattr(storage_class, 'setup'):
storage_class.setup(**kwargs)
# Create and attach a new Session instance to cherrypy.serving.
# It will possess a reference to (and lock, and lazily load)
# the requested session data.
kwargs['timeout'] = timeout
kwargs['clean_freq'] = clean_freq
cherrypy.serving.session = sess = storage_class(id, **kwargs)
sess.debug = debug
def update_cookie(id):
"""Update the cookie every time the session id changes."""
cherrypy.serving.response.cookie[name] = id
sess.id_observers.append(update_cookie)
# Create cherrypy.session which will proxy to cherrypy.serving.session
if not hasattr(cherrypy, 'session'):
cherrypy.session = cherrypy._ThreadLocalProxy('session')
if persistent:
cookie_timeout = timeout
else:
# See http://support.microsoft.com/kb/223799/EN-US/
# and http://support.mozilla.com/en-US/kb/Cookies
cookie_timeout = None
set_response_cookie(path=path, path_header=path_header, name=name,
timeout=cookie_timeout, domain=domain, secure=secure,
httponly=httponly)
def set_response_cookie(path=None, path_header=None, name='session_id',
timeout=60, domain=None, secure=False, httponly=False):
"""Set a response cookie for the client.
path
the 'path' value to stick in the response cookie metadata.
path_header
if 'path' is None (the default), then the response
cookie 'path' will be pulled from request.headers[path_header].
name
the name of the cookie.
timeout
the expiration timeout for the cookie. If 0 or other boolean
False, no 'expires' param will be set, and the cookie will be a
"session cookie" which expires when the browser is closed.
domain
the cookie domain.
secure
if False (the default) the cookie 'secure' value will not
be set. If True, the cookie 'secure' value will be set (to 1).
httponly
If False (the default) the cookie 'httponly' value will not be set.
If True, the cookie 'httponly' value will be set (to 1).
"""
# Set response cookie
cookie = cherrypy.serving.response.cookie
cookie[name] = cherrypy.serving.session.id
cookie[name]['path'] = (
path or
cherrypy.serving.request.headers.get(path_header) or
'/'
)
if timeout:
cookie[name]['max-age'] = timeout * 60
_add_MSIE_max_age_workaround(cookie[name], timeout)
if domain is not None:
cookie[name]['domain'] = domain
if secure:
cookie[name]['secure'] = 1
if httponly:
if not cookie[name].isReservedKey('httponly'):
raise ValueError('The httponly cookie token is not supported.')
cookie[name]['httponly'] = 1
def _add_MSIE_max_age_workaround(cookie, timeout):
"""
We'd like to use the "max-age" param as indicated in
http://www.faqs.org/rfcs/rfc2109.html but IE doesn't
save it to disk and the session is lost if people close
the browser. So we have to use the old "expires" ... sigh ...
"""
expires = time.time() + timeout * 60
cookie['expires'] = httputil.HTTPDate(expires)
def expire():
"""Expire the current session cookie."""
name = cherrypy.serving.request.config.get(
'tools.sessions.name', 'session_id')
one_year = 60 * 60 * 24 * 365
e = time.time() - one_year
cherrypy.serving.response.cookie[name]['expires'] = httputil.HTTPDate(e)
cherrypy.serving.response.cookie[name].pop('max-age', None) | PypiClean |
/FlaskCms-0.0.4.tar.gz/FlaskCms-0.0.4/flask_cms/static/js/ckeditor/plugins/a11yhelp/dialogs/lang/da.js | /*
Copyright (c) 2003-2013, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang("a11yhelp","da",{title:"Tilgængelighedsinstrukser",contents:"Onlinehjælp. For at lukke dette vindue klik ESC",legend:[{name:"Generelt",items:[{name:"Editor værktøjslinje",legend:"Press ${toolbarFocus} to navigate to the toolbar. Move to the next and previous toolbar group with TAB and SHIFT-TAB. Move to the next and previous toolbar button with RIGHT ARROW or LEFT ARROW. Press SPACE or ENTER to activate the toolbar button."},{name:"Editor Dialog",legend:"Inside a dialog, press TAB to navigate to next dialog field, press SHIFT + TAB to move to previous field, press ENTER to submit dialog, press ESC to cancel dialog. For dialogs that have multiple tab pages, press ALT + F10 to navigate to tab-list. Then move to next tab with TAB OR RIGTH ARROW. Move to previous tab with SHIFT + TAB or LEFT ARROW. Press SPACE or ENTER to select the tab page."},
{name:"Editor Context Menu",legend:"Press ${contextMenu} or APPLICATION KEY to open context-menu. Then move to next menu option with TAB or DOWN ARROW. Move to previous option with SHIFT+TAB or UP ARROW. Press SPACE or ENTER to select the menu option. Open sub-menu of current option with SPACE or ENTER or RIGHT ARROW. Go back to parent menu item with ESC or LEFT ARROW. Close context menu with ESC."},{name:"Editor List Box",legend:"Inside a list-box, move to next list item with TAB OR DOWN ARROW. Move to previous list item with SHIFT + TAB or UP ARROW. Press SPACE or ENTER to select the list option. Press ESC to close the list-box."},
{name:"Editor Element Path Bar",legend:"Press ${elementsPathFocus} to navigate to the elements path bar. Move to next element button with TAB or RIGHT ARROW. Move to previous button with SHIFT+TAB or LEFT ARROW. Press SPACE or ENTER to select the element in editor."}]},{name:"Kommandoer",items:[{name:"Fortryd kommando",legend:"Klik på ${undo}"},{name:"Gentag kommando",legend:"Klik ${redo}"},{name:" Bold command",legend:"Klik ${bold}"},{name:" Italic command",legend:"Press ${italic}"},{name:" Underline command",
legend:"Klik ${underline}"},{name:" Link command",legend:"Klik ${link}"},{name:" Toolbar Collapse command",legend:"Press ${toolbarCollapse}"},{name:" Access previous focus space command",legend:"Press ${accessPreviousSpace} to access the closest unreachable focus space before the caret, for example: two adjacent HR elements. Repeat the key combination to reach distant focus spaces."},{name:" Access next focus space command",legend:"Press ${accessNextSpace} to access the closest unreachable focus space after the caret, for example: two adjacent HR elements. Repeat the key combination to reach distant focus spaces."},
{name:" Accessibility Help",legend:"Kilk ${a11yHelp}"}]}]}); | PypiClean |
/DosNa-0.1.tar.gz/DosNa-0.1/dosna/backends/base.py | """Base classes for every backend"""
import logging
from itertools import product
import numpy as np
from six.moves import range
log = logging.getLogger(__name__)
class BackendConnection(object):
def __init__(self, name, open_mode="a", *args, **kwargs):
self._name = name
self._connected = False
self._mode = open_mode
log.debug('Extra connection options: args=%s kwargs=%s', args, kwargs)
@property
def name(self):
return self._name
@property
def connected(self):
return self._connected
@property
def mode(self):
return self._mode
def connect(self):
log.debug("Connecting to %s", self.name)
self._connected = True
def disconnect(self):
log.debug("Disconnecting from %s", self.name)
self._connected = False
def __enter__(self):
if not self.connected:
self.connect()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.connected:
self.disconnect()
def __getitem__(self, name):
return self.get_dataset(name)
def __contains__(self, name):
return self.has_dataset(name)
def create_dataset(self, name, shape=None, dtype=np.float32, fillvalue=0,
data=None, chunk_size=None):
raise NotImplementedError('`create_dataset` not implemented '
'for this backend')
def get_dataset(self, name):
raise NotImplementedError('`get_dataset` not implemented '
'for this backend')
def has_dataset(self, name):
raise NotImplementedError('`has_dataset` not implemented '
'for this backend')
def del_dataset(self, name):
"""Remove dataset metadata only"""
raise NotImplementedError('`del_dataset` not implemented '
'for this backend')
class BackendDataset(object):
def __init__(self, connection, name, shape, dtype, fillvalue, chunk_grid,
chunk_size):
if not connection.has_dataset(name):
raise Exception('Wrong initialization of a Dataset')
self._connection = connection
self._name = name
self._shape = shape
self._dtype = dtype
self._fillvalue = fillvalue
self._chunk_grid = chunk_grid
self._chunk_size = chunk_size
self._total_chunks = np.prod(chunk_grid)
self._ndim = len(self._shape)
@property
def connection(self):
return self._connection
@property
def name(self):
return self._name
@property
def shape(self):
return self._shape
@property
def ndim(self):
return self._ndim
@property
def dtype(self):
return self._dtype
@property
def fillvalue(self):
return self._fillvalue
@property
def chunk_grid(self):
return self._chunk_grid
@property
def chunk_size(self):
return self._chunk_size
@property
def total_chunks(self):
return self._total_chunks
# To be implementd by Storage Backend
def create_chunk(self, idx, data=None, slices=None):
raise NotImplementedError('`create_chunk` not implemented '
'for this backend')
def get_chunk(self, idx):
raise NotImplementedError('`get_chunk` not implemented '
'for this backend')
def has_chunk(self, idx):
raise NotImplementedError('`has_chunk` not implemented '
'for this backend')
def del_chunk(self, idx):
raise NotImplementedError('`del_chunk` not implemented '
'for this backend')
# Standard implementations, could be overriden for more efficient access
def get_chunk_data(self, idx, slices=None):
return self.get_chunk(idx)[slices]
def set_chunk_data(self, idx, values, slices=None):
if not self.has_chunk(idx):
self.create_chunk(idx, values, slices)
self.get_chunk(idx)[slices] = values
# Utility methods used by all backends and engines
def _idx_from_flat(self, idx):
return tuple(map(int, np.unravel_index(idx, self.chunk_grid)))
def _local_chunk_bounds(self, idx):
return tuple((slice(0, min((i + 1) * s, self.shape[j]) - i * s)
for j, (i, s) in enumerate(zip(idx, self.chunk_size))))
def _global_chunk_bounds(self, idx):
return tuple((slice(i * s, min((i + 1) * s, self.shape[j]))
for j, (i, s) in enumerate(zip(idx, self.chunk_size))))
def _process_slices(self, slices, squeeze=False):
if isinstance(slices, (slice, int)):
slices = [slices]
elif slices is Ellipsis:
slices = [slice(None)]
elif np.isscalar(slices):
slices = [int(slices)]
elif not isinstance(slices, (list, tuple)):
raise Exception('Invalid Slicing with index of type `{}`'
.format(type(slices)))
else:
slices = list(slices)
if len(slices) <= self.ndim:
nmiss = self.ndim - len(slices)
while Ellipsis in slices:
idx = slices.index(Ellipsis)
slices = slices[:idx] + ([slice(None)] * (nmiss + 1)) \
+ slices[idx + 1:]
if len(slices) < self.ndim:
slices = list(slices) + ([slice(None)] * nmiss)
elif len(slices) > self.ndim:
raise Exception('Invalid slicing of dataset of dimension `{}`'
' with {}-dimensional slicing'
.format(self.ndim, len(slices)))
final_slices = []
shape = self.shape
squeeze_axis = []
for index, slice_ in enumerate(slices):
if isinstance(slice_, int):
final_slices.append(slice(slice_, slice_ + 1))
squeeze_axis.append(index)
elif isinstance(slice_, slice):
start = slice_.start
stop = slice_.stop
if start is None:
start = 0
if stop is None:
stop = shape[index]
elif stop < 0:
stop = self.shape[index] + stop
if start < 0 or start >= self.shape[index]:
raise Exception('Only possitive and '
'in-bounds slicing supported: `{}`'
.format(slices))
if stop < 0 or stop > self.shape[index] or stop < start:
raise Exception('Only possitive and '
'in-bounds slicing supported: `{}`'
.format(slices))
if slice_.step is not None and slice_.step != 1:
raise Exception('Only slicing with step 1 supported')
final_slices.append(slice(start, stop))
else:
raise Exception('Invalid type `{}` in slicing, only integer or'
' slices are supported'.format(type(slice_)))
if squeeze:
return final_slices, squeeze_axis
return final_slices
@staticmethod
def _ndindex(dims):
return product(*(range(d) for d in dims))
def _chunk_slice_iterator(self, slices, ndim):
indexes = []
nchunks = []
cslices = []
gslices = []
chunk_size = self.chunk_size
chunk_grid = self.chunk_grid
for index, slc in enumerate(slices):
sstart = slc.start // chunk_size[index]
sstop = min((slc.stop - 1) // chunk_size[index],
chunk_grid[index] - 1)
if sstop < 0:
sstop = 0
pad_start = slc.start - sstart * chunk_size[index]
pad_stop = slc.stop - sstop * chunk_size[index]
_i = [] # index
_c = [] # chunk slices in current dimension
_g = [] # global slices in current dimension
for chunk_index in range(sstart, sstop + 1):
start = pad_start if chunk_index == sstart else 0
stop = pad_stop if chunk_index == sstop else chunk_size[index]
gchunk = chunk_index * chunk_size[index] - slc.start
_i += [chunk_index]
_c += [slice(start, stop)]
_g += [slice(gchunk + start, gchunk + stop)]
nchunks += [sstop - sstart + 1]
indexes += [_i]
cslices += [_c]
gslices += [_g]
return (zip(*
(
(
indexes[n][i],
cslices[n][i],
(n < ndim or None) and gslices[n][i],
)
for n, i in enumerate(idx)
))
for idx in self._ndindex(nchunks))
class BackendDataChunk(object):
def __init__(self, dataset, idx, name, shape, dtype, fillvalue):
self._dataset = dataset
self._idx = idx
self._name = name
self._shape = shape
self._size = np.prod(shape)
self._dtype = dtype
self._fillvalue = fillvalue
self._byte_count = self.size * np.dtype(self.dtype).itemsize
@property
def dataset(self):
return self._dataset
@property
def name(self):
return self._name
@property
def shape(self):
return self._shape
@property
def size(self):
return self._size
@property
def dtype(self):
return self._dtype
@property
def fillvalue(self):
return self._fillvalue
@property
def byte_count(self):
return self._byte_count
def get_data(self, slices=None):
raise NotImplementedError('`get_data` not implemented '
'for this backend')
def set_data(self, values, slices=None):
raise NotImplementedError('`set_data` not implemented '
'for this backend')
def __getitem__(self, slices):
return self.get_data(slices=slices)
def __setitem__(self, slices, values):
self.set_data(values, slices=slices)
class ConnectionError(Exception):
pass
class DatasetNotFoundError(Exception):
pass | PypiClean |
/BitEx-2.0.0b3.zip/BitEx-2.0.0b3/bitex/api/REST/rest.py | import logging
# Import Third-Party
import requests
# Import Homebrew
from bitex.api.base import BaseAPI
# Init Logging Facilities
log = logging.getLogger(__name__)
class RESTAPI(BaseAPI):
"""
Generic REST API interface. Supplies private and public query methods,
as well as building blocks to customize the signature generation process.
"""
def __init__(self, addr, timeout=None, key=None, secret=None, version=None,
config=None):
"""
Initializes the RESTAPI instance.
:param addr: str, API URL (excluding endpoint paths, if applicable)
:param key: str, API key
:param secret: str, API secret
:param config: str, path to config file
:param timeout: int or float, defines timeout for requests to API
"""
super(RESTAPI, self).__init__(addr=addr, key=key, secret=secret,
version=version, config=config)
self.timeout = timeout if timeout else 10
def generate_uri(self, endpoint):
"""
Generates a Unique Resource Identifier (API Version + Endpoint)
:param endpoint: str, endpoint path (i.e. /market/btcusd)
:return: str, URI
"""
if self.version:
return '/' + self.version + '/' + endpoint
else:
return '/' + endpoint
def generate_url(self, uri):
"""
Generates a Unique Resource Locator (API Address + URI)
:param uri: str, URI
:return: str, URL
"""
return self.addr + uri
def sign_request_kwargs(self, endpoint, **kwargs):
"""
Dummy Request Kwarg Signature Generator.
Extend this to implement signing of requests for private API calls.
By default, it supplies a default URL using generate_uri and generate_url
:param endpoint: str, API Endpoint
:param kwargs: Kwargs meant for requests.Request()
:return: dict, request kwargs
"""
uri = self.generate_uri(endpoint)
url = self.generate_url(uri)
template = {'url': url, 'headers': {}, 'files': {},
'data': {}, 'params': {}, 'auth': {}, 'cookies': {},
'hooks': {}, 'json': {}}
template.update(kwargs)
return template
def _query(self, method_verb, **request_kwargs):
"""
Sends the request to the API via requests.
:param method_verb: valid HTTP Verb (GET, PUT, DELETE, etc.)
:param request_kwargs: kwargs for request.Request()
:return: request.Response() object
"""
resp = requests.request(method_verb, **request_kwargs,
timeout=self.timeout)
return resp
def private_query(self, method_verb, endpoint, **request_kwargs):
"""Query a private API endpoint requiring signing of the request.
:param method_verb: valid HTTP Verb (GET, PUT, DELETE, etc.)
:param endpoint: str, API Endpoint
:param request_kwargs: kwargs for request.Request()
:return: request.Response() object
"""
self.check_auth_requirements()
request_kwargs = self.sign_request_kwargs(endpoint, **request_kwargs)
return self._query(method_verb, **request_kwargs)
def public_query(self, method_verb, endpoint, **request_kwargs):
"""
Queries a public (i.e. unauthenticated) API endpoint and return the result.
:param method_verb: valid HTTP Verb (GET, PUT, DELETE, etc.)
:param endpoint: str, API Endpoint
:param request_kwargs: kwargs for request.Request()
:return: request.Response() object
"""
request_kwargs['url'] = self.generate_url(self.generate_uri(endpoint))
return self._query(method_verb, **request_kwargs) | PypiClean |
/3dfin-0.2.0rc0-py3-none-any.whl/three_d_fin/cloudcompare/plugin_processing.py | import sys
from pathlib import Path
import numpy as np
import pycc
from three_d_fin.cloudcompare.plugin_progress import CloudCompareProgress
from three_d_fin.processing.abstract_processing import FinProcessing
from three_d_fin.processing.configuration import FinConfiguration
class CloudComparePluginProcessing(FinProcessing):
"""Implement the FinProcessing interface for CloudCompare in a plugin context."""
base_group: pycc.ccHObject
group_name: str
@staticmethod
def write_sf(point_cloud: pycc.ccPointCloud, scalar_field: np.ndarray, name: str):
"""Write a scalar field on a pycc.PointCloud.
Parameters
----------
point_cloud : pycc.ccPointCloud
Point cloud targetted by the 3DFin processing.
scalar_field : np.ndarray
Numpy vector discribing the scalar field.
name: str
Name of the scalar_field to write.
"""
idx_sf = point_cloud.addScalarField(name)
sf_array = point_cloud.getScalarField(idx_sf).asArray()
sf_array[:] = scalar_field.astype(np.float32)[:]
point_cloud.getScalarField(idx_sf).computeMinAndMax()
def __init__(
self,
cc_instance: pycc.ccPythonInstance,
point_cloud: pycc.ccPointCloud,
config: FinConfiguration,
):
"""Construct the functor object.
Parameters
----------
cc_instance : pycc.ccPythonInstance
Current cc application, wrapped by CloudCompare-PythonPlugin.
point_cloud : pycc.ccPointCloud
Point cloud targetted by the 3DFin processing.
config: FinConfiguration
The FinConfiguration object
"""
self.base_cloud = point_cloud
self.cc_instance = cc_instance
self.progress = CloudCompareProgress(output=sys.stdout)
super().__init__(config)
def _construct_output_path(self):
# We still use the stem attribute since in CC cloud could name could be based on filenames
self.output_basepath = (
Path(self.config.misc.output_dir) / Path(self.base_cloud.getName()).stem
)
self.group_name = f"{Path(self.base_cloud.getName()).stem}_3DFin"
def _pre_processing_hook(self):
self.base_group = pycc.ccHObject(self.group_name)
self.base_cloud.setEnabled(False)
def _post_processing_hook(self):
# Could be used to delay addToDB calls.
self.cc_instance.addToDB(self.base_group)
def _load_base_cloud(self):
# This is already loaded at object instanciation.
pass
def _get_xyz_z0_from_base(self) -> np.ndarray:
return np.c_[
self.base_cloud.points(),
self.base_cloud.getScalarField(
self.base_cloud.getScalarFieldIndexByName(self.config.basic.z0_name)
).asArray(),
]
def _get_xyz_from_base(self) -> np.ndarray:
# TODO(RJ) double conversion is only needed for DTM processing,
# But maybe it's worth generalizing it.
return self.base_cloud.points().astype(np.double)
def _export_dtm(self, dtm: np.ndarray):
cloud_dtm = pycc.ccPointCloud(dtm[:, 0], dtm[:, 1], dtm[:, 2])
cloud_dtm.copyGlobalShiftAndScale(self.base_cloud)
cloud_dtm.setName("dtm")
cloud_dtm.setEnabled(False)
self.base_group.addChild(cloud_dtm)
self.cc_instance.addToDB(cloud_dtm)
def _export_stripe(self, clust_stripe: np.ndarray):
cloud_stripe = pycc.ccPointCloud(
clust_stripe[:, 0], clust_stripe[:, 1], clust_stripe[:, 2]
)
cloud_stripe.copyGlobalShiftAndScale(self.base_cloud)
cloud_stripe.setName("Stems in stripe")
CloudComparePluginProcessing.write_sf(
cloud_stripe, clust_stripe[:, -1], "tree_ID"
)
cloud_stripe.setCurrentDisplayedScalarField(0)
cloud_stripe.toggleSF()
cloud_stripe.setEnabled(False)
self.base_group.addChild(cloud_stripe)
self.cc_instance.addToDB(cloud_stripe)
def _enrich_base_cloud(self, assigned_cloud: np.ndarray):
copy_base_cloud = pycc.ccPointCloud(self.base_cloud.getName())
copy_base_cloud.copyGlobalShiftAndScale(self.base_cloud)
copy_base_cloud.reserve(self.base_cloud.size())
# Could be a pycc.ccPointCloud.clone() but we do not want to clone all SFs
copy_base_cloud.addPoints(
assigned_cloud[:, 0], assigned_cloud[:, 1], assigned_cloud[:, 2]
)
CloudComparePluginProcessing.write_sf(
copy_base_cloud, assigned_cloud[:, 5], "dist_axes"
)
CloudComparePluginProcessing.write_sf(
copy_base_cloud, assigned_cloud[:, 4], "tree_ID"
)
# Use assigned_cloud z0 anyway
CloudComparePluginProcessing.write_sf(
copy_base_cloud, assigned_cloud[:, 3], "Z0"
)
copy_base_cloud.toggleSF()
copy_base_cloud.setCurrentDisplayedScalarField(0) # dist_axes
# Set our custom color scale
copy_base_cloud.setEnabled(False)
color_scale_uuid = "{25ec76a1-9b8d-4e4a-a129-21ae313ef8ba}"
color_scale_manager = pycc.ccColorScalesManager.GetUniqueInstance()
color_scale = color_scale_manager.getScale(color_scale_uuid)
copy_base_cloud.getCurrentDisplayedScalarField().setColorScale(color_scale)
self.base_group.addChild(copy_base_cloud)
self.cc_instance.addToDB(copy_base_cloud)
def _export_tree_height(self, tree_heights: np.ndarray):
cloud_tree_heights = pycc.ccPointCloud(
tree_heights[:, 0], tree_heights[:, 1], tree_heights[:, 2]
)
cloud_tree_heights.copyGlobalShiftAndScale(self.base_cloud)
cloud_tree_heights.setName("Highest points")
CloudComparePluginProcessing.write_sf(
cloud_tree_heights, tree_heights[:, 3], "z0"
)
CloudComparePluginProcessing.write_sf(
cloud_tree_heights, tree_heights[:, 4], "deviated"
)
cloud_tree_heights.setPointSize(8)
z0 = cloud_tree_heights.getScalarField(0) # z0
# Add label with z0 values
for i in range(len(cloud_tree_heights.points())):
hlabel = pycc.cc2DLabel(f"point{i}")
hlabel.addPickedPoint(cloud_tree_heights, i)
value = round(z0.asArray()[i], 2)
hlabel.setName(f"{value:.2f}")
hlabel.displayPointLegend(True)
hlabel.toggleVisibility()
hlabel.setDisplayedIn2D(False)
cloud_tree_heights.addChild(hlabel)
self.cc_instance.addToDB(hlabel)
# Set black color everywhere
cloud_tree_heights.setColor(0, 0, 0, 255)
cloud_tree_heights.toggleColors()
self.base_group.addChild(cloud_tree_heights)
self.cc_instance.addToDB(cloud_tree_heights, autoExpandDBTree=False)
def _export_circles(self, circles_coords: np.ndarray):
cloud_circles = pycc.ccPointCloud(
circles_coords[:, 0], circles_coords[:, 1], circles_coords[:, 2]
)
cloud_circles.copyGlobalShiftAndScale(self.base_cloud)
cloud_circles.setName("Fitted sections")
CloudComparePluginProcessing.write_sf(
cloud_circles, circles_coords[:, 4], "tree_ID"
)
CloudComparePluginProcessing.write_sf(
cloud_circles, circles_coords[:, 5], "sector_occupancy_percent"
)
CloudComparePluginProcessing.write_sf(
cloud_circles, circles_coords[:, 6], "pts_inner_circle"
)
CloudComparePluginProcessing.write_sf(cloud_circles, circles_coords[:, 7], "Z0")
CloudComparePluginProcessing.write_sf(
cloud_circles, circles_coords[:, 8], "Diameter"
)
CloudComparePluginProcessing.write_sf(
cloud_circles, circles_coords[:, 9], "outlier_prob"
)
CloudComparePluginProcessing.write_sf(
cloud_circles, circles_coords[:, 10], "quality"
)
cloud_circles.toggleSF()
cloud_circles.setCurrentDisplayedScalarField(6) # = quality
self.base_group.addChild(cloud_circles)
self.cc_instance.addToDB(cloud_circles)
def _export_axes(self, axes_points: np.ndarray, tilt: np.ndarray):
cloud_axes = pycc.ccPointCloud(
axes_points[:, 0], axes_points[:, 1], axes_points[:, 2]
)
cloud_axes.copyGlobalShiftAndScale(self.base_cloud)
cloud_axes.setName("Axes")
CloudComparePluginProcessing.write_sf(cloud_axes, tilt, "tilting_degree")
cloud_axes.toggleSF()
cloud_axes.setCurrentDisplayedScalarField(0) # = tilting_degree
cloud_axes.setEnabled(False)
self.base_group.addChild(cloud_axes)
self.cc_instance.addToDB(cloud_axes)
def _export_tree_locations(
self, tree_locations: np.ndarray, dbh_values: np.ndarray
):
cloud_tree_locations = pycc.ccPointCloud(
tree_locations[:, 0],
tree_locations[:, 1],
tree_locations[:, 2],
)
cloud_tree_locations.copyGlobalShiftAndScale(self.base_cloud)
cloud_tree_locations.setName("Tree locator")
cloud_tree_locations.setPointSize(8)
CloudComparePluginProcessing.write_sf(
cloud_tree_locations, dbh_values.reshape(-1), "dbh"
)
cloud_tree_locations.setColor(255, 0, 255, 255)
cloud_tree_locations.toggleColors()
dbh = cloud_tree_locations.getScalarField(0) # dbh
for i in range(len(cloud_tree_locations.points())):
dlabel = pycc.cc2DLabel(f"point{i}")
dlabel.addPickedPoint(cloud_tree_locations, i)
value = round(dbh.asArray()[i], 3)
if value == 0.0:
dlabel.setName("Non Reliable")
else:
dlabel.setName(f"{value:.3f}")
dlabel.displayPointLegend(True)
dlabel.toggleVisibility()
dlabel.setDisplayedIn2D(False)
cloud_tree_locations.addChild(dlabel)
self.cc_instance.addToDB(dlabel)
self.base_group.addChild(cloud_tree_locations)
self.cc_instance.addToDB(cloud_tree_locations, autoExpandDBTree=False) | PypiClean |
/DLTA-AI-1.1.tar.gz/DLTA-AI-1.1/additional_scripts/coco_eval.py | import sys
import os
import json
import argparse
from pycocotools.coco import COCO
from pycocotools.cocoeval import COCOeval
# Define a class to suppress print statements
class HiddenPrints:
"""
A context manager to suppress print statements.
"""
def __enter__(self):
self._original_stdout = sys.stdout
sys.stdout = open(os.devnull, 'w')
def __exit__(self, exc_type, exc_val, exc_tb):
sys.stdout.close()
sys.stdout = self._original_stdout
# Define the function to evaluate coco
def evaluate_coco(gt_file: str, pred_file: str, task: str = "bbox", evaluation_type: str = "full") -> None:
"""
Evaluates the performance of a COCO object detection model.
Args:
gt_file (str): Path to the ground truth file.
pred_file (str): Path to the prediction file.
task (str, optional): The type of task to evaluate (bbox or segm). Defaults to "bbox".
evaluation_type (str, optional): The type of evaluation to perform (full or mAP). Defaults to "full".
"""
# Use HiddenPrints to suppress print statements
with HiddenPrints():
# Load the ground truth file
coco_gt = COCO(gt_file)
# Load the prediction file
with open(pred_file, 'r') as f:
pred_file = json.load(f)
pred_file = pred_file[0]['annotations'] # type: ignore
coco_dt = coco_gt.loadRes(pred_file)
# Create a COCO evaluator object
coco_eval = COCOeval(coco_gt, coco_dt, task) # type: ignore
# Evaluate the model
coco_eval.evaluate()
coco_eval.accumulate()
# Compute stats
coco_eval.summarize()
# Print the results based on the evaluation type
if evaluation_type == "full":
coco_eval.summarize()
elif evaluation_type == "mAP":
print(f"{task} mAP: {coco_eval.stats[0]:.3f}")
# Create an argument parser
parser = argparse.ArgumentParser()
parser.add_argument("--gt_file", required=True, help="ground truth file")
parser.add_argument("--pred_file", required=True, help="prediction file")
parser.add_argument("--task", default="bbox",
choices=["bbox", "segm"], help="task (bbox or segm)")
parser.add_argument("--evaluation_type", default="full",
choices=["full", "mAP"], help="evaluation type (full or mAP)")
# Parse the arguments
args = parser.parse_args()
# Run the function with the arguments
evaluate_coco(args.gt_file, args.pred_file, args.task, args.evaluation_type) | PypiClean |
/DESTNOSIM-1.3.3.tar.gz/DESTNOSIM-1.3.3/destnosim/des/decam.py | from __future__ import print_function
import numpy as np
from itertools import chain
from .ccd import *
import os
import subprocess
from scipy.spatial import cKDTree
import astropy.table as tb
from astropy.time import Time
from astropy import units as u
import spacerocks
from spacerocks.units import Units
from spacerocks.simulation import Simulation
from spacerocks.model import PerturberModel, builtin_models
from spacerocks.cbindings import correct_for_ltt_destnosim
from spacerocks.observer import Observer
from spacerocks.constants import epsilon
class DECamExposure:
'''
Base class for a DECam exposure. Contains a bunch of useful functions to deal with coordinates
near the exposure
'''
def __init__(self, expnum, ra, dec, mjd_mid, band):
'''
Initialization function
Arguments:
- expnum: Exposure number
- ra: R.A. pointing (degrees)
- dec: Declination pointing (degrees)
- mjd_mid: midpoint time of the exposure (mjd)
- band: exposure filter
'''
self.expnum = expnum
self.ra = ra
if self.ra > 180:
self.ra -= 360
self.dec = dec
self.mjd = mjd_mid
self.band = band
self.corners = {}
def gnomonicProjection(self, ra, dec):
'''
Computes the Gnomonic projection of the positions supplied centered on this exposure
Arguments:
- ra: list (or numpy array) of R.A.s (degrees)
- dec: list (or numpy array) of Decs (degrees)
Returns Gnomonic x and y, in degrees
'''
ra_list = np.array(ra)
dec_list = np.array(dec)
ra_list[np.where(ra_list > 180)] = ra_list[np.where(ra_list > 180)] - 360
c_dec_0 = np.cos(np.pi*self.dec/180.)
s_dec_0 = np.sin(np.pi*self.dec/180.)
c_dec = np.cos(np.pi*dec_list/180.)
s_dec = np.sin(np.pi*dec_list/180.)
c_ra = np.cos(np.pi*(ra_list-self.ra)/180.)
s_ra = np.sin(np.pi*(ra_list-self.ra)/180.)
cos_c = s_dec_0*s_dec + c_dec_0*c_dec*c_ra
x = c_dec*s_ra/cos_c
y = (c_dec_0*s_dec - s_dec_0*c_dec*c_ra)/cos_c
return 180*x/np.pi, 180*y/np.pi
def inverseGnomonic(self, x, y):
'''
Inverts Gnomonic positions from a projection centered on the exposure
Arguments:
- x: list or array of Gnomonic x (degrees)
- y: list or array of Gnomonic y (degrees)
Returns R.A. and Decs, in degrees
'''
x = np.array(x) * np.pi/180.
y = np.array(y) * np.pi/180.
ra_rad = np.pi*self.ra/180.
dec_rad = np.pi*self.dec/180.
den = np.sqrt(1 + x**2 + y**2)
sin_dec = (np.sin(dec_rad) + y*np.cos(dec_rad))/den
sin_ra = x/(np.cos(dec_rad) * den)
return 180.*(np.arcsin(sin_ra))/np.pi + self.ra, 180.*np.arcsin(sin_dec)/np.pi
def checkInCCDFast(self, ra_list, dec_list, ccd_tree = None, ccd_keys = None, ccdsize = 0.149931):
'''
Checks if a list of RAs and Decs are inside a DECam CCD in an approximate way, returns indices of positions that are inside a CCD and which CCD they belong to
Arguments:
- ra_list: list of R.A.s (degrees)
- dec_list: list of Decs (degrees)
- ccd_tree: kD tree of the CCD positions. Can be generated using ccd.create_ccdtree()
- ccd_keys: list of CCD correspondences with the kD tree. Can be generated using ccd.create_ccdtree()
- ccdsize: Size, in degrees, of the Gnomonic x direction (smallest side).
Returns:
- List of CCD positions for each RA/Dec pair (empty if there is no CCD match)
- List of CCDs in which each point belongs to
'''
if ccd_tree == None:
ccd_tree, ccd_keys = create_ccdtree()
x, y = self.gnomonicProjection(ra_list, dec_list)
tree_data = np.array([x, 2*y]).T
index = np.arange(len(ra_list))
if len(x) > 0:
tree = cKDTree(tree_data)
## CCD size = 0.149931 deg
inside_CCD = ccd_tree.query_ball_tree(tree, ccdsize, p = np.inf)
#this is probably the most complicated Python line ever written
if inside_CCD != None:
ccd_id = [len(inside_CCD[i])*[ccdnums[ccd_keys[i]]] for i in range(len(inside_CCD)) if len(inside_CCD[i]) > 0]
inside_CCD = np.array(list(chain(*inside_CCD)))
if len(inside_CCD) > 0:
return index[inside_CCD], list(chain(*ccd_id))
else:
return [], None
else:
return [], None
else:
return [], None
def __str__(self):
'''
String representation function
'''
return 'DECam exposure {} taken with {} band. RA: {} Dec: {} MJD: {}'.format(self.expnum, self.band, self.ra, self.dec, self.mjd)
def checkInCCDRigorous(self, ra_list, dec_list, ccd_list):
'''
Checks if the object is really inside the CCD using the corners table and a ray tracing algorithm
Arguments:
- ra_list: list of R.A.s (degrees)
- dec_list: list of Decs (degrees)
- ccd_list: list of CCDs to be checked by the ray tracing algorithm (coming, eg, from self.checkInCCDFast). Must match RA/Dec shape
Returns:
- List of booleans if the RA/Dec pair belongs to its correspondent CCD.
'''
try:
self.corners
except:
raise AttributeError("Exposure doesn't have dict of corners. Perhaps run Survey.collectCorners?")
inside = []
for ra, dec, ccd in zip(ra_list, dec_list, ccd_list):
try:
inside.append(ray_tracing(ra, dec, self.corners[ccd]))
except KeyError:
inside.append(False)
return inside
class Survey:
'''
A survey is just a series of exposures. Ideally, we'd have one `exposure.positions.fits` file for DESTracks usage
This function includes convenience calls for $ORBITSPP to generate positions of a population of objects
'''
def __init__(self, expnum, ra, dec, mjd, band, track = None, corners = None):
'''
Initialization function
Arguments:
- expnum: list of exposure numbers
- ra: list of R.A. pointings
- dec: list of Dec pointings
- mjd: list of midpoint MJDs for the exposures
- band: list of filters for each exposure
- track: Path of FITS file containing the exposures for use with $ORBITSPP
- corners: Path of CCD corners FITS file containing the CCD corners of all exposures for use with $ORBITSPP
'''
self.ra = ra
self.ra[self.ra > 180] -= 360
self.dec = dec
self.mjd = mjd
self.expnum = expnum
self.band = band
self.track = track
self.corners = corners
self._hascorners = False
def createExposures(self):
'''
Creates a dictionary of DECamExposures for the Survey inside self.exposures
'''
self.exposures = {}
for ra,dec,mjd,n,b in zip(self.ra, self.dec, self.mjd, self.expnum, self.band):
self.exposures[n] = DECamExposure(n, ra, dec, mjd, b)
def _createEarthSpaceRock(self):
'''
Creates the pre-computed Earth positions from SpaceRocks
'''
import astropy.units as u
table = tb.Table.read(self.track)
self.times = Time(self.mjd, format='mjd', scale='tdb')
units = spacerocks.Units()
units.timescale = 'tdb'
units.timeformat = 'mjd'
units.speed = u.au/u.yr
self.earth = spacerocks.SpaceRock(x = table['observatory'][:,0], y = table['observatory'][:,1], z = table['observatory'][:,2],
vx = table['velocity'][:,0], vy = table['velocity'][:,1], vz = table['velocity'][:,2],
epoch = self.times, units = units, origin = 'ssb', frame='J2000')
def createObservations(self, population, outputfile, useold = False, ra0 = 10, dec0 = -20, radius = 85):
'''
Calls $ORBITSPP/DESTracks to generate observations for the input population, saves them in the outputfile
and returns this table
Arguments:
- population: Population object from tno/population containing the input orbits
- outputfile: Path for the output FITS file where the observations will be saved, .fits extension will be appended
- useold: boolean, if True will check if outputfile already exists and read it, skipping the DESTracks call
- ra0: R.A. center of the observation field
- dec0: Dec center of the observation
- radius: search radius for the exposures
For DES usage, the last three parameters should remain constant!
Results are stored in population.observations
'''
if useold and os.path.exists(outputfile + '.fits'):
population.observations = tb.Table.read(outputfile + '.fits')
return None
if population.heliocentric:
raise ValueError("Please use barycentric elements!")
orbitspp = os.getenv('ORBITSPP')
with open('{}.txt'.format(outputfile), 'w') as f:
for j,i in enumerate(population.elements):
print(j, i[0],i[1],i[2],i[3],i[4],i[5], file = f)
with open('{}.txt'.format(outputfile), 'r') as f:
print(' '.join([orbitspp + '/DESTracks', '-cornerFile={}'.format(self.corners),
'-exposureFile={}'.format(self.track), '-tdb0={}'.format(population.epoch), '-positionFile={}.fits'.format(outputfile)
,'-readState={}'.format(population.state) , '-ra0={}'.format(ra0),
'-dec0={}'.format(dec0),'-radius={}'.format(radius), '< {}.txt'.format(outputfile)]))
subprocess.run(' '.join([orbitspp + '/DESTracks', '-cornerFile={}'.format(self.corners),
'-exposureFile={}'.format(self.track), '-tdb0={}'.format(population.epoch), '-positionFile={}.fits'.format(outputfile)
,'-readState={}'.format(population.state) , '-ra0={}'.format(ra0),
'-dec0={}'.format(dec0),'-radius={}'.format(radius), ' < {}.txt'.format(outputfile)]), stdin = f, shell = True)
if not os.path.exists(outputfile + '.fits'):
raise ValueError("$ORBITSPP call did not terminate succesfully!")
population.observations = tb.Table.read(outputfile + '.fits')
def createObservationsSpacerocks(self, population, progress=True):
'''
Calls the Spacerocks backend to generate observations for the input population
Arguments:
- population: Population object for the input orbits
'''
## first set up times and do spacerock stuff
#self.createEarthSpaceRock()
self.times = Time(self.mjd, format='mjd', scale='utc')
rocks = population.generateSpaceRocks()
units = Units()
units.timescale = 'utc'
units.timeformat = 'jd'
units.mass = u.M_sun
spiceids, kernel, masses = builtin_models['ORBITSPP']
model = PerturberModel(spiceids=spiceids, masses=masses)
sim = Simulation(model=model, epoch=self.times.jd[0], units=units)
sim.add_spacerocks(rocks)
sim.integrator = 'leapfrog'
ras = np.array([])
decs = np.array([])
orbitid = np.array([])
expnum = np.array([])
oidlist = np.arange(len(population))
if progress == True:
from rich.progress import track
epochs = track(range(len(self.times.jd)))
else:
epochs = range(len(self.times.jd))
for i in epochs:
sim.integrate(self.times.jd[i], exact_finish_time=1)
a = np.zeros((sim.N, 3), dtype=np.double)
b = np.zeros((sim.N, 3), dtype=np.double)
sim.serialize_particle_data(xyz=a, vxvyvz=b)
x, y, z = a.T
vx, vy, vz = b.T
x = np.ascontiguousarray(x)[sim.N_active:]
y = np.ascontiguousarray(y)[sim.N_active:]
z = np.ascontiguousarray(z)[sim.N_active:]
vx = np.ascontiguousarray(vx)[sim.N_active:]
vy = np.ascontiguousarray(vy)[sim.N_active:]
vz = np.ascontiguousarray(vz)[sim.N_active:]
observer = Observer(epoch=self.times.jd[i], obscode='W84', units=units)
ox = observer.x.au.astype(np.double)
oy = observer.y.au.astype(np.double)
oz = observer.z.au.astype(np.double)
ovx = observer.vx.value.astype(np.double)
ovy = observer.vy.value.astype(np.double)
ovz = observer.vz.value.astype(np.double)
# Compute ltt-corrected topocentroc Ecliptic coordinates
xt, yt, zt = correct_for_ltt_destnosim(x, y, z, vx, vy, vz, ox, oy, oz, ovx, ovy, ovz)
lon = np.arctan2(yt, xt)
lat = np.arcsin(zt / np.sqrt(xt**2 + yt**2 + zt**2))
dec = np.degrees(np.arcsin(np.sin(lat) * np.cos(epsilon) + np.cos(lat) * np.sin(lon) * np.sin(epsilon)))
ra = np.degrees(np.arctan2((np.cos(lat) * np.cos(epsilon) * np.sin(lon) - np.sin(lat) * np.sin(epsilon)), np.cos(lon) * np.cos(lat)))
ra[ra>180] -= 360
dist_center = np.sqrt( ((ra - self.ra[i]) * np.cos(self.dec[i] * np.pi/180))**2 + (dec - self.dec[i])**2)
keep = dist_center < 1.1
ras = np.append(ras, ra[keep])
decs = np.append(decs, dec[keep])
orbitid = np.append(orbitid, oidlist[keep])
expnum = np.append(expnum, len(oidlist[keep]) * [self.expnum[i]])
del x, y, z, vx, vy, vz, a, b, sim, xt, yt, zt, ox, oy, oz, observer
## gather data into something useable
t = tb.Table()
t['RA'] = ras
del ras
#t['RA'][t['RA'] > 180] -= 360
t['DEC'] = decs
del decs
t['EXPNUM'] = expnum
t['EXPNUM'] = t['EXPNUM'].astype('int32')
del expnum
t['ORBITID'] = orbitid
t['ORBITID'] = t['ORBITID'].astype('int64')
del orbitid
exp = tb.Table()
exp['EXPNUM'] = np.array(self.expnum)
exp['RA_CENTER'] = np.array(self.ra)
#exp['RA_CENTER'][exp['RA_CENTER'] > 180] -= 360
exp['DEC_CENTER'] = np.array(self.dec)
#return t, exp
t = tb.join(t, exp)
x, y = bulk_gnomonic(np.array(t['RA']), np.array(t['DEC']), np.array(t['RA_CENTER']), np.array(t['DEC_CENTER']))
#rescale for kD tree
theta = np.array([x, 2*y]).T
del t['RA_CENTER', 'DEC_CENTER']
ccd_tree, ccd_keys = create_ccdtree()
tree = cKDTree(theta)
# kD tree ccd checker
inside_ccd = ccd_tree.query_ball_tree(tree, 0.149931 * 1.01, p = np.inf)
if inside_ccd != None:
ccd_id = [len(inside_ccd[i])*[ccdnums[ccd_keys[i]]] for i in range(len(inside_ccd)) if len(inside_ccd[i]) > 0]
inside_ccd = np.array(list(chain(*inside_ccd)))
if len(inside_ccd) > 0:
ccdlist = list(chain(*ccd_id))
else:
print('No observations!')
self.population = tb.Table(column=['RA', 'DEC', 'EXPNUM', 'ORBITID'])
return None
else:
print('No observations!')
self.population = tb.Table(column=['RA', 'DEC', 'EXPNUM', 'ORBITID'])
return None
outside_ccd = np.arange(len(t))
outside_ccd = outside_ccd[np.isin(outside_ccd, inside_ccd, invert = True)]
outccd = t[outside_ccd]
t = t[inside_ccd]
t['CCDNUM'] = ccdlist
# we need the exposure objects here for the proper CCD stuff
try:
self.exposures
except AttributeError:
self.createExposures()
if not self._hascorners:
self.collectCorners()
inside_ccd = np.zeros(len(t), dtype='bool')
for i in range(len(t)):
# proper corners
if t[i]['CCDNUM'] not in self.exposures[t[i]['EXPNUM']].corners:
ins = False
else:
ins = ray_tracing(t[i]['RA'], t[i]['DEC'], self.exposures[t[i]['EXPNUM']].corners[t[i]['CCDNUM']])
inside_ccd[i] = ins
outccd = tb.vstack([outccd, t[~inside_ccd]])
outccd['CCDNUM'] = 0
outccd = tb.unique(outccd)
obs = t[inside_ccd]
obs = tb.vstack([t, outccd])
obs.sort(['ORBITID','EXPNUM'])
population.observations = obs['RA', 'DEC', 'EXPNUM', 'CCDNUM', 'ORBITID']
def __getitem__(self, key):
'''
Allows DECamExposures to be accessed by indexing the Survey object
'''
try:
return self.exposures[key]
except AttributeError:
raise AttributeError("Survey does not have a list of DECamExposures!")
except KeyError:
raise KeyError("Exposure {} not in survey".format(key))
def __len__(self):
'''
Returns the number of exposures
'''
return len(self.expnum)
def collectCorners(self):
'''
Uses the CCD corners table to build a list of CCDs
'''
if self.corners == None:
raise ValueError("No table of CCD corners! Set Survey.corners first.")
else:
corners = tb.Table.read(self.corners)
try:
self.exposures
except AttributeError:
self.createExposures()
corners = corners[np.isin(corners['expnum'], self.expnum)]
#corners.add_index('expnum')
for i in corners:
rac = i['ra'][:-1]
rac[rac>180] -= 360
decc = i['dec'][:-1]
self.exposures[i['expnum']].corners[i['ccdnum']] = np.array([rac,decc]).T
self._hascorners = True | PypiClean |
/Agora-Scholar-0.2.0.tar.gz/Agora-Scholar-0.2.0/agora/scholar/actions/query.py | import calendar
import json
import logging
import traceback
from datetime import datetime
import networkx as nx
from agora.scholar.actions import FragmentConsumerResponse
from agora.scholar.daemons.fragment import fragment_lock, fragment_graph, fragments_key, fragment_updated_on, \
FragmentPlugin
from agora.stoa.actions.core import STOA
from agora.stoa.actions.core.fragment import FragmentRequest, FragmentAction, FragmentSink
from agora.stoa.actions.core.utils import chunks, tp_parts
from agora.stoa.store import r
from agora.stoa.store.tables import db
__author__ = 'Fernando Serena'
log = logging.getLogger('agora.scholar.actions.query')
def fragment_has_result_set(fid):
return r.get('{}:{}:rs'.format(fragments_key, fid)) is not None
def _update_result_set(fid, gp):
try:
result_gen = _query(fid, gp)
# solutions = _build_solutions(fid, gp)
# for s in solutions:
# print s
removed = db[fid].delete_many({}).deleted_count
log.info('{} rows removed from fragment {} result set'.format(removed, fid))
table = db[fid]
rows = set(result_gen)
if rows:
table.insert_many([{label: row[row.labels[label]] for label in row.labels} for row in rows])
log.info('{} rows inserted into fragment {} result set'.format(len(rows), fid))
with r.pipeline(transaction=True) as p:
p.multi()
p.set('{}:{}:rs'.format(fragments_key, fid), True)
p.execute()
except Exception as e:
traceback.print_exc()
log.error(e.message)
# def _build_solutions(fid, gp):
# gp_parts = [tp_parts(tp) for tp in gp]
# gp_graph = nx.DiGraph()
# for gp_part in gp_parts:
# gp_graph.add_edge(gp_part[0], gp_part[2], predicate=gp_part[1])
#
# roots = filter(lambda x: gp_graph.in_degree(x) == 0, gp_graph.nodes())
#
# sorted_pairs = []
# gp_graph.edges()
# for root in roots:
# succs = gp_graph.successors(root)
# sort
# yield fid
def _query(fid, gp):
"""
Query the fragment using the original request graph pattern
:param gp:
:param fid:
:return: The query result
"""
def __build_query_from(x, depth=0):
def build_pattern_query((u, v, data)):
return '\nOPTIONAL { %s %s %s %s }' % (u, data['predicate'], v, __build_query_from(v, depth + 1))
out_edges = list(gp_graph.out_edges_iter(x, data=True))
out_edges = reversed(sorted(out_edges, key=lambda x: gp_graph.out_degree))
if out_edges:
return ' '.join([build_pattern_query(x) for x in out_edges])
return ''
gp_parts = [tp_parts(tp) for tp in gp]
blocks = []
gp_graph = nx.DiGraph()
for gp_part in gp_parts:
gp_graph.add_edge(gp_part[0], gp_part[2], predicate=gp_part[1])
roots = filter(lambda x: gp_graph.in_degree(x) == 0, gp_graph.nodes())
blocks += ['%s a stoa:Root\nOPTIONAL { %s }' % (root, __build_query_from(root)) for root in roots]
where_gp = ' .\n'.join(blocks)
q = """SELECT DISTINCT * WHERE { %s }""" % where_gp
result = []
try:
log.info('Querying fragment {}:\n{}'.format(fid, q))
result = fragment_graph(fid).query(q)
except Exception as e: # ParseException from query
traceback.print_exc()
log.warning(e.message)
return result
class QueryPlugin(FragmentPlugin):
@property
def sink_class(self):
return QuerySink
def consume(self, fid, quad, graph, *args):
pass
# (subj, _, obj) = quad[0]
# collection_name = '{}:{}:{}:{}'.format(fragments_key, fid, subj, obj)
# db[collection_name].insert({subj: str(quad[1]), obj: str(quad[3])})
@property
def sink_aware(self):
return False
def complete(self, fid, *args):
fragment_gp = args[0]
try:
if fragment_has_result_set(fid):
_update_result_set(fid, fragment_gp)
except Exception, e:
traceback.print_exc()
log.error(e.message)
# collection_prefix = '{}:{}'.format(fragments_key, fid)
# for c in filter(lambda x: x.startswith(collection_prefix),
# db.collection_names(include_system_collections=False)):
# db.drop_collection(c)
# collection_name = '{}:{}:{}:{}'.format(fragments_key, fid, subj, obj)
# # intermediate_fid_keys = r.keys('{}:{}:int*'.format(fragments_key, fid))
# with r.pipeline() as p:
# for ifk in intermediate_fid_keys:
# p.delete(ifk)
# p.execute()
FragmentPlugin.register(QueryPlugin)
class QueryRequest(FragmentRequest):
def __init__(self):
super(QueryRequest, self).__init__()
def _extract_content(self, request_type=STOA.QueryRequest):
"""
Parse query request data. For this operation, there is no additional data to extract.
"""
super(QueryRequest, self)._extract_content(request_type=request_type)
class QueryAction(FragmentAction):
def __init__(self, message):
"""
Prepare request and sink objects before starting initialization
"""
self.__request = QueryRequest()
self.__sink = QuerySink()
super(QueryAction, self).__init__(message)
@property
def sink(self):
return self.__sink
@classmethod
def response_class(cls):
return QueryResponse
@property
def request(self):
return self.__request
def submit(self):
"""
If the fragment is already synced at submission time, the delivery becomes ready
"""
super(QueryAction, self).submit()
if fragment_updated_on(self.sink.fragment_id) is not None:
self.sink.delivery = 'ready'
class QuerySink(FragmentSink):
"""
Query sink does not need any extra behaviour
"""
def _remove(self, pipe):
super(QuerySink, self)._remove(pipe)
def __init__(self):
super(QuerySink, self).__init__()
def _save(self, action, general=True):
super(QuerySink, self)._save(action, general)
def _load(self):
super(QuerySink, self)._load()
class QueryResponse(FragmentConsumerResponse):
def __init__(self, rid):
# The creation of a response always require to load its corresponding sink
self.__sink = QuerySink()
self.__sink.load(rid)
super(QueryResponse, self).__init__(rid)
self.__fragment_lock = fragment_lock(self.__sink.fragment_id)
@property
def sink(self):
return self.__sink
def _build(self):
self.__fragment_lock.acquire()
result = self.result_set()
log.debug('Building a query result for request number {}'.format(self._request_id))
try:
# Query result chunking, yields JSON
for ch in chunks(result, 1000):
result_rows = []
for t in ch:
if any(t):
result_row = {self.sink.map('?' + v).lstrip('?'): t[v] for v in t}
result_rows.append(result_row)
if result_rows:
yield json.dumps(result_rows), {'state': 'streaming', 'source': 'store',
'response_to': self.sink.message_id,
'submitted_on': calendar.timegm(datetime.utcnow().timetuple()),
'submitted_by': self.sink.submitted_by,
'format': 'json'}
except Exception, e:
log.error(e.message)
raise
finally:
self.__fragment_lock.release()
yield [], {'state': 'end', 'format': 'json'}
# Just after sending the state:end message, the request delivery state switches to sent
self.sink.delivery = 'sent'
def result_set(self):
def extract_fields(result):
for r in result:
yield r['_id']
if not r.exists('{}:{}:rs'.format(fragments_key, self.sink.fragment_id)):
_update_result_set(self.sink.fragment_id, self.sink.fragment_gp)
pattern = {}
projection = {}
mapping = filter(lambda x: x.startswith('?'), self.sink.mapping)
for v in mapping:
value = self.sink.map(v, fmap=True)
if not value.startswith('?'):
if value.startswith('"'):
value = value.strip('"')
else:
value = value.lstrip('<').rstrip('>')
pattern[v.lstrip('?')] = value
elif not value.startswith('?_'):
# All those variables that start with '_' won't be projected
projection[v.lstrip('?')] = True
table = db[self.sink.fragment_id]
pipeline = [{"$match": {v: pattern[v] for v in pattern}},
{"$group": {'_id': {v: '$' + v for v in projection}}}]
return extract_fields(table.aggregate(pipeline)) | PypiClean |
/BTEdb-7.0.1.tar.gz/BTEdb-7.0.1/README.txt | ###########
## BTEdb ##
###########
BTEdb (Better Than Ethan's database) is a project created because Ethan thought he could write a database in Python, and mine has to be better than his
It is a schemaless database like MongoDB that serializes to JSON. It can either serialize to a file, or an object that you pass it.
An example:
db = BTEdb.Database("filename.json")
Or:
class MyClass:
def seek(self,position,mode):
pass
def truncate(self):
return False
def __init__(self):
self.data = ""
def write(self,data):
self.data = data
def flush(self):
pass
def read(self):
return self.data
def close(self):
pass
MyObject = MyClass()
db = BTEdb.Database(MyObject)
You don't need to specify an object at all when initiating a database, but you will still need to create one. For example:
db = BTEdb.Database("filename.json")
does the same thing as:
db = BTEdb.Database()
db.OpenDatabase("filename.json")
This allows you to switch files on-the-fly
BTEdb also supports pretty-print. Specify any number of spaces for the database to use, for example:
db = BTEdb.Database("filename.json", 4)
or:
db = BTEdb.Database()
db.OpenDatabase("filename.json", 4)
You can safely close file descriptors and return the database to a just-created state like this:
db.Destroy()
Creating a table looks like this:
db.Create("Table name")
If the table exists, it will be truncated
If the table does not exist, most of the following methods will raise a TableDoesNotExistException
Drop the bass:
db.Drop("Table name")
Truncate a table like this:
db.Truncate("Table name")
Dump the entire database:
db.Dump()
Or a specific table:
db.Dump("Table name")
Check if a table exists like this:
db.TableExists("Table name")
Insert like this:
db.Insert("Table name", Username = "Niles", UID = 1000, favColour = 0xffffff)
As with all schemaless databases, you are responsible for sanitizing the input and output to the database. It is entirely possible to store that entry and {x = 12, y = 24, pointColour = "green"} in the same table.
Select data like this:
db.Select("Table name", UID = 1000)
This selects all entries with the UID of 1000 and the favourite colour of white:
db.Select("Table name", UID = 1000, favColour = 0xffffff)
This selects all entries with the favourite colour of white and who has an E in their username, case insensitive:
db.Select("Table name", favColour = 0xffffff, lambda x: "e" in x["username"].lower())
You may use a lambda or a pre-defined function. For example:
import urllib2
def CheckUser(Datapoint):
values = { "UID" : Datapoint["UID"], "username" : Datapoint["username"] }
postdata = urllib2.encode(values)
request = urllib2.Request("http://example.com/CheckUser.php",postdata)
response = request.read()
if response == "User accepted":
return True
else:
return False
db.Select("Table name", CheckUser)
That would query an external server to check the username and UID to determine if each row should be selected or not. Be warned that this will generate a LOT of network traffic
Deleting is similar. The following will delete any user with the UID of 1000:
db.Delete("Table name", UID = 1000)
Lambdas and functions may also be used here.
It is important to note that the first non-keyword argument is the table name. Non-function or lambda non-keyword arguments after the table name will raise a TypeError
Update is a little bit more complicated. Here is an example to get you going
db.Update("Table name", db.Select("Table name", favColour = 0xffffff), UID = 12, Username = "test")
This is the same as the SQL statement:
UPDATE `Table name` SET `UID` = 12, `Username` = 'test' WHERE `favColour` = 0xffffff;
Lambdas may be used in the select statement, but not the update statement. To achieve something similar, you can do this:
for x in db.Select("Table name", favColour = 0xffffff):
db.Update("Table name", [x], UID = x["UID"] + 1)
That would increment the UIDs of every user with the favourite colour of white.
It can also be combined with the Dump method
db.Update("Table name", db.Dump("Table name"), username = "newusername")
That would change every user's username to "newusername"
Saving a savepoint is as easy as this:
db.Save("Savepoint name")
Reverting back to a savepoint:
db.Revert("Savepoint name")
Saving a specific table:
db.Save("Savepoint name", "Table name")
Recovering a specific table from a savepoint:
db.Revert("Savepoint name", "Table name")
Deleting a savepoint
db.RemoveSave("Savepoint name")
If you only saved a few tables, only those tables will be recovered if you attempt to recover the entire save.
You can manually retrieve or insert a savepoint with these methods:
db.GetSave("Savepoint name")
db.GetSave()
db.PutSave(data,"Savepoint name")
db.PutSave(data)
Under most circumstances, you should never need to use those methods. If for some reason you do, DO NOT confuse whether you are putting a save requested with a savepoint name or without one.
Triggers are identified by name, which can be a string, integer, float, or a few other datatypes. The name must be unique, however you may have infinite triggers.
You can add a trigger something like this:
import time
def handler(db, datapoint, table, event):
print("Handler being called on event " + event + " on datapoint " + str(datapoint) + " in table " + table)
db.Save(str(time.time()),table)
db.AddTrigger("Trigger Name", "AFTER UPDATE", "Table name", handler)
That will make any update to something in the "Table name" table print a message and make a new savepoint. Beware that this will run for each individual datapoint.
You may also use lambda expressions.
Valid events are "BEFORE INSERT", "AFTER INSERT", "BEFORE DELETE", "AFTER DELETE", "BEFORE UPDATE" and "AFTER UPDATE"
With INSERT and DELETE, the same datapoint is passed to both BEFORE and AFTER handlers. With UPDATE, the old and new datapoints are passed to the BEFORE and AFTER handlers, respectively.
Remove like this:
db.RemoveTrigger("Trigger name")
You may have multiple handlers for a single event.
Transactions are necessary for an efficient database.
While not inside a transaction, methods that modify the database write out the database to the disk before they return. This is good if you don't have a lot of requests, but if you are going to execute two or more statements that modify data in a row, you should execute them within a transaction.
Begin a transaction like this:
db.BeginTransaction()
However, that will create a savepoint called "transaction". To begin a transaction without that, (you will be unable to revert the transaction) do this:
db.BeginTransaction(False)
To commit the transaction and write to the disk:
db.CommitTransaction()
If you chose to make a save, you can revert a transaction:
db.RevertTransaction()
You can always tell if a transaction is in progress with the boolean value ``db.TransactionInProgress``
Other less used methods:
db.Vacuum() # Forces a write to the disk
db.ListSaves() # Lists savepoints
db.ListTables() # Guess
db.ListTriggers() # Figure it out yourself
db.TriggerExists(name) # Who even reads these
| PypiClean |
/NetGrasp-0.9.4.tar.gz/NetGrasp-0.9.4/README.txt | ========
NetGrasp
========
Netgrasp tracks IP and MAC address pairs seen on the network while it runs,
optionally generating notifications. For example, it can notify you when a new
device joins your network. It can also send daily and weekly emails summarizing
the devices using your network.
Netgrasp currently requires Python 2.
============
Installation
============
Run:
pip install netgrasp
Or:
sudo ./setup.py install
To configure netgrasp, save the configuration template to any of
the following paths, as preferred for your local configuration:
* /etc/netgrasp.cfg
* /usr/local/etc/netgrasp.cfg
* ~/.netgrasp.cfg
* ./netgrasp.cfg
For example:
sudo netgrasp template > /usr/local/etc/netgrasp.cfg
Then edit the configuration file to suit your environment.
To start netgrasp:
sudo netgrasp start
To stop netgrasp:
sudo netgrasp stop
Some built-in documentation on using Netgrasp:
* netgrasp -h
* netgrasp list -h
* netgrasp identify -h
Complete documentation can be found in docs/README.md:
https://github.com/jeremyandrews/netgrasp/blob/master/docs/README.md
| PypiClean |
/HPexome-1.2.1.tar.gz/HPexome-1.2.1/README.md | # An automated tool for processing whole-exome sequencing data
Whole-exome sequencing has been widely used in clinical applications for the identification of the genetic causes of several diseases.
_HPexome_ automates many data processing tasks for exome-sequencing data analysis of large-scale cohorts.
Given ready-analysis alignment files it is capable of breaking input data into small genomic regions to efficiently process in parallel on cluster-computing environments.
It relies on Queue workflow execution engine and GATK variant calling tool and its best practices to output high-confident unified variant calling file.
Our workflow is shipped as Python command line tool making it easy to install and use.
``` bash
hpexome [OPTIONS] [DESTINATION]
```
`OPTIONS`
__Required arguments__
- `-I, --bam` One or more sequence alignment files in BAM format _or_ directories containing `*.bam` files.
- `-R, --genome` Reference genome in single FASTA file.
- `--dbsnp` dbSNP file in VCF format. See [dbSNP FTP](ftp://ftp.ncbi.nih.gov/snp/).
- `--sites` VCF files containing known polymorphic sites to skip over in the recalibration algorithm.
__Optional arguments__
- `--indels` Inputs the VCF file with known insertions and deletions (indels) to be used.
- `-L, --intervals` One or more genomic intervals over which to operate.
- `--unified_vcf` Unify VCF files into a single one.
- `-O, --output_file_name` Output file name for unified VCF. Default is `unified.vcf`.
- `--min_prunning` Minimum support to not prune paths in the graph. Default value is `2`.
- `-stand_call_conf` Minimum phred-scaled confidence threshold at which variants should be called. Default is `30`.
__Performance-specific arguments__
- `-nt, --num_data_threads` Controls the number of data consecutive threads sent to the processor that are used in the parallelization process. It is used in the Realigner Target Creator, and may not be used together with the scattercount option. If not set, the walker will run in serial.
- `-nct, --num_threads_per_data_thread` Controls the number of CPU threads allocated to each data thread. It is used with the Base Recalibrator and the Print Reads, and may not be used together with the `scattercount` option. If not set, the walkers will run in serial.
- ` --job_runner` Job executor engine (eg. Lsf706, Grid, PbsEngine).
- `--scatter_count` Controls the number of parts in which the genetic sequence will be divided when sent to be parallelized by the Job executor engine. It is used in all walkers. It must be used with the `-jobRuner` option, or else it will not use the GridEngine and the process will be run in serial.
__System path to required software__
- `--java_path` Path to java. Use this to pass JVM-specific arguments. Default is `java`.
`DESTINATION` Sets the directory in which the outputs will be saved. If not set, the outputs will be saved in the directory in which the process is running.
## Reproducible example
In this example we will download and process a [whole-exome sequence sample](https://www.internationalgenome.org/data-portal/sample/HG00114) from the 1000 Genomes Project and required reference files, as well as required software.
Let's create a directory to write all files.
```bash
mkdir hpexome
cd hpexome
```
**HPexome** only requires Python 3 and Java 8 to run, optionally DMRAA-supported batch processing system such as SGE.
However, to create input data it is required to align raw sequencing reads to reference genome and sort those reads by coordinate.
The required software are: BWA to align reads, amtools to convert output to BAM, and Picard to sort reads and fix tags.
```bash
# HPexome
pip3 install hpexome
# BWA
wget https://github.com/lh3/bwa/releases/download/v0.7.17/bwa-0.7.17.tar.bz2
tar xf bwa-0.7.17.tar.bz2
make -C bwa-0.7.17
# Samtools
wget https://github.com/samtools/samtools/releases/download/1.10/samtools-1.10.tar.bz2
tar xf samtools-1.10.tar.bz2
make -C samtools-1.10
# Picard
wget https://github.com/broadinstitute/picard/releases/download/2.21.7/picard.jar
```
Download raw sequencing data as FASTQ files.
```bash
wget \
ftp://ftp.1000genomes.ebi.ac.uk/vol1/ftp/phase3/data/NA12878/sequence_read/SRR098401_1.filt.fastq.gz \
ftp://ftp.1000genomes.ebi.ac.uk/vol1/ftp/phase3/data/NA12878/sequence_read/SRR098401_2.filt.fastq.gz
```
Download required reference files.
```bash
wget \
https://storage.googleapis.com/gatk-legacy-bundles/hg19/ucsc.hg19.fasta \
https://storage.googleapis.com/gatk-legacy-bundles/hg19/ucsc.hg19.fasta.fai \
https://storage.googleapis.com/gatk-legacy-bundles/hg19/ucsc.hg19.dict \
ftp://gsapubftp-anonymous@ftp.broadinstitute.org/bundle/hg19/dbsnp_138.hg19.vcf.gz \
ftp://gsapubftp-anonymous@ftp.broadinstitute.org/bundle/hg19/dbsnp_138.hg19.vcf.idx.gz \
ftp://gsapubftp-anonymous@ftp.broadinstitute.org/bundle/hg19/Mills_and_1000G_gold_standard.indels.hg19.sites.vcf.gz \
ftp://gsapubftp-anonymous@ftp.broadinstitute.org/bundle/hg19/Mills_and_1000G_gold_standard.indels.hg19.sites.vcf.idx.gz \
ftp://gsapubftp-anonymous@ftp.broadinstitute.org/bundle/hg19/1000G_phase1.indels.hg19.sites.vcf.gz \
ftp://gsapubftp-anonymous@ftp.broadinstitute.org/bundle/hg19/1000G_phase1.indels.hg19.sites.vcf.idx.gz \
ftp://gsapubftp-anonymous@ftp.broadinstitute.org/bundle/hg19/1000G_phase1.snps.high_confidence.hg19.sites.vcf.gz \
ftp://gsapubftp-anonymous@ftp.broadinstitute.org/bundle/hg19/1000G_phase1.snps.high_confidence.hg19.sites.vcf.idx.gz \
ftp://gsapubftp-anonymous@ftp.broadinstitute.org/bundle/hg19/1000G_omni2.5.hg19.sites.vcf.gz \
ftp://gsapubftp-anonymous@ftp.broadinstitute.org/bundle/hg19/1000G_omni2.5.hg19.sites.vcf.idx.gz
gunzip dbsnp_138.hg19.vcf.gz \
dbsnp_138.hg19.vcf.idx.gz \
Mills_and_1000G_gold_standard.indels.hg19.sites.vcf.gz \
Mills_and_1000G_gold_standard.indels.hg19.sites.vcf.idx.gz \
1000G_phase1.indels.hg19.sites.vcf.gz \
1000G_phase1.indels.hg19.sites.vcf.idx.gz \
1000G_phase1.snps.high_confidence.hg19.sites.vcf.gz \
1000G_phase1.snps.high_confidence.hg19.sites.vcf.idx.gz \
1000G_omni2.5.hg19.sites.vcf.gz \
1000G_omni2.5.hg19.sites.vcf.idx.gz
```
Index reference genome.
```bash
bwa-0.7.17/bwa index ucsc.hg19.fasta
```
Align raw sequencing reads to the human reference genome.
```bash
bwa-0.7.17/bwa mem \
-K 100000000 -t 16 -Y ucsc.hg19.fasta \
SRR098401_1.filt.fastq.gz SRR098401_2.filt.fastq.gz \
| samtools-1.10/samtools view -1 - > NA12878.bam
```
Sort aligned reads by genomic coordinates.
```bash
java -jar picard.jar SortSam \
INPUT=NA12878.bam \
OUTPUT=NA12878.sorted.bam \
SORT_ORDER=coordinate \
CREATE_INDEX=true
```
Fix RG tags.
```bash
java -jar picard.jar AddOrReplaceReadGroups \
I=NA12878.sorted.bam \
O=NA12878.sorted.rgfix.bam \
RGID=NA12878 \
RGSM=NA12878 \
RGLB=1kgenomes \
RGPL=Illumina \
PU=Unit1 \
CREATE_INDEX=true
```
In some computing setups it will require to set `SGE_ROOT` environment variable.
```bash
export SGE_ROOT=/var/lib/gridengine
```
Run **HPexome**.
```bash
hpexome \
--bam NA12878.sorted.rgfix.bam \
--genome ucsc.hg19.fasta \
--dbsnp dbsnp_138.hg19.vcf \
--indels Mills_and_1000G_gold_standard.indels.hg19.sites.vcf \
--indels 1000G_phase1.indels.hg19.sites.vcf \
--sites 1000G_phase1.snps.high_confidence.hg19.sites.vcf \
--sites 1000G_omni2.5.hg19.sites.vcf \
--scatter_count 16 \
--job_runner GridEngine \
result_files
```
It is expected the following files.
result_files/
├── HPexome.jobreport.txt
├── NA12878.sorted.rgfix.HC.raw.vcf
├── NA12878.sorted.rgfix.HC.raw.vcf.idx
├── NA12878.sorted.rgfix.HC.raw.vcf.out
├── NA12878.sorted.rgfix.intervals
├── NA12878.sorted.rgfix.intervals.out
├── NA12878.sorted.rgfix.realn.bai
├── NA12878.sorted.rgfix.realn.bam
├── NA12878.sorted.rgfix.realn.bam.out
├── NA12878.sorted.rgfix.recal.bai
├── NA12878.sorted.rgfix.recal.bam
├── NA12878.sorted.rgfix.recal.bam.out
├── NA12878.sorted.rgfix.recal.cvs
└── NA12878.sorted.rgfix.recal.cvs.out
See [hpexome-paper repository](https://github.com/labbcb/hpexome-paper) for information about performance and validation tests.
## Development
Clone git repository from GitHub.
```bash
git clone https://github.com/labbcb/hpexome.git
cd hpexome
```
Create virtual environment and install development version.
```bash
python3 -m venv venv
source venv/bin/activate
pip install --requirement requirements.txt
```
Publish new hpexome version to Pypi.
```bash
pip install setuptools wheel twine
python setup.py sdist bdist_wheel
twine upload -u $PYPI_USER -p $PYPI_PASS dist/*
```
| PypiClean |
/CEDApy-0.3.6.tar.gz/CEDApy-0.3.6/CEDA/economic/XinHua.py | import ast
import json
import requests
import pandas as pd
from tqdm import tqdm
from pygtrans import Translate
def translate(text:str=None):
client = Translate()
text = client.translate(text, target="en")
return text.translatedText
url = {
"CNFIN": "https://api.cnfin.com/roll/charts/"
}
class XHData(object):
def __init__(self, country:str=None):
self.country = country
pass
def toc(self):
urls, tid, titles, titles_en = [], [], [], []
if self.country == "CN":
for i in tqdm(range(12005, 12100)):
url = "https://api.cnfin.com/roll/charts/getContent?ids={}".format(i)
r = requests.get(url)
if r.ok:
data = r.json()
if data["data"] == "图表数据不存在":
pass
else:
urls.append(url)
tid.append(i)
title = json.loads(data["data"]["list"][0]["modelCode"])["title"]["text"]
titles.append(title)
titles_en.append(translate(text=title))
elif self.country == "USA":
for i in tqdm(range(6361, 6394)):
url = "https://api.cnfin.com/roll/charts/getContent?ids={}".format(i)
r = requests.get(url)
if r.ok:
data = r.json()
if data["data"] == "图表数据不存在":
pass
else:
urls.append(url)
tid.append(i)
title = json.loads(data["data"]["list"][0]["modelCode"])["title"]["text"]
titles.append(title)
titles_en.append(translate(text=title))
elif self.country == "UK":
for i in tqdm(range(6539, 6566)):
url = "https://api.cnfin.com/roll/charts/getContent?ids={}".format(i)
r = requests.get(url)
if r.ok:
data = r.json()
if data["data"] == "图表数据不存在":
pass
else:
urls.append(url)
tid.append(i)
title = json.loads(data["data"]["list"][0]["modelCode"])["title"]["text"]
titles.append(title)
titles_en.append(translate(text=title))
elif self.country == "Japan":
for i in tqdm(range(6394, 6425)):
url = "https://api.cnfin.com/roll/charts/getContent?ids={}".format(i)
r = requests.get(url)
if r.ok:
data = r.json()
if data["data"] == "图表数据不存在":
pass
else:
urls.append(url)
tid.append(i)
title = json.loads(data["data"]["list"][0]["modelCode"])["title"]["text"]
titles.append(title)
titles_en.append(translate(text=title))
return pd.DataFrame({"urls":urls, "id":tid, "title_zh":titles, "title_en":titles_en})
def download_data(self, iid:int=None):
tmp_url = url["CNFIN"] + "getContent?ids={}".format(iid)
r = requests.get(tmp_url)
if r.ok:
raw_data = r.json()
data = pd.DataFrame(ast.literal_eval(raw_data["data"]["list"][0]["content"]))
data.columns = ["date", "data"]
return data
else:
return ValueError("Something went wrong, try again later")
if __name__ == "__main__":
xhdata = XHData(country="CN")
toc = xhdata.toc()
data = xhdata.download_data(iid=12006) # GDP | PypiClean |
/CNVpytor-1.3.1.tar.gz/CNVpytor-1.3.1/cnvpytor/export.py | import json
from pathlib import Path
from .io import *
from .genome import *
_logger = logging.getLogger("cnvpytor.export")
class Wiggle:
def __init__(self, filename):
"""
creates bigwig file
Parameters
----------
filename : str
Path for the bigwig filename
"""
self.filename = filename
self.file = None
import pyBigWig
if not Path(filename).exists():
try:
self.file = pyBigWig.open(filename, 'w')
except IOError as e:
print("Unable to open file {}! Error: {}".format(filename, e))
except RuntimeError as e:
print("Unable to open file {}! Error: {}".format(filename, e))
else:
self.file = pyBigWig.open(filename)
def add_header_list(self, chr_len_list):
"""
Add header to the bigwig file
Parameters
----------
chr_len_list : list of tuple
chromosome name and length list.
"""
self.file.addHeader(chr_len_list)
def add_fixedstep(self, chrom, position_int, value_list, span=1, step=1):
"""
Add fixed step formatted data
Parameters
----------
chrom : str
chromosome name
position_int : int
start position
value_list : list of values
input values
span : int
step : int
"""
self.file.addEntries(chrom, position_int, values=value_list, span=span, step=step)
def get_cnvpytor_signal(self, md5, chrom, bin_size, signal, flag):
"""
Get a signal from pytor file
parameters
-------------
md5:
chrom: str
chromosome name
bin_size: int
bin size
signal: str
name of the cnvpytor signal
flag: int
Binary flag
returns
-------------
signal_details : numpy.nparray
Array contains data
"""
signal_details = md5.get_signal(chrom, bin_size, signal, flag)
return signal_details
def get_chrom_list(self, md5):
"""
Get list of chromosome name and its length
parameters
------------
md5: cnvpyto object
returns
------------
chr_len_list: list of tuples
list contain tuples with chr name and length
"""
chr_len = md5.get_signal(None, None, "chromosome lengths")
chr_len_list = list(zip(chr_len[::2].astype(str), chr_len[1::2].astype(int)))
return chr_len_list
def create_wig_offset_transform(self, md5, chr_list, bin_size, signal, flag, offset):
"""
parameters
-------------
md5:
chr_list: list
list of chromosomes
bin_size: int
bin size
signal: str
name of the signal
flag: int
Binary flag
offset:
returns
-------------
"""
# add chr_list to add wig header
self.add_header_list(chr_list)
# add the data
for (chrom, length) in chr_list:
signal_details = md5.get_signal(chrom, bin_size, signal, flag)
if isinstance(signal_details, np.ndarray):
signal_value_list = signal_details[()]
signal_value_list[signal_value_list != 0] += offset
signal_value_list = np.absolute(signal_value_list)
self.add_fixedstep(chrom, 0, signal_value_list, span=bin_size, step=bin_size)
def create_wig(self, md5, chr_list, bin_size, signal, flag):
"""
parameters
-------------
md5:
chr_list: list
list of chromosome
bin_size: int
bin size
signal: str
signal name
flag: int
Binary flag
"""
# add chr_list to add wig header
self.add_header_list(chr_list)
# add the data
for (chrom, length) in chr_list:
signal_details = md5.get_signal(chrom, bin_size, signal, flag)
if isinstance(signal_details, np.ndarray):
signal_value_list = signal_details[()]
self.add_fixedstep(chrom, 0, signal_value_list, span=bin_size, step=bin_size)
def __del__(self):
if self.file:
self.file.close()
class ExportJBrowse:
rd_signal_dct = {
"RD": {
"FLAG": [0, 0x0010],
"color": ["gray", "black"]
},
"RD partition": {
"FLAG": [0x0010],
"color": ["red"]
},
"RD call": {
"FLAG": [0x0010],
"color": ["green"]
}
}
snp_signal_dct = {
"SNP baf": {
"FLAG": [0x0100],
"color": ["gray"],
"nonCont": [True],
},
"SNP i1": {
"FLAG": [0x0100, 0x0100],
"color": ["red", "red"],
"nonCont": [True, True],
"offset": [0.5, -0.5]
},
}
signal_dct = {
"RD": "his_rd_p_%(bin_size)d%(rd_flag)s",
"RD partition": "his_rd_p_%(bin_size)d_partition%(rd_flag)s",
"RD call": "his_rd_p_%(bin_size)d_partition%(rd_flag)s_merge",
"SNP baf": "snp_baf_%(bin_size)d%(snp_flag)s",
"SNP maf": "snp_maf_%(bin_size)d%(snp_flag)s",
"SNP i1": "snp_i1_%(bin_size)d%(snp_flag)s",
"SNP i1 partition": "snp_i1_%(bin_size)d%(snp_flag)s_partition",
}
def __init__(self, files, dir_name):
"""
Exports CNVpytor data to a directory
Parameters
----------
files : str
CNVpytor files path
dir_name: str
Export directory path
"""
self.files = files
self.dir = Path(dir_name)
self.io = [IO(f, ro=True) for f in files]
self.export_dir = self.export_create_dir()
@property
def pytor_names(self):
"""
Get name list for for pytor files
return
-------
name_list: list
filename for the pytor files
"""
name_list = []
for filename in self.files:
name_list.append(Path(filename).resolve().stem)
return name_list
@property
def export_directory(self):
"""
return export directory name
return
-------
export directory path
"""
if self.dir.is_dir():
if len(self.files) > 1:
# for multiple input file
default_name = self.dir.joinpath("cnvpytor_jbrowse_export")
else:
# for single_input_file
default_name = self.dir.joinpath("jbrowse_{}".format(self.pytor_names[0]))
if default_name.exists():
tmp_name = default_name
i = 1
while default_name.exists():
update_name = "{}({})".format(tmp_name.name, i)
default_name = default_name.with_name(update_name)
i = i+1
return default_name
else:
if self.dir.parent.exists():
return self.dir
else:
_logger.error("Error: incorrect export path: {}".format(self.dir))
exit(0)
def export_create_dir(self):
"""
create export directory
return
-------
main_dir: str
export directory path
"""
main_dir = self.export_directory
main_dir.mkdir(parents=True, exist_ok=True)
_logger.info("CNVpytor data exporting for JBrowse view in {}".format(main_dir))
return main_dir
@property
def export_data_dir_list(self):
"""
create "bw" directory
return
---------
data_dir_list: list
list of filenames
"""
data_dir = self.export_dir.joinpath("bw")
data_dir.mkdir(parents=True, exist_ok=True)
data_dir_list = []
for root_name in self.pytor_names:
root_data = data_dir.joinpath(root_name)
root_data.mkdir(parents=True, exist_ok=True)
data_dir_list.append(root_data)
return data_dir_list
@property
def export_seq_dir(self):
"""
create "seq" directory
return
---------
seq_dir: path
'seq" directory path
"""
seq_dir = self.export_dir.joinpath("seq")
seq_dir.mkdir(parents=True, exist_ok=True)
return seq_dir
@property
def export_tracklist_file(self):
"""
Get the path for trackList.json file
return
---------
track_list: path
path for trackList.json file
"""
track_list = self.export_dir.joinpath("trackList.json")
return track_list
@property
def export_ref_file(self):
"""
Get the path for refSeqs.json file
return
---------
ref_file : path
path for refSeqs.json file
"""
ref_file = self.export_seq_dir.joinpath("refSeqs.json")
return ref_file
def signal_name(self, bin_size, signal, flags=0):
"""
Read data from the pytor file using bin_size, signal name and data flag
parameter
------------
:param bin_size: int
:param signal: string
:param flags: Flag
return
------------
"""
if signal in self.signal_dct:
try:
return self.signal_dct[signal] % {"bin_size": bin_size, "rd_flag": Signals().suffix_rd_flag(flags),
"snp_flag": Signals().suffix_snp_flag(flags),
"flag": Signals().suffix_flag(flags)}
except TypeError:
return None
else:
return None
def rd_chr_bin(self, root_io):
"""
Read 'RD' data from pytor file
parameter
-----------
root_io: io object
cnvpytor io object
return
-----------
chrs: string
chromosome names
bss: int
length of the chromosome
"""
chr_bs = root_io.chromosomes_bin_sizes_with_signal("RD")
chrs = {}
bss = []
for c, b in chr_bs:
if c not in chrs:
chrs[c] = []
chrs[c].append(int(b))
if int(b) not in bss:
bss.append(int(b))
return chrs, bss
def snp_chr_bin(self, root_io):
"""
read snp likelihood information pytor pytor file
parameter
-----------
root_io: io object
cnvpytor io object
return
-----------
chrs: string
chromosome names
bss: int
length of the chromosome
"""
chr_bs = root_io.chromosomes_bin_sizes_with_signal("SNP likelihood", FLAG_USEMASK)
chrs = {}
bss = []
for c, b in chr_bs:
if c not in chrs:
chrs[c] = []
chrs[c].append(int(b))
if int(b) not in bss:
bss.append(int(b))
return chrs, bss
@staticmethod
def create_bigwig(root_io, bigwig_file, chr_list, bin_size, signal_name, flag, offset=None):
"""
Creates big wig file using the following criteria
parameter
-----------
root_io: io object
cnvpytor io object
bigwig_file: str
Name of the bigwig wilf
chr_list: list
chromosome names
bin_size: int
bin size
signal_name: string
name of the singal
flag: int
Binary flag
offset:
return
-----------
"""
wig = None
for (chrom, length) in chr_list:
signal_details = root_io.get_signal(chrom, bin_size, signal_name, flag)
if isinstance(signal_details, np.ndarray):
signal_value_list = signal_details[()]
if offset is not None:
signal_value_list[signal_value_list != 0] += offset
signal_value_list = np.absolute(signal_value_list)
if not isinstance(wig, Wiggle):
wig = Wiggle(bigwig_file)
wig.add_header_list(chr_list)
wig.add_fixedstep(chrom, 0, signal_value_list, span=bin_size, step=bin_size)
def rd_signal(self):
"""
Get read depth signal and write to bigwig file
"""
_logger.debug("Create Read depth related signals")
for root_index, root_io in enumerate(self.io):
_logger.info("JBrowse export: RD related data for {}".format(self.pytor_names[root_index]))
rd_chr, rd_bin = self.rd_chr_bin(root_io)
# get chr list
chr_len = root_io.get_signal(None, None, "chromosome lengths")
chr_list = list(zip(chr_len[::2].astype(str), chr_len[1::2].astype(int)))
for signal_name, signal_dct in self.rd_signal_dct.items():
_logger.info("JBrowse export: RD signal {}".format(signal_name))
for index, flag in enumerate(signal_dct['FLAG']):
for bin_size in rd_bin:
signal = self.signal_name(bin_size, signal_name, flag)
bigwig_filename = "{}.bw".format(signal)
bigwig_file = self.export_data_dir_list[root_index].joinpath(bigwig_filename)
bigwig_file = str(bigwig_file)
self.create_bigwig(root_io, bigwig_file, chr_list, bin_size, signal_name, flag)
def snp_signal(self):
"""
Get signal and write to file
"""
_logger.debug("Create SNP related signals")
for root_index, root_io in enumerate(self.io):
_logger.info("JBrowse export: SNP related data for {}".format(self.pytor_names[root_index]))
snp_chr, snp_bin = self.snp_chr_bin(root_io)
# get chr list
chr_len = root_io.get_signal(None, None, "chromosome lengths")
chr_list = list(zip(chr_len[::2].astype(str), chr_len[1::2].astype(int)))
for signal_name, signal_dct in self.snp_signal_dct.items():
_logger.info("JBrowse export: SNP signal {}".format(signal_name))
for index, flag in enumerate(signal_dct['FLAG']):
if "offset" in signal_dct:
offset = signal_dct['offset'][index]
for bin_size in snp_bin:
signal = self.signal_name(bin_size, signal_name, flag)
bigwig_filename = "{}_offset{}.bw".format(signal, offset)
bigwig_file = self.export_data_dir_list[root_index].joinpath(bigwig_filename)
bigwig_file = str(bigwig_file)
self.create_bigwig(root_io, bigwig_file, chr_list, bin_size, signal_name, flag,
offset=offset)
else:
for bin_size in snp_bin:
signal = self.signal_name(bin_size, signal_name, flag)
bigwig_filename = "{}.bw".format(signal)
bigwig_file = self.export_data_dir_list[root_index].joinpath(bigwig_filename)
bigwig_file = str(bigwig_file)
self.create_bigwig(root_io, bigwig_file, chr_list, bin_size, signal_name, flag)
@staticmethod
def add_config_reference():
track_dct = {'formatVersion': 1, "plugins": ["MultiBigWig", "MultiScaleBigWig"], 'tracks': []}
track_dct['tracks'].append({
"category": "Reference sequence",
"chunkSize": 20000,
"key": "Reference sequence",
"label": "DNA",
"seqType": "dna",
"storeClass": "JBrowse/Store/Sequence/StaticChunked",
"type": "SequenceTrack",
"urlTemplates": "seq/{refseq_dirpath}/{refseq}-"
})
return track_dct
def add_rd_config_track(self):
"""
Add read depth tracks
returns
-----------
track_dct_list: dict
read depth config setings
"""
_logger.debug("Get RD config track")
track_dct_list = []
for root_index, root_io in enumerate(self.io):
rd_chr, rd_bin = self.rd_chr_bin(root_io)
url_template_dct = []
for signal_name, signal_dct in self.rd_signal_dct.items():
if 'FLAG' in signal_dct:
for index, flag in enumerate(signal_dct['FLAG']):
suffix_rd_flag = Signals.suffix_rd_flag(flag)
signal_id = "{}_{}{}".format(self.pytor_names[root_index], signal_name, suffix_rd_flag)
scales = {}
for bin_size in rd_bin:
signal = self.signal_name(bin_size, signal_name, flag)
bigwig_filename = "{}.bw".format(signal)
bigwig_file = self.export_data_dir_list[root_index].joinpath(bigwig_filename)
bigwig_current_path = Path(bigwig_file.parent.parent.name).joinpath(bigwig_file.parent.name, bigwig_file.name).as_posix()
if bigwig_file.exists():
scales[bin_size] = bigwig_current_path
if len(scales) > 0:
url_template_dct.append({
"storeClass": "MultiScaleBigWig/Store/SeqFeature/MultiScaleBigWig",
"scales": scales,
"name": signal_id,
"color": signal_dct['color'][index],
})
if len(url_template_dct) > 0:
track_dct = {
"category": self.pytor_names[root_index],
'autoscale': 'local',
"storeClass": "MultiBigWig/Store/SeqFeature/MultiBigWig",
"showTooltips": True,
"showLabels": True,
"clickTooltips": True,
"key": "RD",
"label": "RD {}".format(self.pytor_names[root_index]),
"type": "MultiBigWig/View/Track/MultiWiggle/MultiXYPlot",
'useStdDev': True,
'urlTemplates': url_template_dct
}
track_dct_list.append(track_dct)
return track_dct_list
def add_snp_config_track(self):
"""
Add snp track
return
---------
track_dct_list: dict
settings for snp config
"""
_logger.debug("Get SNP config track info")
track_dct_list = []
for root_index, root_io in enumerate(self.io):
snp_url_dct_list = []
snp_chr, snp_bin = self.snp_chr_bin(root_io)
for signal_name, signal_dct in self.snp_signal_dct.items():
for index, flag in enumerate(signal_dct['FLAG']):
suffix_flag = Signals.suffix_snp_flag(flag)
scales = {}
if "offset" in signal_dct:
offset = signal_dct['offset'][index]
signal_id = "{}{}{}".format(signal_name, suffix_flag, offset)
for bin_size in snp_bin:
signal = self.signal_name(bin_size, signal_name, flag)
bigwig_filename = "{}_offset{}.bw".format(signal, offset)
bigwig_file = self.export_data_dir_list[root_index].joinpath(bigwig_filename)
bigwig_current_path = Path(bigwig_file.parent.parent.name).joinpath(bigwig_file.parent.name, bigwig_file.name).as_posix()
if bigwig_file.exists():
scales[bin_size] = bigwig_current_path
else:
signal_id = "{}{}".format(signal_name, suffix_flag)
for bin_size in snp_bin:
signal = self.signal_name(bin_size, signal_name, flag)
bigwig_filename = "{}.bw".format(signal)
bigwig_file = self.export_data_dir_list[root_index].joinpath(bigwig_filename)
bigwig_current_path = Path(bigwig_file.parent.parent.name).joinpath(bigwig_file.parent.name, bigwig_file.name).as_posix()
if bigwig_file.exists():
scales[bin_size] = bigwig_current_path
if len(scales) > 0:
snp_url_dct_list.append({
"storeClass": "MultiScaleBigWig/Store/SeqFeature/MultiScaleBigWig",
"scales": scales,
"name": signal_id,
"color": signal_dct['color'][index],
"nonCont": signal_dct['nonCont'][index]
})
if len(snp_url_dct_list) > 0:
track_dct = {
"category": self.pytor_names[root_index],
'autoscale': 'local',
"storeClass": "MultiBigWig/Store/SeqFeature/MultiBigWig",
"showTooltips": True,
"showLabels": True,
"clickTooltips": True,
"max_score": 1,
"key": "SNP",
"label": "SNP {}".format(self.pytor_names[root_index]),
"type": "MultiBigWig/View/Track/MultiWiggle/MultiXYPlot",
'urlTemplates': snp_url_dct_list,
}
track_dct_list.append(track_dct)
return track_dct_list
def create_tracklist_json(self):
"""
create track list file
return
----------
track_dct: dict
tracklist configuration
"""
_logger.debug("Creates config file: {}".format(self.export_tracklist_file))
# reference config
track_dct = self.add_config_reference()
# create rd config
rd_track_list = self.add_rd_config_track()
for rd_track in rd_track_list:
track_dct['tracks'].append(rd_track)
# create SNP config
snp_track_list = self.add_snp_config_track()
for snp_track in snp_track_list:
track_dct['tracks'].append(snp_track)
with open(self.export_tracklist_file, 'w') as f:
json.dump(track_dct, f, indent=2)
return track_dct
def create_reference_json(self):
_logger.debug("Exporting reference details")
# get signal details
chr_len = list(np.array(self.io[0].get_signal(None, None, "chromosome lengths")).astype("str"))
chr_dct = dict(zip(chr_len[::2], chr_len[1::2]))
# create signal list in proper format
chr_dct_list = []
for chr, length in chr_dct.items():
tmp_dct = {"end": length, "length": length, "name": chr, "start": 0}
chr_dct_list.append(tmp_dct)
# save it to file
with open(self.export_ref_file, 'w') as f:
json.dump(chr_dct_list, f, indent=2)
def __del__(self):
_logger.info("JBrowse export: complete")
_logger.info("Copy this directory to jbrowse directory if export path is not set to JBrowse path, "
"To access this via localhost: http://localhost/jbrowse/?data={}"
.format(self.export_directory.parent.name)) | PypiClean |
/BGWpy-3.2.2.tar.gz/BGWpy-3.2.2/Documentation/Examples/Abinit/v42-Workflow-GW.py | from os.path import join as pjoin
from BGWpy import Structure, Workflow, AbinitScfTask, AbinitWfnTask, Abi2BgwTask, EpsilonTask, SigmaTask
workflow = Workflow(dirname='42-Workflow')
# Common arguments for tasks.
kwargs = dict(
structure = Structure.from_file('../../Data/Structures/GaAs.json'),
prefix = 'GaAs',
pseudo_dir = '../../Data/Pseudos',
pseudos = ['31-Ga.pspnc', '33-As.pspnc'],
ngkpt = [2,2,2],
ecut = 5.0,
nband = 9,
# Parameters for the MPI runner
nproc = 8,
nproc_per_node = 8,
mpirun = 'mpirun',
nproc_flag = '-n',
nproc_per_node_flag = '--npernode',
)
# Ground state density calculation (SCF)
scftask = AbinitScfTask(
dirname = pjoin(workflow.dirname, 'density'),
kshift = [.5,.5,.5],
**kwargs)
# Wavefunctions and eigenvalues calculation (NSCF) on a k-shifted grid
wfntask_ksh = AbinitWfnTask(
dirname = pjoin(workflow.dirname, 'wfn'),
kshift = [.5,.5,.5],
charge_density_fname = scftask.charge_density_fname,
**kwargs)
# Interfacing Abinit with BerkeleyGW.
abi2bgwtask_ksh = Abi2BgwTask(
dirname = wfntask_ksh.dirname,
kshift = [.5,.5,.5],
wfn_fname = wfntask_ksh.wfn_fname,
**kwargs)
# Wavefunctions and eigenvalues calculation (NSCF) on a k+q-shifted grid
wfntask_qsh = AbinitWfnTask(
dirname = pjoin(workflow.dirname, 'wfnq'),
kshift = [.5,.5,.5],
qshift = [.001,.0,.0],
charge_density_fname = scftask.charge_density_fname,
**kwargs)
# Interfacing Abinit with BerkeleyGW.
abi2bgwtask_qsh = Abi2BgwTask(
dirname = wfntask_qsh.dirname,
kshift = [.5,.5,.5],
qshift = [.001,.0,.0],
wfn_fname = wfntask_qsh.wfn_fname,
**kwargs)
# Wavefunctions and eigenvalues calculation (NSCF) on an unshifted grid
wfntask_ush = AbinitWfnTask(
dirname = pjoin(workflow.dirname, 'wfn_co'),
kshift = [.0,.0,.0],
charge_density_fname = scftask.charge_density_fname,
**kwargs)
# Interfacing Abinit with BerkeleyGW.
abi2bgwtask_ush = Abi2BgwTask(
dirname = wfntask_ush.dirname,
kshift = [.0,.0,.0],
wfn_fname = wfntask_ush.wfn_fname,
rho_fname = scftask.rho_fname,
vxc_fname = scftask.vxc_fname,
rhog_flag = True,
vxcg_flag = True,
**kwargs)
# Dielectric matrix computation and inversion (epsilon)
epsilontask = EpsilonTask(
dirname = pjoin(workflow.dirname, 'epsilon'),
qshift = [.001,.0,.0],
ecuteps = 10.0,
wfn_fname = abi2bgwtask_ksh.wfn_fname,
wfnq_fname = abi2bgwtask_qsh.wfn_fname,
**kwargs)
# Self-energy calculation (sigma)
sigmatask = SigmaTask(
dirname = pjoin(workflow.dirname, 'sigma'),
ibnd_min = 1,
ibnd_max = 8,
extra_lines = ['screening_semiconductor'],
#extra_variables = {'number_of_frequencies' : 10},
wfn_co_fname = abi2bgwtask_ush.wfn_fname,
rho_fname = abi2bgwtask_ush.rho_fname,
vxc_fname = abi2bgwtask_ush.vxc_fname,
eps0mat_fname = epsilontask.eps0mat_fname,
epsmat_fname = epsilontask.epsmat_fname,
**kwargs)
# Add all task without merging (executed from a sub-directory)
workflow.add_tasks([scftask,
wfntask_ksh, abi2bgwtask_ksh,
wfntask_qsh, abi2bgwtask_qsh,
wfntask_ush, abi2bgwtask_ush,
epsilontask, sigmatask,
], merge=False)
# Execution
workflow.write()
for task in workflow.tasks:
task.run()
task.report()
# The previous iteration also could have been performed as
#workflow.run()
#workflow.report() | PypiClean |
/Dendrite_Neural_Networks-0.0.9-py3-none-any.whl/DMN.py |
# In[2]:
from tensorflow.keras.layers import Layer
from keras import backend as K
from keras.initializers import Constant
from keras.initializers import RandomUniform
from keras import activations
import numpy as np
class DMNlayer(Layer):
def __init__(self, units,dendrites = [], activation=None, **kwargs):
self.units = units #Number of dendrites
self.dendrites = dendrites
self.activation = activations.get(activation)
if len(dendrites) == 2:
self.units = int(len(dendrites[0]))
super(DMNlayer, self).__init__(**kwargs)
def build(self, input_shape):
if len(self.dendrites) == 0:
self.Wmin = self.add_weight(name='Wmin',
shape=(self.units, input_shape[1]),
initializer=RandomUniform(minval=-2, maxval=-0.1, seed=None),
trainable=True)
self.Wmax = self.add_weight(name='Wmax',
shape=(self.units, input_shape[1]),
initializer=RandomUniform(minval=0.1, maxval=0.5, seed=None),
trainable=True)
if len(self.dendrites) == 2:
self.Wmin = self.add_weight(name='Wmin',
shape=(int(self.units/input_shape[1]), input_shape[1]),
initializer = Constant(value = self.dendrites[0]),
trainable=True)
self.Wmax = self.add_weight(name='Wmax',
shape=(int(self.units/input_shape[1]), input_shape[1]),
initializer= Constant(value = self.dendrites[1]),
trainable=True)
super(DMNlayer, self).build(input_shape)
def call(self, x):
Q = K.int_shape(x)[0]
if Q is None: Q = 1
X = K.repeat(x,np.shape(self.Wmin)[0])
Wmin = K.permute_dimensions(K.repeat(self.Wmin, Q), (1,0,2))
L1 = K.min(X - Wmin, axis=2)
Wmax = K.permute_dimensions(K.repeat(self.Wmax, Q), (1,0,2))
L2 = K.min(Wmax - X, axis=2)
output = K.minimum(L1,L2)
if self.activation is not None:
output = self.activation(output)
return output
def compute_output_shape(self, input_shape):
return (input_shape[0],self.units)
def get_config(self):
config = super(DMNlayer, self).get_config()
config.update({"units": self.units,
"activation":self.activation})
return config
def valWmin(self):
print("Wmin:",self.Wmin)
def valWmax(self):
print("Wmax:",self.Wmax) | PypiClean |
/liteclashproman-0.2.1.tar.gz/liteclashproman-0.2.1/README.md | # LiteClashProMan
生成并更新 clash 配置文件,并提供 http 下载和规则文件镜像下载。
## 说明
本项目使用 `配置模板` 以及节点的 `订阅链接` 生成多种不同的配置文件,也可以将多个订阅中的节点整合至一个配置文件中~~但会导致部分功能丧失~~。
## 快速上手
安装依赖(推荐使用pipx)
```bash
pip install liteclashproman
```
运行程序
```bash
# 可以通过 -c 指定特定的配置文件
# lcpm -c specific_config.yaml
# 未添加 -c 时,默认读取当前目录下的 config.yaml 文件
lcpm # 等效于 lcpm -c config.yaml
```
## 项目配置文件
参考源代码中的 [模板](./LiteClashProMan/static/config.exp.yaml)
### 配置模板(template)
位于 `/data/template` 文件夹的半成品配置文件,在每日的更新中会被填入订阅节点并生成配置文件,存放于 `/data` 文件夹下。
您可以仿照 `/static/template` 中预设的默认模板文件和 [clash文档](https://github.com/Dreamacro/clash/wiki/Configuration) 创建自己的模板。
### 订阅链接(subscribe)
通常由服务商所提供,以获取节点信息的订阅链接。如果您愿意临时提供订阅链接以供开发,可以联系开发者进行更多服务商适配。
目前支持的服务商/订阅方式:
- [Just My Socks](https://justmysocks3.net/members/index.php)
- 类型: `jms`
- 特殊配置项:
- counter:节点的剩余流量API
- 通用的Clash订阅地址,通过联网下载获取 (ClashSub)
- 通用的Clash配置文件,通过本地文件获取 (ClashFile)
计划支持的提供商/订阅方式:
- 单独的ss节点
- 单独的ssr节点
### 规则集(ruleset)
或称规则提供者(rule-provider),是一系列域名或应用程序的列表,可以用于规则的编写。
本项目默认模板使用的规则集来源于 [@Loyalsoldier/clash-rules](https://github.com/Loyalsoldier/clash-rules),于每日 22:30(UTC) / 06:30(UTC+8) 使用 GitHub Action 自动生成,因此本项目的定时更新也设定为其后5分钟更新。
### 特色功能
#### 规则集的本地缓存
在部分地区,直接访问 GitHub 获取规则集是较为困难和耗时的行为,因此由配置模板生成配置文件时会将其中的规则集下载至本地并替换配置文件中的下载链接,使规则文件的下载更加高效稳定。
当然,您可以自由的在配置文件中添加不属于默认规则集以外的链接,只需要注意:在启用的规则文件中的规则集若出现**重复的文件名**,将会只保留**靠后**的规则集的文件,因此请务必注意不要出现**不同的文件但文件名相同**的情况。
### 剩余流量及租期
部分服务商会提供接口供用户查询剩余流量及到期时间,在 [Clash for Windows](https://github.com/Fndroid/clash_for_windows_pkg/releases) 或 [Clash Meta for Android
](https://github.com/MetaCubeX/ClashMetaForAndroid)中可以通过 `Header` 中的信息将上述信息展示在配置文件界面,若您的服务商提供了接口且在配置文件中**仅启用了一个订阅**,那么您可以在获取配置文件时自动额外取得这些信息。
| PypiClean |
/Flask-libsass-1.1.0.tar.gz/Flask-libsass-1.1.0/flask_libsass.py | import os
import posixpath
import hashlib
from werkzeug.exceptions import NotFound
from flask import current_app, request, Response
from flask import _app_ctx_stack as stack
import sass
class Sass(object):
"""
`files`
A dictionary mapping output file path (without extension) to the
root input sass or scss file. The input file is searched for
relative to the application resource root.
`app`
A flask application to bind the extension to.
`url_path`
A prefix to add to the path of the url of the generated css.
`endpoint`
A string that can be passed to `url_for` to find the url for a
generated css file. Defaults to `sass`.
`include_paths`
A list of directories for scss to search for included files.
Relative paths are resolved from pwd. Using
`pkg_resources.resource_filename` is recommended. The directory
containing the root input file takes priority.
`output_style`
A string specifiying how the generated css should appear. One of
`"nested"`, `"expanded"` `"compact"` or `"compressed"`. Defaults
to `"nested"`. See the libsass documentation for details.
"""
_output_styles = {
'nested': sass.SASS_STYLE_NESTED,
'expanded': sass.SASS_STYLE_EXPANDED,
'compact': sass.SASS_STYLE_COMPACT,
'compressed': sass.SASS_STYLE_COMPRESSED,
}
def __init__(self, files, app=None,
url_path='/css', endpoint='sass',
include_paths=None, output_style=None):
self._files = files
self._url_path = url_path
self._endpoint = endpoint
self._include_paths = ','.join(include_paths).encode()
self._output_style = self._output_styles.get(
output_style, sass.SASS_STYLE_NESTED
)
self.app = app
if app is not None:
self.init_app(app)
def init_app(self, app):
app.add_url_rule(
posixpath.join(self._url_path, '<path:filename>.css'),
endpoint=self._endpoint,
view_func=self.send_css
)
def _compile(self, filename):
input_file = os.path.join(
current_app.root_path,
self._files[filename]
).encode()
return sass.compile_file(
input_file,
include_paths=self._include_paths,
output_style=self._output_style
)
def send_css(self, filename):
if filename not in self._files:
raise NotFound()
rebuild = current_app.config.get('SASS_REBUILD', False)
if not rebuild:
if not hasattr(stack.top, 'sass_cache'):
stack.top.sass_cache = {}
cache = stack.top.sass_cache
if filename not in cache:
css = self._compile(filename)
etag = hashlib.sha1(css).hexdigest()
cache[filename] = (css, etag)
else:
css, etag = cache[filename]
response = Response(css, content_type='text/css')
response.set_etag(etag)
response.make_conditional(request)
return response
else:
css = self._compile(filename)
return Response(css, content_type='text/css') | PypiClean |
/FreeGS-0.8.0-py3-none-any.whl/freegs/critical.py | from scipy import interpolate
from numpy import zeros
from numpy.linalg import inv
from numpy import (
dot,
linspace,
argmax,
argmin,
abs,
clip,
sin,
cos,
pi,
amax,
arctan2,
sqrt,
sum,
)
import numpy as np
from warnings import warn
def find_critical(R, Z, psi, discard_xpoints=True):
"""
Find critical points
Inputs
------
R - R(nr, nz) 2D array of major radii
Z - Z(nr, nz) 2D array of heights
psi - psi(nr, nz) 2D array of psi values
Returns
-------
Two lists of critical points
opoint, xpoint
Each of these is a list of tuples with (R, Z, psi) points
The first tuple is the primary O-point (magnetic axis)
and primary X-point (separatrix)
"""
# Get a spline interpolation function
f = interpolate.RectBivariateSpline(R[:, 0], Z[0, :], psi)
# Find candidate locations, based on minimising Bp^2
Bp2 = (f(R, Z, dx=1, grid=False) ** 2 + f(R, Z, dy=1, grid=False) ** 2) / R ** 2
# Get grid resolution, which determines a reasonable tolerance
# for the Newton iteration search area
dR = R[1, 0] - R[0, 0]
dZ = Z[0, 1] - Z[0, 0]
radius_sq = 9 * (dR ** 2 + dZ ** 2)
# Find local minima
J = zeros([2, 2])
xpoint = []
opoint = []
nx, ny = Bp2.shape
for i in range(2, nx - 2):
for j in range(2, ny - 2):
if (
(Bp2[i, j] < Bp2[i + 1, j + 1])
and (Bp2[i, j] < Bp2[i + 1, j])
and (Bp2[i, j] < Bp2[i + 1, j - 1])
and (Bp2[i, j] < Bp2[i - 1, j + 1])
and (Bp2[i, j] < Bp2[i - 1, j])
and (Bp2[i, j] < Bp2[i - 1, j - 1])
and (Bp2[i, j] < Bp2[i, j + 1])
and (Bp2[i, j] < Bp2[i, j - 1])
):
# Found local minimum
R0 = R[i, j]
Z0 = Z[i, j]
# Use Newton iterations to find where
# both Br and Bz vanish
R1 = R0
Z1 = Z0
count = 0
while True:
Br = -f(R1, Z1, dy=1, grid=False) / R1
Bz = f(R1, Z1, dx=1, grid=False) / R1
if Br ** 2 + Bz ** 2 < 1e-6:
# Found a minimum. Classify as either
# O-point or X-point
dR = R[1, 0] - R[0, 0]
dZ = Z[0, 1] - Z[0, 0]
d2dr2 = (psi[i + 2, j] - 2.0 * psi[i, j] + psi[i - 2, j]) / (
2.0 * dR
) ** 2
d2dz2 = (psi[i, j + 2] - 2.0 * psi[i, j] + psi[i, j - 2]) / (
2.0 * dZ
) ** 2
d2drdz = (
(psi[i + 2, j + 2] - psi[i + 2, j - 2]) / (4.0 * dZ)
- (psi[i - 2, j + 2] - psi[i - 2, j - 2]) / (4.0 * dZ)
) / (4.0 * dR)
D = d2dr2 * d2dz2 - d2drdz ** 2
if D < 0.0:
# Found X-point
xpoint.append((R1, Z1, f(R1, Z1)[0][0]))
else:
# Found O-point
opoint.append((R1, Z1, f(R1, Z1)[0][0]))
break
# Jacobian matrix
# J = ( dBr/dR, dBr/dZ )
# ( dBz/dR, dBz/dZ )
J[0, 0] = -Br / R1 - f(R1, Z1, dy=1, dx=1)[0][0] / R1
J[0, 1] = -f(R1, Z1, dy=2)[0][0] / R1
J[1, 0] = -Bz / R1 + f(R1, Z1, dx=2) / R1
J[1, 1] = f(R1, Z1, dx=1, dy=1)[0][0] / R1
d = dot(inv(J), [Br, Bz])
R1 = R1 - d[0]
Z1 = Z1 - d[1]
count += 1
# If (R1,Z1) is too far from (R0,Z0) then discard
# or if we've taken too many iterations
if ((R1 - R0) ** 2 + (Z1 - Z0) ** 2 > radius_sq) or (count > 100):
# Discard this point
break
# Remove duplicates
def remove_dup(points):
result = []
for n, p in enumerate(points):
dup = False
for p2 in result:
if (p[0] - p2[0]) ** 2 + (p[1] - p2[1]) ** 2 < 1e-5:
dup = True # Duplicate
break
if not dup:
result.append(p) # Add to the list
return result
xpoint = remove_dup(xpoint)
opoint = remove_dup(opoint)
if len(opoint) == 0:
# Can't order primary O-point, X-point so return
print("Warning: No O points found")
return opoint, xpoint
# Find primary O-point by sorting by distance from middle of domain
Rmid = 0.5 * (R[-1, 0] + R[0, 0])
Zmid = 0.5 * (Z[0, -1] + Z[0, 0])
opoint.sort(key=lambda x: (x[0] - Rmid) ** 2 + (x[1] - Zmid) ** 2)
# Draw a line from the O-point to each X-point. Psi should be
# monotonic; discard those which are not
if discard_xpoints:
Ro, Zo, Po = opoint[0] # The primary O-point
xpt_keep = []
for xpt in xpoint:
Rx, Zx, Px = xpt
rline = linspace(Ro, Rx, num=50)
zline = linspace(Zo, Zx, num=50)
pline = f(rline, zline, grid=False)
if Px < Po:
pline *= -1.0 # Reverse, so pline is maximum at X-point
# Now check that pline is monotonic
# Tried finding maximum (argmax) and testing
# how far that is from the X-point. This can go
# wrong because psi can be quite flat near the X-point
# Instead here look for the difference in psi
# rather than the distance in space
maxp = amax(pline)
if (maxp - pline[-1]) / (maxp - pline[0]) > 0.001:
# More than 0.1% drop in psi from maximum to X-point
# -> Discard
continue
ind = argmin(pline) # Should be at O-point
if (rline[ind] - Ro) ** 2 + (zline[ind] - Zo) ** 2 > 1e-4:
# Too far, discard
continue
xpt_keep.append(xpt)
xpoint = xpt_keep
# Sort X-points by distance to primary O-point in psi space
psi_axis = opoint[0][2]
xpoint.sort(key=lambda x: (x[2] - psi_axis) ** 2)
return opoint, xpoint
def core_mask(R, Z, psi, opoint, xpoint=[], psi_bndry=None):
"""
Mark the parts of the domain which are in the core
Inputs
------
R[nx,ny] - 2D array of major radius (R) values
Z[nx,ny] - 2D array of height (Z) values
psi[nx,ny] - 2D array of poloidal flux
opoint, xpoint - Values returned by find_critical
If psi_bndry is not None, then that is used to find the
separatrix, not the X-points.
Returns
-------
A 2D array [nx,ny] which is 1 inside the core, 0 outside
"""
mask = zeros(psi.shape)
nx, ny = psi.shape
# Start and end points
Ro, Zo, psi_axis = opoint[0]
if psi_bndry is None:
_, _, psi_bndry = xpoint[0]
# Normalise psi
psin = (psi - psi_axis) / (psi_bndry - psi_axis)
# Need some care near X-points to avoid flood filling through saddle point
# Here we first set the x-points regions to a value, to block the flood fill
# then later return to handle these more difficult cases
#
xpt_inds = []
for rx, zx, _ in xpoint:
# Find nearest index
ix = argmin(abs(R[:, 0] - rx))
jx = argmin(abs(Z[0, :] - zx))
xpt_inds.append((ix, jx))
# Fill this point and all around with '2'
for i in np.clip([ix - 1, ix, ix + 1], 0, nx - 1):
for j in np.clip([jx - 1, jx, jx + 1], 0, ny - 1):
mask[i, j] = 2
# Find nearest index to start
rind = argmin(abs(R[:, 0] - Ro))
zind = argmin(abs(Z[0, :] - Zo))
stack = [(rind, zind)] # List of points to inspect in future
while stack: # Whilst there are any points left
i, j = stack.pop() # Remove from list
# Check the point to the left (i,j-1)
if (j > 0) and (psin[i, j - 1] < 1.0) and (mask[i, j - 1] < 0.5):
stack.append((i, j - 1))
# Scan along a row to the right
while True:
mask[i, j] = 1 # Mark as in the core
if (i < nx - 1) and (psin[i + 1, j] < 1.0) and (mask[i + 1, j] < 0.5):
stack.append((i + 1, j))
if (i > 0) and (psin[i - 1, j] < 1.0) and (mask[i - 1, j] < 0.5):
stack.append((i - 1, j))
if j == ny - 1: # End of the row
break
if (psin[i, j + 1] >= 1.0) or (mask[i, j + 1] > 0.5):
break # Finished this row
j += 1 # Move to next point along
# Now return to X-point locations
for ix, jx in xpt_inds:
for i in np.clip([ix - 1, ix, ix + 1], 0, nx - 1):
for j in np.clip([jx - 1, jx, jx + 1], 0, ny - 1):
if psin[i, j] < 1.0:
mask[i, j] = 1
else:
mask[i, j] = 0
return mask
def find_psisurface(eq, psifunc, r0, z0, r1, z1, psival=1.0, n=100, axis=None):
"""
eq - Equilibrium object
(r0,z0) - Start location inside separatrix
(r1,z1) - Location outside separatrix
n - Number of starting points to use
"""
# Clip (r1,z1) to be inside domain
# Shorten the line so that the direction is unchanged
if abs(r1 - r0) > 1e-6:
rclip = clip(r1, eq.Rmin, eq.Rmax)
z1 = z0 + (z1 - z0) * abs((rclip - r0) / (r1 - r0))
r1 = rclip
if abs(z1 - z0) > 1e-6:
zclip = clip(z1, eq.Zmin, eq.Zmax)
r1 = r0 + (r1 - r0) * abs((zclip - z0) / (z1 - z0))
z1 = zclip
r = linspace(r0, r1, n)
z = linspace(z0, z1, n)
if axis is not None:
axis.plot(r, z)
pnorm = psifunc(r, z, grid=False)
if hasattr(psival, "__len__"):
pass
else:
# Only one value
ind = argmax(pnorm > psival)
# Edited by Bhavin 31/07/18
# Changed 1.0 to psival in f
# make f gradient to psival surface
f = (pnorm[ind] - psival) / (pnorm[ind] - pnorm[ind - 1])
r = (1.0 - f) * r[ind] + f * r[ind - 1]
z = (1.0 - f) * z[ind] + f * z[ind - 1]
if axis is not None:
axis.plot(r, z, "bo")
return r, z
def find_separatrix(
eq, opoint=None, xpoint=None, ntheta=20, psi=None, axis=None, psival=1.0
):
"""Find the R, Z coordinates of the separatrix for equilbrium
eq. Returns a tuple of (R, Z, R_X, Z_X), where R_X, Z_X are the
coordinates of the X-point on the separatrix. Points are equally
spaced in geometric poloidal angle.
If opoint, xpoint or psi are not given, they are calculated from eq
eq - Equilibrium object
opoint - List of O-point tuples of (R, Z, psi)
xpoint - List of X-point tuples of (R, Z, psi)
ntheta - Number of points to find
psi - Grid of psi on (R, Z)
axis - A matplotlib axis object to plot points on
"""
if psi is None:
psi = eq.psi()
if (opoint is None) or (xpoint is None):
opoint, xpoint = find_critical(eq.R, eq.Z, psi)
psinorm = (psi - opoint[0][2]) / (eq.psi_bndry - opoint[0][2])
psifunc = interpolate.RectBivariateSpline(eq.R[:, 0], eq.Z[0, :], psinorm)
r0, z0 = opoint[0][0:2]
theta_grid = linspace(0, 2 * pi, ntheta, endpoint=False)
dtheta = theta_grid[1] - theta_grid[0]
# Avoid putting theta grid points exactly on the X-points
xpoint_theta = arctan2(xpoint[0][0] - r0, xpoint[0][1] - z0)
xpoint_theta = xpoint_theta * (xpoint_theta >= 0) + (xpoint_theta + 2 * pi) * (
xpoint_theta < 0
) # let's make it between 0 and 2*pi
# How close in theta to allow theta grid points to the X-point
TOLERANCE = 1.0e-3
if any(abs(theta_grid - xpoint_theta) < TOLERANCE):
warn("Theta grid too close to X-point, shifting by half-step")
theta_grid += dtheta / 2
isoflux = []
for theta in theta_grid:
r, z = find_psisurface(
eq,
psifunc,
r0,
z0,
r0 + 10.0 * sin(theta),
z0 + 10.0 * cos(theta),
psival=psival,
axis=axis,
n=1000,
)
isoflux.append((r, z, xpoint[0][0], xpoint[0][1]))
return isoflux
def find_safety(
eq, npsi=1, psinorm=None, ntheta=128, psi=None, opoint=None, xpoint=None, axis=None
):
"""Find the safety factor for each value of psi
Calculates equally spaced flux surfaces. Points on
each flux surface are equally paced in poloidal angle
Performs line integral around flux surface to get q
eq - The equilbrium object
psinorm flux surface to calculate it for
npsi - Number of flux surface values to find q for
ntheta - Number of poloidal points to find it on
If opoint, xpoint or psi are not given, they are calculated from eq
returns safety factor for npsi points in normalised psi
"""
if psi is None:
psi = eq.psi()
if (opoint is None) or (xpoint is None):
opoint, xpoint = find_critical(eq.R, eq.Z, psi)
if (xpoint is None) or (len(xpoint) == 0):
# No X-point
raise ValueError("No X-point so no separatrix")
else:
psinormal = (psi - opoint[0][2]) / (xpoint[0][2] - opoint[0][2])
psifunc = interpolate.RectBivariateSpline(eq.R[:, 0], eq.Z[0, :], psinormal)
r0, z0 = opoint[0][0:2]
theta_grid = linspace(0, 2 * pi, ntheta, endpoint=False)
dtheta = theta_grid[1] - theta_grid[0]
# Avoid putting theta grid points exactly on the X-points
xpoint_theta = arctan2(xpoint[0][0] - r0, xpoint[0][1] - z0)
xpoint_theta = xpoint_theta * (xpoint_theta >= 0) + (xpoint_theta + 2 * pi) * (
xpoint_theta < 0
) # let's make it between 0 and 2*pi
# How close in theta to allow theta grid points to the X-point
TOLERANCE = 1.0e-3
if any(abs(theta_grid - xpoint_theta) < TOLERANCE):
warn("Theta grid too close to X-point, shifting by half-step")
theta_grid += dtheta / 2
if psinorm is None:
npsi = 100
psirange = linspace(1.0 / (npsi + 1), 1.0, npsi, endpoint=False)
else:
try:
psirange = psinorm
npsi = len(psinorm)
except TypeError:
npsi = 1
psirange = [psinorm]
psisurf = zeros([npsi, ntheta, 2])
# Calculate flux surface positions
for i in range(npsi):
psin = psirange[i]
for j in range(ntheta):
theta = theta_grid[j]
r, z = find_psisurface(
eq,
psifunc,
r0,
z0,
r0 + 8.0 * sin(theta),
z0 + 8.0 * cos(theta),
psival=psin,
axis=axis,
)
psisurf[i, j, :] = [r, z]
# Get variables for loop integral around flux surface
r = psisurf[:, :, 0]
z = psisurf[:, :, 1]
fpol = eq.fpol(psirange[:]).reshape(npsi, 1)
Br = eq.Br(r, z)
Bz = eq.Bz(r, z)
Bthe = sqrt(Br ** 2 + Bz ** 2)
# Differentiate location w.r.t. index
dr_di = (np.roll(r, 1, axis=1) - np.roll(r, -1, axis=1)) / 2.0
dz_di = (np.roll(z, 1, axis=1) - np.roll(z, -1, axis=1)) / 2.0
# Distance between points
dl = sqrt(dr_di ** 2 + dz_di ** 2)
# Integrand - Btor/(R*Bthe) = Fpol/(R**2*Bthe)
qint = fpol / (r ** 2 * Bthe)
# Integral
q = sum(qint * dl, axis=1) / (2 * pi)
return q | PypiClean |
/parts/Clothing/Graphic/Diamond.py | def Diamond():
return (
'<path fill-rule="evenodd" clip-rule="evenodd" d="M63.7805 29.7365C61.391 32.3934 58.9226 34.9802 56.4446 37.5565C57.4301 34.0044 58.1846 29.9786 59.649 26.5995C60.0655 25.6371 59.7486 25.5153 60.5761 25.1057C61.1848 24.8045 62.6537 25.1495 63.3826 25.1679C64.9405 25.2066 66.5336 25.3597 68.0789 25.1187C66.6282 26.6437 65.187 28.1712 63.7805 29.7365M48.5162 33.7981C45.8174 30.5164 43.1322 27.2302 39.8579 24.4814C41.1467 24.6176 42.5537 24.4377 43.8163 24.6872C45.7596 25.0714 45.6534 25.6779 46.6359 27.6458C48.0866 30.5522 49.5248 33.4348 50.6858 36.4689C49.975 35.5692 49.2446 34.6844 48.5162 33.7981M39.4182 21.2668C40.3811 19.6921 40.5616 18.2948 42.3374 17.4865C44.3988 16.5481 47.6379 16.7454 49.8256 16.5709C48.4553 18.094 46.7888 19.6737 46.2224 21.6928C45.3481 19.9749 44.26 18.519 42.5522 17.5223C42.3369 17.416 43.7102 22.0726 43.8898 22.3927C42.1744 22.2968 40.3685 21.9851 38.6652 22.3012C38.9162 21.9563 39.1672 21.6113 39.4182 21.2668M55.8244 16.2593C57.4155 16.2946 58.994 16.4183 60.5635 16.6783C59.2969 18.3535 58.9151 20.3104 58.4653 22.3087C57.9869 20.051 56.2565 17.9473 54.7313 16.2608C55.0955 16.2603 55.4597 16.2598 55.8244 16.2593M52.3257 17.3334C52.5602 17.1749 56.7827 22.419 57.3924 22.8893C54.0798 22.8813 50.7744 22.7193 47.4659 22.5736C49.3729 21.0869 50.4886 18.8863 52.3257 17.3334M55.0125 34.4289C54.6105 35.778 54.2353 37.137 53.7976 38.4756C52.2432 33.7444 49.9358 29.2881 47.7898 24.802C51.1295 24.89 54.4657 25.0108 57.8059 25.0575C56.9864 28.2164 55.9441 31.3033 55.0125 34.4289M62.2417 16.9656C63.5189 18.094 65.3389 20.4064 65.9284 21.1833C65.9868 21.2604 66.0462 21.3369 66.107 21.4125C66.3178 21.6744 67.0734 22.6681 67.3702 23.0568C65.1427 22.8306 62.8252 22.9271 60.5902 22.9067C61.0882 20.8528 61.6939 18.8953 61.1737 16.7787C61.5309 16.8364 61.8865 16.8985 62.2417 16.9656M69.8899 22.5831C67.7323 19.9068 65.52 15.3248 62.0259 14.5056C58.5539 13.6919 54.4782 14.0518 50.9519 14.2754C47.8774 14.4703 42.8691 14.0721 40.1572 15.8691C38.7638 16.7921 37.7315 18.8739 37.2491 20.4203C36.8145 21.8131 37.5152 22.7049 36.0026 22.7049C35.9065 22.7049 38.5067 26.3778 38.8131 26.6666C41.0853 28.8065 43.2574 30.8688 45.2701 33.2643C48.1047 36.6379 50.6748 41.6022 54.4149 43C53.7292 42.661 57.2067 40.0334 57.6243 39.5811C59.7355 37.2931 61.8709 35.0269 63.9968 32.7518C65.935 30.678 68.0472 28.721 69.9271 26.6004C71.3618 24.9825 71.3658 24.4262 69.8899 22.5831" fill="white"/>'
'<path fill-rule="evenodd" clip-rule="evenodd" d="M35.9986 13.9999C36.0416 14.0142 35.0904 11.9548 35.1739 12.0812C34.6993 11.3625 34.1283 10.8402 33.4357 10.3535C32.7079 9.84262 29.1271 9.25422 29 9C30.4936 11.6522 33.1672 13.1277 35.9986 13.9999" fill="white"/>'
'<path fill-rule="evenodd" clip-rule="evenodd" d="M51.3553 9.19614C51.528 9.8484 51.7926 10.4493 52.1491 10.9989C52.2187 11.0774 53.0077 6.81518 52.9999 6.45201C52.9866 5.83171 52.6121 2 51.8141 2C50.477 2 51.1397 8.46931 51.3553 9.19614" fill="white"/>'
'<path fill-rule="evenodd" clip-rule="evenodd" d="M77.9692 7C74.9776 7 68.2896 12.8536 69.0616 15.9998C69.0673 16.0104 69.509 15.6497 69.4129 15.7172C69.4321 15.7032 69.4509 15.6888 69.4692 15.6738C70.7405 14.6492 72.1873 13.6545 73.4606 12.4683C74.3724 11.6187 75.4476 10.8564 76.2773 9.92728C76.3619 9.83277 78.2629 7 77.9692 7" fill="white"/>'
) | PypiClean |
/MindsDB-23.8.3.0.tar.gz/MindsDB-23.8.3.0/mindsdb/utilities/cache.py | import os
import time
from abc import ABC
from pathlib import Path
import hashlib
import typing as t
import pandas as pd
import walrus
from mindsdb.utilities.config import Config
from mindsdb.utilities.json_encoder import CustomJSONEncoder
from mindsdb.interfaces.storage.fs import FileLock
from mindsdb.utilities.context import context as ctx
def dataframe_checksum(df: pd.DataFrame):
checksum = str_checksum(df.to_json())
return checksum
def json_checksum(obj: t.Union[dict, list]):
checksum = str_checksum(CustomJSONEncoder().encode(obj))
return checksum
def str_checksum(obj: str):
checksum = hashlib.sha256(obj.encode()).hexdigest()
return checksum
class BaseCache(ABC):
def __init__(self, max_size=None, serializer=None):
self.config = Config()
if max_size is None:
max_size = self.config["cache"].get("max_size", 50)
self.max_size = max_size
if serializer is None:
serializer_module = self.config["cache"].get('serializer')
if serializer_module == 'pickle':
import pickle as s_module
else:
import dill as s_module
self.serializer = s_module
# default functions
def set_df(self, name, df):
return self.set(name, df)
def get_df(self, name):
return self.get(name)
def serialize(self, value):
return self.serializer.dumps(value)
def deserialize(self, value):
return self.serializer.loads(value)
class FileCache(BaseCache):
def __init__(self, category, path=None, **kwargs):
super().__init__(**kwargs)
if path is None:
path = self.config['paths']['cache']
cache_path = Path(path) / category
company_id = ctx.company_id
if company_id is not None:
cache_path = cache_path / str(company_id)
cache_path.mkdir(parents=True, exist_ok=True)
self.path = cache_path
def clear_old_cache(self):
with FileLock(self.path):
# buffer to delete, to not run delete on every adding
buffer_size = 5
if self.max_size is None:
return
cur_count = len(os.listdir(self.path))
if cur_count > self.max_size + buffer_size:
files = sorted(Path(self.path).iterdir(), key=os.path.getmtime)
for file in files[:cur_count - self.max_size]:
self.delete_file(file)
def file_path(self, name):
return self.path / name
def set_df(self, name, df):
path = self.file_path(name)
df.to_pickle(path)
self.clear_old_cache()
def set(self, name, value):
path = self.file_path(name)
value = self.serialize(value)
with open(path, 'wb') as fd:
fd.write(value)
self.clear_old_cache()
def get_df(self, name):
path = self.file_path(name)
with FileLock(self.path):
if not os.path.exists(path):
return None
value = pd.read_pickle(path)
return value
def get(self, name):
path = self.file_path(name)
with FileLock(self.path):
if not os.path.exists(path):
return None
with open(path, 'rb') as fd:
value = fd.read()
value = self.deserialize(value)
return value
def delete(self, name):
path = self.file_path(name)
self.delete_file(path)
def delete_file(self, path):
os.unlink(path)
class RedisCache(BaseCache):
def __init__(self, category, connection_info=None, **kwargs):
super().__init__(**kwargs)
self.category = category
if connection_info is None:
# if no params will be used local redis
connection_info = self.config["cache"].get("connection", {})
self.client = walrus.Database(**connection_info)
def clear_old_cache(self, key_added):
if self.max_size is None:
return
# buffer to delete, to not run delete on every adding
buffer_size = 5
cur_count = self.client.hlen(self.category)
# remove oldest
if cur_count > self.max_size + buffer_size:
# 5 is buffer to delete, to not run delete on every adding
keys = self.client.hgetall(self.category)
# to list
keys = list(keys.items())
# sort by timestamp
keys.sort(key=lambda x: x[1])
for key, _ in keys[:cur_count - self.max_size]:
self.delete_key(key)
def redis_key(self, name):
return f'{self.category}_{name}'
def set(self, name, value):
key = self.redis_key(name)
value = self.serialize(value)
self.client.set(key, value)
# using key with category name to store all keys with modify time
self.client.hset(self.category, key, int(time.time() * 1000))
self.clear_old_cache(key)
def get(self, name):
key = self.redis_key(name)
value = self.client.get(key)
if value is None:
# no value in cache
return None
return self.deserialize(value)
def delete(self, name):
key = self.redis_key(name)
self.delete_key(key)
def delete_key(self, key):
self.client.delete(key)
self.client.hdel(self.category, key)
class NoCache:
'''
class for no cache mode
'''
def __init__(self, *args, **kwargs):
pass
def get(self, name):
return None
def set(self, name, value):
pass
def get_cache(category, **kwargs):
config = Config()
if config.get('cache')['type'] == 'redis':
return RedisCache(category, **kwargs)
if config.get('cache')['type'] == 'none':
return NoCache(category, **kwargs)
else:
return FileCache(category, **kwargs) | PypiClean |
/CartiMorph_nnUNet-1.7.14.tar.gz/CartiMorph_nnUNet-1.7.14/CartiMorph_nnUNet/postprocessing/consolidate_postprocessing.py |
import shutil
from typing import Tuple
from batchgenerators.utilities.file_and_folder_operations import *
from CartiMorph_nnUNet.configuration import default_num_threads
from CartiMorph_nnUNet.evaluation.evaluator import aggregate_scores
from CartiMorph_nnUNet.postprocessing.connected_components import determine_postprocessing
import argparse
def collect_cv_niftis(cv_folder: str, output_folder: str, validation_folder_name: str = 'validation_raw',
folds: tuple = (0, 1, 2, 3, 4)):
validation_raw_folders = [join(cv_folder, "fold_%d" % i, validation_folder_name) for i in folds]
exist = [isdir(i) for i in validation_raw_folders]
if not all(exist):
raise RuntimeError("some folds are missing. Please run the full 5-fold cross-validation. "
"The following folds seem to be missing: %s" %
[i for j, i in enumerate(folds) if not exist[j]])
# now copy all raw niftis into cv_niftis_raw
maybe_mkdir_p(output_folder)
for f in folds:
niftis = subfiles(validation_raw_folders[f], suffix=".nii.gz")
for n in niftis:
shutil.copy(n, join(output_folder))
def consolidate_folds(output_folder_base, validation_folder_name: str = 'validation_raw',
advanced_postprocessing: bool = False, folds: Tuple[int] = (0, 1, 2, 3, 4)):
"""
Used to determine the postprocessing for an experiment after all five folds have been completed. In the validation of
each fold, the postprocessing can only be determined on the cases within that fold. This can result in different
postprocessing decisions for different folds. In the end, we can only decide for one postprocessing per experiment,
so we have to rerun it
:param folds:
:param advanced_postprocessing:
:param output_folder_base:experiment output folder (fold_0, fold_1, etc must be subfolders of the given folder)
:param validation_folder_name: dont use this
:return:
"""
output_folder_raw = join(output_folder_base, "cv_niftis_raw")
if isdir(output_folder_raw):
shutil.rmtree(output_folder_raw)
output_folder_gt = join(output_folder_base, "gt_niftis")
collect_cv_niftis(output_folder_base, output_folder_raw, validation_folder_name,
folds)
num_niftis_gt = len(subfiles(join(output_folder_base, "gt_niftis"), suffix='.nii.gz'))
# count niftis in there
num_niftis = len(subfiles(output_folder_raw, suffix='.nii.gz'))
if num_niftis != num_niftis_gt:
raise AssertionError("If does not seem like you trained all the folds! Train all folds first!")
# load a summary file so that we can know what class labels to expect
summary_fold0 = load_json(join(output_folder_base, "fold_0", validation_folder_name, "summary.json"))['results'][
'mean']
classes = [int(i) for i in summary_fold0.keys()]
niftis = subfiles(output_folder_raw, join=False, suffix=".nii.gz")
test_pred_pairs = [(join(output_folder_raw, i), join(output_folder_gt, i)) for i in niftis]
# determine_postprocessing needs a summary.json file in the folder where the raw predictions are. We could compute
# that from the summary files of the five folds but I am feeling lazy today
aggregate_scores(test_pred_pairs, labels=classes, json_output_file=join(output_folder_raw, "summary.json"),
num_threads=default_num_threads)
determine_postprocessing(output_folder_base, output_folder_gt, 'cv_niftis_raw',
final_subf_name="cv_niftis_postprocessed", processes=default_num_threads,
advanced_postprocessing=advanced_postprocessing)
# determine_postprocessing will create a postprocessing.json file that can be used for inference
if __name__ == "__main__":
argparser = argparse.ArgumentParser()
argparser.add_argument("-f", type=str, required=True, help="experiment output folder (fold_0, fold_1, "
"etc must be subfolders of the given folder)")
args = argparser.parse_args()
folder = args.f
consolidate_folds(folder) | PypiClean |
/MedPy-0.4.0.tar.gz/MedPy-0.4.0/bin/medpy_extract_sub_volume.py | # build-in modules
from argparse import RawTextHelpFormatter
import argparse
import logging
import sys
import os
# third-party modules
import scipy
# path changes
# own modules
from medpy.core import ArgumentError, Logger
from medpy.io import load, save
# information
__author__ = "Oskar Maier"
__version__ = "r0.3.0, 2011-12-11"
__email__ = "oskar.maier@googlemail.com"
__status__ = "Release"
__description__ = """
Takes a medical image of arbitrary dimensions and the dimensions
of a sub-volume that lies inside the dimensions of this images.
Extracts the sub-volume from the supplied image and saves it.
The volume to be extracted is defined by its slices, the syntax is the same as
for numpy array indexes (i.e. starting with zero-index, the first literal (x) of any
x:y included and the second (y) excluded).
E.g. '2:3,4:6' would extract the slice no. 3 in X and 5, 6 in Y direction of a 2D image.
E.g. '99:199,149:199,99:249' would extract the respective slices in X,Y and Z direction of a 3D image.
This could, for example, be used to extract the area of the liver form a CT scan.
To keep all slices in one direction just omit the respective value:
E.g. '99:199,149:199,' would work ust as example II, but extract all Z slices.
Note here the trailing colon.
Note to take into account the input images orientation when supplying the sub-volume.
Copyright (C) 2013 Oskar Maier
This program comes with ABSOLUTELY NO WARRANTY; This is free software,
and you are welcome to redistribute it under certain conditions; see
the LICENSE file or <http://www.gnu.org/licenses/> for details.
"""
# code
def main():
# parse cmd arguments
parser = getParser()
parser.parse_args()
args = getArguments(parser)
# prepare logger
logger = Logger.getInstance()
if args.debug: logger.setLevel(logging.DEBUG)
elif args.verbose: logger.setLevel(logging.INFO)
# check if output image exists
if not args.force:
if os.path.exists(args.output + args.image[-4:]):
logger.warning('The output file {} already exists. Breaking.'.format(args.output + args.image[-4:]))
exit(1)
# load images
image_data, image_header = load(args.image)
# check image dimensions against sub-volume dimensions
if len(image_data.shape) != len(args.volume):
logger.critical('The supplied input image is of different dimension as the sub volume requested ({} to {})'.format(len(image_data.shape), len(args.volume)))
raise ArgumentError('The supplied input image is of different dimension as the sub volume requested ({} to {})'.format(len(image_data.shape), len(args.volume)))
# execute extraction of the sub-area
logger.info('Extracting sub-volume...')
index = [slice(x[0], x[1]) for x in args.volume]
volume = image_data[index]
# check if the output image contains data
if 0 == len(volume):
logger.exception('The extracted sub-volume is of zero-size. This usual means that the supplied volume coordinates and the image coordinates do not intersect. Exiting the application.')
sys.exit(-1)
# squeeze extracted sub-volume for the case in which one dimensions has been eliminated
volume = scipy.squeeze(volume)
logger.debug('Extracted volume is of shape {}.'.format(volume.shape))
# save results in same format as input image
save(volume, args.output, image_header, args.force)
logger.info('Successfully terminated.')
def getArguments(parser):
"Provides additional validation of the arguments collected by argparse."
args = parser.parse_args()
# parse volume and adapt to zero-indexing
try:
def _to_int_or_none(string):
if 0 == len(string): return None
return int(string)
def _to_int_or_none_double (string):
if 0 == len(string): return [None, None]
return list(map(_to_int_or_none, string.split(':')))
args.volume = list(map(_to_int_or_none_double, args.volume.split(',')))
args.volume = [(x[0], x[1]) for x in args.volume]
except (ValueError, IndexError) as e:
raise ArgumentError('Maleformed volume parameter "{}", see description with -h flag.'.format(args.volume), e)
return args
def getParser():
"Creates and returns the argparse parser object."
parser = argparse.ArgumentParser(description=__description__, formatter_class=RawTextHelpFormatter)
parser.add_argument('image', help='The source volume.')
parser.add_argument('output', help='The target volume.')
parser.add_argument('volume', help='The coordinated of the sub-volume of the images that should be extracted.\nExample: 30:59,40:67,45:75 for a 3D image.\nSee -h for more information.')
parser.add_argument('-f', dest='force', action='store_true', help='Set this flag to silently override files that exist.')
parser.add_argument('-v', dest='verbose', action='store_true', help='Display more information.')
parser.add_argument('-d', dest='debug', action='store_true', help='Display debug information.')
return parser
if __name__ == "__main__":
main() | PypiClean |
/Misago-0.36.1.tar.gz/Misago-0.36.1/misago/conf/defaults.py | # Permissions system extensions
# https://misago.readthedocs.io/en/latest/developers/acls.html#extending-permissions-system
MISAGO_ACL_EXTENSIONS = [
"misago.users.permissions.account",
"misago.users.permissions.profiles",
"misago.users.permissions.moderation",
"misago.users.permissions.delete",
"misago.categories.permissions",
"misago.threads.permissions.attachments",
"misago.threads.permissions.polls",
"misago.threads.permissions.threads",
"misago.threads.permissions.privatethreads",
"misago.threads.permissions.bestanswers",
"misago.search.permissions",
]
# Path to the directory that Misago should use to prepare user data downloads.
# Should not be accessible from internet.
MISAGO_USER_DATA_DOWNLOADS_WORKING_DIR = None
# Custom markup extensions
MISAGO_MARKUP_EXTENSIONS = []
# Custom post validators
MISAGO_POST_VALIDATORS = []
# Post search filters
MISAGO_POST_SEARCH_FILTERS = []
# Posting middlewares
# https://misago.readthedocs.io/en/latest/developers/posting_process.html
MISAGO_POSTING_MIDDLEWARES = [
# Always keep FloodProtectionMiddleware middleware first one
"misago.threads.api.postingendpoint.floodprotection.FloodProtectionMiddleware",
"misago.threads.api.postingendpoint.category.CategoryMiddleware",
"misago.threads.api.postingendpoint.privatethread.PrivateThreadMiddleware",
"misago.threads.api.postingendpoint.reply.ReplyMiddleware",
"misago.threads.api.postingendpoint.moderationqueue.ModerationQueueMiddleware",
"misago.threads.api.postingendpoint.attachments.AttachmentsMiddleware",
"misago.threads.api.postingendpoint.participants.ParticipantsMiddleware",
"misago.threads.api.postingendpoint.pin.PinMiddleware",
"misago.threads.api.postingendpoint.close.CloseMiddleware",
"misago.threads.api.postingendpoint.hide.HideMiddleware",
"misago.threads.api.postingendpoint.protect.ProtectMiddleware",
"misago.threads.api.postingendpoint.recordedit.RecordEditMiddleware",
"misago.threads.api.postingendpoint.updatestats.UpdateStatsMiddleware",
"misago.threads.api.postingendpoint.mentions.MentionsMiddleware",
"misago.threads.api.postingendpoint.syncprivatethreads.SyncPrivateThreadsMiddleware",
# Always keep SaveChangesMiddleware middleware after all state-changing middlewares
"misago.threads.api.postingendpoint.savechanges.SaveChangesMiddleware",
# Those middlewares are last because they don't change app state
"misago.threads.api.postingendpoint.notifications.NotificationsMiddleware",
]
# Configured thread types
MISAGO_THREAD_TYPES = [
"misago.threads.threadtypes.thread.Thread",
"misago.threads.threadtypes.privatethread.PrivateThread",
]
# Search extensions
MISAGO_SEARCH_EXTENSIONS = [
"misago.threads.search.SearchThreads",
"misago.users.search.SearchUsers",
]
# Additional registration validators
# https://misago.readthedocs.io/en/latest/developers/validating_registrations.html
MISAGO_NEW_REGISTRATIONS_VALIDATORS = [
"misago.users.validators.validate_gmail_email",
"misago.users.validators.validate_with_sfs",
]
# Custom profile fields
MISAGO_PROFILE_FIELDS = []
# Login API URL
MISAGO_LOGIN_API_URL = "auth"
# Misago Admin Path
# Omit starting and trailing slashes. To disable Misago admin, empty this value.
MISAGO_ADMIN_PATH = "admincp"
# Admin urls namespaces that Misago's AdminAuthMiddleware should protect
MISAGO_ADMIN_NAMESPACES = ["admin", "misago:admin"]
# How long (in minutes) since previous request to admin namespace should admin session last.
MISAGO_ADMIN_SESSION_EXPIRATION = 60
# Display threads on forum index
# Change this to false to display categories list instead
MISAGO_THREADS_ON_INDEX = True
# How many notifications may be retrieved from the API in single request?
MISAGO_NOTIFICATIONS_PAGE_LIMIT = 50
# How many unread notifications to track
# Misago will not report report unread notifications count bigger than this
# Example: if limit 50 and user has 56 unread notifications, UI will show "50+"
# Also used by the notifications healing mechanism
MISAGO_UNREAD_NOTIFICATIONS_LIMIT = 50
# Function used for generating individual avatar for user
MISAGO_DYNAMIC_AVATAR_DRAWER = "misago.users.avatars.dynamic.draw_default"
# Path to directory containing avatar galleries
# Those galleries can be loaded by running loadavatargallery command
MISAGO_AVATAR_GALLERY = None
# Save user avatars for sizes
# Keep sizes ordered from greatest to smallest
# Max size also controls min size of uploaded image as well as crop size
MISAGO_AVATARS_SIZES = [400, 200, 150, 128, 100, 64, 50, 40, 32, 20]
# Path to blank avatar image used for guests and removed users.
MISAGO_BLANK_AVATAR = "misago/img/blank-avatar.png"
# Max allowed size of image before Misago will generate thumbnail for it
MISAGO_ATTACHMENT_IMAGE_SIZE_LIMIT = (500, 500)
# Length of secret used for attachments url tokens and filenames
MISAGO_ATTACHMENT_SECRET_LENGTH = 64
# Names of files served when user requests file that doesn't exist or is unavailable
MISAGO_ATTACHMENT_403_IMAGE = "misago/img/attachment-403.png"
MISAGO_ATTACHMENT_404_IMAGE = "misago/img/attachment-404.png"
# Available Moment.js locales
MISAGO_MOMENT_JS_LOCALES = [
"af",
"ar-ma",
"ar-sa",
"ar-tn",
"ar",
"az",
"be",
"bg",
"bn",
"bo",
"br",
"bs",
"ca",
"cs",
"cv",
"cy",
"da",
"de-at",
"de",
"el",
"en-au",
"en-ca",
"en-gb",
"eo",
"es",
"et",
"eu",
"fa",
"fi",
"fo",
"fr-ca",
"fr",
"fy",
"gl",
"he",
"hi",
"hr",
"hu",
"hy-am",
"id",
"is",
"it",
"ja",
"ka",
"km",
"ko",
"lb",
"lt",
"lv",
"mk",
"ml",
"mr",
"ms-my",
"my",
"nb",
"ne",
"nl",
"nn",
"pl",
"pt-br",
"pt",
"ro",
"ru",
"sk",
"sl",
"sq",
"sr-cyrl",
"sr",
"sv",
"ta",
"th",
"tl-ph",
"tr",
"tzm-latn",
"tzm",
"uk",
"uz",
"vi",
"zh-cn",
"zh-hans",
"zh-tw",
] | PypiClean |
/GitPython-3.1.34-py3-none-any.whl/git/config.py | import sys
import abc
from functools import wraps
import inspect
from io import BufferedReader, IOBase
import logging
import os
import re
import fnmatch
from git.compat import (
defenc,
force_text,
is_win,
)
from git.util import LockFile
import os.path as osp
import configparser as cp
# typing-------------------------------------------------------
from typing import (
Any,
Callable,
Generic,
IO,
List,
Dict,
Sequence,
TYPE_CHECKING,
Tuple,
TypeVar,
Union,
cast,
)
from git.types import Lit_config_levels, ConfigLevels_Tup, PathLike, assert_never, _T
if TYPE_CHECKING:
from git.repo.base import Repo
from io import BytesIO
T_ConfigParser = TypeVar("T_ConfigParser", bound="GitConfigParser")
T_OMD_value = TypeVar("T_OMD_value", str, bytes, int, float, bool)
if sys.version_info[:3] < (3, 7, 2):
# typing.Ordereddict not added until py 3.7.2
from collections import OrderedDict
OrderedDict_OMD = OrderedDict
else:
from typing import OrderedDict
OrderedDict_OMD = OrderedDict[str, List[T_OMD_value]] # type: ignore[assignment, misc]
# -------------------------------------------------------------
__all__ = ("GitConfigParser", "SectionConstraint")
log = logging.getLogger("git.config")
log.addHandler(logging.NullHandler())
# invariants
# represents the configuration level of a configuration file
CONFIG_LEVELS: ConfigLevels_Tup = ("system", "user", "global", "repository")
# Section pattern to detect conditional includes.
# https://git-scm.com/docs/git-config#_conditional_includes
CONDITIONAL_INCLUDE_REGEXP = re.compile(r"(?<=includeIf )\"(gitdir|gitdir/i|onbranch):(.+)\"")
class MetaParserBuilder(abc.ABCMeta): # noqa: B024
"""Utility class wrapping base-class methods into decorators that assure read-only properties"""
def __new__(cls, name: str, bases: Tuple, clsdict: Dict[str, Any]) -> "MetaParserBuilder":
"""
Equip all base-class methods with a needs_values decorator, and all non-const methods
with a set_dirty_and_flush_changes decorator in addition to that."""
kmm = "_mutating_methods_"
if kmm in clsdict:
mutating_methods = clsdict[kmm]
for base in bases:
methods = (t for t in inspect.getmembers(base, inspect.isroutine) if not t[0].startswith("_"))
for name, method in methods:
if name in clsdict:
continue
method_with_values = needs_values(method)
if name in mutating_methods:
method_with_values = set_dirty_and_flush_changes(method_with_values)
# END mutating methods handling
clsdict[name] = method_with_values
# END for each name/method pair
# END for each base
# END if mutating methods configuration is set
new_type = super(MetaParserBuilder, cls).__new__(cls, name, bases, clsdict)
return new_type
def needs_values(func: Callable[..., _T]) -> Callable[..., _T]:
"""Returns method assuring we read values (on demand) before we try to access them"""
@wraps(func)
def assure_data_present(self: "GitConfigParser", *args: Any, **kwargs: Any) -> _T:
self.read()
return func(self, *args, **kwargs)
# END wrapper method
return assure_data_present
def set_dirty_and_flush_changes(non_const_func: Callable[..., _T]) -> Callable[..., _T]:
"""Return method that checks whether given non constant function may be called.
If so, the instance will be set dirty.
Additionally, we flush the changes right to disk"""
def flush_changes(self: "GitConfigParser", *args: Any, **kwargs: Any) -> _T:
rval = non_const_func(self, *args, **kwargs)
self._dirty = True
self.write()
return rval
# END wrapper method
flush_changes.__name__ = non_const_func.__name__
return flush_changes
class SectionConstraint(Generic[T_ConfigParser]):
"""Constrains a ConfigParser to only option commands which are constrained to
always use the section we have been initialized with.
It supports all ConfigParser methods that operate on an option.
:note:
If used as a context manager, will release the wrapped ConfigParser."""
__slots__ = ("_config", "_section_name")
_valid_attrs_ = (
"get_value",
"set_value",
"get",
"set",
"getint",
"getfloat",
"getboolean",
"has_option",
"remove_section",
"remove_option",
"options",
)
def __init__(self, config: T_ConfigParser, section: str) -> None:
self._config = config
self._section_name = section
def __del__(self) -> None:
# Yes, for some reason, we have to call it explicitly for it to work in PY3 !
# Apparently __del__ doesn't get call anymore if refcount becomes 0
# Ridiculous ... .
self._config.release()
def __getattr__(self, attr: str) -> Any:
if attr in self._valid_attrs_:
return lambda *args, **kwargs: self._call_config(attr, *args, **kwargs)
return super(SectionConstraint, self).__getattribute__(attr)
def _call_config(self, method: str, *args: Any, **kwargs: Any) -> Any:
"""Call the configuration at the given method which must take a section name
as first argument"""
return getattr(self._config, method)(self._section_name, *args, **kwargs)
@property
def config(self) -> T_ConfigParser:
"""return: Configparser instance we constrain"""
return self._config
def release(self) -> None:
"""Equivalent to GitConfigParser.release(), which is called on our underlying parser instance"""
return self._config.release()
def __enter__(self) -> "SectionConstraint[T_ConfigParser]":
self._config.__enter__()
return self
def __exit__(self, exception_type: str, exception_value: str, traceback: str) -> None:
self._config.__exit__(exception_type, exception_value, traceback)
class _OMD(OrderedDict_OMD):
"""Ordered multi-dict."""
def __setitem__(self, key: str, value: _T) -> None:
super(_OMD, self).__setitem__(key, [value])
def add(self, key: str, value: Any) -> None:
if key not in self:
super(_OMD, self).__setitem__(key, [value])
return None
super(_OMD, self).__getitem__(key).append(value)
def setall(self, key: str, values: List[_T]) -> None:
super(_OMD, self).__setitem__(key, values)
def __getitem__(self, key: str) -> Any:
return super(_OMD, self).__getitem__(key)[-1]
def getlast(self, key: str) -> Any:
return super(_OMD, self).__getitem__(key)[-1]
def setlast(self, key: str, value: Any) -> None:
if key not in self:
super(_OMD, self).__setitem__(key, [value])
return
prior = super(_OMD, self).__getitem__(key)
prior[-1] = value
def get(self, key: str, default: Union[_T, None] = None) -> Union[_T, None]:
return super(_OMD, self).get(key, [default])[-1]
def getall(self, key: str) -> List[_T]:
return super(_OMD, self).__getitem__(key)
def items(self) -> List[Tuple[str, _T]]: # type: ignore[override]
"""List of (key, last value for key)."""
return [(k, self[k]) for k in self]
def items_all(self) -> List[Tuple[str, List[_T]]]:
"""List of (key, list of values for key)."""
return [(k, self.getall(k)) for k in self]
def get_config_path(config_level: Lit_config_levels) -> str:
# we do not support an absolute path of the gitconfig on windows ,
# use the global config instead
if is_win and config_level == "system":
config_level = "global"
if config_level == "system":
return "/etc/gitconfig"
elif config_level == "user":
config_home = os.environ.get("XDG_CONFIG_HOME") or osp.join(os.environ.get("HOME", "~"), ".config")
return osp.normpath(osp.expanduser(osp.join(config_home, "git", "config")))
elif config_level == "global":
return osp.normpath(osp.expanduser("~/.gitconfig"))
elif config_level == "repository":
raise ValueError("No repo to get repository configuration from. Use Repo._get_config_path")
else:
# Should not reach here. Will raise ValueError if does. Static typing will warn missing elifs
assert_never( # type: ignore[unreachable]
config_level,
ValueError(f"Invalid configuration level: {config_level!r}"),
)
class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
"""Implements specifics required to read git style configuration files.
This variation behaves much like the git.config command such that the configuration
will be read on demand based on the filepath given during initialization.
The changes will automatically be written once the instance goes out of scope, but
can be triggered manually as well.
The configuration file will be locked if you intend to change values preventing other
instances to write concurrently.
:note:
The config is case-sensitive even when queried, hence section and option names
must match perfectly.
If used as a context manager, will release the locked file."""
# { Configuration
# The lock type determines the type of lock to use in new configuration readers.
# They must be compatible to the LockFile interface.
# A suitable alternative would be the BlockingLockFile
t_lock = LockFile
re_comment = re.compile(r"^\s*[#;]")
# } END configuration
optvalueonly_source = r"\s*(?P<option>[^:=\s][^:=]*)"
OPTVALUEONLY = re.compile(optvalueonly_source)
OPTCRE = re.compile(optvalueonly_source + r"\s*(?P<vi>[:=])\s*" + r"(?P<value>.*)$")
del optvalueonly_source
# list of RawConfigParser methods able to change the instance
_mutating_methods_ = ("add_section", "remove_section", "remove_option", "set")
def __init__(
self,
file_or_files: Union[None, PathLike, "BytesIO", Sequence[Union[PathLike, "BytesIO"]]] = None,
read_only: bool = True,
merge_includes: bool = True,
config_level: Union[Lit_config_levels, None] = None,
repo: Union["Repo", None] = None,
) -> None:
"""Initialize a configuration reader to read the given file_or_files and to
possibly allow changes to it by setting read_only False
:param file_or_files:
A single file path or file objects or multiple of these
:param read_only:
If True, the ConfigParser may only read the data , but not change it.
If False, only a single file path or file object may be given. We will write back the changes
when they happen, or when the ConfigParser is released. This will not happen if other
configuration files have been included
:param merge_includes: if True, we will read files mentioned in [include] sections and merge their
contents into ours. This makes it impossible to write back an individual configuration file.
Thus, if you want to modify a single configuration file, turn this off to leave the original
dataset unaltered when reading it.
:param repo: Reference to repository to use if [includeIf] sections are found in configuration files.
"""
cp.RawConfigParser.__init__(self, dict_type=_OMD)
self._dict: Callable[..., _OMD] # type: ignore # mypy/typeshed bug?
self._defaults: _OMD
self._sections: _OMD # type: ignore # mypy/typeshed bug?
# Used in python 3, needs to stay in sync with sections for underlying implementation to work
if not hasattr(self, "_proxies"):
self._proxies = self._dict()
if file_or_files is not None:
self._file_or_files: Union[PathLike, "BytesIO", Sequence[Union[PathLike, "BytesIO"]]] = file_or_files
else:
if config_level is None:
if read_only:
self._file_or_files = [
get_config_path(cast(Lit_config_levels, f)) for f in CONFIG_LEVELS if f != "repository"
]
else:
raise ValueError("No configuration level or configuration files specified")
else:
self._file_or_files = [get_config_path(config_level)]
self._read_only = read_only
self._dirty = False
self._is_initialized = False
self._merge_includes = merge_includes
self._repo = repo
self._lock: Union["LockFile", None] = None
self._acquire_lock()
def _acquire_lock(self) -> None:
if not self._read_only:
if not self._lock:
if isinstance(self._file_or_files, (str, os.PathLike)):
file_or_files = self._file_or_files
elif isinstance(self._file_or_files, (tuple, list, Sequence)):
raise ValueError(
"Write-ConfigParsers can operate on a single file only, multiple files have been passed"
)
else:
file_or_files = self._file_or_files.name
# END get filename from handle/stream
# initialize lock base - we want to write
self._lock = self.t_lock(file_or_files)
# END lock check
self._lock._obtain_lock()
# END read-only check
def __del__(self) -> None:
"""Write pending changes if required and release locks"""
# NOTE: only consistent in PY2
self.release()
def __enter__(self) -> "GitConfigParser":
self._acquire_lock()
return self
def __exit__(self, *args: Any) -> None:
self.release()
def release(self) -> None:
"""Flush changes and release the configuration write lock. This instance must not be used anymore afterwards.
In Python 3, it's required to explicitly release locks and flush changes, as __del__ is not called
deterministically anymore."""
# checking for the lock here makes sure we do not raise during write()
# in case an invalid parser was created who could not get a lock
if self.read_only or (self._lock and not self._lock._has_lock()):
return
try:
try:
self.write()
except IOError:
log.error("Exception during destruction of GitConfigParser", exc_info=True)
except ReferenceError:
# This happens in PY3 ... and usually means that some state cannot be written
# as the sections dict cannot be iterated
# Usually when shutting down the interpreter, don'y know how to fix this
pass
finally:
if self._lock is not None:
self._lock._release_lock()
def optionxform(self, optionstr: str) -> str:
"""Do not transform options in any way when writing"""
return optionstr
def _read(self, fp: Union[BufferedReader, IO[bytes]], fpname: str) -> None:
"""A direct copy of the py2.4 version of the super class's _read method
to assure it uses ordered dicts. Had to change one line to make it work.
Future versions have this fixed, but in fact its quite embarrassing for the
guys not to have done it right in the first place !
Removed big comments to make it more compact.
Made sure it ignores initial whitespace as git uses tabs"""
cursect = None # None, or a dictionary
optname = None
lineno = 0
is_multi_line = False
e = None # None, or an exception
def string_decode(v: str) -> str:
if v[-1] == "\\":
v = v[:-1]
# end cut trailing escapes to prevent decode error
return v.encode(defenc).decode("unicode_escape")
# end
# end
while True:
# we assume to read binary !
line = fp.readline().decode(defenc)
if not line:
break
lineno = lineno + 1
# comment or blank line?
if line.strip() == "" or self.re_comment.match(line):
continue
if line.split(None, 1)[0].lower() == "rem" and line[0] in "rR":
# no leading whitespace
continue
# is it a section header?
mo = self.SECTCRE.match(line.strip())
if not is_multi_line and mo:
sectname: str = mo.group("header").strip()
if sectname in self._sections:
cursect = self._sections[sectname]
elif sectname == cp.DEFAULTSECT:
cursect = self._defaults
else:
cursect = self._dict((("__name__", sectname),))
self._sections[sectname] = cursect
self._proxies[sectname] = None
# So sections can't start with a continuation line
optname = None
# no section header in the file?
elif cursect is None:
raise cp.MissingSectionHeaderError(fpname, lineno, line)
# an option line?
elif not is_multi_line:
mo = self.OPTCRE.match(line)
if mo:
# We might just have handled the last line, which could contain a quotation we want to remove
optname, vi, optval = mo.group("option", "vi", "value")
if vi in ("=", ":") and ";" in optval and not optval.strip().startswith('"'):
pos = optval.find(";")
if pos != -1 and optval[pos - 1].isspace():
optval = optval[:pos]
optval = optval.strip()
if optval == '""':
optval = ""
# end handle empty string
optname = self.optionxform(optname.rstrip())
if len(optval) > 1 and optval[0] == '"' and optval[-1] != '"':
is_multi_line = True
optval = string_decode(optval[1:])
# end handle multi-line
# preserves multiple values for duplicate optnames
cursect.add(optname, optval)
else:
# check if it's an option with no value - it's just ignored by git
if not self.OPTVALUEONLY.match(line):
if not e:
e = cp.ParsingError(fpname)
e.append(lineno, repr(line))
continue
else:
line = line.rstrip()
if line.endswith('"'):
is_multi_line = False
line = line[:-1]
# end handle quotations
optval = cursect.getlast(optname)
cursect.setlast(optname, optval + string_decode(line))
# END parse section or option
# END while reading
# if any parsing errors occurred, raise an exception
if e:
raise e
def _has_includes(self) -> Union[bool, int]:
return self._merge_includes and len(self._included_paths())
def _included_paths(self) -> List[Tuple[str, str]]:
"""Return List all paths that must be included to configuration
as Tuples of (option, value).
"""
paths = []
for section in self.sections():
if section == "include":
paths += self.items(section)
match = CONDITIONAL_INCLUDE_REGEXP.search(section)
if match is None or self._repo is None:
continue
keyword = match.group(1)
value = match.group(2).strip()
if keyword in ["gitdir", "gitdir/i"]:
value = osp.expanduser(value)
if not any(value.startswith(s) for s in ["./", "/"]):
value = "**/" + value
if value.endswith("/"):
value += "**"
# Ensure that glob is always case insensitive if required.
if keyword.endswith("/i"):
value = re.sub(
r"[a-zA-Z]",
lambda m: "[{}{}]".format(m.group().lower(), m.group().upper()),
value,
)
if self._repo.git_dir:
if fnmatch.fnmatchcase(str(self._repo.git_dir), value):
paths += self.items(section)
elif keyword == "onbranch":
try:
branch_name = self._repo.active_branch.name
except TypeError:
# Ignore section if active branch cannot be retrieved.
continue
if fnmatch.fnmatchcase(branch_name, value):
paths += self.items(section)
return paths
def read(self) -> None: # type: ignore[override]
"""Reads the data stored in the files we have been initialized with. It will
ignore files that cannot be read, possibly leaving an empty configuration
:return: Nothing
:raise IOError: if a file cannot be handled"""
if self._is_initialized:
return None
self._is_initialized = True
files_to_read: List[Union[PathLike, IO]] = [""]
if isinstance(self._file_or_files, (str, os.PathLike)):
# for str or Path, as str is a type of Sequence
files_to_read = [self._file_or_files]
elif not isinstance(self._file_or_files, (tuple, list, Sequence)):
# could merge with above isinstance once runtime type known
files_to_read = [self._file_or_files]
else: # for lists or tuples
files_to_read = list(self._file_or_files)
# end assure we have a copy of the paths to handle
seen = set(files_to_read)
num_read_include_files = 0
while files_to_read:
file_path = files_to_read.pop(0)
file_ok = False
if hasattr(file_path, "seek"):
# must be a file objectfile-object
file_path = cast(IO[bytes], file_path) # replace with assert to narrow type, once sure
self._read(file_path, file_path.name)
else:
# assume a path if it is not a file-object
file_path = cast(PathLike, file_path)
try:
with open(file_path, "rb") as fp:
file_ok = True
self._read(fp, fp.name)
except IOError:
continue
# Read includes and append those that we didn't handle yet
# We expect all paths to be normalized and absolute (and will assure that is the case)
if self._has_includes():
for _, include_path in self._included_paths():
if include_path.startswith("~"):
include_path = osp.expanduser(include_path)
if not osp.isabs(include_path):
if not file_ok:
continue
# end ignore relative paths if we don't know the configuration file path
file_path = cast(PathLike, file_path)
assert osp.isabs(file_path), "Need absolute paths to be sure our cycle checks will work"
include_path = osp.join(osp.dirname(file_path), include_path)
# end make include path absolute
include_path = osp.normpath(include_path)
if include_path in seen or not os.access(include_path, os.R_OK):
continue
seen.add(include_path)
# insert included file to the top to be considered first
files_to_read.insert(0, include_path)
num_read_include_files += 1
# each include path in configuration file
# end handle includes
# END for each file object to read
# If there was no file included, we can safely write back (potentially) the configuration file
# without altering it's meaning
if num_read_include_files == 0:
self._merge_includes = False
# end
def _write(self, fp: IO) -> None:
"""Write an .ini-format representation of the configuration state in
git compatible format"""
def write_section(name: str, section_dict: _OMD) -> None:
fp.write(("[%s]\n" % name).encode(defenc))
values: Sequence[str] # runtime only gets str in tests, but should be whatever _OMD stores
v: str
for key, values in section_dict.items_all():
if key == "__name__":
continue
for v in values:
fp.write(("\t%s = %s\n" % (key, self._value_to_string(v).replace("\n", "\n\t"))).encode(defenc))
# END if key is not __name__
# END section writing
if self._defaults:
write_section(cp.DEFAULTSECT, self._defaults)
value: _OMD
for name, value in self._sections.items():
write_section(name, value)
def items(self, section_name: str) -> List[Tuple[str, str]]: # type: ignore[override]
""":return: list((option, value), ...) pairs of all items in the given section"""
return [(k, v) for k, v in super(GitConfigParser, self).items(section_name) if k != "__name__"]
def items_all(self, section_name: str) -> List[Tuple[str, List[str]]]:
""":return: list((option, [values...]), ...) pairs of all items in the given section"""
rv = _OMD(self._defaults)
for k, vs in self._sections[section_name].items_all():
if k == "__name__":
continue
if k in rv and rv.getall(k) == vs:
continue
for v in vs:
rv.add(k, v)
return rv.items_all()
@needs_values
def write(self) -> None:
"""Write changes to our file, if there are changes at all
:raise IOError: if this is a read-only writer instance or if we could not obtain
a file lock"""
self._assure_writable("write")
if not self._dirty:
return None
if isinstance(self._file_or_files, (list, tuple)):
raise AssertionError(
"Cannot write back if there is not exactly a single file to write to, have %i files"
% len(self._file_or_files)
)
# end assert multiple files
if self._has_includes():
log.debug(
"Skipping write-back of configuration file as include files were merged in."
+ "Set merge_includes=False to prevent this."
)
return None
# end
fp = self._file_or_files
# we have a physical file on disk, so get a lock
is_file_lock = isinstance(fp, (str, os.PathLike, IOBase)) # can't use Pathlike until 3.5 dropped
if is_file_lock and self._lock is not None: # else raise Error?
self._lock._obtain_lock()
if not hasattr(fp, "seek"):
fp = cast(PathLike, fp)
with open(fp, "wb") as fp_open:
self._write(fp_open)
else:
fp = cast("BytesIO", fp)
fp.seek(0)
# make sure we do not overwrite into an existing file
if hasattr(fp, "truncate"):
fp.truncate()
self._write(fp)
def _assure_writable(self, method_name: str) -> None:
if self.read_only:
raise IOError("Cannot execute non-constant method %s.%s" % (self, method_name))
def add_section(self, section: str) -> None:
"""Assures added options will stay in order"""
return super(GitConfigParser, self).add_section(section)
@property
def read_only(self) -> bool:
""":return: True if this instance may change the configuration file"""
return self._read_only
def get_value(
self,
section: str,
option: str,
default: Union[int, float, str, bool, None] = None,
) -> Union[int, float, str, bool]:
# can default or return type include bool?
"""Get an option's value.
If multiple values are specified for this option in the section, the
last one specified is returned.
:param default:
If not None, the given default value will be returned in case
the option did not exist
:return: a properly typed value, either int, float or string
:raise TypeError: in case the value could not be understood
Otherwise the exceptions known to the ConfigParser will be raised."""
try:
valuestr = self.get(section, option)
except Exception:
if default is not None:
return default
raise
return self._string_to_value(valuestr)
def get_values(
self,
section: str,
option: str,
default: Union[int, float, str, bool, None] = None,
) -> List[Union[int, float, str, bool]]:
"""Get an option's values.
If multiple values are specified for this option in the section, all are
returned.
:param default:
If not None, a list containing the given default value will be
returned in case the option did not exist
:return: a list of properly typed values, either int, float or string
:raise TypeError: in case the value could not be understood
Otherwise the exceptions known to the ConfigParser will be raised."""
try:
self.sections()
lst = self._sections[section].getall(option)
except Exception:
if default is not None:
return [default]
raise
return [self._string_to_value(valuestr) for valuestr in lst]
def _string_to_value(self, valuestr: str) -> Union[int, float, str, bool]:
types = (int, float)
for numtype in types:
try:
val = numtype(valuestr)
# truncated value ?
if val != float(valuestr):
continue
return val
except (ValueError, TypeError):
continue
# END for each numeric type
# try boolean values as git uses them
vl = valuestr.lower()
if vl == "false":
return False
if vl == "true":
return True
if not isinstance(valuestr, str):
raise TypeError(
"Invalid value type: only int, long, float and str are allowed",
valuestr,
)
return valuestr
def _value_to_string(self, value: Union[str, bytes, int, float, bool]) -> str:
if isinstance(value, (int, float, bool)):
return str(value)
return force_text(value)
@needs_values
@set_dirty_and_flush_changes
def set_value(self, section: str, option: str, value: Union[str, bytes, int, float, bool]) -> "GitConfigParser":
"""Sets the given option in section to the given value.
It will create the section if required, and will not throw as opposed to the default
ConfigParser 'set' method.
:param section: Name of the section in which the option resides or should reside
:param option: Name of the options whose value to set
:param value: Value to set the option to. It must be a string or convertible
to a string
:return: this instance"""
if not self.has_section(section):
self.add_section(section)
self.set(section, option, self._value_to_string(value))
return self
@needs_values
@set_dirty_and_flush_changes
def add_value(self, section: str, option: str, value: Union[str, bytes, int, float, bool]) -> "GitConfigParser":
"""Adds a value for the given option in section.
It will create the section if required, and will not throw as opposed to the default
ConfigParser 'set' method. The value becomes the new value of the option as returned
by 'get_value', and appends to the list of values returned by 'get_values`'.
:param section: Name of the section in which the option resides or should reside
:param option: Name of the option
:param value: Value to add to option. It must be a string or convertible
to a string
:return: this instance"""
if not self.has_section(section):
self.add_section(section)
self._sections[section].add(option, self._value_to_string(value))
return self
def rename_section(self, section: str, new_name: str) -> "GitConfigParser":
"""rename the given section to new_name
:raise ValueError: if section doesn't exit
:raise ValueError: if a section with new_name does already exist
:return: this instance
"""
if not self.has_section(section):
raise ValueError("Source section '%s' doesn't exist" % section)
if self.has_section(new_name):
raise ValueError("Destination section '%s' already exists" % new_name)
super(GitConfigParser, self).add_section(new_name)
new_section = self._sections[new_name]
for k, vs in self.items_all(section):
new_section.setall(k, vs)
# end for each value to copy
# This call writes back the changes, which is why we don't have the respective decorator
self.remove_section(section)
return self | PypiClean |
/OctoBot-Services-1.6.2.tar.gz/OctoBot-Services-1.6.2/octobot_services/services/service_factory.py |
import octobot_commons.logging as logging
import octobot_services.constants as constants
import octobot_services.services as services
class ServiceFactory:
def __init__(self, config):
self.logger = logging.get_logger(self.__class__.__name__)
self.config = config
@staticmethod
def get_available_services() -> list:
return [service_class for service_class in services.AbstractService.__subclasses__()]
async def create_or_get_service(self, service_class, backtesting_enabled, edited_config) -> bool:
"""
create_or_get_service will create a service instance if it doesn't exist, check the existing one otherwise
:param service_class: the class of the service to create
:return: True if the created service is working properly, False otherwise
"""
service_instance = service_class.instance()
if service_instance.get_has_been_created():
return service_instance.is_healthy()
else:
return await self._create_service(service_instance, backtesting_enabled, edited_config)
async def _create_service(self, service, backtesting_enabled, edited_config) -> bool:
service.is_backtesting_enabled = backtesting_enabled
service.set_has_been_created(True)
service.logger = logging.get_logger(service.get_name())
service.config = self.config
service.edited_config = edited_config
if service.has_required_configuration():
return await self._perform_checkup(service)
else:
if service.get_should_warn():
self.logger.info(f"{service.get_name()} can't be initialized: configuration "
f"is missing, wrong or incomplete. This is normal if you did not configure it yet.")
return False
async def _perform_checkup(self, service) -> bool:
try:
await service.prepare()
if constants.CONFIG_CATEGORY_SERVICES not in self.config:
self.config[constants.CONFIG_CATEGORY_SERVICES] = {}
if service.get_type() not in self.config[constants.CONFIG_CATEGORY_SERVICES]:
self.config[constants.CONFIG_CATEGORY_SERVICES][service.get_type()] = {}
self.config[constants.CONFIG_CATEGORY_SERVICES][service.get_type()][constants.CONFIG_SERVICE_INSTANCE] = \
service
if await service.say_hello():
return service.is_healthy()
else:
self.logger.warning(f"{service.get_name()} initial checkup failed.")
except Exception as e:
self.logger.exception(e, True, f"{service.get_name()} preparation produced the following error: {e}")
return False
@staticmethod
def has_already_been_created(service_class):
return service_class.instance().get_has_been_created() | PypiClean |
/CorePost-0.0.16.tar.gz/CorePost-0.0.16/corepost/routing.py | from collections import defaultdict
from corepost import Response, RESTException
from corepost.enums import Http, HttpHeader
from corepost.utils import getMandatoryArgumentNames, convertToJson, safeDictUpdate
from corepost.convert import convertForSerialization, generateXml
from corepost.filters import IRequestFilter, IResponseFilter
from enums import MediaType
from twisted.internet import defer
from twisted.web.http import parse_qs
from twisted.python import log
import re, copy, exceptions, json, yaml, logging
from xml.etree import ElementTree
class UrlRouter:
''' Common class for containing info related to routing a request to a function '''
__urlMatcher = re.compile(r"<(int|float|):?([^/]+)>")
__urlRegexReplace = {"":r"(?P<arg>([^/]+))","int":r"(?P<arg>\d+)","float":r"(?P<arg>\d+.?\d*)"}
__typeConverters = {"int":int,"float":float}
def __init__(self,f,url,methods,accepts,produces,cache):
self.__f = f
self.__url = url
self.__methods = methods if isinstance(methods,tuple) else (methods,)
self.__accepts = accepts if isinstance(accepts,tuple) else (accepts,)
self.__produces = produces
self.__cache = cache
self.__argConverters = {} # dict of arg names -> group index
self.__validators = {}
self.__mandatory = getMandatoryArgumentNames(f)[2:]
def compileMatcherForFullUrl(self):
"""Compiles the regex matches once the URL has been updated to include the full path from the parent class"""
#parse URL into regex used for matching
m = UrlRouter.__urlMatcher.findall(self.url)
self.__matchUrl = "^%s$" % self.url
for match in m:
if len(match[0]) == 0:
# string
self.__argConverters[match[1]] = None
self.__matchUrl = self.__matchUrl.replace("<%s>" % match[1],
UrlRouter.__urlRegexReplace[match[0]].replace("arg",match[1]))
else:
# non string
self.__argConverters[match[1]] = UrlRouter.__typeConverters[match[0]]
self.__matchUrl = self.__matchUrl.replace("<%s:%s>" % match,
UrlRouter.__urlRegexReplace[match[0]].replace("arg",match[1]))
self.__matcher = re.compile(self.__matchUrl)
@property
def cache(self):
'''Indicates if this URL should be cached or not'''
return self.__cache
@property
def methods(self):
return self.__methods
@property
def url(self):
return self.__url
@property
def accepts(self):
return self.__accepts
def addValidator(self,fieldName,validator):
'''Adds additional field-specific formencode validators'''
self.__validators[fieldName] = validator
def getArguments(self,url):
'''
Returns None if nothing matched (i.e. URL does not match), empty dict if no args found (i,e, static URL)
or dict with arg/values for dynamic URLs
'''
g = self.__matcher.search(url)
if g != None:
args = g.groupdict()
# convert to expected datatypes
if len(args) > 0:
for name in args.keys():
converter = self.__argConverters[name]
if converter != None:
args[name] = converter(args[name])
return args
else:
return None
def call(self,instance,request,**kwargs):
'''Forwards call to underlying method'''
for arg in self.__mandatory:
if arg not in kwargs:
raise TypeError("Missing mandatory argument '%s'" % arg)
return self.__f(instance,request,**kwargs)
def __str__(self):
return "%s %s" % (self.url, self.methods)
class UrlRouterInstance():
"""Combines a UrlRouter with a class instance it should be executed against"""
def __init__(self,clazz,urlRouter):
self.clazz = clazz
self.urlRouter = urlRouter
def __str__(self):
return self.urlRouter.url
class CachedUrl:
'''
Used for caching URLs that have been already routed once before. Avoids the overhead
of regex processing on every incoming call for commonly accessed REST URLs
'''
def __init__(self,urlRouterInstance,args):
self.__urlRouterInstance = urlRouterInstance
self.__args = args
@property
def urlRouterInstance(self):
return self.__urlRouterInstance
@property
def args(self):
return self.__args
class RequestRouter:
'''
Class that handles request->method routing functionality to any type of resource
'''
def __init__(self,restServiceContainer,schema=None,filters=()):
'''
Constructor
'''
self.__urls = {Http.GET: defaultdict(dict),Http.POST: defaultdict(dict),Http.PUT: defaultdict(dict),Http.DELETE: defaultdict(dict)}
self.__cachedUrls = {Http.GET: defaultdict(dict),Http.POST: defaultdict(dict),Http.PUT: defaultdict(dict),Http.DELETE: defaultdict(dict)}
self.__urlRouterInstances = {}
self.__schema = schema
self.__registerRouters(restServiceContainer)
self.__urlContainer = restServiceContainer
self.__requestFilters = []
self.__responseFilters = []
#filters
if filters != None:
for webFilter in filters:
valid = False
if IRequestFilter.providedBy(webFilter):
self.__requestFilters.append(webFilter)
valid = True
if IResponseFilter.providedBy(webFilter):
self.__responseFilters.append(webFilter)
valid = True
if not valid:
raise RuntimeError("filter %s must implement IRequestFilter or IResponseFilter" % webFilter.__class__.__name__)
@property
def path(self):
return self.__path
def __registerRouters(self,restServiceContainer):
"""Main method responsible for registering routers"""
from types import FunctionType
for service in restServiceContainer.services:
# check if the service has a root path defined, which is optional
rootPath = service.__class__.path if "path" in service.__class__.__dict__ else ""
for key in service.__class__.__dict__:
func = service.__class__.__dict__[key]
# handle REST resources directly on the CorePost resource
if type(func) == FunctionType and hasattr(func,'corepostRequestRouter'):
# if specified, add class path to each function's path
rq = func.corepostRequestRouter
rq.url = "%s%s" % (rootPath,rq.url)
# remove first and trailing '/' to standardize URLs
start = 1 if rq.url[0:1] == "/" else 0
end = -1 if rq.url[len(rq.url) -1] == '/' else len(rq.url)
rq.url = rq.url[start:end]
# now that the full URL is set, compile the matcher for it
rq.compileMatcherForFullUrl()
for method in rq.methods:
for accepts in rq.accepts:
urlRouterInstance = UrlRouterInstance(service,rq)
self.__urls[method][rq.url][accepts] = urlRouterInstance
self.__urlRouterInstances[func] = urlRouterInstance # needed so that we can lookup the urlRouterInstance for a specific function
def getResponse(self,request):
"""Finds the appropriate instance and dispatches the request to the registered function. Returns the appropriate Response object"""
# see if already cached
response = None
try:
if len(self.__requestFilters) > 0:
self.__filterRequests(request)
# standardize URL and remove trailing "/" if necessary
standardized_postpath = request.postpath if (request.postpath[-1] != '' or request.postpath == ['']) else request.postpath[:-1]
path = '/'.join(standardized_postpath)
contentType = MediaType.WILDCARD if HttpHeader.CONTENT_TYPE not in request.received_headers else request.received_headers[HttpHeader.CONTENT_TYPE]
urlRouterInstance, pathargs = None, None
# fetch URL arguments <-> function from cache if hit at least once before
if contentType in self.__cachedUrls[request.method][path]:
cachedUrl = self.__cachedUrls[request.method][path][contentType]
urlRouterInstance,pathargs = cachedUrl.urlRouterInstance, cachedUrl.args
else:
# first time this URL is called
instance = None
# go through all the URLs, pick up the ones matching by content type
# and then validate which ones match by path/argument to a particular UrlRouterInstance
for contentTypeInstances in self.__urls[request.method].values():
if contentType in contentTypeInstances:
# there is an exact function for this incoming content type
instance = contentTypeInstances[contentType]
elif MediaType.WILDCARD in contentTypeInstances:
# fall back to any wildcard method
instance = contentTypeInstances[MediaType.WILDCARD]
if instance != None:
# see if the path arguments match up against any function @route definition
args = instance.urlRouter.getArguments(path)
if args != None:
if instance.urlRouter.cache:
self.__cachedUrls[request.method][path][contentType] = CachedUrl(instance, args)
urlRouterInstance,pathargs = instance,args
break
#actual call
if urlRouterInstance != None and pathargs != None:
allargs = copy.deepcopy(pathargs)
try:
# if POST/PUT, check if we need to automatically parse JSON, YAML, XML
self.__parseRequestData(request)
# parse request arguments from form or JSON docss
self.__addRequestArguments(request, allargs)
urlRouter = urlRouterInstance.urlRouter
val = urlRouter.call(urlRouterInstance.clazz,request,**allargs)
#handle Deferreds natively
if isinstance(val,defer.Deferred):
# add callback to finish the request
val.addCallback(self.__finishDeferred,request)
val.addErrback(self.__finishDeferredError,request)
return val
else:
#special logic for POST to return 201 (created)
if request.method == Http.POST:
if hasattr(request, 'code'):
if request.code == 200:
request.setResponseCode(201)
else:
request.setResponseCode(201)
response = self.__generateResponse(request, val, request.code)
except exceptions.TypeError as ex:
log.msg(ex,logLevel=logging.WARN)
response = self.__createErrorResponse(request,400,"%s" % ex)
except RESTException as ex:
"""Convert REST exceptions to their responses. Input errors log at a lower level to avoid overloading logs"""
if (ex.response.code in (400,404)):
log.msg(ex,logLevel=logging.WARN)
else:
log.err(ex)
response = ex.response
except Exception as ex:
log.err(ex)
response = self.__createErrorResponse(request,500,"Unexpected server error: %s\n%s" % (type(ex),ex))
else:
log.msg("URL %s not found" % path,logLevel=logging.WARN)
response = self.__createErrorResponse(request,404,"URL '%s' not found\n" % request.path)
except Exception as ex:
log.err(ex)
response = self.__createErrorResponse(request,500,"Internal server error: %s" % ex)
# response handling
if response != None and len(self.__responseFilters) > 0:
self.__filterResponses(request,response)
return response
def __generateResponse(self,request,response,code=200):
"""
Takes care of automatically rendering the response and converting it to appropriate format (text,XML,JSON,YAML)
depending on what the caller can accept. Returns Response
"""
if isinstance(response, str):
return Response(code,response,{HttpHeader.CONTENT_TYPE: MediaType.TEXT_PLAIN})
elif isinstance(response, Response):
return response
else:
(content,contentType) = self.__convertObjectToContentType(request, response)
return Response(code,content,{HttpHeader.CONTENT_TYPE:contentType})
def __convertObjectToContentType(self,request,obj):
"""
Takes care of converting an object (non-String) response to the appropriate format, based on the what the caller can accept.
Returns a tuple of (content,contentType)
"""
obj = convertForSerialization(obj)
if HttpHeader.ACCEPT in request.received_headers:
accept = request.received_headers[HttpHeader.ACCEPT]
if MediaType.APPLICATION_JSON in accept:
return (convertToJson(obj),MediaType.APPLICATION_JSON)
elif MediaType.TEXT_YAML in accept:
return (yaml.dump(obj),MediaType.TEXT_YAML)
elif MediaType.APPLICATION_XML in accept or MediaType.TEXT_XML in accept:
return (generateXml(obj),MediaType.APPLICATION_XML)
else:
# no idea, let's do JSON
return (convertToJson(obj),MediaType.APPLICATION_JSON)
else:
# called has no accept header, let's default to JSON
return (convertToJson(obj),MediaType.APPLICATION_JSON)
def __finishDeferred(self,val,request):
"""Finishes any Defered/inlineCallback methods. Returns Response"""
if isinstance(val,Response):
return val
elif val != None:
try:
return self.__generateResponse(request,val)
except Exception as ex:
msg = "Unexpected server error: %s\n%s" % (type(ex),ex)
return self.__createErrorResponse(request, 500, msg)
else:
return Response(209,None)
def __finishDeferredError(self,error,request):
"""Finishes any Defered/inlineCallback methods that raised an error. Returns Response"""
log.err(error, "Deferred failed")
return self.__createErrorResponse(request, 500,"Internal server error")
def __createErrorResponse(self,request,code,message):
"""Common method for rendering errors"""
return Response(code=code, entity=message, headers={"content-type": MediaType.TEXT_PLAIN})
def __parseRequestData(self,request):
'''Automatically parses JSON,XML,YAML if present'''
if request.method in (Http.POST,Http.PUT) and HttpHeader.CONTENT_TYPE in request.received_headers.keys():
contentType = request.received_headers["content-type"]
if contentType == MediaType.APPLICATION_JSON:
try:
request.json = json.loads(request.content.read())
except Exception as ex:
raise TypeError("Unable to parse JSON body: %s" % ex)
elif contentType in (MediaType.APPLICATION_XML,MediaType.TEXT_XML):
try:
request.xml = ElementTree.XML(request.content.read())
except Exception as ex:
raise TypeError("Unable to parse XML body: %s" % ex)
elif contentType == MediaType.TEXT_YAML:
try:
request.yaml = yaml.safe_load(request.content.read())
except Exception as ex:
raise TypeError("Unable to parse YAML body: %s" % ex)
def __addRequestArguments(self,request,allargs):
"""Parses the request form arguments OR JSON document root elements to build the list of arguments to a method"""
# handler for weird Twisted logic where PUT does not get form params
# see: http://twistedmatrix.com/pipermail/twisted-web/2007-March/003338.html
requestargs = request.args
if request.method == Http.PUT and HttpHeader.CONTENT_TYPE in request.received_headers.keys() \
and request.received_headers[HttpHeader.CONTENT_TYPE] == MediaType.APPLICATION_FORM_URLENCODED:
requestargs = parse_qs(request.content.read(), 1)
#merge form args
if len(requestargs.keys()) > 0:
for arg in requestargs.keys():
# maintain first instance of an argument always
safeDictUpdate(allargs,arg,requestargs[arg][0])
elif hasattr(request,'json'):
# if YAML parse root elements instead of form elements
for key in request.json.keys():
safeDictUpdate(allargs, key, request.json[key])
elif hasattr(request,'yaml'):
# if YAML parse root elements instead of form elements
for key in request.yaml.keys():
safeDictUpdate(allargs, key, request.yaml[key])
elif hasattr(request,'xml'):
# if XML, parse attributes first, then root nodes
for key in request.xml.attrib:
safeDictUpdate(allargs, key, request.xml.attrib[key])
for el in request.xml.findall("*"):
safeDictUpdate(allargs, el.tag,el.text)
def __filterRequests(self,request):
"""Filters incoming requests"""
for webFilter in self.__requestFilters:
webFilter.filterRequest(request)
def __filterResponses(self,request,response):
"""Filters incoming requests"""
for webFilter in self.__responseFilters:
webFilter.filterResponse(request,response) | PypiClean |
/INDIpy-0.4.0.tar.gz/INDIpy-0.4.0/indi/client/client.py | from __future__ import annotations
import asyncio
import logging
import uuid
from typing import Any, Callable, Dict, Iterable, List, Optional, Type
import indi
from indi import message
from indi.client import events
from indi.client.device import Device
from indi.message import IndiMessage, const
logger = logging.getLogger(__name__)
class _CallbackConfig:
def __init__(
self,
device: Optional[str],
vector: Optional[str],
element: Optional[str],
event_type: Type[events.BaseEvent],
callback: Callable,
uuid: uuid.UUID,
):
self.device = device
self.vector = vector
self.element = element
self.event_type = event_type
self.callback = callback
self.uuid = uuid
def accepts_event(self, event: events.BaseEvent) -> bool:
"""Checks if event should be processed by callbacked associated with this configuration.
:param event: An event
:type event: events.BaseEvent
:return: True if event should be processed
:rtype: bool
"""
return (
self.device
in (
None,
event.device.name if event.device else None,
)
and self.vector
in (
None,
event.vector.name if event.vector else None,
)
and self.element
in (
None,
event.element.name if event.element else None,
)
and isinstance(event, self.event_type)
)
class _EventWaitResult:
def __init__(self) -> None:
self.event: Optional[events.BaseEvent] = None
self.timeout = False
class BaseClient:
def __init__(self) -> None:
"""Constructor for INDI client."""
self.devices: Dict[str, Device] = {}
self.callbacks: List[_CallbackConfig] = []
def __getitem__(self, key) -> Device:
return self.devices[key]
def __contains__(self, key) -> bool:
return key in self.devices
def get_device(self, name: str) -> Optional[Device]:
return self.devices.get(name)
def list_devices(self) -> Iterable[str]:
"""Lists all known device names.
:return: List of all known device names
:rtype: Iterable[str]
"""
return self.devices.keys()
def set_device(self, name: str, device: Device):
self.devices[name] = device
self.blob_handshake(name)
def process_message(self, msg: IndiMessage):
device = None
if isinstance(msg, message.DefVector):
device = self.get_device(msg.device)
if not device:
device = Device(self, msg.device)
self.set_device(msg.device, device)
if isinstance(msg, message.SetVector):
device = self.get_device(msg.device)
if isinstance(msg, message.DelProperty):
device = self.get_device(msg.device)
if device:
device.process_message(msg)
def send_message(self, msg: IndiMessage):
"""Sends INDI message to server using control connection.
:param msg: INDI message to be sent
:type msg: IndiMessage
"""
raise NotImplementedError()
def onevent(
self,
*,
callback: Callable,
device: Optional[str] = None,
vector: Optional[str] = None,
element: Optional[str] = None,
event_type: Type[events.BaseEvent] = events.BaseEvent,
) -> uuid.UUID:
"""Attaches event callback.
:param callback: Callback
:type callback: Callable
:param device: Optional device name, defaults to None
:type device: str, optional
:param vector: Optional vector name, defaults to None
:type vector: str, optional
:param element: Optional element name, defaults to None
:type element: str, optional
:param event_type: Optional event type, defaults to events.BaseEvent
:type event_type: Type[events.BaseEvent], optional
:return: UUID of created event attachment
:rtype: uuid
"""
uid = uuid.uuid4()
callback_config = _CallbackConfig(
device=device,
vector=vector,
element=element,
event_type=event_type,
callback=callback,
uuid=uid,
)
self.callbacks.append(callback_config)
return uid
def rmonevent(
self,
uuid: Optional[uuid.UUID] = None,
device: Optional[str] = None,
vector: Optional[str] = None,
element: Optional[str] = None,
event_type: Optional[Type[events.BaseEvent]] = None,
callback: Optional[Callable] = None,
):
to_rm = list()
for cb in self.callbacks:
if (
uuid
in (
None,
cb.uuid,
)
and device
in (
None,
cb.device,
)
and vector
in (
None,
cb.vector,
)
and element
in (
None,
cb.element,
)
and event_type
in (
None,
cb.event_type,
)
and callback in (None, cb.callback)
):
to_rm.append(cb)
for cb in to_rm:
self.callbacks.remove(cb)
async def waitforevent(
self,
device: Optional[str] = None,
vector: Optional[str] = None,
element: Optional[str] = None,
event_type: Type[events.BaseEvent] = events.BaseEvent,
expect: Any = None,
initial: Any = None,
check: Optional[Callable] = None,
timeout: Optional[Optional[float]] = None,
polling_enabled: bool = True,
polling_delay: float = 1.0,
polling_interval: float = 1.0,
):
assert 1 == sum(
1
for _ in filter(
None.__ne__,
(
expect,
initial,
check,
),
)
), "Exactly one of `expect`, `initial`, `check` has to be passed"
lock = asyncio.Event()
result = _EventWaitResult()
def cb(event: events.BaseEvent):
release = False
if check is not None:
if check(event):
release = True
if expect is not None:
if isinstance(event, events.ValueUpdate):
if event.new_value == expect:
release = True
if isinstance(event, events.StateUpdate):
if event.new_state == expect:
release = True
if initial is not None:
if isinstance(event, events.ValueUpdate):
if event.new_value != initial:
release = True
if isinstance(event, events.StateUpdate):
if event.new_state != initial:
release = True
if release:
result.event = event
lock.set()
if polling_enabled:
async def poll():
kwargs = {}
if device:
kwargs["device"] = device
if vector:
kwargs["name"] = vector
msg = message.GetProperties(version=indi.__protocol_version__, **kwargs)
await asyncio.sleep(polling_delay)
while not lock.is_set():
self.send_message(msg)
await asyncio.sleep(polling_interval)
asyncio.get_running_loop().create_task(poll())
async def timeout_check():
await asyncio.sleep(timeout)
if not lock.is_set():
result.timeout = True
lock.set()
uid = self.onevent(
device=device,
vector=vector,
element=element,
event_type=event_type,
callback=cb,
)
if timeout is not None and timeout > 0:
asyncio.get_running_loop().create_task(timeout_check())
await lock.wait()
self.rmonevent(uuid=uid)
if result.timeout:
raise Exception("Timeout occurred")
return result.event
def trigger_event(self, event: events.BaseEvent):
for callback in self.callbacks:
if callback.accepts_event(event):
try:
if asyncio.iscoroutinefunction(callback.callback):
asyncio.get_running_loop().create_task(callback.callback(event))
else:
callback.callback(event)
except:
logger.exception("Error in event handler")
def handshake(self, device=None, name=None, version=indi.__protocol_version__):
self.send_message(
message.GetProperties(version=version, device=device, name=name)
)
def blob_handshake(self, device):
self.send_message(
message.EnableBLOB(device=device, value=const.BLOBEnable.NEVER)
)
class Client(BaseClient):
def __init__(self, control_connection, blob_connection):
super().__init__()
self.control_connection = control_connection
self.blob_connection = blob_connection
self.control_connection_handler = None
self.blob_connection_handler = None
def send_message(self, msg: IndiMessage):
"""Sends INDI message to server using control connection.
:param msg: INDI message to be sent
:type msg: IndiMessage
"""
self.control_connection_handler.send_message(msg)
def blob_handshake(self, device):
super().blob_handshake(device)
self.blob_connection_handler.send_message(
message.EnableBLOB(device=device, value=const.BLOBEnable.ONLY)
)
async def start(self):
"""Starts client and connects to the server.
Connects both connections (control and blob) and sends initial GetProperties message to the server.
"""
self.control_connection_handler = await self.control_connection.connect(
self.process_message
)
self.blob_connection_handler = await self.blob_connection.connect(
self.process_message, for_blobs=True
)
asyncio.get_running_loop().create_task(
self.control_connection_handler.wait_for_messages()
)
asyncio.get_running_loop().create_task(
self.blob_connection_handler.wait_for_messages()
)
self.handshake()
def stop(self):
self.control_connection_handler.close()
self.blob_connection_handler.close() | PypiClean |
/NetworkSim-0.2.2.tar.gz/NetworkSim-0.2.2/docs/source/_templates/module.rst | .. _mod-{{ fullname }}:
{{ fullname | underline }}
.. automodule:: {{ fullname }}
{% block functions %}
{% if functions %}
.. rubric:: Functions
.. autosummary::
:toctree: {{ objname }}
:template: function.rst
{% for item in functions %}
{{ item }}
{%- endfor %}
{% endif %}
{% endblock %}
{% block classes %}
{% if classes %}
.. rubric:: Classes
.. autosummary::
:toctree: {{ objname }}
:template: class.rst
{% for item in classes %}
{{ item }}
{%- endfor %}
{% endif %}
{% endblock %}
{% block exceptions %}
{% if exceptions %}
.. rubric:: Exceptions
.. autosummary::
{% for item in exceptions %}
{{ item }}
{%- endfor %}
{% endif %}
{% endblock %}
| PypiClean |
/IdracRedfishSupportTest-0.0.7.tar.gz/IdracRedfishSupportTest-0.0.7/FirmwareUpdateLocalRepoREDFISH.py |
import argparse
import getpass
import glob
import json
import logging
import os
import platform
import re
import requests
import subprocess
import sys
import time
import warnings
from datetime import datetime
from pprint import pprint
warnings.filterwarnings("ignore")
parser=argparse.ArgumentParser(description="Python script using Redfish API to update multiple devices using a local directory which contains all Dell Update packages only. Script will first update all devices except for iDRAC that do not require a reboot (examples: DIAGs or DriverPack). Next will download and create update jobs for all devices that need a reboot to apply (examples: BIOS, RAID, NIC). Once all jobs created, server will reboot to execute all of them. Last script will run iDRAC update if detected. iDRAC update will run last since the iDRAC reboots after the update is complete.")
parser.add_argument('-ip',help='iDRAC IP address', required=False)
parser.add_argument('-u', help='iDRAC username', required=False)
parser.add_argument('-p', help='iDRAC password. If you do not pass in argument -p, script will prompt to enter user password which will not be echoed to the screen.', required=False)
parser.add_argument('-x', help='Pass in X-Auth session token for executing Redfish calls. All Redfish calls will use X-Auth token instead of username/password', required=False)
parser.add_argument('--ssl', help='SSL cert verification for all Redfish calls, pass in value \"true\" or \"false\". By default, this argument is not required and script ignores validating SSL cert for all Redfish calls.', required=False)
parser.add_argument('--script-examples', action="store_true", help='Prints script examples')
parser.add_argument('--get', help='Get current supported devices for firmware updates and their current firmware versions', action="store_true", required=False)
parser.add_argument('--location', help='Pass in the full directory path location of the directory which contains all Dell update packages (DUP). Note: only Windows DUPs are supported by iDRAC interfaces to perform updates. Note: make sure only DUPs are in this directory and no other files, directories. Note: If planning to update iDRAC, make sure the DUP name package contains the word idrac (default DUP name does contain wording iDRAC, recommended not to change it)', required=False)
args = vars(parser.parse_args())
logging.basicConfig(format='%(message)s', stream=sys.stdout, level=logging.INFO)
def script_examples():
print("""\n- FirmwareUpdateLocalRepoREDFISH.py -ip 192.168.0.120 -u root -p calvin --location C:\\Users\\administrator\\Downloads\\R740xd_repo, this example will apply updates for all DUP packages detected in this directory path.""")
sys.exit(0)
# Example of local directory contents containing Dell DUPs:
#>>> glob.glob("C://Users//administrator//Downloads//R740xd_repo/*")
#['C://Users//administrator//Downloads//R740xd_repo\\BIOS_W77H1_WN64_2.16.1.EXE',
#'C://Users//administrator//Downloads//R740xd_repo\\Diagnostics_Application_R30YT_WN64_4301A73_4301.74_01.EXE',
#'C://Users//administrator//Downloads//R740xd_repo\\Firmware_60K1J_WN32_2.52_A00.EXE',
#'C://Users//administrator//Downloads//R740xd_repo\\iDRAC-with-Lifecycle-Controller_Firmware_KMYV9_WN64_6.10.00.00_A00.EXE',
#'C://Users//administrator//Downloads//R740xd_repo\\Network_Firmware_T3KH2_WN64_15.05.12_A00-00_02.EXE',
#'C://Users//administrator//Downloads//R740xd_repo\\SAS-RAID_Firmware_NYKX7_WN32_25.5.9.0001_A15.EXE']
def check_supported_idrac_version():
if args["x"]:
response = requests.get('https://%s/redfish/v1/UpdateService' % idrac_ip, verify=verify_cert, headers={'X-Auth-Token': args["x"]})
else:
response = requests.get('https://%s/redfish/v1/UpdateService' % idrac_ip, verify=verify_cert, auth=(idrac_username, idrac_password))
data = response.json()
if response.status_code == 401:
logging.warning("\n- WARNING, status code %s returned, check your iDRAC username/password is correct or iDRAC user has correct privileges to execute Redfish commands" % response.status_code)
sys.exit(0)
if 'MultipartHttpPushUri' not in data.keys():
logging.warning("\n- WARNING, iDRAC version installed does not support this feature using Redfish API")
sys.exit(0)
def get_FW_inventory():
logging.info("\n- INFO, getting current firmware inventory for iDRAC %s -\n" % idrac_ip)
if args["x"]:
response = requests.get('https://%s/redfish/v1/UpdateService/FirmwareInventory?$expand=*($levels=1)' % idrac_ip, verify=verify_cert, headers={'X-Auth-Token': args["x"]})
else:
response = requests.get('https://%s/redfish/v1/UpdateService/FirmwareInventory?$expand=*($levels=1)' % idrac_ip, verify=verify_cert, auth=(idrac_username, idrac_password))
data = response.json()
if response.status_code != 200:
logging.error("\n- ERROR, GET request failed to get firmware inventory, error: \n%s" % data)
sys.exit(0)
installed_devices = []
for i in data['Members']:
pprint(i)
print("\n")
def download_image_create_update_job(firmware_image_device):
global job_id
global idrac_dup_package
global idrac_update_flag
if "idrac" in firmware_image_device.lower():
logging.info("- INFO, iDRAC firmware package detected, this update will get applied at the end due to iDRAC reboot")
idrac_update_flag = True
idrac_dup_package = firmware_image_device
else:
logging.info("- INFO, uploading update package \"%s\" to create update job, this may take a few minutes depending on firmware image size" % firmware_image_device.split("\\")[-1])
url = "https://%s/redfish/v1/UpdateService/MultipartUpload" % idrac_ip
payload = {"Targets": [], "@Redfish.OperationApplyTime": "OnReset", "Oem": {}}
files = {
'UpdateParameters': (None, json.dumps(payload), 'application/json'),
'UpdateFile': (os.path.basename(firmware_image_device), open(firmware_image_device, 'rb'), 'application/octet-stream')
}
if args["x"]:
headers = {'X-Auth-Token': args["x"]}
response = requests.post(url, files=files, headers=headers, verify=verify_cert)
else:
response = requests.post(url, files=files, verify=verify_cert,auth=(idrac_username,idrac_password))
if response.status_code != 202:
data = response.json()
logging.error("- FAIL, status code %s returned, detailed error: %s" % (response.status_code,data))
sys.exit(0)
try:
job_id = response.headers['Location'].split("/")[-1]
except:
logging.error("- FAIL, unable to locate job ID in header")
sys.exit(0)
logging.info("- PASS, update job ID %s successfully created for firmware package \"%s\"" % (job_id, firmware_image_device.split("\\")[-1]))
def idrac_update(firmware_image_device):
global idrac_update_job_id
logging.info("- INFO, downloading update package \"%s\" to create update job, this may take a few minutes depending on firmware image size" % firmware_image_device.split("\\")[-1])
url = "https://%s/redfish/v1/UpdateService/MultipartUpload" % idrac_ip
payload = {"Targets": [], "@Redfish.OperationApplyTime": "OnReset", "Oem": {}}
files = {
'UpdateParameters': (None, json.dumps(payload), 'application/json'),
'UpdateFile': (os.path.basename(firmware_image_device), open(firmware_image_device, 'rb'), 'application/octet-stream')
}
if args["x"]:
headers = {'X-Auth-Token': args["x"]}
response = requests.post(url, files=files, headers=headers, verify=verify_cert)
else:
response = requests.post(url, files=files, verify=verify_cert,auth=(idrac_username,idrac_password))
if response.status_code != 202:
data = response.json()
logging.error("- FAIL, status code %s returned, detailed error: %s" % (response.status_code,data))
sys.exit(0)
try:
idrac_update_job_id = response.headers['Location'].split("/")[-1]
except:
logging.error("- FAIL, unable to locate job ID in header")
sys.exit(0)
logging.info("- PASS, update job ID %s successfully created for firmware package \"%s\"" % (idrac_update_job_id, firmware_image_device.split("\\")[-1]))
def check_job_status(download_job_id):
retry_count = 1
start_time = datetime.now()
while True:
check_idrac_connection()
if retry_count == 10:
logging.warning("- WARNING, GET command retry count of 10 has been reached, script will exit")
sys.exit(0)
try:
if args["x"]:
response = requests.get('https://%s/redfish/v1/TaskService/Tasks/%s' % (idrac_ip, download_job_id), verify=verify_cert, headers={'X-Auth-Token': args["x"]})
else:
response = requests.get('https://%s/redfish/v1/TaskService/Tasks/%s' % (idrac_ip, download_job_id), verify=verify_cert, auth=(idrac_username, idrac_password))
except requests.ConnectionError as error_message:
logging.info("- INFO, GET request failed due to connection error, retry")
time.sleep(10)
retry_count += 1
continue
data = response.json()
current_time = str(datetime.now()-start_time)[0:7]
message_string = data["Messages"]
if response.status_code == 200 or response.status_code == 202:
time.sleep(1)
else:
logging.error("\n- ERROR, GET request failed to get job ID details, status code %s returned, error: \n%s" % (response.status_code, data))
sys.exit(0)
if "fail" in data['Oem']['Dell']['Message'].lower() or "error" in data['Oem']['Dell']['Message'].lower() or "fail" in data['Oem']['Dell']['JobState'].lower():
logging.error("- FAIL: Job ID %s failed, current message: %s" % (download_job_id, data['Oem']['Dell']['Message']))
sys.exit(0)
elif data["TaskState"] == "Completed" and data["Oem"]["Dell"]["JobState"] or data["TaskState"] == "Completed" or "completed successfully" in data['Oem']['Dell']['Message'].lower():
logging.info("- PASS, job ID %s successfully marked completed" % download_job_id)
time.sleep(15)
break
elif str(current_time)[0:7] >= "0:50:00":
logging.error("\n- FAIL: Timeout of 50 minutes has been hit, update job should of already been marked completed. Check the iDRAC job queue and LC logs to debug the issue\n")
sys.exit(0)
elif "schedule" in data['Oem']['Dell']['Message'].lower():
print("- PASS, job ID %s successfully marked as scheduled, server reboot needed to apply the update" % data["Id"])
update_jobs_need_server_reboot.append(download_job_id)
break
else:
logging.info("- INFO: %s job status: %s" % (download_job_id, message_string[0]["Message"].rstrip(".")))
time.sleep(2)
continue
def loop_check_final_job_status(reboot_update_job_id):
start_time = datetime.now()
retry_count = 1
while True:
if retry_count == 20:
logging.warning("- WARNING, GET command retry count of 20 has been reached, script will exit")
sys.exit(0)
check_idrac_connection()
try:
if args["x"]:
response = requests.get('https://%s/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/%s' % (idrac_ip, reboot_update_job_id), verify=verify_cert, headers={'X-Auth-Token': args["x"]})
else:
response = requests.get('https://%s/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/%s' % (idrac_ip, reboot_update_job_id), verify=verify_cert,auth=(idrac_username, idrac_password))
except requests.ConnectionError as error_message:
logging.info("- INFO, GET request failed due to connection error, retry")
time.sleep(10)
retry_count += 1
continue
current_time = str((datetime.now()-start_time))[0:7]
if response.status_code != 200:
logging.error("\n- FAIL, GET command failed to check job status, return code %s" % response.status_code)
logging.error("Extended Info Message: {0}".format(response.json()))
sys.exit(0)
data = response.json()
if str(current_time)[0:7] >= "0:50:00":
logging.error("\n- FAIL: Timeout of 50 minutes has been hit, script stopped\n")
sys.exit(0)
elif "fail" in data['Message'].lower() or "fail" in data['JobState'].lower():
logging.error("- FAIL: job ID %s failed, error results: \n%s" % (job_id, data['Message']))
sys.exit(0)
elif "completed successfully" in data['Message']:
logging.info("- PASS, job ID %s successfully marked completed" % reboot_update_job_id)
break
else:
logging.info("- INFO, %s job status not completed, current status: \"%s\"" % (reboot_update_job_id, data['Message'].rstrip(".")))
time.sleep(60)
def reboot_server():
logging.info("- INFO, rebooting the server now to apply firmware update(s)")
if args["x"]:
response = requests.get('https://%s/redfish/v1/Systems/System.Embedded.1' % idrac_ip, verify=verify_cert, headers={'X-Auth-Token': args["x"]})
else:
response = requests.get('https://%s/redfish/v1/Systems/System.Embedded.1' % idrac_ip, verify=verify_cert,auth=(idrac_username, idrac_password))
data = response.json()
logging.info("- INFO, Current server power state: %s" % data['PowerState'])
if data['PowerState'] == "On":
url = 'https://%s/redfish/v1/Systems/System.Embedded.1/Actions/ComputerSystem.Reset' % idrac_ip
payload = {'ResetType': 'GracefulShutdown'}
if args["x"]:
headers = {'content-type': 'application/json', 'X-Auth-Token': args["x"]}
response = requests.post(url, data=json.dumps(payload), headers=headers, verify=verify_cert)
else:
headers = {'content-type': 'application/json'}
response = requests.post(url, data=json.dumps(payload), headers=headers, verify=verify_cert,auth=(idrac_username,idrac_password))
if response.status_code == 204:
logging.info("- PASS, POST command passed to gracefully power OFF server")
logging.info("- INFO, script will now verify the server was able to perform a graceful shutdown. If the server was unable to perform a graceful shutdown, forced shutdown will be invoked in 5 minutes")
time.sleep(15)
start_time = datetime.now()
else:
logging.error("\n- FAIL, Command failed to gracefully power OFF server, status code is: %s\n" % response.status_code)
logging.error("Extended Info Message: {0}".format(response.json()))
sys.exit(0)
while True:
if args["x"]:
response = requests.get('https://%s/redfish/v1/Systems/System.Embedded.1' % idrac_ip, verify=verify_cert, headers={'X-Auth-Token': args["x"]})
else:
response = requests.get('https://%s/redfish/v1/Systems/System.Embedded.1' % idrac_ip, verify=verify_cert,auth=(idrac_username, idrac_password))
data = response.json()
current_time = str(datetime.now() - start_time)[0:7]
if data['PowerState'] == "Off":
logging.info("- PASS, GET command passed to verify graceful shutdown was successful and server is in OFF state")
break
elif current_time >= "0:05:00":
logging.info("- INFO, unable to perform graceful shutdown, server will now perform forced shutdown")
payload = {'ResetType': 'ForceOff'}
if args["x"]:
headers = {'content-type': 'application/json', 'X-Auth-Token': args["x"]}
response = requests.post(url, data=json.dumps(payload), headers=headers, verify=verify_cert)
else:
headers = {'content-type': 'application/json'}
response = requests.post(url, data=json.dumps(payload), headers=headers, verify=verify_cert,auth=(idrac_username,idrac_password))
if response.status_code == 204:
logging.info("- PASS, POST command passed to perform forced shutdown")
time.sleep(15)
if args["x"]:
response = requests.get('https://%s/redfish/v1/Systems/System.Embedded.1' % idrac_ip, verify=verify_cert, headers={'X-Auth-Token': args["x"]})
else:
response = requests.get('https://%s/redfish/v1/Systems/System.Embedded.1' % idrac_ip, verify=verify_cert,auth=(idrac_username, idrac_password))
data = response.json()
if data['PowerState'] == "Off":
logging.info("- PASS, GET command passed to verify forced shutdown was successful and server is in OFF state")
break
else:
logging.error("- FAIL, server not in OFF state, current power status is %s" % data['PowerState'])
sys.exit(0)
else:
continue
payload = {'ResetType': 'On'}
if args["x"]:
headers = {'content-type': 'application/json', 'X-Auth-Token': args["x"]}
response = requests.post(url, data=json.dumps(payload), headers=headers, verify=verify_cert)
else:
headers = {'content-type': 'application/json'}
response = requests.post(url, data=json.dumps(payload), headers=headers, verify=verify_cert,auth=(idrac_username,idrac_password))
if response.status_code == 204:
logging.info("- PASS, POST command passed to power ON server")
time.sleep(15)
else:
logging.error("\n- FAIL, Command failed to power ON server, status code is: %s\n" % response.status_code)
logging.error("Extended Info Message: {0}".format(response.json()))
sys.exit(0)
elif data['PowerState'] == "Off":
url = 'https://%s/redfish/v1/Systems/System.Embedded.1/Actions/ComputerSystem.Reset' % idrac_ip
payload = {'ResetType': 'On'}
if args["x"]:
headers = {'content-type': 'application/json', 'X-Auth-Token': args["x"]}
response = requests.post(url, data=json.dumps(payload), headers=headers, verify=verify_cert)
else:
headers = {'content-type': 'application/json'}
response = requests.post(url, data=json.dumps(payload), headers=headers, verify=verify_cert,auth=(idrac_username,idrac_password))
if response.status_code == 204:
logging.info("- PASS, Command passed to power ON server, code return is %s" % response.status_code)
else:
logging.error("\n- FAIL, Command failed to power ON server, status code is: %s\n" % response.status_code)
logging.error("Extended Info Message: {0}".format(response.json()))
sys.exit(0)
else:
logging.error("- FAIL, unable to get current server power state to perform either reboot or power on")
sys.exit(0)
def check_idrac_connection():
run_network_connection_function = ""
if platform.system().lower() == "windows":
ping_command = "ping -n 3 %s" % idrac_ip
elif platform.system().lower() == "linux":
ping_command = "ping -c 3 %s" % idrac_ip
else:
logging.error("- FAIL, unable to determine OS type, check iDRAC connection function will not execute")
run_network_connection_function = "fail"
execute_command = subprocess.call(ping_command, stdout=subprocess.PIPE, shell=True)
if execute_command != 0:
ping_status = "lost"
else:
ping_status = "good"
pass
if ping_status == "lost":
logging.info("- INFO, iDRAC network connection lost due to slow network response, waiting 30 seconds to access iDRAC again")
time.sleep(30)
while True:
if run_network_connection_function == "fail":
break
execute_command=subprocess.call(ping_command, stdout=subprocess.PIPE, shell=True)
if execute_command != 0:
ping_status = "lost"
else:
ping_status = "good"
if ping_status == "lost":
logging.info("- INFO, unable to ping iDRAC IP, script will wait 30 seconds and try again")
time.sleep(30)
continue
else:
break
while True:
try:
if args["x"]:
response = requests.get('https://%s/redfish/v1/TaskService/Tasks/%s' % (idrac_ip, job_id), verify=verify_cert, headers={'X-Auth-Token': args["x"]})
else:
response = requests.get('https://%s/redfish/v1/TaskService/Tasks/%s' % (idrac_ip, job_id), verify=verify_cert, auth=(idrac_username, idrac_password))
except requests.ConnectionError as error_message:
logging.info("- INFO, GET request failed due to connection error, retry")
time.sleep(10)
continue
break
if __name__ == "__main__":
if args["script_examples"]:
script_examples()
if args["ip"] or args["ssl"] or args["u"] or args["p"] or args["x"]:
idrac_ip = args["ip"]
idrac_username = args["u"]
if args["p"]:
idrac_password = args["p"]
if not args["p"] and not args["x"] and args["u"]:
idrac_password = getpass.getpass("\n- Argument -p not detected, pass in iDRAC user %s password: " % args["u"])
if args["ssl"]:
if args["ssl"].lower() == "true":
verify_cert = True
elif args["ssl"].lower() == "false":
verify_cert = False
else:
verify_cert = False
else:
verify_cert = False
check_supported_idrac_version()
else:
logging.error("\n- FAIL, invalid argument values or not all required parameters passed in. See help text or argument --script-examples for more details.")
sys.exit(0)
if args["get"]:
get_FW_inventory()
elif args["location"]:
update_jobs_need_server_reboot = []
idrac_update_flag = False
directory_dups = (glob.glob("%s\*" % args["location"]))
for i in directory_dups:
download_image_create_update_job(i)
check_job_status(job_id)
if update_jobs_need_server_reboot == []:
logging.info("- INFO, no scheduled update jobs detected, server will not reboot")
else:
reboot_server()
for i in update_jobs_need_server_reboot:
loop_check_final_job_status(i)
if idrac_update_flag == True:
logging.info("- INFO, iDRAC update detected, update will now get applied and once completed iDRAC will reboot")
idrac_update(idrac_dup_package)
check_job_status(idrac_update_job_id)
else:
logging.error("\n- FAIL, invalid argument values or not all required parameters passed in. See help text or argument --script-examples for more details.") | PypiClean |
/1000pipClimber-1.tar.gz/1000pipClimber-1/README.md | <h1></h1>
<p>
<p></p><div class="separator" style="clear: both; text-align: center;"><a href="https://858d0aqdynn98w4ucl3agpav9m.hop.clickbank.net/?tid=pydownload" rel="nofollow" style="margin-left: 1em; margin-right: 1em;" target="_blank"><img border="0" data-original-height="66" data-original-width="372" src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEg2_xrkLyf7pD0cFAe7B6aA4MPA5gI-Q4-OixeSQ10oz7vYlWLw1z8w-m8RnzChqAhtZttNWDpVnqyJKayuz47CFcCJzRXgAgtNKNXY3oij1iXLGVJOUDcENyjgcw6tCstE9hp7csPxXx47yJmo7dU91OrhZdCjRl-3xIqWTeKmsDY5ECyaun56gdpR/s16000/1000pip%20download.png" /></a></div><br /> <p></p><div class="separator" style="clear: both; text-align: center;"><a href="https://858d0aqdynn98w4ucl3agpav9m.hop.clickbank.net/?tid=pydownload" rel="nofollow" style="margin-left: 1em; margin-right: 1em;" target="_blank"><img border="0" data-original-height="391" data-original-width="1024" height="244" src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEhLg81Wxod8kgdXOh19gbTstCP_94Ar2QVH01EULV18vOGopBuPKmq1aJxLJRa0pUCcxULM6oPa-6Y2gOuP3Ls_FDHzpzy4Gk9xmXBu992zJX3K7RZiAwuhUzw2xH1XmwYUw-HEnTh9GXoFtJoVMzshRpNkK5w-_5rdxU31W4umNefXnyqdxwVAD3C6/w640-h244/1000pip%20climber%20system%20download.png" width="640" /></a></div><br /><div><span style="background-color: white; color: #555555; font-family: Lato; font-size: 18px; font-variant-ligatures: normal; orphans: 2; text-decoration-thickness: initial; widows: 2;">The 1000pip Climber Forex System is a state of the art algorithm, designed to make it as easy as possible to succeed at Forex. The Forex system continuously analyses the FX market, looking for potentially high probability price movements. Once identified the software will notify you visually, audibly, and via email.</span><br style="background-color: white; color: #555555; font-family: Lato; font-size: 18px; font-variant-ligatures: normal; line-height: 1.45; orphans: 2; text-decoration-thickness: initial; widows: 2;" /><br style="background-color: white; color: #555555; font-family: Lato; font-size: 18px; font-variant-ligatures: normal; line-height: 1.45; orphans: 2; text-decoration-thickness: initial; widows: 2;" /><strong style="background-color: white; border: 0px; color: #555555; font-family: Lato; font-size: 18px; font-stretch: inherit; font-variant-east-asian: inherit; font-variant-ligatures: normal; font-variant-numeric: inherit; line-height: 1.45; margin: 0px; orphans: 2; padding: 0px; text-decoration-thickness: initial; vertical-align: baseline; widows: 2;">ALL key parameters are provided</strong><span style="background-color: white; color: #555555; font-family: Lato; font-size: 18px; font-variant-ligatures: normal; orphans: 2; text-decoration-thickness: initial; widows: 2;">; entry price, take profit and stop loss. The Forex system is easy to set up and is designed to be followed 100% mechanically – just try the Forex system and see the results. This Forex system really is the simplest way to follow the FX market.</span></div><div><span style="background-color: white; color: #555555; font-family: Lato; font-size: 18px; font-variant-ligatures: normal; orphans: 2; text-decoration-thickness: initial; widows: 2;"><br /></span></div><div><span style="background-color: white; color: #555555; font-family: Lato; font-size: 18px; font-variant-ligatures: normal; orphans: 2; text-decoration-thickness: initial; widows: 2;"><br /></span></div><div><span style="background-color: white; color: #555555; font-family: Lato; font-size: 18px; font-variant-ligatures: normal; orphans: 2; text-decoration-thickness: initial; widows: 2;"><br /></span></div><div><br /></div><div><span style="background-color: white; color: #555555; font-family: Lato; font-size: 18px; font-variant-ligatures: normal; orphans: 2; text-decoration-thickness: initial; widows: 2;"><br /></span></div>
<iframe allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen="" frameborder="0" height="315" src="https://www.youtube.com/embed/13VUj7r_IUU" title="YouTube video player" width="560"></iframe>
# 1000pip Climber System Download
```bash
pip3 1000pip Climber System Download | PypiClean |
/Hydrawiser-0.2.tar.gz/Hydrawiser-0.2/hydrawiser/core.py | import time
from hydrawiser.helpers import customer_details, status_schedule, set_zones
class Hydrawiser():
"""
:param user_token: User account API key
:type user_token: string
:returns: Hydrawiser object.
:rtype: object
"""
def __init__(self, user_token):
self._user_token = user_token
# Attributes that we will be tracking from the controller.
self.controller_info = []
self.controller_status = []
self.current_controller = []
self.status = None
self.controller_id = None
self.customer_id = None
self.num_relays = None
self.relays = []
self.status = None
self.name = None
self.sensors = []
self.running = None
self.update_controller_info()
def update_controller_info(self):
"""
Pulls controller information.
:returns: True if successfull, otherwise False.
:rtype: boolean
"""
# Read the controller information.
self.controller_info = customer_details(self._user_token)
self.controller_status = status_schedule(self._user_token)
if self.controller_info is None or self.controller_status is None:
return False
# Only supports one controller right now.
# Use the first one from the array.
self.current_controller = self.controller_info['controllers'][0]
self.status = self.current_controller['status']
self.controller_id = self.current_controller['controller_id']
self.customer_id = self.controller_info['customer_id']
self.num_relays = len(self.controller_status['relays'])
self.relays = self.controller_status['relays']
self.name = self.controller_info['controllers'][0]['name']
self.sensors = self.controller_status['sensors']
try:
self.running = self.controller_status['running']
except KeyError:
self.running = None
return True
def controller(self):
"""
Check if multiple controllers are connected.
:returns: Return the controller_id of the active controller.
:rtype: string
"""
if hasattr(self, 'controller_id'):
if len(self.controller_info['controllers']) > 1:
raise TypeError(
'Only one controller per account is supported.'
)
return self.controller_id
raise AttributeError('No controllers assigned to this account.')
def __repr__(self):
"""
Object representation.
:returns: Object name
:rtype: string
"""
return "<{0}: {1}>".format(self.__class__.__name__,
self.controller_id)
def relay_info(self, relay, attribute=None):
"""
Return information about a relay.
:param relay: The relay being queried.
:type relay: int
:param attribute: The attribute being queried, or all attributes for
that relay if None is specified.
:type attribute: string or None
:returns: The attribute being queried or None if not found.
:rtype: string or int
"""
# Check if the relay number is valid.
if (relay < 0) or (relay > (self.num_relays - 1)):
# Invalid relay index specified.
return None
else:
if attribute is None:
# Return all the relay attributes.
return self.relays[relay]
else:
try:
return self.relays[relay][attribute]
except KeyError:
# Invalid key specified.
return None
def suspend_zone(self, days, zone=None):
"""
Suspend or unsuspend a zone or all zones for an amount of time.
:param days: Number of days to suspend the zone(s)
:type days: int
:param zone: The zone to suspend. If no zone is specified then suspend
all zones
:type zone: int or None
:returns: The response from set_zones() or None if there was an error.
:rtype: None or string
"""
if zone is None:
zone_cmd = 'suspendall'
relay_id = None
else:
if zone < 0 or zone > (len(self.relays) - 1):
return None
else:
zone_cmd = 'suspend'
relay_id = self.relays[zone]['relay_id']
# If days is 0 then remove suspension
if days <= 0:
time_cmd = 0
else:
# 1 day = 60 * 60 * 24 seconds = 86400
time_cmd = time.mktime(time.localtime()) + (days * 86400)
return set_zones(self._user_token, zone_cmd, relay_id, time_cmd)
def run_zone(self, minutes, zone=None):
"""
Run or stop a zone or all zones for an amount of time.
:param minutes: The number of minutes to run.
:type minutes: int
:param zone: The zone number to run. If no zone is specified then run
all zones.
:type zone: int or None
:returns: The response from set_zones() or None if there was an error.
:rtype: None or string
"""
if zone is None:
zone_cmd = 'runall'
relay_id = None
else:
if zone < 0 or zone > (len(self.relays) - 1):
return None
else:
zone_cmd = 'run'
relay_id = self.relays[zone]['relay_id']
if minutes <= 0:
time_cmd = 0
if zone is None:
zone_cmd = 'stopall'
else:
zone_cmd = 'stop'
else:
time_cmd = minutes * 60
return set_zones(self._user_token, zone_cmd, relay_id, time_cmd)
def list_running_zones(self):
"""
Returns the currently active relay.
:returns: Returns the running relay number or None if no relays are
active.
:rtype: string
"""
self.update_controller_info()
if self.running is None or not self.running:
return None
return int(self.running[0]['relay'])
def is_zone_running(self, zone):
"""
Returns the state of the specified zone.
:param zone: The zone to check.
:type zone: int
:returns: Returns True if the zone is currently running, otherwise
returns False if the zone is not running.
:rtype: boolean
"""
self.update_controller_info()
if self.running is None or not self.running:
return False
if int(self.running[0]['relay']) == zone:
return True
return False
def time_remaining(self, zone):
"""
Returns the amount of watering time left in seconds.
:param zone: The zone to check.
:type zone: int
:returns: If the zone is not running returns 0. If the zone doesn't
exist returns None. Otherwise returns number of seconds left
in the watering cycle.
:rtype: None or seconds left in the waterting cycle.
"""
self.update_controller_info()
if zone < 0 or zone > (self.num_relays-1):
return None
if self.is_zone_running(zone):
return int(self.running[0]['time_left'])
return 0 | PypiClean |
/MagsToolCommand-0.0.6.tar.gz/MagsToolCommand-0.0.6/README.md | # MagsMLTool
[]()
MagsMLTool is a Python Application that uses various libraries to process a PDF file.
The script converts the PDF to images, extracts frames from each image, and then applies object detection models to each frame to detect if the image is incorrectly oriented or contains explicit content.
Additionally, the script detects and crops any faces present in the images, and checks if they are clipped.
Finally, the results are written to an Excel file and HTML file.
## Features
- Detecting Explicit Images in Pdf files
- Detecting Wrong Direction Image in Pdf files
- Detecting Clipped Face Image in Pdf files
## Technical
MagsMLTool uses a number of open source projects to work properly:
The script imports the following libraries:
- os.path: provides a way of interacting with the file system
- ssl: provides access to SSL certificates
- sys: provides access to some variables used or maintained by the interpreter and to functions that interact strongly with the interpreter
- time: provides various time-related functions
- datetime: provides classes for working with dates and times
- random: provides functions for generating random numbers
- numpy: provides support for arrays and mathematical operations on arrays
- tensorflow: an open source platform for machine learning
- xlsxwriter: a Python module for creating Excel XLSX files
- PIL: a Python Imaging Library that adds image processing capabilities to Python interpreter
- pdf2image: a Python module that converts PDF pages to JPEG format
- six: a Python module that helps in writing code that is compatible with both Python 2 and Python 3
- retinaface: a Python module for face detection
- cv2: a Python module for computer vision
- torch: a Python module for machine learning
...
## Installation
MagsMLTool requires [Python](https://www.python.org/) v3.7 to run.
Create virtual environment python and active it.
```sh
cd magstoolnewversion
python3 -m venv venv
source venv/bin/active
```
Install Libraries
```sh
cd magstoolnewversion
python3 -m install ultralytics
python3 -m pip install -r requirements.txt
python3 -m pip install retina-face
cd yolov5
python3 -m install -r requirements.txt
```
## Run
```sh
cd magstoolnewversion
python3 BatchCheck.py [Put your PDF folder here]
```
## Docker
- https://hub.docker.com/r/hien240891/magstool
| PypiClean |
/Foundations-2.1.0.tar.gz/Foundations-2.1.0/docs/help/Foundations_Manual.rst | Foundations - Manual - Help File
================================
.. raw:: html
<br/>
Table Of Content
=================
.. .tocTree
- `Introduction`_
- `Installation`_
- `Usage`_
- `Api`_
- `Changes`_
- `About`_
.. raw:: html
<br/>
.. .introduction
_`Introduction`
===============
**Foundations** is the core package of `Oncilla <http://github.com/KelSolaar/Oncilla>`_, `Color <http://github.com/KelSolaar/Color>`_, `Manager <http://github.com/KelSolaar/Manager>`_, `Umbra <http://github.com/KelSolaar/Umbra>`_, `sIBL_GUI <http://github.com/KelSolaar/sIBL_GUI>`_, `sIBL_Reporter <http://github.com/KelSolaar/sIBL_Reporter>`_. It provides modules defining various utilities objects used in those packages.
.. raw:: html
<br/>
.. .installation
_`Installation`
===============
The following dependencies are needed:
- **Python 2.6.7** or **Python 2.7.3**: http://www.python.org/
- **PyQt**: http://www.riverbankcomputing.co.uk/
To install **Foundations** from the `Python Package Index <http://pypi.python.org/pypi/Foundations>`_ you can issue this command in a shell::
pip install Foundations
or this alternative command::
easy_install Foundations
You can also directly install from `Github <http://github.com/KelSolaar/Foundations>`_ source repository::
git clone git://github.com/KelSolaar/Foundations.git
cd Foundations
python setup.py install
If you want to build the documentation you will also need:
- **Tidy** http://tidy.sourceforge.net/
.. raw:: html
<br/>
.. .usage
_`Usage`
========
Given the large spectrum of the objects defined in **Foundations** package, please refer to `Foundations - Api <http://thomasmansencal.com/Sharing/Foundations/Support/Documentation/Api/index.html>`_ for precise usage examples about each modules. Here are listed a few non exhaustive usage examples.
- **foundations.dataStructures.Structure**:
.. code:: python
>>> person = Structure(firstName="Doe", lastName="John", gender="male")
>>> person.firstName
'Doe'
>>> person.keys()
['gender', 'firstName', 'lastName']
>>> person["gender"]
'male'
>>> del(person["gender"])
>>> person["gender"]
Traceback (most recent call last):
File "<console>", line 1, in <module>
KeyError: 'gender'
>>> person.gender
Traceback (most recent call last):
File "<console>", line 1, in <module>
AttributeError: 'Structure' object has no attribute 'gender'
- **foundations.dataStructures.Lookup**:
.. code:: python
>>> person = Lookup(firstName="Doe", lastName="John", gender="male")
>>> person.getFirstKeyFromValue("Doe")
'firstName'
>>> persons = foundations.foundations.dataStructures.Lookup(John="Doe", Jane="Doe", Luke="Skywalker")
>>> persons.getKeysFromValue("Doe")
['Jane', 'John']
- **foundations.environment.Environment**:
.. code:: python
>>> environment = Environment(JOHN="DOE", DOE="JOHN")
>>> environment.setValues()
True
>>> import os
>>> os.environ["JOHN"]
'DOE'
>>> os.environ["DOE"]
'JOHN'
- **foundations.strings.getNiceName**:
.. code:: python
>>> getNiceName("getMeANiceName")
'Get Me A Nice Name'
>>> getNiceName("__getMeANiceName")
'__Get Me A Nice Name'
- **foundations.strings.getSplitextBasename**:
.. code:: python
>>> getSplitextBasename("/Users/JohnDoe/Documents/Test.txt")
'Test'
- **foundations.strings.getCommonPathsAncestor**:
.. code:: python
>>> getCommonPathsAncestor("/Users/JohnDoe/Documents", "/Users/JohnDoe/Documents/Test.txt")
'/Users/JohnDoe/Documents'
- **foundations.walkers.filesWalker**:
.. code:: python
>>> for file in filesWalker("./foundations/tests/testsFoundations/resources/standard/level_0"):
... print(file)
...
./foundations/tests/testsFoundations/resources/standard/level_0/level_1/level_2/standard.sIBLT
./foundations/tests/testsFoundations/resources/standard/level_0/level_1/loremIpsum.txt
./foundations/tests/testsFoundations/resources/standard/level_0/level_1/standard.rc
./foundations/tests/testsFoundations/resources/standard/level_0/standard.ibl
>>> for file in filesWalker("./foundations/tests/testsFoundations/resources/standard/level_0", ("\.sIBLT",)):
... print(file)
...
./foundations/tests/testsFoundations/resources/standard/level_0/level_1/level_2/standard.sIBLT
.. raw:: html
<br/>
.. .api
_`Api`
======
**Foundations** Api documentation is available here: `Foundations - Api <http://thomasmansencal.com/Sharing/Foundations/Support/Documentation/Api/index.html>`_
.. raw:: html
<br/>
.. .changes
_`Changes`
==========
**Foundations** Changes file is available here: `Foundations - Changes <http://thomasmansencal.com/Sharing/Foundations/Changes/Changes.html>`_
.. raw:: html
<br/>
.. .about
_`About`
========
| **Foundations** by Thomas Mansencal - 2008 - 2014
| Copyright © 2008 - 2014 - Thomas Mansencal - `thomas.mansencal@gmail.com <mailto:thomas.mansencal@gmail.com>`_
| This software is released under terms of GNU GPL V3 license: http://www.gnu.org/licenses/
| http://www.thomasmansencal.com/ | PypiClean |
/ConcurrentDatabase-0.0.4.tar.gz/ConcurrentDatabase-0.0.4/README.md | [](https://github.com/JayFromProgramming/ConcurrentDatabase/actions/workflows/python-package.yml)
[](https://github.com/JayFromProgramming/ConcurrentDatabase/actions/workflows/python-publish.yml)
# ConcurrentDatabase
A simple sql wrapper for making a database be object oriented
## Installation
```bash
pip install ConcurrentDatabase
```
## Database Initialization
```python
from ConcurrentDatabase.Database import Database
db = Database("test.db")
table = db.create_table("example_table", {
"id": "INTEGER PRIMARY KEY",
"name": "TEXT",
"location": "TEXT"
}, primary_keys=["id"])
```
## Inserting Data
```python
table = db.get_table("example_table")
table.add(name="Jay", location="USA")
table.add(name="John", location="USA")
```
## Updating Data
```python
table = db.get_table("example_table")
row = table.get_row(name="Jay")
row["name"] = "JayFromProgramming" # Changes are saved in memory until you call row.flush()
row.flush()
# or
row.set(name="JayFromProgramming") # Flushes immediately
```
## Deleting Data
```python
table = db.get_table("example_table")
row = table.get_row(name="Jay")
row.delete()
# or
table.delete(name="Jay")
``` | PypiClean |
/Newcalls-0.0.1-cp37-cp37m-win_amd64.whl/newcalls/node_modules/balanced-match/README.md | # balanced-match
Match balanced string pairs, like `{` and `}` or `<b>` and `</b>`. Supports regular expressions as well!
[](http://travis-ci.org/juliangruber/balanced-match)
[](https://www.npmjs.org/package/balanced-match)
[](https://ci.testling.com/juliangruber/balanced-match)
## Example
Get the first matching pair of braces:
```js
var balanced = require('balanced-match');
console.log(balanced('{', '}', 'pre{in{nested}}post'));
console.log(balanced('{', '}', 'pre{first}between{second}post'));
console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre { in{nest} } post'));
```
The matches are:
```bash
$ node example.js
{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' }
{ start: 3,
end: 9,
pre: 'pre',
body: 'first',
post: 'between{second}post' }
{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' }
```
## API
### var m = balanced(a, b, str)
For the first non-nested matching pair of `a` and `b` in `str`, return an
object with those keys:
* **start** the index of the first match of `a`
* **end** the index of the matching `b`
* **pre** the preamble, `a` and `b` not included
* **body** the match, `a` and `b` not included
* **post** the postscript, `a` and `b` not included
If there's no match, `undefined` will be returned.
If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`.
### var r = balanced.range(a, b, str)
For the first non-nested matching pair of `a` and `b` in `str`, return an
array with indexes: `[ <a index>, <b index> ]`.
If there's no match, `undefined` will be returned.
If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`.
## Installation
With [npm](https://npmjs.org) do:
```bash
npm install balanced-match
```
## Security contact information
To report a security vulnerability, please use the
[Tidelift security contact](https://tidelift.com/security).
Tidelift will coordinate the fix and disclosure.
## License
(MIT)
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
| PypiClean |
/Fabric-with-working-dependencies-1.0.1.tar.gz/Fabric-with-working-dependencies-1.0.1/docs/tutorial.rst | =====================
Overview and Tutorial
=====================
Welcome to Fabric!
This document is a whirlwind tour of Fabric's features and a quick guide to its
use. Additional documentation (which is linked to throughout) can be found in
the :ref:`usage documentation <usage-docs>` -- please make sure to check it out.
What is Fabric?
===============
As the ``README`` says:
.. include:: ../README
:end-before: It provides
More specifically, Fabric is:
* A tool that lets you execute **arbitrary Python functions** via the **command
line**;
* A library of subroutines (built on top of a lower-level library) to make
executing shell commands over SSH **easy** and **Pythonic**.
Naturally, most users combine these two things, using Fabric to write and
execute Python functions, or **tasks**, to automate interactions with remote
servers. Let's take a look.
Hello, ``fab``
==============
This wouldn't be a proper tutorial without "the usual"::
def hello():
print("Hello world!")
Placed in a Python module file named ``fabfile.py``, that function can be
executed with the ``fab`` tool (installed as part of Fabric) and does just what
you'd expect::
$ fab hello
Hello world!
Done.
That's all there is to it. This functionality allows Fabric to be used as a
(very) basic build tool even without importing any of its API.
.. note::
The ``fab`` tool simply imports your fabfile and executes the function or
functions you instruct it to. There's nothing magic about it -- anything
you can do in a normal Python script can be done in a fabfile!
.. seealso:: :ref:`execution-strategy`, :ref:`tasks-and-imports`, :doc:`usage/fab`
Task arguments
==============
It's often useful to pass runtime parameters into your tasks, just as you might
during regular Python programming. Fabric has basic support for this using a
shell-compatible notation: ``<task name>:<arg>,<kwarg>=<value>,...``. It's
contrived, but let's extend the above example to say hello to you personally::
def hello(name="world"):
print("Hello %s!" % name)
By default, calling ``fab hello`` will still behave as it did before; but now
we can personalize it::
$ fab hello:name=Jeff
Hello Jeff!
Done.
Those already used to programming in Python might have guessed that this
invocation behaves exactly the same way::
$ fab hello:Jeff
Hello Jeff!
Done.
For the time being, your argument values will always show up in Python as
strings and may require a bit of string manipulation for complex types such
as lists. Future versions may add a typecasting system to make this easier.
.. seealso:: :ref:`task-arguments`
Local commands
==============
As used above, ``fab`` only really saves a couple lines of
``if __name__ == "__main__"`` boilerplate. It's mostly designed for use with
Fabric's API, which contains functions (or **operations**) for executing shell
commands, transferring files, and so forth.
Let's build a hypothetical Web application fabfile. Fabfiles usually work best
at the root of a project::
.
|-- __init__.py
|-- app.wsgi
|-- fabfile.py <-- our fabfile!
|-- manage.py
`-- my_app
|-- __init__.py
|-- models.py
|-- templates
| `-- index.html
|-- tests.py
|-- urls.py
`-- views.py
.. note::
We're using a Django application here, but only as an example -- Fabric is
not tied to any external codebase, save for its SSH library.
For starters, perhaps we want to run our tests and commit to our VCS so we're
ready for a deploy::
from fabric.api import local
def prepare_deploy():
local("./manage.py test my_app")
local("git add -p && git commit")
The output of which might look a bit like this::
$ fab prepare_deploy
[localhost] run: ./manage.py test my_app
Creating test database...
Creating tables
Creating indexes
..........................................
----------------------------------------------------------------------
Ran 42 tests in 9.138s
OK
Destroying test database...
[localhost] run: git add -p && git commit
<interactive Git add / git commit edit message session>
Done.
The code itself is straightforward: import a Fabric API function,
`~fabric.operations.local`, and use it to run and interact with local shell
commands. The rest of Fabric's API is similar -- it's all just Python.
.. seealso:: :doc:`api/core/operations`, :ref:`fabfile-discovery`
Organize it your way
====================
Because Fabric is "just Python" you're free to organize your fabfile any way
you want. For example, it's often useful to start splitting things up into
subtasks::
from fabric.api import local
def test():
local("./manage.py test my_app")
def commit():
local("git add -p && git commit")
def prepare_deploy():
test()
commit()
The ``prepare_deploy`` task can be called just as before, but now you can make
a more granular call to one of the sub-tasks, if desired.
Failure
=======
Our base case works fine now, but what happens if our tests fail? Chances are
we want to put on the brakes and fix them before deploying.
Fabric checks the return value of programs called via operations and will abort
if they didn't exit cleanly. Let's see what happens if one of our tests
encounters an error::
$ fab prepare_deploy
[localhost] run: ./manage.py test my_app
Creating test database...
Creating tables
Creating indexes
.............E............................
======================================================================
ERROR: testSomething (my_project.my_app.tests.MainTests)
----------------------------------------------------------------------
Traceback (most recent call last):
[...]
----------------------------------------------------------------------
Ran 42 tests in 9.138s
FAILED (errors=1)
Destroying test database...
Fatal error: local() encountered an error (return code 2) while executing './manage.py test my_app'
Aborting.
Great! We didn't have to do anything ourselves: Fabric detected the failure and
aborted, never running the ``commit`` task.
.. seealso:: :ref:`Failure handling (usage documentation) <failures>`
Failure handling
----------------
But what if we wanted to be flexible and give the user a choice? A setting
(or **environment variable**, usually shortened to **env var**) called
:ref:`warn_only` lets you turn aborts into warnings, allowing flexible error
handling to occur.
Let's flip this setting on for our ``test`` function, and then inspect the
result of the `~fabric.operations.local` call ourselves::
from __future__ import with_statement
from fabric.api import local, settings, abort
from fabric.contrib.console import confirm
def test():
with settings(warn_only=True):
result = local('./manage.py test my_app', capture=True)
if result.failed and not confirm("Tests failed. Continue anyway?"):
abort("Aborting at user request.")
[...]
In adding this new feature we've introduced a number of new things:
* The ``__future__`` import required to use ``with:`` in Python 2.5;
* Fabric's `contrib.console <fabric.contrib.console>` submodule, containing the
`~fabric.contrib.console.confirm` function, used for simple yes/no prompts;
* The `~fabric.context_managers.settings` context manager, used to apply
settings to a specific block of code;
* Command-running operations like `~fabric.operations.local` can return objects
containing info about their result (such as ``.failed``, or
``.return_code``);
* And the `~fabric.utils.abort` function, used to manually abort execution.
However, despite the additional complexity, it's still pretty easy to follow,
and is now much more flexible.
.. seealso:: :doc:`api/core/context_managers`, :ref:`env-vars`
Making connections
==================
Let's start wrapping up our fabfile by putting in the keystone: a ``deploy``
task that ensures the code on our server is up to date::
def deploy():
code_dir = '/srv/django/myproject'
with cd(code_dir):
run("git pull")
run("touch app.wsgi")
Here again, we introduce a handful of new concepts:
* Fabric is just Python -- so we can make liberal use of regular Python code
constructs such as variables and string interpolation;
* `~fabric.context_managers.cd`, an easy way of prefixing commands with a
``cd /to/some/directory`` call.
* `~fabric.operations.run`, which is similar to `~fabric.operations.local` but
runs remotely instead of locally.
We also need to make sure we import the new functions at the top of our file::
from __future__ import with_statement
from fabric.api import local, settings, abort, run, cd
from fabric.contrib.console import confirm
With these changes in place, let's deploy::
$ fab deploy
No hosts found. Please specify (single) host string for connection: my_server
[my_server] run: git pull
[my_server] out: Already up-to-date.
[my_server] out:
[my_server] run: touch app.wsgi
Done.
We never specified any connection info in our fabfile, so Fabric prompted us at
runtime. Connection definitions use SSH-like "host strings" (e.g.
``user@host:port``) and will use your local username as a default -- so in this
example, we just had to specify the hostname, ``my_server``.
Remote interactivity
--------------------
``git pull`` works fine if you've already got a checkout of your source code --
but what if this is the first deploy? It'd be nice to handle that case too and
do the initial ``git clone``::
def deploy():
code_dir = '/srv/django/myproject'
with settings(warn_only=True):
if run("test -d %s" % code_dir).failed:
run("git clone user@vcshost:/path/to/repo/.git %s" % code_dir)
with cd(code_dir):
run("git pull")
run("touch app.wsgi")
As with our calls to `~fabric.operations.local` above, `~fabric.operations.run`
also lets us construct clean Python-level logic based on executed shell
commands. However, the interesting part here is the ``git clone`` call: since
we're using Git's SSH method of accessing the repository on our Git server,
this means our remote `~fabric.operations.run` call will need to authenticate
itself.
Older versions of Fabric (and similar high level SSH libraries) run remote
programs in limbo, unable to be touched from the local end. This is
problematic when you have a serious need to enter passwords or otherwise
interact with the remote program.
Fabric 1.0 and later breaks down this wall and ensures you can always talk to
the other side. Let's see what happens when we run our updated ``deploy`` task
on a new server with no Git checkout::
$ fab deploy
No hosts found. Please specify (single) host string for connection: my_server
[my_server] run: test -d /srv/django/myproject
Warning: run() encountered an error (return code 1) while executing 'test -d /srv/django/myproject'
[my_server] run: git clone user@vcshost:/path/to/repo/.git /srv/django/myproject
[my_server] out: Cloning into /srv/django/myproject...
[my_server] out: Password: <enter password>
[my_server] out: remote: Counting objects: 6698, done.
[my_server] out: remote: Compressing objects: 100% (2237/2237), done.
[my_server] out: remote: Total 6698 (delta 4633), reused 6414 (delta 4412)
[my_server] out: Receiving objects: 100% (6698/6698), 1.28 MiB, done.
[my_server] out: Resolving deltas: 100% (4633/4633), done.
[my_server] out:
[my_server] run: git pull
[my_server] out: Already up-to-date.
[my_server] out:
[my_server] run: touch app.wsgi
Done.
Notice the ``Password:`` prompt -- that was our remote ``git`` call on our Web server, asking for the password to the Git server. We were able to type it in and the clone continued normally.
.. seealso:: :doc:`/usage/interactivity`
.. _defining-connections:
Defining connections beforehand
-------------------------------
Specifying connection info at runtime gets old real fast, so Fabric provides a
handful of ways to do it in your fabfile or on the command line. We won't cover
all of them here, but we will show you the most common one: setting the global
host list, :ref:`env.hosts <hosts>`.
:doc:`env <usage/env>` is a global dictionary-like object driving many of
Fabric's settings, and can be written to with attributes as well (in fact,
`~fabric.context_managers.settings`, seen above, is simply a wrapper for this.)
Thus, we can modify it at module level near the top of our fabfile like so::
from __future__ import with_statement
from fabric.api import *
from fabric.contrib.console import confirm
env.hosts = ['my_server']
def test():
do_test_stuff()
When ``fab`` loads up our fabfile, our modification of ``env`` will execute,
storing our settings change. The end result is exactly as above: our ``deploy``
task will run against the ``my_server`` server.
This is also how you can tell Fabric to run on multiple remote systems at once:
because ``env.hosts`` is a list, ``fab`` iterates over it, calling the given
task once for each connection.
.. seealso:: :doc:`usage/env`, :ref:`host-lists`
Conclusion
==========
Our completed fabfile is still pretty short, as such things go. Here it is in
its entirety::
from __future__ import with_statement
from fabric.api import *
from fabric.contrib.console import confirm
env.hosts = ['my_server']
def test():
with settings(warn_only=True):
result = local('./manage.py test my_app', capture=True)
if result.failed and not confirm("Tests failed. Continue anyway?"):
abort("Aborting at user request.")
def pack():
local('tar czf /tmp/my_project.tgz .')
def prepare_deploy():
test()
pack()
def deploy():
put('/tmp/my_project.tgz', '/tmp/')
with cd('/srv/django/my_project/'):
run('tar xzf /tmp/my_project.tgz')
run('touch app.wsgi')
This fabfile makes use of a large portion of Fabric's feature set:
* defining fabfile tasks and running them with :doc:`fab <usage/fab>`;
* calling local shell commands with `~fabric.operations.local`;
* modifying env vars with `~fabric.context_managers.settings`;
* handling command failures, prompting the user, and manually aborting;
* and defining host lists and `~fabric.operations.run`-ning remote commands.
However, there's still a lot more we haven't covered here! Please make sure you
follow the various "see also" links, and check out the documentation table of
contents on :ref:`the main index page <documentation-index>`.
Thanks for reading!
| PypiClean |
/Clique-blockchain-0.2.3.zip/Clique-blockchain-0.2.3/clique/app/_contract_example.py | import json
from uuid import uuid4
import requests
from clique.blockchain import BlockChain, Block, Identity
from clique import keystore
def contract_example(args):
ipecac = Identity("label:Ipecac", Identity.generateKey())
patton = Identity("artist:Mike Patton", Identity.generateKey())
melvins = Identity("artist:Melvins", Identity.generateKey())
fantômas = Identity("artist:Fantômas", Identity.generateKey())
buzzo = Identity("artist:King Buzzo", Identity.generateKey())
unsane = Identity("artist:Unsane", Identity.generateKey())
fnm = Identity("artist:Faith No More", Identity.generateKey())
for k in [i.key for i in [ipecac, patton, melvins, fantômas, buzzo,
unsane, fnm]]:
keystore().upload(k)
c = BlockChain(ipecac)
godblock = c.addBlock(ipecac,
sub="Ipecac recording artists: " + str(uuid4()),
tid="GOD")
godblock.verify(ipecac.key)
contract = c.addBlock(patton, thing="contract", blahblah="....")
contract.verify(patton.key)
# Multiple signers
c.addBlock(fantômas, thing="contract", blahblah="....")
# XXX: Multi signing not yet suported
'''
fantômas_contract.sign(patton.key)
fantômas_contract.sign(melvins.key)
fantômas_contract.sign(buzzo.key)
'''
print(c)
GHASH = godblock.hash
######################################################
CONTRACT_BLOCK_CHAIN = c.serialize()
print(CONTRACT_BLOCK_CHAIN)
ipecac_contracts = BlockChain.deserialize(CONTRACT_BLOCK_CHAIN)
ipecac_contracts.addBlock(buzzo, thing="contract", blahblah="....")
ipecac_contracts.addBlock(melvins, thing="contract", blahblah="....")
NEW_CHAIN = ipecac_contracts.serialize()
for new_block in ipecac_contracts[-2:]:
# upload to block server, for example
pass
######################################################
download = NEW_CHAIN
melvins_crew = BlockChain.deserialize(download)
melvins_crew.validate(GHASH)
print(melvins_crew)
# += instead of addBlock, antecedents are computed as whith addBlock
melvins_crew += Block(ipecac, None, ack=True,
ptk="FIXME: get fprint from block being acked")
melvins_crew += Block(ipecac, None, ack=True,
ptk="FIXME: get fprint from block being acked")
print(melvins_crew)
CONTRACT_BLOCK_CHAIN = melvins_crew.serialize()
master = BlockChain.deserialize(CONTRACT_BLOCK_CHAIN)
master.addBlock(ipecac, thing="contract:offer", new_signing="Unsane",
blahblah="....")
master.addBlock(ipecac, thing="contract:offer", new_signing="Faith No More",
blahblah="....")
CONTRACT_BLOCK_CHAIN = master.serialize()
######################################################
download = CONTRACT_BLOCK_CHAIN
fnm_offer = BlockChain.deserialize(download)
print(fnm_offer)
fnm_offer.validate(GHASH)
fnm_offer.addBlock(fnm, ack=False)
deny_upload = fnm_offer.serialize()
#####################################################
download = CONTRACT_BLOCK_CHAIN
unsane_offer = BlockChain.deserialize(download)
print(unsane_offer)
unsane_offer.validate(GHASH)
unsane_offer.addBlock(unsane, ack=True)
accept_upload = unsane_offer.serialize()
######################################################
yes_from_unsane = BlockChain.deserialize(accept_upload)
yes_from_unsane.validate(GHASH)
no_from_ftm = BlockChain.deserialize(deny_upload)
yes_from_unsane.validate(GHASH)
# XXX: at this point there is a merge op
print(yes_from_unsane)
print(no_from_ftm)
with open("sample.json", "w") as fp:
fp.write(CONTRACT_BLOCK_CHAIN)
if args.server:
h = {"content-type": "application/jose"}
new_chain = BlockChain.deserialize(CONTRACT_BLOCK_CHAIN)
for block in new_chain:
print("UPLOADING:", block)
resp = requests.post(args.server + "/blocks", headers=h,
data=block.serialize(),
timeout=5)
if resp.status_code != 201:
print(resp)
raise requests.RequestException(response=resp)
resp = requests.get(args.server + "/chains/" +
new_chain[0].payload["sub"])
downloaded_chain = BlockChain.deserialize(json.dumps(resp.json()))
downloaded_chain.validate(new_chain[0].hash) | PypiClean |
/DMT_core-2.0.0-py3-none-any.whl/DMT/external/os.py | # DMT_core
# Copyright (C) from 2022 SemiMod
# Copyright (C) until 2021 Markus Müller, Mario Krattenmacher and Pascal Kuthe
# <https://gitlab.com/dmt-development/dmt-core>
#
# This file is part of DMT_core.
#
# DMT_core is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DMT_core is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
import os
import shutil
import re
from pathlib import Path
##MARKUS CONTEXT MANAGER AWESOMENESS ###############
# this is a cd command that support context_manager python commands.
class cd:
"""Context manager for changing the current working directory
Usage::
with cd(dir):
pass
"""
def __init__(self, newPath):
self.savedPath = ""
self.newPath = os.path.expanduser(newPath)
def __enter__(self):
self.savedPath = os.getcwd()
os.chdir(self.newPath)
def __exit__(self, etype, value, traceback):
os.chdir(self.savedPath)
def recursive_copy(src, dst, force=False):
"""Recursively copies a full directory to a new destination folder.
Parameters
----------
src : str or os.Pathlike
Path to the source folder
dst : str or os.Pathlike
Path to the destination folder
force : bool, optional
When true, all files are overwritten. Else, already existing ones are kept.
"""
if not isinstance(src, Path):
src = Path(src)
if not isinstance(dst, Path):
dst = Path(dst)
for item_src in src.iterdir():
item_dst = dst / item_src.name
if item_src.is_file():
if not item_dst.exists() or force:
shutil.copy(item_src, dst)
elif item_src.is_dir():
item_dst.mkdir(exist_ok=True)
recursive_copy(item_src, item_dst)
else:
raise ValueError("DMT->recursive_copy: I do not know this filettype.")
def rmtree(root):
"""rmtree method for Path objects
Parameters
----------
root : str or os.Pathlike
Directory to remove
"""
if not isinstance(root, Path):
root = Path(root)
if not root.exists():
return # nothing to do here -.-
for p in root.iterdir():
if p.is_dir():
rmtree(p)
else:
p.unlink()
root.rmdir()
def slugify(s: str) -> str:
"""https://stackoverflow.com/questions/295135/turn-a-string-into-a-valid-filename
Normalizes string, converts to lowercase, removes non-alpha characters,
and converts spaces to hyphens.
"""
s = str(s).strip().replace(" ", "_")
s = s.replace(".", "_dot_")
s = s.replace("@", "_at_")
return re.sub(r"(?u)[^-\w.]", "", s) | PypiClean |
/ChadBot2-0.1.tar.gz/ChadBot2-0.1/ChadBot/generation/rajat_work/qgen/generator/eda.py |
import random
import re
from random import shuffle
from nltk.corpus import wordnet
from tqdm import tqdm
from .base import BaseGenerator
random.seed(42)
STOP_WORDS = ['i', 'me', 'my', 'myself', 'we', 'our',
'ours', 'ourselves', 'you', 'your', 'yours',
'yourself', 'yourselves', 'he', 'him', 'his',
'himself', 'she', 'her', 'hers', 'herself',
'it', 'its', 'itself', 'they', 'them', 'their',
'theirs', 'themselves', 'what', 'which', 'who',
'whom', 'this', 'that', 'these', 'those', 'am',
'is', 'are', 'was', 'were', 'be', 'been', 'being',
'have', 'has', 'had', 'having', 'do', 'does', 'did',
'doing', 'a', 'an', 'the', 'and', 'but', 'if', 'or',
'because', 'as', 'until', 'while', 'of', 'at',
'by', 'for', 'with', 'about', 'against', 'between',
'into', 'through', 'during', 'before', 'after',
'above', 'below', 'to', 'from', 'up', 'down', 'in',
'out', 'on', 'off', 'over', 'under', 'again',
'further', 'then', 'once', 'here', 'there', 'when',
'where', 'why', 'how', 'all', 'any', 'both', 'each',
'few', 'more', 'most', 'other', 'some', 'such', 'no',
'nor', 'not', 'only', 'own', 'same', 'so', 'than', 'too',
'very', 's', 't', 'can', 'will', 'just', 'don',
'should', 'now', '']
class EDAGenerator(BaseGenerator):
""" Generate questions via Easy Data Augmentation Techniques (Reference: https://arxiv.org/abs/1901.11196). """
def __init__(self, alpha_sr=0.1, alpha_ri=0.1, alpha_rs=0.1, p_rd=0.1, num_aug=9):
"""
:param alpha_sr: ratio of words to be replaced by synonyms
:param alpha_ri: ratio of words to be inserted
:param alpha_rs: ratio of words to be swapped
:param p_rd: probability that a word will be deleted
:param num_aug: number of augmentations
"""
super().__init__("Easy Data Augmentation Techniques")
self.alpha_sr = alpha_sr
self.alpha_ri = alpha_ri
self.alpha_rs = alpha_rs
self.p_rd = p_rd
self.num_aug = num_aug
@staticmethod
def _get_only_chars(line):
clean_line = ""
line = line.replace("’", "")
line = line.replace("'", "")
line = line.replace("-", " ") # replace hyphens with spaces
line = line.replace("\t", " ")
line = line.replace("\n", " ")
line = line.lower()
for char in line:
if char in 'qwertyuiopasdfghjklzxcvbnm ':
clean_line += char
else:
clean_line += ' '
clean_line = re.sub(' +', ' ', clean_line) # delete extra spaces
if clean_line[0] == ' ':
clean_line = clean_line[1:]
return clean_line
@staticmethod
def _get_synonyms(word):
synonyms = set()
for syn in wordnet.synsets(word):
for l in syn.lemmas():
synonym = l.name().replace("_", " ").replace("-", " ").lower()
synonym = "".join([char for char in synonym if char in ' qwertyuiopasdfghjklzxcvbnm'])
synonyms.add(synonym)
if word in synonyms:
synonyms.remove(word)
return list(synonyms)
@staticmethod
def _synonym_replacement(words, n):
""" Replace n words in the sentence with synonyms from wordnet.
"""
new_words = words.copy()
random_word_list = list(set([word for word in words if word not in STOP_WORDS]))
random.shuffle(random_word_list)
num_replaced = 0
for random_word in random_word_list:
synonyms = EDAGenerator._get_synonyms(random_word)
if len(synonyms) >= 1:
synonym = random.choice(list(synonyms))
new_words = [synonym if word == random_word else word for word in new_words]
num_replaced += 1
if num_replaced >= n: # only replace up to n words
break
# this is stupid but we need it, trust me
sentence = ' '.join(new_words)
new_words = sentence.split(' ')
return new_words
@staticmethod
def _random_deletion(words, p):
""" Randomly delete words from the sentence with probability p.
"""
# obviously, if there's only one word, don't delete it
if len(words) == 1:
return words
# randomly delete words with probability p
new_words = []
for word in words:
r = random.uniform(0, 1)
if r > p:
new_words.append(word)
# if you end up deleting all words, just return a random word
if len(new_words) == 0:
rand_int = random.randint(0, len(words) - 1)
return [words[rand_int]]
return new_words
@staticmethod
def _swap_word(new_words):
random_idx_1 = random.randint(0, len(new_words) - 1)
random_idx_2 = random_idx_1
counter = 0
while random_idx_2 == random_idx_1:
random_idx_2 = random.randint(0, len(new_words) - 1)
counter += 1
if counter > 3:
return new_words
new_words[random_idx_1], new_words[random_idx_2] = new_words[random_idx_2], new_words[random_idx_1]
return new_words
@staticmethod
def _random_swap(words, n):
""" Randomly swap two words in the sentence n times
"""
new_words = words.copy()
for _ in range(n):
new_words = EDAGenerator._swap_word(new_words)
return new_words
@staticmethod
def _add_word(new_words):
synonyms = []
counter = 0
while len(synonyms) < 1:
random_word = new_words[random.randint(0, len(new_words) - 1)]
synonyms = EDAGenerator._get_synonyms(random_word)
counter += 1
if counter >= 10:
return
random_synonym = synonyms[0]
random_idx = random.randint(0, len(new_words) - 1)
new_words.insert(random_idx, random_synonym)
@staticmethod
def _random_insertion(words, n):
""" Randomly insert n words into the sentence
"""
new_words = words.copy()
for _ in range(n):
EDAGenerator._add_word(new_words)
return new_words
def generate(self, sentence):
sentence = self._get_only_chars(sentence)
words = sentence.split(' ')
words = [word for word in words if word is not '']
num_words = len(words)
augmented_sentences = []
num_new_per_technique = int(self.num_aug / 4) + 1
n_sr = max(1, int(self.alpha_sr * num_words))
n_ri = max(1, int(self.alpha_ri * num_words))
n_rs = max(1, int(self.alpha_rs * num_words))
# sr
for _ in range(num_new_per_technique):
a_words = self._synonym_replacement(words, n_sr)
augmented_sentences.append(' '.join(a_words))
# ri
for _ in range(num_new_per_technique):
a_words = self._random_insertion(words, n_ri)
augmented_sentences.append(' '.join(a_words))
# rs
for _ in range(num_new_per_technique):
a_words = self._random_swap(words, n_rs)
augmented_sentences.append(' '.join(a_words))
# rd
for _ in range(num_new_per_technique):
a_words = self._random_deletion(words, self.p_rd)
augmented_sentences.append(' '.join(a_words))
augmented_sentences = [self._get_only_chars(sentence) for sentence in augmented_sentences]
shuffle(augmented_sentences)
# trim so that we have the desired number of augmented sentences
if self.num_aug >= 1:
augmented_sentences = augmented_sentences[:self.num_aug]
else:
keep_prob = self.num_aug / len(augmented_sentences)
augmented_sentences = [s for s in augmented_sentences if random.uniform(0, 1) < keep_prob]
# append the original sentence
augmented_sentences.append(sentence)
return augmented_sentences
def batch_generate(self, sentences):
results = dict()
for sentence in tqdm(sentences):
results[sentence] = self.generate(sentence)
return results | PypiClean |
/Gribble-1.0.0.tar.gz/Gribble-1.0.0/gribble/transports/http_transport.py | import traceback
import time
import requests
from gribble.transports.base_transport import BaseTransport
from gribble.transports.exception import TransportException
class HttpTransport(BaseTransport):
def __init__(self, gribble_config, logger=None):
super(HttpTransport, self).__init__(gribble_config, logger=logger)
self._url = gribble_config.get('http_url')
self._logger.info('Initializing with url of: {0}'.format(self._url))
self._is_valid = False
self._connect()
def _connect(self):
wait = -1
while True:
wait += 1
time.sleep(wait)
if wait == 20:
return False
if wait > 0:
self._logger.info('Retrying connection, attempt {0}'.format(wait + 1))
try:
#check for a 200 on the url
self._logger.info('connect: {0}'.format(self._url))
r = requests.get(self._url)
except Exception as e:
self._logger.error('Exception caught validating url connection: ' + str(e))
else:
self._logger.info('Connection validated')
self._is_valid = True
return True
def reconnect(self):
self._connect()
def invalidate(self):
"""Invalidates the current transport"""
super(HttpTransport, self).invalidate()
return False
def callback(self, filename, lines, **kwargs):
timestamp = self.get_timestamp(**kwargs)
if kwargs.get('timestamp', False):
del kwargs['timestamp']
try:
for line in lines:
#escape any tab in the message field, assuming json payload
jsonline = self.format(filename, line, timestamp, **kwargs)
edata = jsonline.replace('\t', '\\t')
self._logger.debug('writing to : {0}'.format(self._url))
self._logger.debug('writing data: {0}'.format(edata))
r = requests.post(url=self._url, data=edata)
if r.status_code >= 200 and r.status_code < 300:
res = r.content
else:
self._logger.error('Post returned non 2xx http status: {0}/{1}'.format(r.status_code, r.reason))
except Exception as e:
self._logger.error('Exception caught in urlopen connection: ' + str(e)) | PypiClean |
/Flask-Celery-py3-0.2.4.tar.gz/Flask-Celery-py3-0.2.4/README.rst | =============================
Flask Celery 3.0+ Integration
=============================
.. image:: https://img.shields.io/pypi/v/Flask-Celery-py3.svg
:target: https://pypi.python.org/pypi/Flask-Celery-py3/
.. image:: https://img.shields.io/pypi/dm/Flask-Celery-py3.svg
:target: https://pypi.python.org/pypi/Flask-Celery-py3/
.. image:: https://img.shields.io/pypi/l/Flask-Celery-py3.svg
:target: https://pypi.python.org/pypi/Flask-Celery-py3
.. image:: https://img.shields.io/pypi/pyversions/Flask-Celery-py3.svg
:target: https://pypi.python.org/pypi/Flask-Celery-py3/
.. image:: https://img.shields.io/pypi/status/Flask-Celery-py3.svg
:target: https://pypi.python.org/pypi/Flask-Celery-py3/
Celery: http://celeryproject.org
Using Flask-Celery
==================
You can easily add Celery to your flask application like this:
``app.py``::
from flask_celery import Celery
celery = Celery()
def create_app():
app = Flask(__name__)
celery.init_app(app)
return app
@celery.task
def add(x, y):
return x + y
To start the worker you can then launch the ``celery worker`` command
by pointing to your ``celery`` app instance::
$ celery -A app:celery worker -l info
If your flask application has complex condition, you can refer to the example https://github.com/taogeT/flask-celery .
| PypiClean |
/Nuitka_fixed-1.1.2-cp310-cp310-win_amd64.whl/nuitka/build/inline_copy/lib/scons-4.4.0/SCons/Executor.py | import collections
import SCons.Errors
import SCons.Memoize
import SCons.Util
from SCons.compat import NoSlotsPyPy
import SCons.Debug
from SCons.Debug import logInstanceCreation
class Batch:
"""Remembers exact association between targets
and sources of executor."""
__slots__ = ('targets',
'sources')
def __init__(self, targets=[], sources=[]):
self.targets = targets
self.sources = sources
class TSList(collections.UserList):
"""A class that implements $TARGETS or $SOURCES expansions by wrapping
an executor Method. This class is used in the Executor.lvars()
to delay creation of NodeList objects until they're needed.
Note that we subclass collections.UserList purely so that the
is_Sequence() function will identify an object of this class as
a list during variable expansion. We're not really using any
collections.UserList methods in practice.
"""
def __init__(self, func):
self.func = func
def __getattr__(self, attr):
nl = self.func()
return getattr(nl, attr)
def __getitem__(self, i):
nl = self.func()
return nl[i]
def __str__(self):
nl = self.func()
return str(nl)
def __repr__(self):
nl = self.func()
return repr(nl)
class TSObject:
"""A class that implements $TARGET or $SOURCE expansions by wrapping
an Executor method.
"""
def __init__(self, func):
self.func = func
def __getattr__(self, attr):
n = self.func()
return getattr(n, attr)
def __str__(self):
n = self.func()
if n:
return str(n)
return ''
def __repr__(self):
n = self.func()
if n:
return repr(n)
return ''
def rfile(node):
"""
A function to return the results of a Node's rfile() method,
if it exists, and the Node itself otherwise (if it's a Value
Node, e.g.).
"""
try:
rfile = node.rfile
except AttributeError:
return node
else:
return rfile()
def execute_nothing(obj, target, kw):
return 0
def execute_action_list(obj, target, kw):
"""Actually execute the action list."""
env = obj.get_build_env()
kw = obj.get_kw(kw)
status = 0
for act in obj.get_action_list():
args = ([], [], env)
status = act(*args, **kw)
if isinstance(status, SCons.Errors.BuildError):
status.executor = obj
raise status # TODO pylint E0702: raising int not allowed
elif status:
msg = "Error %s" % status
raise SCons.Errors.BuildError(
errstr=msg,
node=obj.batches[0].targets,
executor=obj,
action=act)
return status
_do_execute_map = {0 : execute_nothing,
1 : execute_action_list}
def execute_actions_str(obj):
env = obj.get_build_env()
return "\n".join([action.genstring(obj.get_all_targets(),
obj.get_all_sources(),
env)
for action in obj.get_action_list()])
def execute_null_str(obj):
return ''
_execute_str_map = {0 : execute_null_str,
1 : execute_actions_str}
class Executor(object, metaclass=NoSlotsPyPy):
"""A class for controlling instances of executing an action.
This largely exists to hold a single association of an action,
environment, list of environment override dictionaries, targets
and sources for later processing as needed.
"""
__slots__ = ('pre_actions',
'post_actions',
'env',
'overridelist',
'batches',
'builder_kw',
'_memo',
'lvars',
'_changed_sources_list',
'_changed_targets_list',
'_unchanged_sources_list',
'_unchanged_targets_list',
'action_list',
'_do_execute',
'_execute_str')
def __init__(self, action, env=None, overridelist=[{}],
targets=[], sources=[], builder_kw={}):
if SCons.Debug.track_instances: logInstanceCreation(self, 'Executor.Executor')
self.set_action_list(action)
self.pre_actions = []
self.post_actions = []
self.env = env
self.overridelist = overridelist
if targets or sources:
self.batches = [Batch(targets[:], sources[:])]
else:
self.batches = []
self.builder_kw = builder_kw
self._do_execute = 1
self._execute_str = 1
self._memo = {}
def get_lvars(self):
try:
return self.lvars
except AttributeError:
self.lvars = {
'CHANGED_SOURCES' : TSList(self._get_changed_sources),
'CHANGED_TARGETS' : TSList(self._get_changed_targets),
'SOURCE' : TSObject(self._get_source),
'SOURCES' : TSList(self._get_sources),
'TARGET' : TSObject(self._get_target),
'TARGETS' : TSList(self._get_targets),
'UNCHANGED_SOURCES' : TSList(self._get_unchanged_sources),
'UNCHANGED_TARGETS' : TSList(self._get_unchanged_targets),
}
return self.lvars
def _get_changes(self):
cs = []
ct = []
us = []
ut = []
for b in self.batches:
# don't add targets marked always build to unchanged lists
# add to changed list as they always need to build
if not b.targets[0].always_build and b.targets[0].is_up_to_date():
us.extend(list(map(rfile, b.sources)))
ut.extend(b.targets)
else:
cs.extend(list(map(rfile, b.sources)))
ct.extend(b.targets)
self._changed_sources_list = SCons.Util.NodeList(cs)
self._changed_targets_list = SCons.Util.NodeList(ct)
self._unchanged_sources_list = SCons.Util.NodeList(us)
self._unchanged_targets_list = SCons.Util.NodeList(ut)
def _get_changed_sources(self, *args, **kw):
try:
return self._changed_sources_list
except AttributeError:
self._get_changes()
return self._changed_sources_list
def _get_changed_targets(self, *args, **kw):
try:
return self._changed_targets_list
except AttributeError:
self._get_changes()
return self._changed_targets_list
def _get_source(self, *args, **kw):
return rfile(self.batches[0].sources[0]).get_subst_proxy()
def _get_sources(self, *args, **kw):
return SCons.Util.NodeList([rfile(n).get_subst_proxy() for n in self.get_all_sources()])
def _get_target(self, *args, **kw):
return self.batches[0].targets[0].get_subst_proxy()
def _get_targets(self, *args, **kw):
return SCons.Util.NodeList([n.get_subst_proxy() for n in self.get_all_targets()])
def _get_unchanged_sources(self, *args, **kw):
try:
return self._unchanged_sources_list
except AttributeError:
self._get_changes()
return self._unchanged_sources_list
def _get_unchanged_targets(self, *args, **kw):
try:
return self._unchanged_targets_list
except AttributeError:
self._get_changes()
return self._unchanged_targets_list
def get_action_targets(self):
if not self.action_list:
return []
targets_string = self.action_list[0].get_targets(self.env, self)
if targets_string[0] == '$':
targets_string = targets_string[1:]
return self.get_lvars()[targets_string]
def set_action_list(self, action):
if not SCons.Util.is_List(action):
if not action:
raise SCons.Errors.UserError("Executor must have an action.")
action = [action]
self.action_list = action
def get_action_list(self):
if self.action_list is None:
return []
return self.pre_actions + self.action_list + self.post_actions
def get_all_targets(self):
"""Returns all targets for all batches of this Executor."""
result = []
for batch in self.batches:
result.extend(batch.targets)
return result
def get_all_sources(self):
"""Returns all sources for all batches of this Executor."""
result = []
for batch in self.batches:
result.extend(batch.sources)
return result
def get_all_children(self):
"""Returns all unique children (dependencies) for all batches
of this Executor.
The Taskmaster can recognize when it's already evaluated a
Node, so we don't have to make this list unique for its intended
canonical use case, but we expect there to be a lot of redundancy
(long lists of batched .cc files #including the same .h files
over and over), so removing the duplicates once up front should
save the Taskmaster a lot of work.
"""
result = SCons.Util.UniqueList([])
for target in self.get_all_targets():
result.extend(target.children())
return result
def get_all_prerequisites(self):
"""Returns all unique (order-only) prerequisites for all batches
of this Executor.
"""
result = SCons.Util.UniqueList([])
for target in self.get_all_targets():
if target.prerequisites is not None:
result.extend(target.prerequisites)
return result
def get_action_side_effects(self):
"""Returns all side effects for all batches of this
Executor used by the underlying Action.
"""
result = SCons.Util.UniqueList([])
for target in self.get_action_targets():
result.extend(target.side_effects)
return result
@SCons.Memoize.CountMethodCall
def get_build_env(self):
"""Fetch or create the appropriate build Environment
for this Executor.
"""
try:
return self._memo['get_build_env']
except KeyError:
pass
# Create the build environment instance with appropriate
# overrides. These get evaluated against the current
# environment's construction variables so that users can
# add to existing values by referencing the variable in
# the expansion.
overrides = {}
for odict in self.overridelist:
overrides.update(odict)
import SCons.Defaults
env = self.env or SCons.Defaults.DefaultEnvironment()
build_env = env.Override(overrides)
self._memo['get_build_env'] = build_env
return build_env
def get_build_scanner_path(self, scanner):
"""Fetch the scanner path for this executor's targets and sources.
"""
env = self.get_build_env()
try:
cwd = self.batches[0].targets[0].cwd
except (IndexError, AttributeError):
cwd = None
return scanner.path(env, cwd,
self.get_all_targets(),
self.get_all_sources())
def get_kw(self, kw={}):
result = self.builder_kw.copy()
result.update(kw)
result['executor'] = self
return result
# use extra indirection because with new-style objects (Python 2.2
# and above) we can't override special methods, and nullify() needs
# to be able to do this.
def __call__(self, target, **kw):
return _do_execute_map[self._do_execute](self, target, kw)
def cleanup(self):
self._memo = {}
def add_sources(self, sources):
"""Add source files to this Executor's list. This is necessary
for "multi" Builders that can be called repeatedly to build up
a source file list for a given target."""
# TODO(batch): extend to multiple batches
assert (len(self.batches) == 1)
# TODO(batch): remove duplicates?
sources = [x for x in sources if x not in self.batches[0].sources]
self.batches[0].sources.extend(sources)
def get_sources(self):
return self.batches[0].sources
def add_batch(self, targets, sources):
"""Add pair of associated target and source to this Executor's list.
This is necessary for "batch" Builders that can be called repeatedly
to build up a list of matching target and source files that will be
used in order to update multiple target files at once from multiple
corresponding source files, for tools like MSVC that support it."""
self.batches.append(Batch(targets, sources))
def prepare(self):
"""
Preparatory checks for whether this Executor can go ahead
and (try to) build its targets.
"""
for s in self.get_all_sources():
if s.missing():
msg = "Source `%s' not found, needed by target `%s'."
raise SCons.Errors.StopError(msg % (s, self.batches[0].targets[0]))
def add_pre_action(self, action):
self.pre_actions.append(action)
def add_post_action(self, action):
self.post_actions.append(action)
# another extra indirection for new-style objects and nullify...
def __str__(self):
return _execute_str_map[self._execute_str](self)
def nullify(self):
self.cleanup()
self._do_execute = 0
self._execute_str = 0
@SCons.Memoize.CountMethodCall
def get_contents(self):
"""Fetch the signature contents. This is the main reason this
class exists, so we can compute this once and cache it regardless
of how many target or source Nodes there are.
Returns bytes
"""
try:
return self._memo['get_contents']
except KeyError:
pass
env = self.get_build_env()
action_list = self.get_action_list()
all_targets = self.get_all_targets()
all_sources = self.get_all_sources()
result = bytearray("",'utf-8').join([action.get_contents(all_targets,
all_sources,
env)
for action in action_list])
self._memo['get_contents'] = result
return result
def get_timestamp(self):
"""Fetch a time stamp for this Executor. We don't have one, of
course (only files do), but this is the interface used by the
timestamp module.
"""
return 0
def scan_targets(self, scanner):
# TODO(batch): scan by batches
self.scan(scanner, self.get_all_targets())
def scan_sources(self, scanner):
# TODO(batch): scan by batches
if self.batches[0].sources:
self.scan(scanner, self.get_all_sources())
def scan(self, scanner, node_list):
"""Scan a list of this Executor's files (targets or sources) for
implicit dependencies and update all of the targets with them.
This essentially short-circuits an N*M scan of the sources for
each individual target, which is a hell of a lot more efficient.
"""
env = self.get_build_env()
path = self.get_build_scanner_path
kw = self.get_kw()
# TODO(batch): scan by batches)
deps = []
for node in node_list:
node.disambiguate()
deps.extend(node.get_implicit_deps(env, scanner, path, kw))
deps.extend(self.get_implicit_deps())
for tgt in self.get_all_targets():
tgt.add_to_implicit(deps)
def _get_unignored_sources_key(self, node, ignore=()):
return (node,) + tuple(ignore)
@SCons.Memoize.CountDictCall(_get_unignored_sources_key)
def get_unignored_sources(self, node, ignore=()):
key = (node,) + tuple(ignore)
try:
memo_dict = self._memo['get_unignored_sources']
except KeyError:
memo_dict = {}
self._memo['get_unignored_sources'] = memo_dict
else:
try:
return memo_dict[key]
except KeyError:
pass
if node:
# TODO: better way to do this (it's a linear search,
# but it may not be critical path)?
sourcelist = []
for b in self.batches:
if node in b.targets:
sourcelist = b.sources
break
else:
sourcelist = self.get_all_sources()
if ignore:
idict = {}
for i in ignore:
idict[i] = 1
sourcelist = [s for s in sourcelist if s not in idict]
memo_dict[key] = sourcelist
return sourcelist
def get_implicit_deps(self):
"""Return the executor's implicit dependencies, i.e. the nodes of
the commands to be executed."""
result = []
build_env = self.get_build_env()
for act in self.get_action_list():
deps = act.get_implicit_deps(self.get_all_targets(),
self.get_all_sources(),
build_env)
result.extend(deps)
return result
_batch_executors = {}
def GetBatchExecutor(key):
return _batch_executors[key]
def AddBatchExecutor(key, executor):
assert key not in _batch_executors
_batch_executors[key] = executor
nullenv = None
class NullEnvironment(SCons.Util.Null):
import SCons.CacheDir
_CacheDir_path = None
_CacheDir = SCons.CacheDir.CacheDir(None)
def get_CacheDir(self):
return self._CacheDir
def get_NullEnvironment():
"""Use singleton pattern for Null Environments."""
global nullenv
if nullenv is None:
nullenv = NullEnvironment()
return nullenv
class Null(object, metaclass=NoSlotsPyPy):
"""A null Executor, with a null build Environment, that does
nothing when the rest of the methods call it.
This might be able to disappear when we refactor things to
disassociate Builders from Nodes entirely, so we're not
going to worry about unit tests for this--at least for now.
"""
__slots__ = ('pre_actions',
'post_actions',
'env',
'overridelist',
'batches',
'builder_kw',
'_memo',
'lvars',
'_changed_sources_list',
'_changed_targets_list',
'_unchanged_sources_list',
'_unchanged_targets_list',
'action_list',
'_do_execute',
'_execute_str')
def __init__(self, *args, **kw):
if SCons.Debug.track_instances:
logInstanceCreation(self, 'Executor.Null')
self.batches = [Batch(kw['targets'][:], [])]
def get_build_env(self):
return get_NullEnvironment()
def get_build_scanner_path(self):
return None
def cleanup(self):
pass
def prepare(self):
pass
def get_unignored_sources(self, *args, **kw):
return tuple(())
def get_action_targets(self):
return []
def get_action_list(self):
return []
def get_all_targets(self):
return self.batches[0].targets
def get_all_sources(self):
return self.batches[0].targets[0].sources
def get_all_children(self):
return self.batches[0].targets[0].children()
def get_all_prerequisites(self):
return []
def get_action_side_effects(self):
return []
def __call__(self, *args, **kw):
return 0
def get_contents(self):
return ''
def _morph(self):
"""Morph this Null executor to a real Executor object."""
batches = self.batches
self.__class__ = Executor
self.__init__([])
self.batches = batches
# The following methods require morphing this Null Executor to a
# real Executor object.
def add_pre_action(self, action):
self._morph()
self.add_pre_action(action)
def add_post_action(self, action):
self._morph()
self.add_post_action(action)
def set_action_list(self, action):
self._morph()
self.set_action_list(action)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4: | PypiClean |
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dijit/form/_ExpandingTextAreaMixin.js | define("dijit/form/_ExpandingTextAreaMixin",["dojo/_base/declare","dojo/dom-construct","dojo/_base/lang","dojo/_base/window"],function(_1,_2,_3,_4){
var _5;
return _1("dijit.form._ExpandingTextAreaMixin",null,{_setValueAttr:function(){
this.inherited(arguments);
this.resize();
},postCreate:function(){
this.inherited(arguments);
var _6=this.textbox;
if(_5==undefined){
var te=_2.create("textarea",{rows:"5",cols:"20",value:" ",style:{zoom:1,overflow:"hidden",visibility:"hidden",position:"absolute",border:"0px solid black",padding:"0px"}},_4.body(),"last");
_5=te.scrollHeight>=te.clientHeight;
_4.body().removeChild(te);
}
this.connect(_6,"onscroll","_resizeLater");
this.connect(_6,"onresize","_resizeLater");
this.connect(_6,"onfocus","_resizeLater");
_6.style.overflowY="hidden";
this._estimateHeight();
this._resizeLater();
},_onInput:function(e){
this.inherited(arguments);
this.resize();
},_estimateHeight:function(){
var _7=this.textbox;
_7.style.height="auto";
_7.rows=(_7.value.match(/\n/g)||[]).length+2;
},_resizeLater:function(){
setTimeout(_3.hitch(this,"resize"),0);
},resize:function(){
function _8(){
var _9=false;
if(_a.value===""){
_a.value=" ";
_9=true;
}
var sh=_a.scrollHeight;
if(_9){
_a.value="";
}
return sh;
};
var _a=this.textbox;
if(_a.style.overflowY=="hidden"){
_a.scrollTop=0;
}
if(this.resizeTimer){
clearTimeout(this.resizeTimer);
}
this.resizeTimer=null;
if(this.busyResizing){
return;
}
this.busyResizing=true;
if(_8()||_a.offsetHeight){
var _b=_a.style.height;
if(!(/px/.test(_b))){
_b=_8();
_a.rows=1;
_a.style.height=_b+"px";
}
var _c=Math.max(parseInt(_b)-_a.clientHeight,0)+_8();
var _d=_c+"px";
if(_d!=_a.style.height){
_a.rows=1;
_a.style.height=_d;
}
if(_5){
var _e=_8();
_a.style.height="auto";
if(_8()<_e){
_d=_c-_e+_8()+"px";
}
_a.style.height=_d;
}
_a.style.overflowY=_8()>_a.clientHeight?"auto":"hidden";
}else{
this._estimateHeight();
}
this.busyResizing=false;
},destroy:function(){
if(this.resizeTimer){
clearTimeout(this.resizeTimer);
}
if(this.shrinkTimer){
clearTimeout(this.shrinkTimer);
}
this.inherited(arguments);
}});
}); | PypiClean |
/Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glosser/GlosserWidget.py | import gtk
from matplotlib.backends.backend_cairo import RendererCairo
import pangocairo
from aobject.utils import *
import pango
import gobject
from .. import glypher
import copy
from lxml import etree
import cairo
from aobject import aobject
from aobject.paths import *
from ..tablemaker import *
import rsvg
import StringIO
def render_stock(style, stockid) :
icon_set = style.lookup_icon_set(stockid)
pixbuf = icon_set.render_icon(style,
gtk.TEXT_DIR_NONE,
gtk.STATE_NORMAL,
gtk.ICON_SIZE_SMALL_TOOLBAR,
None,
None)
return pixbuf
class GlosserWidget(aobject.AObject) :
x = 0
y = 0
h = 0
w = 0
design_widget = None
presentation_widget = None
__gsignals__ = {
"redraw-request" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, () )}
current_scaling = 1.
attached = True
suspend = False
visible = True
container = False
initial_spronk_fns = None
initial_hide = False
def get_auto_aesthete_properties(self) :
return {
'x' : (float,), 'y' : (float,), 'h' : (float,), 'w' : (float,),
}
def aes_get_parameters(self) :
return { 'on_slide' : self.slide.num }
def initial_spronk(self) :
debug_print(self.initial_hide)
if self.initial_hide :
self.hide()
for fn in self.initial_spronk_fns :
fn()
def show(self) :
self.presentation_widget.show()
def hide(self) :
self.presentation_widget.hide()
def get_visible(self) :
return self.visible
def remove_from_layouts(self) :
if not self.attached :
return
self.attached = False
for i in (0,1) :
self.layouts[i].remove(self.subwidgets[i])
def restore_to_layouts(self) :
if self.attached :
return
for i in (0,1) :
to_pos = map(int, self.layouts[i].translate_body_pos(self.x,
self.y))
self.layouts[i].put(self.subwidgets[i], *to_pos)
self.attached = True
def __init__(self, slide, design_layout, presentation_layout, name_root='GlosserWidget', env=None) :
aobject.AObject.__init__(self, name_root=name_root,
env=env,
view_object=False,
elevate=False)
self.slide = slide
self.current_scaling = [1.,1.]
self.layout_conn = [-1, -1]
self.initial_spronk_fns = []
#[presentation_layout.translate_dist(1.),
# design_layout.translate_dist(1.)]
#[presentation_layout.translate_pos(presentation_layout.body_x,presentation_layout.body_y,rev=True),
# design_layout.translate_pos(design_layout.body_x,design_layout.body_y,rev=True)]
self.layouts = [presentation_layout, design_layout]
self.design_widget.connect_after("expose-event", lambda w, e :
self.presentation_widget.queue_draw())
self.subwidgets = [self.presentation_widget, self.design_widget]
for i in (0, 1) :
self.layout_conn[i] = self.layouts[i].connect_after("size-allocate",
self.do_layout_size_allocate, i)
self.layouts[i].put(self.subwidgets[i], 0, 0)
self.subwidgets[i].show()
self.move(0, 0, i)
self.design_widget.connect("size-allocate", lambda w, a :
self.update_from_design_widget())
def move(self, x=None, y=None, subwidget=None) :
if self.suspend :
return
self.suspend = True
if x is None :
x = self.x
if y is None :
y = self.y
self.x = x
self.y = y
do_redraw = False
for i in (subwidget,) if subwidget is not None else (1,0) :
sw = self.subwidgets[i]
self.layouts[i].handler_block(self.layout_conn[i])
x, y = map(int, self.layouts[i].translate_body_pos(self.x,self.y))
if self.attached and (x != sw.allocation.x or y != sw.allocation.y) :
self.layouts[i].move(sw, x, y)
do_redraw = True
self.layouts[i].handler_unblock(self.layout_conn[i])
self.suspend = False
if do_redraw :
self.emit("redraw-request")
def update_from_design_widget(self) :
if self.suspend :
return
al = self.design_widget.get_allocation()
layout = self.layouts[GLOSSER_WIDGET_DESIGN]
if al.x > 0 or al.y > 0 :
x, y = layout.translate_body_pos(al.x, al.y, rev=True)
self.move(x, y, subwidget=GLOSSER_WIDGET_PRESENTATION)
if al.width > 0 or al.height > 0:
w = layout.translate_dist(al.width, rev=True)
h = layout.translate_dist(al.height, rev=True)
self.resize(w, h, subwidget=GLOSSER_WIDGET_PRESENTATION)
def move_resize(self, subwidget=None) :
self.resize(self.w, self.h, subwidget=subwidget)
self.move(self.x, self.y, subwidget=subwidget)
def resize(self, w=None, h=None, subwidget=None) :
if self.suspend :
return
self.suspend = True
if w is None :
w = self.w
if h is None :
h = self.h
self.w = w
self.h = h
do_redraw = False
for i in (subwidget,) if subwidget is not None else (1,0) :
self.layouts[i].handler_block(self.layout_conn[i])
sw = self.subwidgets[i]
w = self.layouts[i].translate_dist(self.w)
h = self.layouts[i].translate_dist(self.h)
if int(w) != sw.allocation.width or int(h) != sw.allocation.height :
sw.set_size_request(int(w), int(h))
do_redraw = True
self.layouts[i].handler_unblock(self.layout_conn[i])
if do_redraw :
self.emit("redraw-request")
self.suspend = False
old_origin = None
old_rat = None
def do_layout_size_allocate(self, layout, allocation, subwidget) :
origin = layout.translate_body_pos(0, 0)
rat = layout.translate_dist(1)
if origin == self.old_origin and rat == self.old_rat :
return
self.old_origin = origin
self.old_rat = rat
if rat != self.current_scaling[subwidget] :
self.rescale(subwidget, rat)
else :
self.move_resize(subwidget)
def rescale_action(self, subwidget, rat) :
pass
def rescale(self, subwidget, rat) :
if self.suspend :
return
self.current_scaling[subwidget] = rat
self.move_resize(subwidget)
self.rescale_action(subwidget, rat)
def get_action_panel(self) :
return None
def presentation_draw(self, cr, scaling=None, ignore_spronks=True,
final=None) :
if not ignore_spronks and not self.presentation_widget.get_visible() :
return False
if final is None :
self.do_presentation_draw(cr, scaling=scaling)
else :
self.do_presentation_draw(cr, scaling=scaling, final=final)
return True
class GlosserPresentationImage(gtk.DrawingArea) :
scaling = 1
draw_fn = None
__gsignals__ = { "expose-event" : "override"}
def __init__(self, draw_fn) :
self.draw_fn = draw_fn
gtk.DrawingArea.__init__(self)
def rescale(self, rat) :
self.scaling = rat
self.queue_draw()
def do_expose_event(self, event):
cr = self.window.cairo_create()
cr.rectangle ( event.area.x, event.area.y, event.area.width, event.area.height)
cr.clip()
cr.scale(self.scaling, self.scaling)
self.draw_fn(cr, self.scaling, final=True)
GLOSSER_WIDGET_PRESENTATION = 0
GLOSSER_WIDGET_DESIGN = 1 | PypiClean |
/CommenlyzerEngine-1.0.0.tar.gz/CommenlyzerEngine-1.0.0/README.md | <img alt="PyPI - License" src="https://img.shields.io/pypi/l/CubaCrawler.svg"> <img alt="PyPI - Python Version" src="https://img.shields.io/pypi/pyversions/CubaCrawler.svg"> <img alt="PyPI" src="https://img.shields.io/pypi/v/CubaCrawler.svg"> <img alt="Travis (.org)" src="https://img.shields.io/travis/fsadannn/CubaCrawler/master.svg"> <img alt="Codecov" src="https://img.shields.io/codecov/c/github/fsadannn/CubaCrawler.svg">
# CommenlyzerEngine
Esta biblioteca contiene
## Como se usa
Por escribir
Esta biblioteca es desarrollada por GIA (Grupo de Inteligencia Artificial), cualquier contribución o referencia es agradecida.
thanks,
Frank Sadan Naranjo Noda <fsadannn@gmail.com>
Hian Cañizares Díaz <hiancdtrsnm@gmail.com> | PypiClean |
/Eureqa-1.76.0.tar.gz/Eureqa-1.76.0/eureqa/analysis/components/table_column.py | import uuid
import itertools
import math
import formatted_text
from base import _Component
class TableColumn(object):
""" Represent a column in table
:param ~eureqa.analysis.components.table_builder.TableBuilder parent_table: the containing table
:param list col_data: data for this column. Can be either a list of str or a list of float
:param str col_name: name of this column
"""
def __init__(self, parent_table, col_data, col_name):
self._parent_table = parent_table
self._col_data = col_data
self._column_name = col_name
self.column_header = col_name
self._sort_values = None
self._sort_col_name = col_name
self._rendered_values = None
self._rendered_col_name = col_name
self.searchable = True
self._width = 1
self.filterable = False
self._filter_only = False
self.filter_name = col_name
@property
def column_name(self):
"""The name of this column"""
return self._column_name
@column_name.setter
def column_name(self, new_name):
"""Change the name of this column
:param str new_name: The new name of the column
"""
try:
self._parent_table._change_col_name(self._column_name, new_name)
except KeyError:
pass # if this is an orphan column, ignore and continue
if self._rendered_col_name == self._column_name:
self._rendered_col_name = new_name
if self._sort_col_name == self._column_name:
self._sort_col_name = new_name
self._column_name = new_name
@property
def sort_values(self):
"""A list of values specifying how the column is sorted"""
return self._sort_values
@sort_values.setter
def sort_values(self, sort_values):
"""Change how the column is sorted
Note the input must be a list of numbers. The input could be a pandas.Series only if the index runs from 0 to (n-1)
:param a list of numbers sort_values: the value for each row used for sorting
"""
self._sort_col_name = str(uuid.uuid4())
# Backend expects no nans/infs, so we replace nan/-inf with (min(values)-1) and +inf with (max(values)+1).
# Also we cap the magnitude of all values at a value which is smaller than the largest value JS can represent.
self._sort_values = sort_values
largest_value = 2**52
smallest_value = -largest_value
set_to_min = []
set_to_max = []
for i in xrange(len(self._sort_values)):
value = self._sort_values[i]
# ignore strings -- the processing we need to do only applies to numeric values
if not isinstance(value, float) and not isinstance(value, int):
continue
if (math.isnan(value) or (math.isinf(value) and value < 0)) or (value < smallest_value):
set_to_min.append(i)
elif (math.isinf(value) and value > 0) or (value > largest_value):
set_to_max.append(i)
for i in set_to_min:
self._sort_values[i] = smallest_value
for i in set_to_max:
self._sort_values[i] = largest_value
@property
def rendered_values(self):
"""A list of values specifying how the column is rendered"""
return self._rendered_values
@rendered_values.setter
def rendered_values(self, rendered_values):
"""Change how the column is rendered
:param a list of str or numbers: the value for each row used for rendering
"""
self._rendered_values = rendered_values
self._rendered_col_name = str(uuid.uuid4())
@property
def width(self):
""" The width of this column"""
return self._width
@width.setter
def width(self, width):
""" Change the width of this column
:param double width: a number representing the percentage of whole table width for this column
"""
if width <= 0:
raise RuntimeError("Column width must be larger than zero")
self._width = width
@property
def filter_only(self):
""" Whether this column is only for filtering, if True this column doesn't appear in the table"""
return self._filter_only
@filter_only.setter
def filter_only(self, filter_only):
""" Change whether this column is only for filtering
:param bool filter_only: whether or not this column should be set to filter only
"""
self._filter_only = filter_only
self.filterable = True
# filter_only columns don't show up in the table component, it would be
# very confusing if you can search on them
self.searchable = False
def _get_data_columns(self, analysis):
cols_name = [str(self._column_name)]
cols_data = [self._col_data]
cols_comp = []
if self._sort_values is not None:
cols_name.append(self._sort_col_name)
cols_data.append(self._sort_values)
if self._rendered_values is not None:
comp_ref_and_comp_def_by_row = [
formatted_text._get_component_ref_and_defs_for_value(rendered_value, analysis, associate_with_analysis=False)
for rendered_value in self._rendered_values]
comp_ref, comp_def = itertools.izip(*comp_ref_and_comp_def_by_row) # transpose it into 2 columns
cols_name.append(self._rendered_col_name)
cols_data.append(comp_ref)
cols_comp = comp_def
return cols_data, cols_name, cols_comp | PypiClean |
/CommentGPT-0.2.tar.gz/CommentGPT-0.2/README.md | # CommentGPT
This python script/command line tool is able to comment code using ChatGPT. The results are sometimes a little *odd*, so a human will need to look at them, but the results can be very efficient at
commented undermaintained files.
You'll never have to write comments again!
## Installation
### Download
#### Clone the repository
```bash
git clone https://github.com/brendankane04/CommentGPT.git
```
#### Download the packages with pip
```bash
pip3 install -r requirements.txt
```
### Installation of ChatGPT-wrapper
This module needed by the script must be installed in its own way. You will need to run these commands.
```bash
pip install git+https://github.com/mmabrouk/chatgpt-wrapper
pip install playwright
playwright install firefox
```
After running this command, you will reach a web page run by OpenAI.
You will need to log in with your OpenAI username & password
```bash
chatgpt install
```
Afterwards, exit the webpage.
## How to use
#### script in terminal
The python script can be called from the terminal and used with the following script
Clone the script using git, then access the file `main.py`.
```bash
python3 __main__.py -i test_file.cpp -o test_file_commented.cpp
```
The script will comment the file
#### python package
import the package with pip, then apply this to your own python script
```python
from CommentGPT import commenter as c
combined_response = c.comment_code(snippet, section_size)
```
## Future work
The tool currently uses the python ChatGPT-interfacing library [chatgpt-wrapper](https://github.com/mmabrouk/chatgpt-wrapper).
Unfortunately, it is slow, and has an involved configuration process, so that should be changed.
The code also sometimes modifies the code being commented on a rare occasion. Using some diff tool & comment verifier in the script would prevent this.
Until then, take care to look at the modifications made in the code. | PypiClean |
/Leye-0.0.1.tar.gz/Leye-0.0.1/sentileye/polarity.py | import csv
import re
import ast
import pandas as pd
from csv import reader
from sentileye.booster import booster
from sentileye.emoticon import emoticon
def get_user_data():
message = 'Hello, you are welcome to SentiLEYE sentiment classifier'
print('{:^80}'.format(message))
#Read in file to classify
user = input('Do you have a csv file to classify? Enter yes/no ').lower()
print("\n")
if user == 'yes' or user == 'y':
message1 = '*****Please read instructions carefully*****'
print('{:^80}'.format(message1))
print("\n")
print('''
1. Load file in .csv format
2. Input csv file header as text - This means document or sentences to classify should have column name = text
''')
print("\n")
fileinput = str(input('Your filename - kindly include .csv extension: '))
print("\n")
data = pd.read_csv(fileinput)
data = pd.DataFrame(data['text'])
elif user == 'no' or user == 'n':
print("\n")
message = [input('Do you want to classify raw text or sentence? If yes, enter text here ')]
print("\n")
data = pd.DataFrame(list(reader(message)))
data['text'] = data[0]
data = pd.DataFrame(data['text'])
elif user != 'yes' or user != 'y' or user != 'no' or user != 'n':
raise ValueError('Please enter yes or no')
else:
raise NameError('Sorry, you need a csv file or text')
return data
#emotion words
emotion_df= pd.read_csv('emotion_new.csv')
emotion = dict(zip(emotion_df.term, emotion_df.score))
#read in sentileye (labelled to reflect bank context) wordlist
sentileye_df= pd.read_csv('sentileye_list.csv')
sentileye = dict(zip(sentileye_df.term, sentileye_df.score))
#slang
slang_df= pd.read_csv('slang.csv')
slang = dict(zip(slang_df.term, slang_df.score))
#open contraction words
with open("neg.txt", "r") as dat:
neg = ast.literal_eval(dat.read())
#open contraction words 2
with open("neg1.txt", "r") as dat1:
neg1 = ast.literal_eval(dat1.read())
# Regular expression for finding contractions
contractions_re=re.compile('(%s)' % '|'.join(neg.keys()))
# Function for expanding contractions
def expand_contractions(text,contractions_dict=neg):
def replace(match):
return contractions_dict[match.group(0)]
return contractions_re.sub(replace, text)
# Regular expression for finding contractions if punctuation exist
contractions_re1=re.compile('(%s)' % '|'.join(neg1.keys()))
# Function for expanding contractions
def expand_contractions_punct(text,contractions_dict=neg1):
def replace(match):
return contractions_dict[match.group(0)]
return contractions_re1.sub(replace, text)
# Regular expression for finding slang
contractions_re_slang = re.compile('('+'|'.join(slang.keys())+')')
# Function for expanding contractions
def expand_slang(text,contractions_dict=slang):
def replace(match):
return contractions_dict[match.group(0)]
return contractions_re_slang.sub(replace, text)
def get_score(s):
value = 0
result = 0
negation = 0
booster_word_count = 0
pattern = '('+'|'.join(emotion.keys())+')'#Regular expression pattern
#sum values of exact word matched
for z in s.split():
if z in sentileye:
value += int(sentileye[z])
elif z in re.findall(pattern, s):
value += int(emotion[z])
elif z in emoticon:
value += int(emoticon[z])
if booster_word_count > 0 and value != 0:
value = value * 2
booster_word_count -= 1
if negation > 0 and value == 0:
value = 0
if negation > 0 and value != 0:
value = -1 * value
negation -= 2
if z in booster:
booster_word_count += 2
elif z in ["no", "not", "but", "however", "nothing", "meanwhile", "yet", "without", "witout", "never", "although"]:
negation += 2
if negation >= 2:
result -= 1
result += value
return result
def result():
data = get_user_data()
data['text'] = data['text'].apply(lambda x: " ".join(x.lower() for x in x.split()))
# Expanding Contractions in the tweets
data['text'] = data['text'].apply(lambda x:expand_contractions(x))
# Expanding Contractions in the tweets if punctuation exist
data['text']=data['text'].apply(lambda x:expand_contractions_punct(x))
# Expanding slangs
data['text']=data['text'].apply(lambda x:expand_slang(x))
data['score'] = data['text'].apply(lambda x: get_score(x))
data['class'] = data['score'].apply(lambda x: 'positive' if x > 0
else('neutral' if x == 0
else 'negative'))
data.to_csv('sentileyeresult.csv')
return print(data)
result() | PypiClean |
/ChatExchange-0.0.4-py3-none-any.whl/chatexchange/_utils.py | import sys
if sys.version_info[0] == 2:
from HTMLParser import HTMLParser
import htmlentitydefs
else:
from html.parser import HTMLParser
from html import entities as htmlentitydefs
import functools
import logging
import weakref
def log_and_ignore_exceptions(
f, exceptions=Exception, logger=logging.getLogger('exceptions')
):
"""
Wraps a function to catch its exceptions, log them, and return None.
"""
@functools.wraps(f)
def wrapper(*a, **kw):
try:
return f(*a, **kw)
except exceptions:
logger.exception("ignored unhandled exception in %s", f)
return None
return wrapper
class HTMLTextExtractor(HTMLParser):
# Originally posted at http://stackoverflow.com/a/7778368.
# by Søren Løvborg (http://stackoverflow.com/u/13679) and Eloff.
def __init__(self):
HTMLParser.__init__(self)
self.result = []
def handle_data(self, d):
self.result.append(d)
def handle_charref(self, number):
if number[0] in ('x', 'X'):
codepoint = int(number[1:], 16)
else:
codepoint = int(number)
self.result.append(chr(codepoint))
def handle_entityref(self, name):
codepoint = htmlentitydefs.name2codepoint[name]
self.result.append(chr(codepoint))
def get_text(self):
return ''.join(self.result)
def html_to_text(html):
s = HTMLTextExtractor()
s.feed(html)
return s.get_text()
# Number of seconds since the user was last seen, based on <12d ago> data.
def parse_last_seen(text):
suffixes = {
's': 1,
'm': 60,
'h': 3600,
'd': 86400,
'y': 31536000
}
if text == "n/a":
return -1 # Take this as an error code if you want
if text == "just now":
return 0
splat = text.split(' ')
assert len(splat) == 2, "text doesn't appear to be in <x ago> format"
char = splat[0][-1]
number = int(splat[0][:-1])
assert char in suffixes, "suffix char unrecognized"
return number * suffixes[char]
class LazyFrom(object):
"""
A descriptor used when multiple lazy attributes depend on a common
source of data.
"""
def __init__(self, method_name):
"""
method_name is the name of the method that will be invoked if
the value is not known. It must assign a value for the attribute
attribute (through this descriptor).
"""
self.method_name = method_name
self.values = weakref.WeakKeyDictionary()
def __get__(self, obj, cls):
if obj is None:
return self
if obj not in self.values:
method = getattr(obj, self.method_name)
method()
assert obj in self.values, "method failed to populate attribute"
return self.values[obj]
def __set__(self, obj, value):
self.values[obj] = value
def __delete__(self, obj):
if obj in self.values:
del self.values[obj] | PypiClean |
/Misago-0.36.1.tar.gz/Misago-0.36.1/misago/static/misago/admin/momentjs/se.js |
;(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined'
&& typeof require === 'function' ? factory(require('../moment')) :
typeof define === 'function' && define.amd ? define(['../moment'], factory) :
factory(global.moment)
}(this, (function (moment) { 'use strict';
var se = moment.defineLocale('se', {
months : 'ođđajagemánnu_guovvamánnu_njukčamánnu_cuoŋománnu_miessemánnu_geassemánnu_suoidnemánnu_borgemánnu_čakčamánnu_golggotmánnu_skábmamánnu_juovlamánnu'.split('_'),
monthsShort : 'ođđj_guov_njuk_cuo_mies_geas_suoi_borg_čakč_golg_skáb_juov'.split('_'),
weekdays : 'sotnabeaivi_vuossárga_maŋŋebárga_gaskavahkku_duorastat_bearjadat_lávvardat'.split('_'),
weekdaysShort : 'sotn_vuos_maŋ_gask_duor_bear_láv'.split('_'),
weekdaysMin : 's_v_m_g_d_b_L'.split('_'),
longDateFormat : {
LT : 'HH:mm',
LTS : 'HH:mm:ss',
L : 'DD.MM.YYYY',
LL : 'MMMM D. [b.] YYYY',
LLL : 'MMMM D. [b.] YYYY [ti.] HH:mm',
LLLL : 'dddd, MMMM D. [b.] YYYY [ti.] HH:mm'
},
calendar : {
sameDay: '[otne ti] LT',
nextDay: '[ihttin ti] LT',
nextWeek: 'dddd [ti] LT',
lastDay: '[ikte ti] LT',
lastWeek: '[ovddit] dddd [ti] LT',
sameElse: 'L'
},
relativeTime : {
future : '%s geažes',
past : 'maŋit %s',
s : 'moadde sekunddat',
ss: '%d sekunddat',
m : 'okta minuhta',
mm : '%d minuhtat',
h : 'okta diimmu',
hh : '%d diimmut',
d : 'okta beaivi',
dd : '%d beaivvit',
M : 'okta mánnu',
MM : '%d mánut',
y : 'okta jahki',
yy : '%d jagit'
},
dayOfMonthOrdinalParse: /\d{1,2}\./,
ordinal : '%d.',
week : {
dow : 1, // Monday is the first day of the week.
doy : 4 // The week that contains Jan 4th is the first week of the year.
}
});
return se;
}))); | PypiClean |
/Gribble-1.0.0.tar.gz/Gribble-1.0.0/gribble/worker/tail.py | import collections
import datetime
import errno
import gzip
import io
import os
import sqlite3
import time
from gribble.utils import IS_GZIPPED_FILE, REOPEN_FILES, multiline_merge
from gribble.unicode_dammit import ENCODINGS
from gribble.base_log import BaseLog
class Tail(BaseLog):
"""Follows a single file and outputs new lines from it to a callback
"""
def __init__(self, filename, callback, position="end", logger=None, gribble_config=None, file_config=None):
super(Tail, self).__init__(logger=logger)
self.active = False
self._callback = callback
self._fid = None
self._file = None
self._filename = filename
self._last_sincedb_write = None
self._last_file_mapping_update = None
self._line_count = 0
self._line_count_sincedb = 0
self._log_template = '[' + self._filename + '] - {0}'
self._sincedb_path = gribble_config.get('sincedb_path')
self._debug = gribble_config.get_field('debug', filename) # TODO: Implement me
self._encoding = gribble_config.get_field('encoding', filename)
self._fields = gribble_config.get_field('fields', filename)
self._format = gribble_config.get_field('format', filename)
self._ignore_empty = gribble_config.get_field('ignore_empty', filename)
self._ignore_truncate = gribble_config.get_field('ignore_truncate', filename)
self._message_format = gribble_config.get_field('message_format', filename) # TODO: Implement me
self._sincedb_write_interval = gribble_config.get_field('sincedb_write_interval', filename)
self._start_position = gribble_config.get_field('start_position', filename)
self._stat_interval = gribble_config.get_field('stat_interval', filename)
self._tail_lines = gribble_config.get_field('tail_lines', filename)
self._tags = gribble_config.get_field('tags', filename)
self._type = gribble_config.get_field('type', filename)
# The following is for the buffered tokenization
# Store the specified delimiter
self._delimiter = gribble_config.get_field("delimiter", filename)
# Store the specified size limitation
self._size_limit = gribble_config.get_field("size_limit", filename)
# The input buffer is stored as an array. This is by far the most efficient
# approach given language constraints (in C a linked list would be a more
# appropriate data structure). Segments of input data are stored in a list
# which is only joined when a token is reached, substantially reducing the
# number of objects required for the operation.
self._input = collections.deque([])
# Size of the input buffer
self._input_size = 0
# Attribute for multi-line events
self._current_event = collections.deque([])
self._last_activity = time.time()
self._multiline_regex_after = gribble_config.get_field('multiline_regex_after', filename)
self._multiline_regex_before = gribble_config.get_field('multiline_regex_before', filename)
self._update_file()
if self.active:
self._log_info("watching logfile")
def __del__(self):
"""Closes all files"""
self.close()
def open(self, encoding=None):
"""Opens the file with the appropriate call"""
try:
if IS_GZIPPED_FILE.search(self._filename):
_file = gzip.open(self._filename, 'rb')
else:
if encoding:
_file = io.open(self._filename, 'r', encoding=encoding, errors='replace')
elif self._encoding:
_file = io.open(self._filename, 'r', encoding=self._encoding, errors='replace')
else:
_file = io.open(self._filename, 'r', errors='replace')
except IOError, e:
self._log_warning(str(e))
_file = None
self.close()
return _file
def close(self):
"""Closes all currently open file pointers"""
if not self.active:
return
self.active = False
if self._file:
self._file.close()
self._sincedb_update_position(force_update=True)
if self._current_event:
event = '\n'.join(self._current_event)
self._current_event.clear()
self._callback_wrapper([event])
def run(self, once=False):
while self.active:
current_time = time.time()
self._run_pass()
self._ensure_file_is_good(current_time=current_time)
self._log_debug('Iteration took {0:.6f}'.format(time.time() - current_time))
if once:
break
if not once:
self._log_debug('file closed')
def fid(self):
return self._fid
def _buffer_extract(self, data):
"""
Extract takes an arbitrary string of input data and returns an array of
tokenized entities, provided there were any available to extract. This
makes for easy processing of datagrams using a pattern like:
tokenizer.extract(data).map { |entity| Decode(entity) }.each do ..."""
# Extract token-delimited entities from the input string with the split command.
# There's a bit of craftiness here with the -1 parameter. Normally split would
# behave no differently regardless of if the token lies at the very end of the
# input buffer or not (i.e. a literal edge case) Specifying -1 forces split to
# return "" in this case, meaning that the last entry in the list represents a
# new segment of data where the token has not been encountered
entities = collections.deque(data.split(self._delimiter, -1))
# Check to see if the buffer has exceeded capacity, if we're imposing a limit
if self._size_limit:
if self.input_size + len(entities[0]) > self._size_limit:
raise Exception('input buffer full')
self._input_size += len(entities[0])
# Move the first entry in the resulting array into the input buffer. It represents
# the last segment of a token-delimited entity unless it's the only entry in the list.
first_entry = entities.popleft()
if len(first_entry) > 0:
self._input.append(first_entry)
# If the resulting array from the split is empty, the token was not encountered
# (not even at the end of the buffer). Since we've encountered no token-delimited
# entities this go-around, return an empty array.
if len(entities) == 0:
return []
# At this point, we've hit a token, or potentially multiple tokens. Now we can bring
# together all the data we've buffered from earlier calls without hitting a token,
# and add it to our list of discovered entities.
entities.appendleft(''.join(self._input))
# Now that we've hit a token, joined the input buffer and added it to the entities
# list, we can go ahead and clear the input buffer. All of the segments that were
# stored before the join can now be garbage collected.
self._input.clear()
# The last entity in the list is not token delimited, however, thanks to the -1
# passed to split. It represents the beginning of a new list of as-yet-untokenized
# data, so we add it to the start of the list.
self._input.append(entities.pop())
# Set the new input buffer size, provided we're keeping track
if self._size_limit:
self._input_size = len(self._input[0])
# Now we're left with the list of extracted token-delimited entities we wanted
# in the first place. Hooray!
return entities
# Flush the contents of the input buffer, i.e. return the input buffer even though
# a token has not yet been encountered
def _buffer_flush(self):
buf = ''.join(self._input)
self._input.clear
return buf
# Is the buffer empty?
def _buffer_empty(self):
return len(self._input) > 0
def _ensure_file_is_good(self, current_time):
"""Every N seconds, ensures that the file we are tailing is the file we expect to be tailing"""
if self._last_file_mapping_update and current_time - self._last_file_mapping_update <= self._stat_interval:
return
self._last_file_mapping_update = time.time()
try:
st = os.stat(self._filename)
except EnvironmentError, err:
if err.errno == errno.ENOENT:
self._log_info('file removed')
self.close()
fid = self.get_file_id(st)
if fid != self._fid:
self._log_info('file rotated')
self.close()
elif self._file.tell() > st.st_size:
if st.st_size == 0 and self._ignore_truncate:
self._logger.info("[{0}] - file size is 0 {1}. ".format(fid, self._filename) +
"If you use another tool (i.e. logrotate) to truncate " +
"the file, your application may continue to write to " +
"the offset it last wrote later. In such a case, we'd " +
"better do nothing here")
return
self._log_info('file truncated')
self._update_file(seek_to_end=False)
elif REOPEN_FILES:
self._log_debug('file reloaded (non-linux)')
position = self._file.tell()
self._update_file(seek_to_end=False)
if self.active:
self._file.seek(position, os.SEEK_SET)
def _run_pass(self):
"""Read lines from a file and performs a callback against them"""
while True:
try:
data = self._file.read(4096)
except IOError, e:
if e.errno == errno.ESTALE:
self.active = False
return False
lines = self._buffer_extract(data)
if not lines:
# Before returning, check if an event (maybe partial) is waiting for too long.
if self._current_event and time.time() - self._last_activity > 1:
event = '\n'.join(self._current_event)
self._current_event.clear()
self._callback_wrapper([event])
break
self._last_activity = time.time()
if self._multiline_regex_after or self._multiline_regex_before:
# Multiline is enabled for this file.
events = multiline_merge(
lines,
self._current_event,
self._multiline_regex_after,
self._multiline_regex_before)
else:
events = lines
if events:
self._callback_wrapper(events)
if self._sincedb_path:
current_line_count = len(lines)
self._sincedb_update_position(lines=current_line_count)
self._sincedb_update_position()
def _callback_wrapper(self, lines):
now = datetime.datetime.utcnow()
timestamp = now.strftime("%Y-%m-%dT%H:%M:%S") + ".%03d" % (now.microsecond / 1000) + "Z"
self._callback(('callback', {
'fields': self._fields,
'filename': self._filename,
'format': self._format,
'ignore_empty': self._ignore_empty,
'lines': lines,
'timestamp': timestamp,
'tags': self._tags,
'type': self._type,
}))
def _seek_to_end(self):
self._log_debug('seek_to_end')
if self._sincedb_path:
sincedb_start_position = self._sincedb_start_position()
if sincedb_start_position:
self._start_position = sincedb_start_position
if self._start_position == 'beginning':
self._log_debug('no start_position specified')
return
line_count = 0
if str(self._start_position).isdigit():
self._log_debug('going to start position {0}'.format(self._start_position))
self._start_position = int(self._start_position)
for encoding in ENCODINGS:
line_count, encoded = self._seek_to_position(encoding=encoding, position=True)
if line_count is None and encoded is None:
return
if encoded:
break
if self._start_position == 'beginning':
self._log_debug('Bad start position specified')
return
if self._start_position == 'end':
self._log_debug('getting end position')
for encoding in ENCODINGS:
line_count, encoded = self._seek_to_position(encoding=encoding)
if line_count is None and encoded is None:
return
if encoded:
break
current_position = self._file.tell()
self._log_debug('line count {0}'.format(line_count))
self._log_debug('current position {0}'.format(current_position))
self._sincedb_update_position(lines=line_count, force_update=True)
# Reset this, so line added processed just after this initialization
# will update the sincedb. Without this, if gribble run for less than
# sincedb_write_interval it will always re-process the last lines.
self._last_sincedb_write = 0
if self._tail_lines:
self._log_debug('tailing {0} lines'.format(self._tail_lines))
lines = self.tail(self._filename, encoding=self._encoding, window=self._tail_lines, position=current_position)
if lines:
if self._multiline_regex_after or self._multiline_regex_before:
# Multiline is enabled for this file.
events = multiline_merge(
lines,
self._current_event,
self._multiline_regex_after,
self._multiline_regex_before)
else:
events = lines
self._callback_wrapper(events)
return
def _seek_to_position(self, encoding=None, position=None):
line_count = 0
encoded = False
try:
while self._file.readline():
line_count += 1
if position and line_count == self._start_position:
encoded = True
break
if not position:
encoded = True
except UnicodeDecodeError:
self._log_debug('UnicodeDecodeError raised with encoding {0}'.format(self._encoding))
self._file = self.open(encoding=encoding)
self._encoding = encoding
if not self._file:
return None, None
if position and line_count != self._start_position:
self._log_debug('file at different position than {0}, assuming manual truncate'.format(self._start_position))
self._file.seek(0, os.SEEK_SET)
self._start_position == 'beginning'
return line_count, encoded
def _sincedb_init(self):
"""Initializes the sincedb schema in an sqlite db"""
if not self._sincedb_path:
return
if not os.path.exists(self._sincedb_path):
self._log_debug('initializing sincedb sqlite schema')
conn = sqlite3.connect(self._sincedb_path, isolation_level=None)
conn.execute("""
create table sincedb (
fid text primary key,
filename text,
position integer default 1
);
""")
conn.close()
def _sincedb_update_position(self, lines=0, force_update=False):
"""Retrieves the starting position from the sincedb sql db for a given file
Returns a boolean representing whether or not it updated the record
"""
if not self._sincedb_path:
return False
self._line_count = self._line_count + lines
old_count = self._line_count_sincedb
lines = self._line_count
current_time = int(time.time())
if not force_update:
if self._last_sincedb_write and current_time - self._last_sincedb_write <= self._sincedb_write_interval:
return False
if old_count == lines:
return False
self._sincedb_init()
self._last_sincedb_write = current_time
self._log_debug('updating sincedb to {0}'.format(lines))
conn = sqlite3.connect(self._sincedb_path, isolation_level=None)
cursor = conn.cursor()
query = 'insert or ignore into sincedb (fid, filename) values (:fid, :filename);'
cursor.execute(query, {
'fid': self._fid,
'filename': self._filename
})
query = 'update sincedb set position = :position where fid = :fid and filename = :filename'
cursor.execute(query, {
'fid': self._fid,
'filename': self._filename,
'position': lines,
})
conn.close()
self._line_count_sincedb = lines
return True
def _sincedb_start_position(self):
"""Retrieves the starting position from the sincedb sql db
for a given file
"""
if not self._sincedb_path:
return None
self._sincedb_init()
self._log_debug('retrieving start_position from sincedb')
conn = sqlite3.connect(self._sincedb_path, isolation_level=None)
cursor = conn.cursor()
cursor.execute('select position from sincedb where fid = :fid and filename = :filename', {
'fid': self._fid,
'filename': self._filename
})
start_position = None
for row in cursor.fetchall():
start_position, = row
return start_position
def _update_file(self, seek_to_end=True):
"""Open the file for tailing"""
try:
self.close()
self._file = self.open()
except IOError:
pass
else:
if not self._file:
return
self.active = True
try:
st = os.stat(self._filename)
except EnvironmentError, err:
if err.errno == errno.ENOENT:
self._log_info('file removed')
self.close()
fid = self.get_file_id(st)
if not self._fid:
self._fid = fid
if fid != self._fid:
self._log_info('file rotated')
self.close()
elif seek_to_end:
self._seek_to_end()
def tail(self, fname, encoding, window, position=None):
"""Read last N lines from file fname."""
if window <= 0:
raise ValueError('invalid window %r' % window)
encodings = ENCODINGS
if encoding:
encodings = [encoding] + ENCODINGS
for enc in encodings:
try:
f = self.open(encoding=enc)
if f:
return self.tail_read(f, window, position=position)
return False
except IOError, err:
if err.errno == errno.ENOENT:
return []
raise
except UnicodeDecodeError:
pass
@staticmethod
def get_file_id(st):
return "%xg%x" % (st.st_dev, st.st_ino)
@classmethod
def tail_read(cls, f, window, position=None):
BUFSIZ = 1024
# open() was overridden and file was opened in text
# mode; read() will return a string instead bytes.
encoded = getattr(f, 'encoding', False)
CR = '\n' if encoded else b'\n'
data = '' if encoded else b''
f.seek(0, os.SEEK_END)
if position is None:
position = f.tell()
block = -1
exit = False
read = BUFSIZ
while not exit:
step = (block * BUFSIZ) + position
if step < 0:
step = 0
read = ((block + 1) * BUFSIZ) + position
exit = True
f.seek(step, os.SEEK_SET)
newdata = f.read(read)
data = newdata + data
if data.count(CR) > window:
break
else:
block -= 1
return data.splitlines()[-window:] | PypiClean |
/Euphorie-15.0.2.tar.gz/Euphorie-15.0.2/src/euphorie/client/resources/oira/script/chunks/30308.112c9fad0e442466d866.min.js | (self.webpackChunk_patternslib_patternslib=self.webpackChunk_patternslib_patternslib||[]).push([[30308],{30308:function(e){function n(e){return function(...e){return e.map((e=>function(e){return e?"string"==typeof e?e:e.source:null}(e))).join("")}("(?=",e,")")}e.exports=function(e){const a={$pattern:/[A-Za-z]\w+|__\w+__/,keyword:["and","as","assert","async","await","break","class","continue","def","del","elif","else","except","finally","for","from","global","if","import","in","is","lambda","nonlocal|10","not","or","pass","raise","return","try","while","with","yield"],built_in:["__import__","abs","all","any","ascii","bin","bool","breakpoint","bytearray","bytes","callable","chr","classmethod","compile","complex","delattr","dict","dir","divmod","enumerate","eval","exec","filter","float","format","frozenset","getattr","globals","hasattr","hash","help","hex","id","input","int","isinstance","issubclass","iter","len","list","locals","map","max","memoryview","min","next","object","oct","open","ord","pow","print","property","range","repr","reversed","round","set","setattr","slice","sorted","staticmethod","str","sum","super","tuple","type","vars","zip"],literal:["__debug__","Ellipsis","False","None","NotImplemented","True"],type:["Any","Callable","Coroutine","Dict","List","Literal","Generic","Optional","Sequence","Set","Tuple","Type","Union"]},i={className:"meta",begin:/^(>>>|\.\.\.) /},s={className:"subst",begin:/\{/,end:/\}/,keywords:a,illegal:/#/},t={begin:/\{\{/,relevance:0},r={className:"string",contains:[e.BACKSLASH_ESCAPE],variants:[{begin:/([uU]|[bB]|[rR]|[bB][rR]|[rR][bB])?'''/,end:/'''/,contains:[e.BACKSLASH_ESCAPE,i],relevance:10},{begin:/([uU]|[bB]|[rR]|[bB][rR]|[rR][bB])?"""/,end:/"""/,contains:[e.BACKSLASH_ESCAPE,i],relevance:10},{begin:/([fF][rR]|[rR][fF]|[fF])'''/,end:/'''/,contains:[e.BACKSLASH_ESCAPE,i,t,s]},{begin:/([fF][rR]|[rR][fF]|[fF])"""/,end:/"""/,contains:[e.BACKSLASH_ESCAPE,i,t,s]},{begin:/([uU]|[rR])'/,end:/'/,relevance:10},{begin:/([uU]|[rR])"/,end:/"/,relevance:10},{begin:/([bB]|[bB][rR]|[rR][bB])'/,end:/'/},{begin:/([bB]|[bB][rR]|[rR][bB])"/,end:/"/},{begin:/([fF][rR]|[rR][fF]|[fF])'/,end:/'/,contains:[e.BACKSLASH_ESCAPE,t,s]},{begin:/([fF][rR]|[rR][fF]|[fF])"/,end:/"/,contains:[e.BACKSLASH_ESCAPE,t,s]},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},l="[0-9](_?[0-9])*",b=`(\\b(${l}))?\\.(${l})|\\b(${l})\\.`,o={className:"number",relevance:0,variants:[{begin:`(\\b(${l})|(${b}))[eE][+-]?(${l})[jJ]?\\b`},{begin:`(${b})[jJ]?`},{begin:"\\b([1-9](_?[0-9])*|0+(_?0)*)[lLjJ]?\\b"},{begin:"\\b0[bB](_?[01])+[lL]?\\b"},{begin:"\\b0[oO](_?[0-7])+[lL]?\\b"},{begin:"\\b0[xX](_?[0-9a-fA-F])+[lL]?\\b"},{begin:`\\b(${l})[jJ]\\b`}]},c={className:"comment",begin:n(/# type:/),end:/$/,keywords:a,contains:[{begin:/# type:/},{begin:/#/,end:/\b\B/,endsWithParent:!0}]},d={className:"params",variants:[{className:"",begin:/\(\s*\)/,skip:!0},{begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:a,contains:["self",i,o,r,e.HASH_COMMENT_MODE]}]};return s.contains=[r,o,i],{name:"Python",aliases:["py","gyp","ipython"],keywords:a,illegal:/(<\/|->|\?)|=>/,contains:[i,o,{begin:/\bself\b/},{beginKeywords:"if",relevance:0},r,c,e.HASH_COMMENT_MODE,{variants:[{className:"function",beginKeywords:"def"},{className:"class",beginKeywords:"class"}],end:/:/,illegal:/[${=;\n,]/,contains:[e.UNDERSCORE_TITLE_MODE,d,{begin:/->/,endsWithParent:!0,keywords:a}]},{className:"meta",begin:/^[\t ]*@/,end:/(?=#)|$/,contains:[o,d,r]}]}}}}]);
//# sourceMappingURL=30308.112c9fad0e442466d866.min.js.map | PypiClean |
/ConGen-0.0.5.tar.gz/ConGen-0.0.5/src/congen/layers/Layer.py | import numpy as np
from layers.LayerUsage import LayerUsage
from layers.Parameter import ParameterDict, Parameter
class Layer:
def __init__(self, enabled=True, **kwargs):
if not hasattr(self, "parameters"):
self.parameters = ParameterDict()
if not hasattr(self, "dependentLayers"):
self.dependentLayers = list()
self.parameters.extend([
Parameter(name="name", desc="Name of the Layer", parameterType=str, default="New Layer"),
Parameter(name="seed", desc="Random seed for the layer",
parameterType=int, min=0, max=100, default=0, invalidates_cache=True),
Parameter(name="weight", desc="Weight of the layer", parameterType=int, min=1, max=10, default=5,
invalidates_cache=True),
Parameter(name="res", desc="Resolution of the layer in pixels (affects width and height)",
parameterType=int, min=10, max=200, default=100, invalidates_cache=True),
Parameter(name="xshift", desc="Amount to shift this layer left / right",
parameterType=int, min=0, max=100, default=0),
Parameter(name="yshift", desc="Amount to shift this layer top / down",
parameterType=int, min=0, max=100, default=0),
Parameter(name="layer_usage", desc="Usage of this layer in exported files",
parameterType=LayerUsage, min=None, max=None, default=None)
])
self.update(enabled=enabled, **kwargs)
self.clear_cache()
def clear_cache(self):
self.points = None
self.pixels = None
self.graph = None
self.generator = None
self.importer = None
for layer in self.dependentLayers:
layer.clear_cache()
def update(self, **kwargs):
if any(item in kwargs for item in self.parameters.cache_invalidators):
self.clear_cache()
if "layer_usage" in list(kwargs.keys()):
if kwargs["layer_usage"] == LayerUsage.LAYER_USAGE_FEATURE:
self.parameters.extend([
Parameter(name="target", desc="Minimum amount of the feature to be in the solution",
parameterType=int, min=0, max=100, default=0),
Parameter(name="prop", desc="Minimum proportion of the feature to be in the solution",
parameterType=int, min=0, max=100, default=0)
])
for key in list(kwargs.keys()):
if key in self.parameters:
self.parameters[key].value = kwargs.pop(key)
self.__dict__.update(kwargs)
self.parameters["xshift"].max = self.res
self.parameters["yshift"].max = self.res
# only gets called for parameters that are not found otherwise
def __getattr__(self, item):
try:
return self.parameters[item].value
except Exception:
raise AttributeError(f"{self.__class__.__name__} has no attribute {item}.")
def get_generator(self):
if not hasattr(self, "generator") or self.generator is None:
self.generator = np.random.default_rng(self.seed)
return self.generator
def __str__(self):
return self.name
def toJSON(self):
print("toJSON of Layer called!")
print(self.parameters)
return self.parameters
def __getstate__(self):
self.clear_cache()
dict = self.__dict__.copy()
dict["dependentLayers"] = []
return dict
def __setstate__(self, state):
print(f"De-pickling layer {state['parameters']['name']['value']}")
self.__dict__.update(state)
if hasattr(self, "dependingLayer") and self.dependingLayer is not None:
self.dependingLayer.dependentLayers.append(self) | PypiClean |
/IQA_pytorch-0.1.tar.gz/IQA_pytorch-0.1/IQA_pytorch/SteerPyrUtils.py | import math
import torch
from torch.autograd import Variable
import numpy as np
def L( r ):
if r <= math.pi / 4:
return 2
elif r >= math.pi / 2:
return 0
else:
return 2 * math.cos( math.pi / 2 * math.log( 4 * r / math.pi ) / math.log( 2 ) )
def H( r ):
if r <= math.pi / 4:
return 0
elif r >= math.pi / 2:
return 1
else:
return math.cos( math.pi / 2 * math.log( 2 * r / math.pi ) / math.log( 2 ) )
def G( t, k, K ):
t0 = math.pi * k / K
aK = 2**(K-1) * math.factorial(K-1) / math.sqrt( K * math.factorial( 2 * (K-1) ) )
if (t - t0) > (math.pi/2):
return G( t - math.pi, k, K )
elif (t - t0 ) < (-math.pi/2):
return G( t + math.pi, k, K )
else:
return aK * (math.cos( t - t0 ))**(K-1)
def S( t, k, K ):
t0 = math.pi * k / K
dt = abs(t-t0)
if dt < math.pi/2:
return 1
elif dt == math.pi/2:
return 0
else:
return -1
def L0( r ):
return L( r/2 ) / 2
def H0( r ):
return H( r/2 )
def polar_map( s ):
x = torch.linspace( 0, math.pi, s[1] ).view( 1, s[1] ).expand( s )
if s[0] % 2 == 0 :
y = torch.linspace( -math.pi, math.pi, s[0]+1 ).narrow(0,1,s[0])
else:
y = torch.linspace( -math.pi, math.pi, s[0] )
y = y.view( s[0], 1 ).expand( s ).mul( -1 )
r = ( x**2 + y**2 ).sqrt()
t = torch.atan2( y, x )
return r, t
def S_matrix( K, s ):
_, t = polar_map( s )
sm = torch.Tensor( K, s[0], s[1] )
for k in range( K ):
for i in range( s[0] ):
for j in range( s[1] ):
sm[k][i][j] = S( t[i][j], k, K )
return sm
def G_matrix( K, s ):
_, t = polar_map( s )
g = torch.Tensor( K, s[0], s[1] )
for k in range( K ):
for i in range( s[0] ):
for j in range( s[1] ):
g[k][i][j] = G( t[i][j], k, K )
return g
def B_matrix( K, s ):
g = G_matrix( K, s )
r, _ = polar_map( s )
h = r.apply_( H ).unsqueeze(0)
return h * g
def L_matrix( s ):
r, _ = polar_map( s )
return r.apply_( L )
def LB_matrix( K, s ):
l = L_matrix( s ).unsqueeze(0)
b = B_matrix( K, s )
return torch.cat( (l,b), 0 )
def HL0_matrix( s ):
r, _ = polar_map( s )
h = r.clone().apply_( H0 ).view( 1, s[0], s[1] )
l = r.clone().apply_( L0 ).view( 1, s[0], s[1] )
return torch.cat( ( h, l ), 0 )
def central_crop( x ):
ns = [ x.size(-2)//2 , x.size(-1)//2 + 1 ]
return x.narrow( -2, ns[1]-1, ns[0] ).narrow( -1, 0, ns[1] )
def cropped_size( s ):
return [ s[0]//2 , s[1]//2 + 1 ]
def L_matrix_cropped( s ):
l = L_matrix( s )
ns = cropped_size( s )
return l.narrow( 0, ns[1]-1, ns[0] ).narrow( 1, 0, ns[1] )
def freq_shift( imgSize, fwd, device ):
ind = torch.LongTensor( imgSize ).to(device)
sgn = 1
if fwd:
sgn = -1
for i in range( imgSize ):
ind[i] = (i + sgn*((imgSize-1)//2) ) % imgSize
return Variable( ind )
##########
def sp5_filters():
filters = {}
filters['harmonics'] = np.array([1, 3, 5])
filters['mtx'] = (
np.array([[0.3333, 0.2887, 0.1667, 0.0000, -0.1667, -0.2887],
[0.0000, 0.1667, 0.2887, 0.3333, 0.2887, 0.1667],
[0.3333, -0.0000, -0.3333, -0.0000, 0.3333, -0.0000],
[0.0000, 0.3333, 0.0000, -0.3333, 0.0000, 0.3333],
[0.3333, -0.2887, 0.1667, -0.0000, -0.1667, 0.2887],
[-0.0000, 0.1667, -0.2887, 0.3333, -0.2887, 0.1667]]))
filters['hi0filt'] = (
np.array([[-0.00033429, -0.00113093, -0.00171484,
-0.00133542, -0.00080639, -0.00133542,
-0.00171484, -0.00113093, -0.00033429],
[-0.00113093, -0.00350017, -0.00243812,
0.00631653, 0.01261227, 0.00631653,
-0.00243812, -0.00350017, -0.00113093],
[-0.00171484, -0.00243812, -0.00290081,
-0.00673482, -0.00981051, -0.00673482,
-0.00290081, -0.00243812, -0.00171484],
[-0.00133542, 0.00631653, -0.00673482,
-0.07027679, -0.11435863, -0.07027679,
-0.00673482, 0.00631653, -0.00133542],
[-0.00080639, 0.01261227, -0.00981051,
-0.11435863, 0.81380200, -0.11435863,
-0.00981051, 0.01261227, -0.00080639],
[-0.00133542, 0.00631653, -0.00673482,
-0.07027679, -0.11435863, -0.07027679,
-0.00673482, 0.00631653, -0.00133542],
[-0.00171484, -0.00243812, -0.00290081,
-0.00673482, -0.00981051, -0.00673482,
-0.00290081, -0.00243812, -0.00171484],
[-0.00113093, -0.00350017, -0.00243812,
0.00631653, 0.01261227, 0.00631653,
-0.00243812, -0.00350017, -0.00113093],
[-0.00033429, -0.00113093, -0.00171484,
-0.00133542, -0.00080639, -0.00133542,
-0.00171484, -0.00113093, -0.00033429]]))
filters['lo0filt'] = (
np.array([[0.00341614, -0.01551246, -0.03848215, -0.01551246,
0.00341614],
[-0.01551246, 0.05586982, 0.15925570, 0.05586982,
-0.01551246],
[-0.03848215, 0.15925570, 0.40304148, 0.15925570,
-0.03848215],
[-0.01551246, 0.05586982, 0.15925570, 0.05586982,
-0.01551246],
[0.00341614, -0.01551246, -0.03848215, -0.01551246,
0.00341614]]))
filters['lofilt'] = (
2 * np.array([[0.00085404, -0.00244917, -0.00387812, -0.00944432,
-0.00962054, -0.00944432, -0.00387812, -0.00244917,
0.00085404],
[-0.00244917, -0.00523281, -0.00661117, 0.00410600,
0.01002988, 0.00410600, -0.00661117, -0.00523281,
-0.00244917],
[-0.00387812, -0.00661117, 0.01396746, 0.03277038,
0.03981393, 0.03277038, 0.01396746, -0.00661117,
-0.00387812],
[-0.00944432, 0.00410600, 0.03277038, 0.06426333,
0.08169618, 0.06426333, 0.03277038, 0.00410600,
-0.00944432],
[-0.00962054, 0.01002988, 0.03981393, 0.08169618,
0.10096540, 0.08169618, 0.03981393, 0.01002988,
-0.00962054],
[-0.00944432, 0.00410600, 0.03277038, 0.06426333,
0.08169618, 0.06426333, 0.03277038, 0.00410600,
-0.00944432],
[-0.00387812, -0.00661117, 0.01396746, 0.03277038,
0.03981393, 0.03277038, 0.01396746, -0.00661117,
-0.00387812],
[-0.00244917, -0.00523281, -0.00661117, 0.00410600,
0.01002988, 0.00410600, -0.00661117, -0.00523281,
-0.00244917],
[0.00085404, -0.00244917, -0.00387812, -0.00944432,
-0.00962054, -0.00944432, -0.00387812, -0.00244917,
0.00085404]]))
filters['bfilts'] = (
np.array([[0.00277643, 0.00496194, 0.01026699, 0.01455399, 0.01026699,
0.00496194, 0.00277643, -0.00986904, -0.00893064,
0.01189859, 0.02755155, 0.01189859, -0.00893064,
-0.00986904, -0.01021852, -0.03075356, -0.08226445,
-0.11732297, -0.08226445, -0.03075356, -0.01021852,
0.00000000, 0.00000000, 0.00000000, 0.00000000, 0.00000000,
0.00000000, 0.00000000, 0.01021852, 0.03075356, 0.08226445,
0.11732297, 0.08226445, 0.03075356, 0.01021852, 0.00986904,
0.00893064, -0.01189859, -0.02755155, -0.01189859,
0.00893064, 0.00986904, -0.00277643, -0.00496194,
-0.01026699, -0.01455399, -0.01026699, -0.00496194,
-0.00277643],
[-0.00343249, -0.00640815, -0.00073141, 0.01124321,
0.00182078, 0.00285723, 0.01166982, -0.00358461,
-0.01977507, -0.04084211, -0.00228219, 0.03930573,
0.01161195, 0.00128000, 0.01047717, 0.01486305,
-0.04819057, -0.12227230, -0.05394139, 0.00853965,
-0.00459034, 0.00790407, 0.04435647, 0.09454202,
-0.00000000, -0.09454202, -0.04435647, -0.00790407,
0.00459034, -0.00853965, 0.05394139, 0.12227230,
0.04819057, -0.01486305, -0.01047717, -0.00128000,
-0.01161195, -0.03930573, 0.00228219, 0.04084211,
0.01977507, 0.00358461, -0.01166982, -0.00285723,
-0.00182078, -0.01124321, 0.00073141, 0.00640815,
0.00343249],
[0.00343249, 0.00358461, -0.01047717, -0.00790407,
-0.00459034, 0.00128000, 0.01166982, 0.00640815,
0.01977507, -0.01486305, -0.04435647, 0.00853965,
0.01161195, 0.00285723, 0.00073141, 0.04084211, 0.04819057,
-0.09454202, -0.05394139, 0.03930573, 0.00182078,
-0.01124321, 0.00228219, 0.12227230, -0.00000000,
-0.12227230, -0.00228219, 0.01124321, -0.00182078,
-0.03930573, 0.05394139, 0.09454202, -0.04819057,
-0.04084211, -0.00073141, -0.00285723, -0.01161195,
-0.00853965, 0.04435647, 0.01486305, -0.01977507,
-0.00640815, -0.01166982, -0.00128000, 0.00459034,
0.00790407, 0.01047717, -0.00358461, -0.00343249],
[-0.00277643, 0.00986904, 0.01021852, -0.00000000,
-0.01021852, -0.00986904, 0.00277643, -0.00496194,
0.00893064, 0.03075356, -0.00000000, -0.03075356,
-0.00893064, 0.00496194, -0.01026699, -0.01189859,
0.08226445, -0.00000000, -0.08226445, 0.01189859,
0.01026699, -0.01455399, -0.02755155, 0.11732297,
-0.00000000, -0.11732297, 0.02755155, 0.01455399,
-0.01026699, -0.01189859, 0.08226445, -0.00000000,
-0.08226445, 0.01189859, 0.01026699, -0.00496194,
0.00893064, 0.03075356, -0.00000000, -0.03075356,
-0.00893064, 0.00496194, -0.00277643, 0.00986904,
0.01021852, -0.00000000, -0.01021852, -0.00986904,
0.00277643],
[-0.01166982, -0.00128000, 0.00459034, 0.00790407,
0.01047717, -0.00358461, -0.00343249, -0.00285723,
-0.01161195, -0.00853965, 0.04435647, 0.01486305,
-0.01977507, -0.00640815, -0.00182078, -0.03930573,
0.05394139, 0.09454202, -0.04819057, -0.04084211,
-0.00073141, -0.01124321, 0.00228219, 0.12227230,
-0.00000000, -0.12227230, -0.00228219, 0.01124321,
0.00073141, 0.04084211, 0.04819057, -0.09454202,
-0.05394139, 0.03930573, 0.00182078, 0.00640815,
0.01977507, -0.01486305, -0.04435647, 0.00853965,
0.01161195, 0.00285723, 0.00343249, 0.00358461,
-0.01047717, -0.00790407, -0.00459034, 0.00128000,
0.01166982],
[-0.01166982, -0.00285723, -0.00182078, -0.01124321,
0.00073141, 0.00640815, 0.00343249, -0.00128000,
-0.01161195, -0.03930573, 0.00228219, 0.04084211,
0.01977507, 0.00358461, 0.00459034, -0.00853965,
0.05394139, 0.12227230, 0.04819057, -0.01486305,
-0.01047717, 0.00790407, 0.04435647, 0.09454202,
-0.00000000, -0.09454202, -0.04435647, -0.00790407,
0.01047717, 0.01486305, -0.04819057, -0.12227230,
-0.05394139, 0.00853965, -0.00459034, -0.00358461,
-0.01977507, -0.04084211, -0.00228219, 0.03930573,
0.01161195, 0.00128000, -0.00343249, -0.00640815,
-0.00073141, 0.01124321, 0.00182078, 0.00285723,
0.01166982]]).T)
return filters | PypiClean |
/DobotRPC-4.8.8.tar.gz/DobotRPC-4.8.8/README.rst | ***This version is relevant for Dobotlink 5.0.0 and python3.5+***
DobotRPC is a dobotlink communication module based on websocket and
JSON-RPC . It provides python ports to communicate with dobotlink and
allows developers to communicate with the GUI.
APIS
-----
- RPCClient: RPCClient is a class that allows users to instantiate their own client,
which uses WebSocket protocol to connect to a target IP and port, and then uses JSON format for data communication.
- set_ip: Version 4.7.4 or above is acceptable. The IP and port used to set the destination address.
- RPCServer: RPCServer is a class that allows users to instantiate their own server,
which waits for a client to connect using the WebSocket protocol, and then uses JSON format for data communication.
The default port number is 9091.
- loggers: Provide an instantiated loggers interface. The user can call the following interfaces to do the configuration.
- set_filename: Set log file name.
- set_level: Set the log output level.
- set_use_console: Set log output to the console.
- set_use_file: Set log output to a file.
- DobotlinkAdapter: Provides an adapter for DobotLink RPC communication.
- NormalAdapter: Provides an adapter for normal RPC communication
Examples
--------
- Users can communicate synchronously or asynchronously.The
asynchronous mode is as follows:
```python
# Async demo
from DobotRPC import DobotlinkAdapter, RPCClient, loggers
# The asyncio module provides infrastructure for writing single-threaded concurrent code using coroutines, multiplexing I/O access over sockets and other resources, running network clients and servers, and other related primitives.
import asyncio
# Coroutines function
async def main(dobotlink_async):
# Display information with Dobotlink
await dobotlink_async.api.ShowMessage(title="Async Demo Message",
message="Async Demo is running.")
# Search for available ports
res = await dobotlink_async.Magician.SearchDobot()
# Get ports
if len(res) < 1:
return
port_name = res[0]["portName"]
# Connect
await dobotlink_async.Magician.ConnectDobot(portName=port_name)
# PTP
await dobotlink_async.Magician.SetPTPCmd(portName=port_name,
ptpMode=0,
x=230,
y=50,
z=0,
r=20)
# Disconnect
await dobotlink_async.Magician.DisconnectDobot(portName=port_name,
queueStop=True,
queueClear=True)
if __name__ == "__main__":
loggers.set_level(loggers.DEBUG)
# Get the Eventloop reference
loop = asyncio.get_event_loop()
# Initializes, connects to dobotlink, and is executed before the Loop runs
dobotlink_async = DobotlinkAdapter(RPCClient(loop=loop), is_sync=False)
# Perform coroutines
loop.run_until_complete(main(dobotlink_async))
```
- The synchronization mode is as follows:
```python
# Sync Demo
from DobotRPC import RPCClient, DobotlinkAdapter, loggers
def main(dobotlink_sync):
# Display information with Dobotlink
dobotlink_sync.api.ShowMessage(title="Sync Demo Message",
message="Sync Demo is running.")
# Search for available ports
res = dobotlink_sync.Magician.SearchDobot()
# Get ports
if len(res) < 1:
return
port_name = res[0]["portName"]
# Connect
dobotlink_sync.Magician.ConnectDobot(portName=port_name)
# PTP
dobotlink_sync.Magician.SetPTPCmd(portName=port_name,
ptpMode=0,
x=230,
y=50,
z=0,
r=20)
# Disconnect
dobotlink_sync.Magician.DisconnectDobot(portName=port_name)
if __name__ == "__main__":
loggers.set_level(loggers.DEBUG)
# Initialize, connect to dobotlink
dobotlink_sync = DobotlinkAdapter(RPCClient(), is_sync=True)
main(dobotlink_sync)
```
Installtion
-----------
To install DobotRPC, type:
```python
pip install DobotRPC
```
DobotRPC is a free software distributed under the Apache license
Usage
-----
- Users can use the API:
loggers, RPCClient, DobotlinkAdapter, NetworkError, client, aip
- Install Dobotlink [32bit](https://cdn.dobotlab.dobot.cc/release/DobotLinkSetup_32.exe) [64bit](https://cdn.dobotlab.dobot.cc/release/DobotLinkSetup_64.exe)
- Right-click the Dobotlink icon and click ``help``, pop up a
``Dobotlink help documentation``.
- You can guide by ``examples``, reference the
``Dobotlink help documentation``.
- Then go ahead and develop your first python script. | PypiClean |
/ClueDojo-1.4.3-1.tar.gz/ClueDojo-1.4.3-1/src/cluedojo/static/dojox/highlight/languages/pygments/javascript.js | if(!dojo._hasResource["dojox.highlight.languages.pygments.javascript"]){
dojo._hasResource["dojox.highlight.languages.pygments.javascript"]=true;
dojo.provide("dojox.highlight.languages.pygments.javascript");
dojo.require("dojox.highlight._base");
(function(){
var dh=dojox.highlight,_1=dh.constants;
dh.languages.javascript={defaultMode:{lexems:["\\b[a-zA-Z]+"],keywords:{"keyword":{"for":1,"in":1,"while":1,"do":1,"break":1,"return":1,"continue":1,"if":1,"else":1,"throw":1,"try":1,"catch":1,"var":1,"with":1,"const":1,"label":1,"function":1,"new":1,"typeof":1,"instanceof":1},"keyword constant":{"true":1,"false":1,"null":1,"NaN":1,"Infinity":1,"undefined":1},"name builtin":{"Array":1,"Boolean":1,"Date":1,"Error":1,"Function":1,"Math":1,"netscape":1,"Number":1,"Object":1,"Packages":1,"RegExp":1,"String":1,"sun":1,"decodeURI":1,"decodeURIComponent":1,"encodeURI":1,"encodeURIComponent":1,"Error":1,"eval":1,"isFinite":1,"isNaN":1,"parseFloat":1,"parseInt":1,"document":1,"window":1},"name builtin pseudo":{"this":1}},contains:["comment single","comment multiline","number integer","number oct","number hex","number float","string single","string double","string regex","operator","punctuation","_function"]},modes:[{className:"comment single",begin:"//",end:"$",relevance:0},{className:"comment multiline",begin:"/\\*",end:"\\*/"},{className:"number integer",begin:"0|([1-9][0-9]*)",end:"^",relevance:0},{className:"number oct",begin:"0[0-9]+",end:"^",relevance:0},{className:"number hex",begin:"0x[0-9a-fA-F]+",end:"^",relevance:0},{className:"number float",begin:"([1-9][0-9]*\\.[0-9]*([eE][\\+-]?[0-9]+)?)|(\\.[0-9]+([eE][\\+-]?[0-9]+)?)|([0-9]+[eE][\\+-]?[0-9]+)",end:"^",relevance:0},{className:"string single",begin:"'",end:"'",illegal:"\\n",contains:["string escape"],relevance:0},{className:"string double",begin:"\"",end:"\"",illegal:"\\n",contains:["string escape"],relevance:0},{className:"string escape",begin:"\\\\.",end:"^",relevance:0},{className:"string regex",begin:"/.*?[^\\\\/]/[gim]*",end:"^"},{className:"operator",begin:"\\|\\||&&|\\+\\+|--|-=|\\+=|/=|\\*=|==|[-\\+\\*/=\\?:~\\^]",end:"^",relevance:0},{className:"punctuation",begin:"[{}\\(\\)\\[\\]\\.;]",end:"^",relevance:0},{className:"_function",begin:"function\\b",end:"{",lexems:[_1.UNDERSCORE_IDENT_RE],keywords:{keyword:{"function":1}},contains:["name function","_params"],relevance:5},{className:"name function",begin:_1.UNDERSCORE_IDENT_RE,end:"^"},{className:"_params",begin:"\\(",end:"\\)",contains:["comment single","comment multiline"]}]};
})();
} | PypiClean |
/DjangoDjangoAppCenter-0.0.11-py3-none-any.whl/DjangoAppCenter/simpleui/static/admin/simpleui-x/elementui/dropdown-menu.js | module.exports =
/******/ (function (modules) { // webpackBootstrap
/******/ // The module cache
/******/
var installedModules = {};
/******/
/******/ // The require function
/******/
function __webpack_require__(moduleId) {
/******/
/******/ // Check if module is in cache
/******/
if (installedModules[moduleId]) {
/******/
return installedModules[moduleId].exports;
/******/
}
/******/ // Create a new module (and put it into the cache)
/******/
var module = installedModules[moduleId] = {
/******/ i: moduleId,
/******/ l: false,
/******/ exports: {}
/******/
};
/******/
/******/ // Execute the module function
/******/
modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/
/******/ // Flag the module as loaded
/******/
module.l = true;
/******/
/******/ // Return the exports of the module
/******/
return module.exports;
/******/
}
/******/
/******/
/******/ // expose the modules object (__webpack_modules__)
/******/
__webpack_require__.m = modules;
/******/
/******/ // expose the module cache
/******/
__webpack_require__.c = installedModules;
/******/
/******/ // define getter function for harmony exports
/******/
__webpack_require__.d = function (exports, name, getter) {
/******/
if (!__webpack_require__.o(exports, name)) {
/******/
Object.defineProperty(exports, name, {enumerable: true, get: getter});
/******/
}
/******/
};
/******/
/******/ // define __esModule on exports
/******/
__webpack_require__.r = function (exports) {
/******/
if (typeof Symbol !== 'undefined' && Symbol.toStringTag) {
/******/
Object.defineProperty(exports, Symbol.toStringTag, {value: 'Module'});
/******/
}
/******/
Object.defineProperty(exports, '__esModule', {value: true});
/******/
};
/******/
/******/ // create a fake namespace object
/******/ // mode & 1: value is a module id, require it
/******/ // mode & 2: merge all properties of value into the ns
/******/ // mode & 4: return value when already ns object
/******/ // mode & 8|1: behave like require
/******/
__webpack_require__.t = function (value, mode) {
/******/
if (mode & 1) value = __webpack_require__(value);
/******/
if (mode & 8) return value;
/******/
if ((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;
/******/
var ns = Object.create(null);
/******/
__webpack_require__.r(ns);
/******/
Object.defineProperty(ns, 'default', {enumerable: true, value: value});
/******/
if (mode & 2 && typeof value != 'string') for (var key in value) __webpack_require__.d(ns, key, function (key) {
return value[key];
}.bind(null, key));
/******/
return ns;
/******/
};
/******/
/******/ // getDefaultExport function for compatibility with non-harmony modules
/******/
__webpack_require__.n = function (module) {
/******/
var getter = module && module.__esModule ?
/******/ function getDefault() {
return module['default'];
} :
/******/ function getModuleExports() {
return module;
};
/******/
__webpack_require__.d(getter, 'a', getter);
/******/
return getter;
/******/
};
/******/
/******/ // Object.prototype.hasOwnProperty.call
/******/
__webpack_require__.o = function (object, property) {
return Object.prototype.hasOwnProperty.call(object, property);
};
/******/
/******/ // __webpack_public_path__
/******/
__webpack_require__.p = "/dist/";
/******/
/******/
/******/ // Load entry module and return exports
/******/
return __webpack_require__(__webpack_require__.s = 78);
/******/
})
/************************************************************************/
/******/({
/***/ 0:
/***/ (function (module, __webpack_exports__, __webpack_require__) {
"use strict";
/* harmony export (binding) */
__webpack_require__.d(__webpack_exports__, "a", function () {
return normalizeComponent;
});
/* globals __VUE_SSR_CONTEXT__ */
// IMPORTANT: Do NOT use ES2015 features in this file (except for modules).
// This module is a runtime utility for cleaner component module output and will
// be included in the final webpack user bundle.
function normalizeComponent(
scriptExports,
render,
staticRenderFns,
functionalTemplate,
injectStyles,
scopeId,
moduleIdentifier, /* server only */
shadowMode /* vue-cli only */
) {
// Vue.extend constructor export interop
var options = typeof scriptExports === 'function'
? scriptExports.options
: scriptExports
// render functions
if (render) {
options.render = render
options.staticRenderFns = staticRenderFns
options._compiled = true
}
// functional template
if (functionalTemplate) {
options.functional = true
}
// scopedId
if (scopeId) {
options._scopeId = 'data-v-' + scopeId
}
var hook
if (moduleIdentifier) { // server build
hook = function (context) {
// 2.3 injection
context =
context || // cached call
(this.$vnode && this.$vnode.ssrContext) || // stateful
(this.parent && this.parent.$vnode && this.parent.$vnode.ssrContext) // functional
// 2.2 with runInNewContext: true
if (!context && typeof __VUE_SSR_CONTEXT__ !== 'undefined') {
context = __VUE_SSR_CONTEXT__
}
// inject component styles
if (injectStyles) {
injectStyles.call(this, context)
}
// register component module identifier for async chunk inferrence
if (context && context._registeredComponents) {
context._registeredComponents.add(moduleIdentifier)
}
}
// used by ssr in case component is cached and beforeCreate
// never gets called
options._ssrRegister = hook
} else if (injectStyles) {
hook = shadowMode
? function () {
injectStyles.call(this, this.$root.$options.shadowRoot)
}
: injectStyles
}
if (hook) {
if (options.functional) {
// for template-only hot-reload because in that case the render fn doesn't
// go through the normalizer
options._injectStyles = hook
// register for functioal component in vue file
var originalRender = options.render
options.render = function renderWithStyleInjection(h, context) {
hook.call(context)
return originalRender(h, context)
}
} else {
// inject component registration as beforeCreate hook
var existing = options.beforeCreate
options.beforeCreate = existing
? [].concat(existing, hook)
: [hook]
}
}
return {
exports: scriptExports,
options: options
}
}
/***/
}),
/***/ 5:
/***/ (function (module, exports) {
module.exports = require("element-ui/lib/utils/vue-popper");
/***/
}),
/***/ 78:
/***/ (function (module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
// CONCATENATED MODULE: ./node_modules/_vue-loader@15.7.1@vue-loader/lib/loaders/templateLoader.js??vue-loader-options!./node_modules/_vue-loader@15.7.1@vue-loader/lib??vue-loader-options!./packages/dropdown/src/dropdown-menu.vue?vue&type=template&id=0da6b714&
var render = function () {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c(
"transition",
{attrs: {name: "el-zoom-in-top"}, on: {"after-leave": _vm.doDestroy}},
[
_c(
"ul",
{
directives: [
{
name: "show",
rawName: "v-show",
value: _vm.showPopper,
expression: "showPopper"
}
],
staticClass: "el-dropdown-menu el-popper",
class: [_vm.size && "el-dropdown-menu--" + _vm.size]
},
[_vm._t("default")],
2
)
]
)
}
var staticRenderFns = []
render._withStripped = true
// CONCATENATED MODULE: ./packages/dropdown/src/dropdown-menu.vue?vue&type=template&id=0da6b714&
// EXTERNAL MODULE: external "element-ui/lib/utils/vue-popper"
var vue_popper_ = __webpack_require__(5);
var vue_popper_default = /*#__PURE__*/__webpack_require__.n(vue_popper_);
// CONCATENATED MODULE: ./node_modules/_babel-loader@7.1.5@babel-loader/lib!./node_modules/_vue-loader@15.7.1@vue-loader/lib??vue-loader-options!./packages/dropdown/src/dropdown-menu.vue?vue&type=script&lang=js&
//
//
//
//
//
//
//
/* harmony default export */
var dropdown_menuvue_type_script_lang_js_ = ({
name: 'ElDropdownMenu',
componentName: 'ElDropdownMenu',
mixins: [vue_popper_default.a],
props: {
visibleArrow: {
type: Boolean,
default: true
},
arrowOffset: {
type: Number,
default: 0
}
},
data: function data() {
return {
size: this.dropdown.dropdownSize
};
},
inject: ['dropdown'],
created: function created() {
var _this = this;
this.$on('updatePopper', function () {
if (_this.showPopper) _this.updatePopper();
});
this.$on('visible', function (val) {
_this.showPopper = val;
});
},
mounted: function mounted() {
this.dropdown.popperElm = this.popperElm = this.$el;
this.referenceElm = this.dropdown.$el;
// compatible with 2.6 new v-slot syntax
// issue link https://github.com/ElemeFE/element/issues/14345
this.dropdown.initDomOperation();
},
watch: {
'dropdown.placement': {
immediate: true,
handler: function handler(val) {
this.currentPlacement = val;
}
}
}
});
// CONCATENATED MODULE: ./packages/dropdown/src/dropdown-menu.vue?vue&type=script&lang=js&
/* harmony default export */
var src_dropdown_menuvue_type_script_lang_js_ = (dropdown_menuvue_type_script_lang_js_);
// EXTERNAL MODULE: ./node_modules/_vue-loader@15.7.1@vue-loader/lib/runtime/componentNormalizer.js
var componentNormalizer = __webpack_require__(0);
// CONCATENATED MODULE: ./packages/dropdown/src/dropdown-menu.vue
/* normalize component */
var component = Object(componentNormalizer["a" /* default */])(
src_dropdown_menuvue_type_script_lang_js_,
render,
staticRenderFns,
false,
null,
null,
null
)
/* hot reload */
if (false) {
var api;
}
component.options.__file = "packages/dropdown/src/dropdown-menu.vue"
/* harmony default export */
var dropdown_menu = (component.exports);
// CONCATENATED MODULE: ./packages/dropdown-menu/index.js
/* istanbul ignore next */
dropdown_menu.install = function (Vue) {
Vue.component(dropdown_menu.name, dropdown_menu);
};
/* harmony default export */
var packages_dropdown_menu = __webpack_exports__["default"] = (dropdown_menu);
/***/
})
/******/
}); | PypiClean |
/Finance-Jindowin-1.3.3.tar.gz/Finance-Jindowin-1.3.3/jdw/mfc/entropy/catalyst/geneticist/stock.py | from jdw.data.SurfaceAPI.stock.yields import StkYields
from jdw.data.SurfaceAPI.stock.factors import StkFactors
from jdw.data.SurfaceAPI.universe import StkUniverse
from jdw.data.SurfaceAPI.stock.industry import Industry
from jdw.data.SurfaceAPI.dummy import Dummy
from jdw.mfc.entropy.catalyst.geneticist.base import Base
class StockGeneticist(Base):
def __init__(self,
offset,
horizon,
factor_columns,
universe,
industry_name,
industry_level,
dummy_name=None,
is_loop=False,
operators=None,
factors_normal=True,
callback_save=None,
yield_name='returns'):
super(StockGeneticist, self).__init__(offset=offset,
horizon=horizon,
factor_columns=factor_columns,
universe=universe,
dummy_name=dummy_name,
yields_class=StkYields,
universe_class=StkUniverse,
dummy_class=Dummy,
industry_class=Industry,
factors_class=StkFactors,
industry_name=industry_name,
industry_level=industry_level,
operators=operators,
factors_normal=factors_normal,
callback_save=callback_save,
is_loop=is_loop,
yield_name=yield_name)
def industry_fillna(self, industry_data, factors_data):
factors_data = factors_data.merge(industry_data,
on=['trade_date', 'code'])
factors_data = self.industry_median(factors_data)
return factors_data | PypiClean |
/Bambanta-0.1.4.tar.gz/Bambanta-0.1.4/README.md | [](https://travis-ci.org/CS207-Project-Group-9/cs207-FinalProject)
[](https://coveralls.io/github/CS207-Project-Group-9/cs207-FinalProject?branch=master)
# Bambanta
### CS207 Project Group 9:
**Authors:** Qiansha (Karina) Huang, Rong Liu, Rory Maizels
**Project Page:** https://pypi.org/project/Bambanta/
This automatic differentiation package `Bambanta` performs both forward-mode and reverse-mode automatic differentiation. For details about the project, please consult the [documentation](https://github.com/CS207-Project-Group-9/cs207-FinalProject/blob/master/docs/Final.ipynb).
### Installation Guide
We recommend installing our package in a virtual environment. Please ensure that a version of `virtualenv` is already installed in your machine, and follow the steps:
```
virtualenv env
source env/bin/activate
pip install Bambanta
```
### Testing `Bambanta`
Our module may be tested using `pytest` on `AutoDiff.py`, or using `doctest` on `test_AutoDiff.py`.
| PypiClean |
/LOCATE_model-0.0.3-py3-none-any.whl/LOCATE/LOCATE_model.py | import torch
import pytorch_lightning as pl
from torch import nn
from torch.nn import functional as F
class GeneralModel(pl.LightningModule):
"""
LOCATE class
"""
def __init__(self, train_metab, train_micro, input_size, representation_size, weight_decay_rep=0.999,
weight_decay_dis=0.999, lr_rep=0.001, lr_dis=0.001, rep_coef=1.0, dis_coef=0.0, activation_rep="relu",
activation_dis="relu", neurons=50, neurons2=50, dropout=0.0):
"""
Initializes the model with the model hyperparameters.
:param train_metab: Known metabolites of the training for data we have both microbiome and metabolites (Tensor)
:param train_micro: Tensor of the microbial features (Tensor)
:param input_size: train_micro.shape[1] (int)
:param representation_size: Size of the intermediate representstion z, (int)
:param weight_decay_rep: L2 regularization coefficient of the representation network (float)
:param weight_decay_dis: L2 regularization of the optional discriminator, is not used in the paper (float)
:param lr_rep: Leaning rate of the representation network (float)
:param lr_dis: Learning rate of the optional discriminator network, is not used in the paper (float)
:param rep_coef: Weight of the loss upgrades of the representation network, is set to 1, when no discriminator is used (float)
:param dis_coef: Weight of the loss upgrades of the discriminator network, is set to 0, when no discriminator is used (float)
:param activation_rep: Activation function of the representation network, one of: {relu,elu,tanh}
:param activation_dis: Activation function of the discriminator network, one of: {relu,elu,tanh}
:param neurons: Number of neurons in the first layer of the representation network (int)
:param neurons2: Number of neurons in the second layer of the representation network (int)
:param dropout: Dropout parameter (float)
"""
super().__init__()
self.neurons = neurons
self.neurons2 = neurons2
if activation_rep == "relu":
self.activation_rep = nn.ReLU
elif activation_rep == "elu":
self.activation_rep = nn.ELU
elif activation_rep == "tanh":
self.activation_rep = nn.Tanh
if activation_dis == "relu":
self.activation_dis = nn.ReLU
elif activation_dis == "elu":
self.activation_dis = nn.ELU
elif activation_dis == "tanh":
self.activation_dis = nn.Tanh
self.linear_representation = nn.Sequential(nn.Linear(input_size, representation_size),
self.activation_rep(),
nn.Dropout(dropout),
nn.Linear(representation_size, self.neurons),
self.activation_rep(),
nn.Dropout(dropout),
nn.Linear(self.neurons, representation_size)
)
self.discriminator = nn.Sequential(nn.Linear(representation_size, self.neurons2), self.activation_dis(),
nn.Linear(self.neurons2, 1), nn.Sigmoid())
self.train_metab = train_metab
self.train_micro = train_micro
self.weight_decay_rep = weight_decay_rep
self.weight_decay_dis = weight_decay_dis
self.lr_rep = lr_rep
self.lr_dis = lr_dis
self.find_transformer(self.linear_representation(train_micro))
self.rep_coef = rep_coef
self.dis_coef = dis_coef
def find_transformer(self, Z):
"""
Finds the approximated A* to relate the intermediate representation of the microbiome to the training metabolites
:param Z: Intermediate representation of the microbiome
:return: Approximated A*
"""
X = torch.linalg.lstsq(Z, self.train_metab)
a, b, c = torch.svd_lowrank(X.solution, q=min(6, self.train_metab.shape[1]))
b = torch.diag(b)
self.X = a @ b @ c.T
def forward(self, micro, train=False):
"""
LOCATE's forward function
:param micro: microbial features (Tensor)
:param train: Binary mode (True = training mode, False = test mode)
:return: Predicted metaolites and the intermediate representation
"""
Z = self.linear_representation(micro)
if train:
self.find_transformer(Z)
metab = Z @ self.X
return metab, Z
def configure_optimizers(self):
optimizer_g = torch.optim.Adam(self.linear_representation.parameters(), lr=self.lr_rep,
weight_decay=self.weight_decay_rep)
optimizer_d = torch.optim.Adam(self.discriminator.parameters(), lr=self.lr_dis,
weight_decay=self.weight_decay_dis)
return [optimizer_g, optimizer_d]
def loss_g(self, metab, y, mode="valid"):
loss = torch.tensor(0., requires_grad=True)
loss = loss + F.mse_loss(metab, y)
self.log(f"mse loss {mode}", loss, prog_bar=True)
return loss
def training_step(self, train_batch, batch_idx, optimizer_idx):
metab, Z = self.forward(self.train_micro, train=True)
if optimizer_idx == 0:
return self.loss_g(metab, self.train_metab, mode="train")
def validation_step(self, train_batch, batch_idx):
x, y, cond = train_batch
metab, Z = self.forward(x)
return self.loss_g(metab, y)
def backward(self, loss, optimizer, optimizer_idx, *args, **kwargs):
loss.backward(retain_graph=True) | PypiClean |
/Mathics_Django-6.0.0-py3-none-any.whl/mathics_django/web/media/js/mathjax/jax/output/HTML-CSS/fonts/STIX/General/Italic/Main.js | MathJax.OutputJax["HTML-CSS"].FONTDATA.FONTS["STIXGeneral-italic"]={directory:"General/Italic",family:"STIXGeneral",style:"italic",Ranges:[[160,255,"Latin1Supplement"],[256,383,"LatinExtendedA"],[384,591,"LatinExtendedB"],[592,687,"IPAExtensions"],[688,767,"SpacingModLetters"],[880,1023,"GreekAndCoptic"],[1024,1279,"Cyrillic"],[7680,7935,"LatinExtendedAdditional"],[8192,8303,"GeneralPunctuation"],[8352,8399,"CurrencySymbols"],[8400,8447,"CombDiactForSymbols"],[8448,8527,"LetterlikeSymbols"],[8704,8959,"MathOperators"],[9216,9279,"ControlPictures"],[9312,9471,"EnclosedAlphanum"],[9472,9599,"BoxDrawing"],[64256,64335,"AlphaPresentForms"],[119860,119911,"MathItalic"],[119964,120015,"MathScript"],[120328,120379,"MathSSItalic"],[120484,120485,"ij"],[120546,120603,"GreekItalic"]],32:[0,0,250,0,0],33:[667,11,333,39,304],34:[666,-421,420,144,432],35:[676,0,501,2,540],36:[731,89,500,32,497],37:[706,19,755,80,705],38:[666,18,778,76,723],39:[666,-421,214,132,241],40:[669,181,333,42,315],41:[669,180,333,16,289],42:[666,-255,500,128,492],43:[506,0,675,86,590],44:[101,129,250,-5,135],45:[255,-192,333,49,282],46:[100,11,250,27,138],47:[666,18,278,-65,386],48:[676,7,500,32,497],49:[676,0,500,50,409],50:[676,0,500,12,452],51:[676,7,500,16,465],52:[676,0,500,1,479],53:[666,7,500,15,491],54:[686,7,500,30,521],55:[666,8,500,75,537],56:[676,7,500,30,493],57:[676,17,500,23,492],58:[441,11,333,50,261],59:[441,129,333,26,261],60:[516,10,675,84,592],61:[386,-120,675,86,590],62:[516,10,675,84,592],63:[664,12,500,132,472],64:[666,18,920,118,806],65:[668,0,611,-51,564],66:[653,0,611,-8,588],67:[666,18,667,66,689],68:[653,0,722,-8,700],69:[653,0,611,-1,634],70:[653,0,611,8,645],71:[666,18,722,52,722],72:[653,0,722,-8,769],73:[653,0,333,-8,384],74:[653,18,444,-6,491],75:[653,0,667,7,722],76:[653,0,556,-8,559],77:[653,0,833,-18,872],78:[653,15,667,-20,727],79:[667,18,722,60,699],80:[653,0,611,0,605],81:[666,182,722,59,699],82:[653,0,611,-13,588],83:[667,18,500,17,508],84:[653,0,556,59,633],85:[653,18,722,102,765],86:[653,18,611,76,688],87:[653,18,833,71,906],88:[653,0,611,-29,655],89:[653,0,556,78,633],90:[653,0,556,-6,606],91:[663,153,389,21,391],92:[666,18,278,-41,319],93:[663,153,389,12,382],94:[666,-301,422,0,422],95:[-75,125,500,0,500],96:[664,-492,333,120,311],97:[441,11,501,17,476],98:[683,11,500,23,473],99:[441,11,444,30,425],100:[683,13,500,15,527],101:[441,11,444,31,412],102:[678,207,278,-147,424],103:[441,206,500,8,471],104:[683,9,500,19,478],105:[654,11,278,49,264],106:[652,207,278,-124,279],107:[683,11,444,14,461],108:[683,11,278,41,279],109:[441,9,722,12,704],110:[441,9,500,14,474],111:[441,11,500,27,468],112:[441,205,504,-75,472],113:[441,209,500,25,484],114:[441,0,389,45,412],115:[442,13,389,16,366],116:[546,11,278,38,296],117:[441,11,500,42,475],118:[441,18,444,20,426],119:[441,18,667,15,648],120:[441,11,444,-27,447],121:[441,206,444,-24,426],122:[428,81,389,-2,380],123:[687,177,400,51,407],124:[666,18,275,105,171],125:[687,177,400,-7,349],126:[323,-183,541,40,502],305:[441,11,278,47,235],567:[441,207,278,-124,246],915:[653,0,611,8,645],916:[668,0,611,-32,526],920:[667,18,722,60,699],923:[668,0,611,-51,564],926:[653,0,651,-6,680],928:[653,0,722,-8,769],931:[653,0,620,-6,659],933:[668,0,556,78,648],934:[653,0,741,50,731],936:[667,0,675,77,778],937:[666,0,762,-6,739],945:[441,11,552,27,549],946:[678,205,506,-40,514],947:[435,206,410,19,438],948:[668,11,460,24,460],949:[441,11,444,30,425],950:[683,185,454,30,475],951:[441,205,474,14,442],952:[678,11,480,27,494],953:[441,11,278,49,235],954:[441,13,444,14,465],955:[678,16,458,-12,431],956:[428,205,526,-33,483],957:[441,18,470,20,459],958:[683,185,454,30,446],959:[441,11,500,27,468],960:[428,18,504,19,536],961:[441,205,504,-40,471],962:[441,185,454,30,453],963:[428,11,498,27,531],964:[428,11,410,12,426],965:[441,10,478,19,446],966:[441,205,622,27,590],967:[441,207,457,-108,498],968:[441,205,584,15,668],969:[439,11,686,27,654],977:[678,10,556,19,526],981:[683,205,627,27,595],982:[428,11,792,17,832],1009:[441,205,516,27,484],1013:[441,11,444,30,420],8467:[687,11,579,48,571]};MathJax.OutputJax["HTML-CSS"].initFont("STIXGeneral-italic");MathJax.Ajax.loadComplete(MathJax.OutputJax["HTML-CSS"].fontDir+"/General/Italic/Main.js"); | PypiClean |
/NIDDL-0.1.8-py3-none-any.whl/cnn_archs/baseline_v2.py | import tensorflow as tf
def conv2d(x, W, b, strides=1):
# Conv2D wrapper, with bias and relu activation
x = tf.nn.conv2d(x, W, strides=[1, strides, strides, 1], padding='SAME')
x = tf.nn.bias_add(x, b)
return tf.nn.relu(x)
def maxpool2d(x, k=2):
return tf.nn.max_pool(x, ksize=[1, k, k, 1], strides=[1, k, k, 1],padding='SAME')
def conv2d_transpose(x, W, b, output, strides=2):
# Conv2D wrapper, with bias and relu activation
x = tf.nn.conv2d_transpose(x, W, output_shape= output, strides=[1, strides, strides, 1], padding='SAME')
x = tf.nn.bias_add(x, b)
return tf.nn.relu(x)
def conv_net(x, weights, biases):
# block1
conv1 = conv2d(x, weights['wc1'], biases['b1'])
conv2 = conv2d(conv1, weights['wc2'], biases['b2'])
conv3 = conv2d(conv2, weights['wc3'], biases['b3'])
# block2
mp1 = maxpool2d(conv3, 2)
conv4 = conv2d(mp1, weights['wc4'], biases['b4'])
conv5 = conv2d(conv4, weights['wc5'], biases['b5'])
conv6 = conv2d(conv5, weights['wc6'], biases['b6'])
# block3
mp2 = maxpool2d(conv6, 2)
conv7 = conv2d(mp2, weights['wc7'], biases['b7'])
conv8 = conv2d(conv7, weights['wc8'], biases['b8'])
conv9 = conv2d(conv8, weights['wc9'], biases['b9'])
# block4
mp3 = maxpool2d(conv9, 2)
conv10 = conv2d(mp3, weights['wc10'], biases['b10'])
conv11 = conv2d(conv10, weights['wc11'], biases['b11'])
conv12 = conv2d(conv11, weights['wc12'], biases['b12'])
# up block 1
uconv1 = conv2d_transpose(conv12, weights['wuc1'], biases['ub1'], [tf.shape(conv3)[0], 215, 163, 128], strides=2)
# up block 2
uconv2 = conv2d_transpose(uconv1, weights['wuc2'], biases['ub2'], [tf.shape(conv3)[0], 430, 325, 128], strides=2)
# up block 3
uconv3 = conv2d_transpose(uconv2, weights['wuc3'], biases['ub3'], [tf.shape(conv3)[0], 860, 650, 128], strides=2)
conv22 = conv2d(uconv3, weights['wc22'], biases['b22'])
out = conv22
return out
def wts_and_bias():
weights = {'wc1': tf.get_variable('W0', shape=(3, 3, 1, 32), initializer=tf.contrib.layers.xavier_initializer()),
'wc2': tf.get_variable('W1', shape=(3, 3, 32, 32), initializer=tf.contrib.layers.xavier_initializer()),
'wc3': tf.get_variable('W2', shape=(3, 3, 32, 32), initializer=tf.contrib.layers.xavier_initializer()),
'wc4': tf.get_variable('W3', shape=(3, 3, 32, 64), initializer=tf.contrib.layers.xavier_initializer()),
'wc5': tf.get_variable('W4', shape=(3, 3, 64, 64), initializer=tf.contrib.layers.xavier_initializer()),
'wc6': tf.get_variable('W5', shape=(3, 3, 64, 64), initializer=tf.contrib.layers.xavier_initializer()),
'wc7': tf.get_variable('W6', shape=(3, 3, 64, 128), initializer=tf.contrib.layers.xavier_initializer()),
'wc8': tf.get_variable('W7', shape=(3, 3, 128, 128), initializer=tf.contrib.layers.xavier_initializer()),
'wc9': tf.get_variable('W8', shape=(3, 3, 128, 128), initializer=tf.contrib.layers.xavier_initializer()),
'wc10': tf.get_variable('W9', shape=(3, 3, 128, 256), initializer=tf.contrib.layers.xavier_initializer()),
'wc11': tf.get_variable('W10', shape=(3, 3, 256, 256), initializer=tf.contrib.layers.xavier_initializer()),
'wc12': tf.get_variable('W11', shape=(3, 3, 256, 256), initializer=tf.contrib.layers.xavier_initializer()),
'wuc1': tf.get_variable('W12', shape=(3, 3, 128, 256), initializer=tf.contrib.layers.xavier_initializer()),
'wuc2': tf.get_variable('W16', shape=(3, 3, 128, 128), initializer=tf.contrib.layers.xavier_initializer()),
'wuc3': tf.get_variable('W20', shape=(3, 3, 128, 128), initializer=tf.contrib.layers.xavier_initializer()),
'wc22': tf.get_variable('W24', shape=(1, 1, 128, 1), initializer=tf.contrib.layers.xavier_initializer())}
biases = {'b1': tf.get_variable('B0', shape=(32), initializer=tf.contrib.layers.xavier_initializer()),
'b2': tf.get_variable('B1', shape=(32), initializer=tf.contrib.layers.xavier_initializer()),
'b3': tf.get_variable('B2', shape=(32), initializer=tf.contrib.layers.xavier_initializer()),
'b4': tf.get_variable('B3', shape=(64), initializer=tf.contrib.layers.xavier_initializer()),
'b5': tf.get_variable('B4', shape=(64), initializer=tf.contrib.layers.xavier_initializer()),
'b6': tf.get_variable('B5', shape=(64), initializer=tf.contrib.layers.xavier_initializer()),
'b7': tf.get_variable('B6', shape=(128), initializer=tf.contrib.layers.xavier_initializer()),
'b8': tf.get_variable('B7', shape=(128), initializer=tf.contrib.layers.xavier_initializer()),
'b9': tf.get_variable('B8', shape=(128), initializer=tf.contrib.layers.xavier_initializer()),
'b10': tf.get_variable('B9', shape=(256), initializer=tf.contrib.layers.xavier_initializer()),
'b11': tf.get_variable('B10', shape=(256), initializer=tf.contrib.layers.xavier_initializer()),
'b12': tf.get_variable('B11', shape=(256), initializer=tf.contrib.layers.xavier_initializer()),
'ub1': tf.get_variable('B12', shape=(128), initializer=tf.contrib.layers.xavier_initializer()),
'ub2': tf.get_variable('B16', shape=(128), initializer=tf.contrib.layers.xavier_initializer()),
'ub3': tf.get_variable('B20', shape=(128), initializer=tf.contrib.layers.xavier_initializer()),
'b22': tf.get_variable('B24', shape=(1), initializer=tf.contrib.layers.xavier_initializer()),}
return weights, biases | PypiClean |
/Kiosk_Client-0.8.4.tar.gz/Kiosk_Client-0.8.4/kiosk_client/utils.py | """Utility files for batch file processing"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from PIL import Image
from twisted.internet import reactor
from twisted.internet.task import deferLater
def get_download_path():
"""Returns the default downloads path for linux or windows.
https://stackoverflow.com/a/48706260
"""
if os.name == 'nt':
# pylint: disable=E0401,C0415
import winreg
k = r'SOFTWARE\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders'
downloads_guid = '{374DE290-123F-4565-9164-39C4925E467B}'
with winreg.OpenKey(winreg.HKEY_CURRENT_USER, k) as key:
location = winreg.QueryValueEx(key, downloads_guid)[0]
return location
location = os.path.join(os.path.expanduser('~'), 'Downloads')
while not os.path.exists(location):
location = os.path.abspath(os.path.join(location, '..'))
return location
def strip_bucket_prefix(prefix):
"""Remove any leading or trailing "/" characters."""
return '/'.join(x for x in prefix.split('/') if x)
def sleep(seconds):
"""Simple helper to delay asynchronously for some number of seconds."""
return deferLater(reactor, seconds, lambda: None)
def is_image_file(filepath):
"""Returns True if the file is an image file, otherwise False"""
try:
with Image.open(filepath) as im:
im.verify()
return True
except: # pylint: disable=bare-except
return False
def iter_image_files(path, include_archives=True):
archive_extensions = {'.zip'}
if os.path.isfile(path):
_, ext = os.path.splitext(path.lower())
if ext in archive_extensions and include_archives:
yield path
elif is_image_file(path):
yield path
for (dirpath, _, filenames) in os.walk(path):
for filename in filenames:
filepath = os.path.join(dirpath, filename)
# process all zip images
_, ext = os.path.splitext(filepath.lower())
if ext in archive_extensions and include_archives:
yield filepath
# process all images
elif is_image_file(filepath):
yield filepath | PypiClean |
/Infineon_DPS310-1.0.0.tar.gz/Infineon_DPS310-1.0.0/Infineon_DPS310.py | import smbus
from time import sleep
def getTwosComplement(raw_val, length):
"""Get two's complement of `raw_val`.
Args:
raw_val (int): Raw value
length (int): Max bit length
Returns:
int: Two's complement
"""
val = raw_val
if raw_val & (1 << (length - 1)):
val = raw_val - (1 << length)
return val
class DPS310:
"""Class of DPS310, Pressure and Temperature sensor.
"""
__bus = smbus.SMBus(1)
__addr = 0x77
# Compensation Scale Factors
# Oversampling Rate | Scale Factor (kP or kT)
# ---------------------------|------------------------
# 1 (single) | 524288
# 2 times (Low Power) | 1572864
# 4 times | 3670016
# 8 times | 7864320
# 16 times (Standard) | 253952
# 32 times | 516096
# 64 times (High Precision) | 1040384 <- Configured
# 128 times | 2088960
__kP = 1040384
__kT = 1040384
def __init__(self):
"""Initial setting.
Execute `self.correctTemperature()` and `self.setOversamplingRate()`.
"""
self.__correctTemperature()
self.__setOversamplingRate()
def __correctTemperature(self):
"""Correct temperature.
DPS310 sometimes indicates a temperature over 60 degree Celsius
although room temperature is around 20-30 degree Celsius.
Call this function to fix.
"""
# Correct Temp
DPS310.__bus.write_byte_data(DPS310.__addr, 0x0E, 0xA5)
DPS310.__bus.write_byte_data(DPS310.__addr, 0x0F, 0x96)
DPS310.__bus.write_byte_data(DPS310.__addr, 0x62, 0x02)
DPS310.__bus.write_byte_data(DPS310.__addr, 0x0E, 0x00)
DPS310.__bus.write_byte_data(DPS310.__addr, 0x0F, 0x00)
def __setOversamplingRate(self):
"""Set oversampling rate.
Pressure measurement rate : 4 Hz
Pressure oversampling rate : 64 times
Temperature measurement rate : 4 Hz
Temperature oversampling rate: 64 times
"""
# Oversampling Rate Setting (64time)
DPS310.__bus.write_byte_data(DPS310.__addr, 0x06, 0x26)
DPS310.__bus.write_byte_data(DPS310.__addr, 0x07, 0xA6)
DPS310.__bus.write_byte_data(DPS310.__addr, 0x08, 0x07)
# Oversampling Rate Configuration
DPS310.__bus.write_byte_data(DPS310.__addr, 0x09, 0x0C)
def __getRawPressure(self):
"""Get raw pressure from sensor.
Returns:
int: Raw pressure
"""
p1 = DPS310.__bus.read_byte_data(DPS310.__addr, 0x00)
p2 = DPS310.__bus.read_byte_data(DPS310.__addr, 0x01)
p3 = DPS310.__bus.read_byte_data(DPS310.__addr, 0x02)
p = (p1 << 16) | (p2 << 8) | p3
p = getTwosComplement(p, 24)
return p
def __getRawTemperature(self):
"""Get raw temperature from sensor.
Returns:
int: Raw temperature
"""
t1 = DPS310.__bus.read_byte_data(DPS310.__addr, 0x03)
t2 = DPS310.__bus.read_byte_data(DPS310.__addr, 0x04)
t3 = DPS310.__bus.read_byte_data(DPS310.__addr, 0x05)
t = (t1 << 16) | (t2 << 8) | t3
t = getTwosComplement(t, 24)
return t
def __getPressureCalibrationCoefficients(self):
"""Get pressure calibration coefficients from sensor.
Returns:
int: Pressure calibration coefficient (c00)
int: Pressure calibration coefficient (c10)
int: Pressure calibration coefficient (c20)
int: Pressure calibration coefficient (c30)
int: Pressure calibration coefficient (c01)
int: Pressure calibration coefficient (c11)
int: Pressure calibration coefficient (c21)
"""
src13 = DPS310.__bus.read_byte_data(DPS310.__addr, 0x13)
src14 = DPS310.__bus.read_byte_data(DPS310.__addr, 0x14)
src15 = DPS310.__bus.read_byte_data(DPS310.__addr, 0x15)
src16 = DPS310.__bus.read_byte_data(DPS310.__addr, 0x16)
src17 = DPS310.__bus.read_byte_data(DPS310.__addr, 0x17)
src18 = DPS310.__bus.read_byte_data(DPS310.__addr, 0x18)
src19 = DPS310.__bus.read_byte_data(DPS310.__addr, 0x19)
src1A = DPS310.__bus.read_byte_data(DPS310.__addr, 0x1A)
src1B = DPS310.__bus.read_byte_data(DPS310.__addr, 0x1B)
src1C = DPS310.__bus.read_byte_data(DPS310.__addr, 0x1C)
src1D = DPS310.__bus.read_byte_data(DPS310.__addr, 0x1D)
src1E = DPS310.__bus.read_byte_data(DPS310.__addr, 0x1E)
src1F = DPS310.__bus.read_byte_data(DPS310.__addr, 0x1F)
src20 = DPS310.__bus.read_byte_data(DPS310.__addr, 0x20)
src21 = DPS310.__bus.read_byte_data(DPS310.__addr, 0x21)
c00 = (src13 << 12) | (src14 << 4) | (src15 >> 4)
c00 = getTwosComplement(c00, 20)
c10 = ((src15 & 0x0F) << 16) | (src16 << 8) | src17
c10 = getTwosComplement(c10, 20)
c20 = (src1C << 8) | src1D
c20 = getTwosComplement(c20, 16)
c30 = (src20 << 8) | src21
c30 = getTwosComplement(c30, 16)
c01 = (src18 << 8) | src19
c01 = getTwosComplement(c01, 16)
c11 = (src1A << 8) | src1B
c11 = getTwosComplement(c11, 16)
c21 = (src1E < 8) | src1F
c21 = getTwosComplement(c21, 16)
return c00, c10, c20, c30, c01, c11, c21
def __getTemperatureCalibrationCoefficients(self):
"""Get temperature calibration coefficients from sensor.
Returns:
int: Temperature calibration coefficient (c0)
int: Temperature calibration coefficient (c1)
"""
src10 = DPS310.__bus.read_byte_data(DPS310.__addr, 0x10)
src11 = DPS310.__bus.read_byte_data(DPS310.__addr, 0x11)
src12 = DPS310.__bus.read_byte_data(DPS310.__addr, 0x12)
c0 = (src10 << 4) | (src11 >> 4)
c0 = getTwosComplement(c0, 12)
c1 = ((src11 & 0x0F) << 8) | src12
c1 = getTwosComplement(c1, 12)
return c0, c1
def calcScaledPressure(self):
"""Calculate scaled pressure.
Returns:
float: Scaled pressure
"""
raw_p = self.__getRawPressure()
scaled_p = raw_p / DPS310.__kP
return scaled_p
def calcScaledTemperature(self):
"""Calculate scaled temperature.
Returns:
float: Scaled temperature
"""
raw_t = self.__getRawTemperature()
scaled_t = raw_t / DPS310.__kT
return scaled_t
def calcCompTemperature(self, scaled_t):
"""Calculate compensated temperature.
Args:
scaled_t (float): Scaled temperature
Returns:
float: Compensated temperature [C]
"""
c0, c1 = self.__getTemperatureCalibrationCoefficients()
comp_t = c0 * 0.5 + scaled_t * c1
return comp_t
def calcCompPressure(self, scaled_p, scaled_t):
"""Calculate compensated pressure.
Args:
scaled_p (float): Scaled pressure
scaled_t (float): Scaled temperature
Returns:
float: Compensated pressure [Pa]
"""
c00, c10, c20, c30, c01, c11, c21 = self.__getPressureCalibrationCoefficients()
comp_p = (c00 + scaled_p * (c10 + scaled_p * (c20 + scaled_p * c30))
+ scaled_t * (c01 + scaled_p * (c11 + scaled_p * c21)))
return comp_p | PypiClean |
/MarkDo-0.3.0.tar.gz/MarkDo-0.3.0/markdo/static/bower/codemirror/mode/plsql/plsql.js | CodeMirror.defineMode("plsql", function(config, parserConfig) {
var indentUnit = config.indentUnit,
keywords = parserConfig.keywords,
functions = parserConfig.functions,
types = parserConfig.types,
sqlplus = parserConfig.sqlplus,
multiLineStrings = parserConfig.multiLineStrings;
var isOperatorChar = /[+\-*&%=<>!?:\/|]/;
function chain(stream, state, f) {
state.tokenize = f;
return f(stream, state);
}
var type;
function ret(tp, style) {
type = tp;
return style;
}
function tokenBase(stream, state) {
var ch = stream.next();
// start of string?
if (ch == '"' || ch == "'")
return chain(stream, state, tokenString(ch));
// is it one of the special signs []{}().,;? Seperator?
else if (/[\[\]{}\(\),;\.]/.test(ch))
return ret(ch);
// start of a number value?
else if (/\d/.test(ch)) {
stream.eatWhile(/[\w\.]/);
return ret("number", "number");
}
// multi line comment or simple operator?
else if (ch == "/") {
if (stream.eat("*")) {
return chain(stream, state, tokenComment);
}
else {
stream.eatWhile(isOperatorChar);
return ret("operator", "operator");
}
}
// single line comment or simple operator?
else if (ch == "-") {
if (stream.eat("-")) {
stream.skipToEnd();
return ret("comment", "comment");
}
else {
stream.eatWhile(isOperatorChar);
return ret("operator", "operator");
}
}
// pl/sql variable?
else if (ch == "@" || ch == "$") {
stream.eatWhile(/[\w\d\$_]/);
return ret("word", "variable");
}
// is it a operator?
else if (isOperatorChar.test(ch)) {
stream.eatWhile(isOperatorChar);
return ret("operator", "operator");
}
else {
// get the whole word
stream.eatWhile(/[\w\$_]/);
// is it one of the listed keywords?
if (keywords && keywords.propertyIsEnumerable(stream.current().toLowerCase())) return ret("keyword", "keyword");
// is it one of the listed functions?
if (functions && functions.propertyIsEnumerable(stream.current().toLowerCase())) return ret("keyword", "builtin");
// is it one of the listed types?
if (types && types.propertyIsEnumerable(stream.current().toLowerCase())) return ret("keyword", "variable-2");
// is it one of the listed sqlplus keywords?
if (sqlplus && sqlplus.propertyIsEnumerable(stream.current().toLowerCase())) return ret("keyword", "variable-3");
// default: just a "variable"
return ret("word", "variable");
}
}
function tokenString(quote) {
return function(stream, state) {
var escaped = false, next, end = false;
while ((next = stream.next()) != null) {
if (next == quote && !escaped) {end = true; break;}
escaped = !escaped && next == "\\";
}
if (end || !(escaped || multiLineStrings))
state.tokenize = tokenBase;
return ret("string", "plsql-string");
};
}
function tokenComment(stream, state) {
var maybeEnd = false, ch;
while (ch = stream.next()) {
if (ch == "/" && maybeEnd) {
state.tokenize = tokenBase;
break;
}
maybeEnd = (ch == "*");
}
return ret("comment", "plsql-comment");
}
// Interface
return {
startState: function(basecolumn) {
return {
tokenize: tokenBase,
startOfLine: true
};
},
token: function(stream, state) {
if (stream.eatSpace()) return null;
var style = state.tokenize(stream, state);
return style;
}
};
});
(function() {
function keywords(str) {
var obj = {}, words = str.split(" ");
for (var i = 0; i < words.length; ++i) obj[words[i]] = true;
return obj;
}
var cKeywords = "abort accept access add all alter and any array arraylen as asc assert assign at attributes audit " +
"authorization avg " +
"base_table begin between binary_integer body boolean by " +
"case cast char char_base check close cluster clusters colauth column comment commit compress connect " +
"connected constant constraint crash create current currval cursor " +
"data_base database date dba deallocate debugoff debugon decimal declare default definition delay delete " +
"desc digits dispose distinct do drop " +
"else elsif enable end entry escape exception exception_init exchange exclusive exists exit external " +
"fast fetch file for force form from function " +
"generic goto grant group " +
"having " +
"identified if immediate in increment index indexes indicator initial initrans insert interface intersect " +
"into is " +
"key " +
"level library like limited local lock log logging long loop " +
"master maxextents maxtrans member minextents minus mislabel mode modify multiset " +
"new next no noaudit nocompress nologging noparallel not nowait number_base " +
"object of off offline on online only open option or order out " +
"package parallel partition pctfree pctincrease pctused pls_integer positive positiven pragma primary prior " +
"private privileges procedure public " +
"raise range raw read rebuild record ref references refresh release rename replace resource restrict return " +
"returning reverse revoke rollback row rowid rowlabel rownum rows run " +
"savepoint schema segment select separate session set share snapshot some space split sql start statement " +
"storage subtype successful synonym " +
"tabauth table tables tablespace task terminate then to trigger truncate type " +
"union unique unlimited unrecoverable unusable update use using " +
"validate value values variable view views " +
"when whenever where while with work";
var cFunctions = "abs acos add_months ascii asin atan atan2 average " +
"bfilename " +
"ceil chartorowid chr concat convert cos cosh count " +
"decode deref dual dump dup_val_on_index " +
"empty error exp " +
"false floor found " +
"glb greatest " +
"hextoraw " +
"initcap instr instrb isopen " +
"last_day least lenght lenghtb ln lower lpad ltrim lub " +
"make_ref max min mod months_between " +
"new_time next_day nextval nls_charset_decl_len nls_charset_id nls_charset_name nls_initcap nls_lower " +
"nls_sort nls_upper nlssort no_data_found notfound null nvl " +
"others " +
"power " +
"rawtohex reftohex round rowcount rowidtochar rpad rtrim " +
"sign sin sinh soundex sqlcode sqlerrm sqrt stddev substr substrb sum sysdate " +
"tan tanh to_char to_date to_label to_multi_byte to_number to_single_byte translate true trunc " +
"uid upper user userenv " +
"variance vsize";
var cTypes = "bfile blob " +
"character clob " +
"dec " +
"float " +
"int integer " +
"mlslabel " +
"natural naturaln nchar nclob number numeric nvarchar2 " +
"real rowtype " +
"signtype smallint string " +
"varchar varchar2";
var cSqlplus = "appinfo arraysize autocommit autoprint autorecovery autotrace " +
"blockterminator break btitle " +
"cmdsep colsep compatibility compute concat copycommit copytypecheck " +
"define describe " +
"echo editfile embedded escape exec execute " +
"feedback flagger flush " +
"heading headsep " +
"instance " +
"linesize lno loboffset logsource long longchunksize " +
"markup " +
"native newpage numformat numwidth " +
"pagesize pause pno " +
"recsep recsepchar release repfooter repheader " +
"serveroutput shiftinout show showmode size spool sqlblanklines sqlcase sqlcode sqlcontinue sqlnumber " +
"sqlpluscompatibility sqlprefix sqlprompt sqlterminator suffix " +
"tab term termout time timing trimout trimspool ttitle " +
"underline " +
"verify version " +
"wrap";
CodeMirror.defineMIME("text/x-plsql", {
name: "plsql",
keywords: keywords(cKeywords),
functions: keywords(cFunctions),
types: keywords(cTypes),
sqlplus: keywords(cSqlplus)
});
}()); | PypiClean |
/HEBO-0.3.2-py3-none-any.whl/hebo/optimizers/nomr.py |
# This program is free software; you can redistribute it and/or modify it under
# the terms of the MIT license.
# This program is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the MIT License for more details.
"""
Bayesian optimisation collaborated with NJU
"""
import numpy as np
import pandas as pd
import torch
from hebo.design_space import DesignSpace
from hebo.acquisitions.acq import SingleObjectiveAcq
from .abstract_optimizer import AbstractOptimizer
from .bo import BO
from .hebo import HEBO
class AbsEtaDifference(SingleObjectiveAcq):
def __init__(self, model, kappa=3.0, eta=0.7, **conf):
super().__init__(model, **conf)
self.kappa = kappa
self.eta = eta
assert model.num_out == 1
def eval(self, x: torch.FloatTensor, xe: torch.LongTensor) -> torch.FloatTensor:
py, ps2 = self.model.predict(x, xe)
return torch.abs(py-self.eta) - self.kappa * ps2.sqrt()
class NoMR_BO(AbstractOptimizer):
support_parallel_opt = False
support_combinatorial = True
support_contextual = False
def __init__(self,
space : DesignSpace,
eta : float = None,
opt1 : AbstractOptimizer = None,
opt2 : AbstractOptimizer = None
):
super().__init__(space)
self.eta = eta
self.opt1 = opt1
self.opt2 = opt2
if self.eta is None:
self.eta = np.inf # prior optimu
if self.opt1 is None:
# NOTE: optimizer for stage one, vallina BO
self.opt1 = HEBO(space)
if self.opt2 is None:
# NOTE: optimizer for stage two, focus more on exploitation
self.opt2 = BO(space, acq_conf = {'kappa' : 0.6})
def observe(self, x : pd.DataFrame, y : np.ndarray):
self.opt1.observe(x, y)
self.opt2.observe(x, y)
def suggest(self, n_suggestions = 1, fix_input : dict = None):
assert n_suggestions == 1
if self.opt1.y is None or self.opt1.y.shape[0] == 0 or self.opt1.y.min() > self.eta:
return self.opt1.suggest(n_suggestions, fix_input)
return self.opt2.suggest(n_suggestions, fix_input)
@property
def best_x(self) -> pd.DataFrame:
return self.opt1.best_x if self.opt1.best_y < self.opt2.best_y else self.opt2.best_x
@property
def best_y(self) -> float:
return self.opt1.best_y if self.opt1.best_y < self.opt2.best_y else self.opt2.best_y | PypiClean |
/DotConfig-0.1.1.tar.gz/DotConfig-0.1.1/dotconfig.py | import ConfigParser
class Config(object):
"""Contains the configuration data.
The configuration is read at initialization
and you read sections and options as attributes
of the class, e.g. config.my_section.my_value
where my_sectiona and my_value are data in
your config file.
Attributes:
You access your config file sections as
attributes.
"""
def __init__(self, config_file):
"""Create the parser and read config file.
Args:
config_file: path to your config file.
Returns:
None
Raises:
TypeError: if config_file is not a string.
"""
self._parser = ConfigParser.ConfigParser()
self._parser.read(config_file)
def __getattr__(self, name):
"""Read and return a section object.
Args:
name: section name to fetch.
Returns:
a section object with the options.
Raises:
ConfigParser.NoSectionError: if the
section can't be found.
"""
if self._parser.has_section(name):
items = self._parser.items(name)
return Section(name, items)
else:
raise ConfigParser.NoSectionError(name)
# TODO: Could we use __type here to a avoid casting later?
class Section(object):
"""A data object with the options in a section.
Section works much like Config. In a section
you have zero or more options which is reached
as attributes in your object, e.g:
config.my_section.my_option where my_section
is the name of your section and my_option
the option you want to read.
Attributes:
You access your config file sections as
attributes.
"""
def __init__(self, section_name, items):
"""Save the section name and make a dict
of the options.
Args:
section_name: the name of your section
items: [(name: string, value: string)]
Returns:
None
"""
self._section_name = section_name
self._items = dict(items)
def __getattr__(self, name):
"""Read and return a value as string.
Args:
name: option name to fetch.
Returns:
An option.
Raises:
ConfigParser.NoOptionError: if the
option can't be found.
"""
if name in self._items:
return self._items[name]
else:
raise ConfigParser.NoOptionError(name, self._section_name) | PypiClean |
/OBITools-1.2.13.tar.gz/OBITools-1.2.13/distutils.ext/obidistutils/serenity/pip/_vendor/requests/cookies.py | import time
import collections
from .compat import cookielib, urlparse, urlunparse, Morsel
try:
import threading
# grr, pyflakes: this fixes "redefinition of unused 'threading'"
threading
except ImportError:
import dummy_threading as threading
class MockRequest(object):
"""Wraps a `requests.Request` to mimic a `urllib2.Request`.
The code in `cookielib.CookieJar` expects this interface in order to correctly
manage cookie policies, i.e., determine whether a cookie can be set, given the
domains of the request and the cookie.
The original request object is read-only. The client is responsible for collecting
the new headers via `get_new_headers()` and interpreting them appropriately. You
probably want `get_cookie_header`, defined below.
"""
def __init__(self, request):
self._r = request
self._new_headers = {}
self.type = urlparse(self._r.url).scheme
def get_type(self):
return self.type
def get_host(self):
return urlparse(self._r.url).netloc
def get_origin_req_host(self):
return self.get_host()
def get_full_url(self):
# Only return the response's URL if the user hadn't set the Host
# header
if not self._r.headers.get('Host'):
return self._r.url
# If they did set it, retrieve it and reconstruct the expected domain
host = self._r.headers['Host']
parsed = urlparse(self._r.url)
# Reconstruct the URL as we expect it
return urlunparse([
parsed.scheme, host, parsed.path, parsed.params, parsed.query,
parsed.fragment
])
def is_unverifiable(self):
return True
def has_header(self, name):
return name in self._r.headers or name in self._new_headers
def get_header(self, name, default=None):
return self._r.headers.get(name, self._new_headers.get(name, default))
def add_header(self, key, val):
"""cookielib has no legitimate use for this method; add it back if you find one."""
raise NotImplementedError("Cookie headers should be added with add_unredirected_header()")
def add_unredirected_header(self, name, value):
self._new_headers[name] = value
def get_new_headers(self):
return self._new_headers
@property
def unverifiable(self):
return self.is_unverifiable()
@property
def origin_req_host(self):
return self.get_origin_req_host()
@property
def host(self):
return self.get_host()
class MockResponse(object):
"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
...what? Basically, expose the parsed HTTP headers from the server response
the way `cookielib` expects to see them.
"""
def __init__(self, headers):
"""Make a MockResponse for `cookielib` to read.
:param headers: a httplib.HTTPMessage or analogous carrying the headers
"""
self._headers = headers
def info(self):
return self._headers
def getheaders(self, name):
self._headers.getheaders(name)
def extract_cookies_to_jar(jar, request, response):
"""Extract the cookies from the response into a CookieJar.
:param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
:param request: our own requests.Request object
:param response: urllib3.HTTPResponse object
"""
if not (hasattr(response, '_original_response') and
response._original_response):
return
# the _original_response field is the wrapped httplib.HTTPResponse object,
req = MockRequest(request)
# pull out the HTTPMessage with the headers and put it in the mock:
res = MockResponse(response._original_response.msg)
jar.extract_cookies(res, req)
def get_cookie_header(jar, request):
"""Produce an appropriate Cookie header string to be sent with `request`, or None."""
r = MockRequest(request)
jar.add_cookie_header(r)
return r.get_new_headers().get('Cookie')
def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
"""Unsets a cookie by name, by default over all domains and paths.
Wraps CookieJar.clear(), is O(n).
"""
clearables = []
for cookie in cookiejar:
if cookie.name == name:
if domain is None or domain == cookie.domain:
if path is None or path == cookie.path:
clearables.append((cookie.domain, cookie.path, cookie.name))
for domain, path, name in clearables:
cookiejar.clear(domain, path, name)
class CookieConflictError(RuntimeError):
"""There are two cookies that meet the criteria specified in the cookie jar.
Use .get and .set and include domain and path args in order to be more specific."""
class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
"""Compatibility class; is a cookielib.CookieJar, but exposes a dict interface.
This is the CookieJar we create by default for requests and sessions that
don't specify one, since some clients may expect response.cookies and
session.cookies to support dict operations.
Don't use the dict interface internally; it's just for compatibility with
with external client code. All `requests` code should work out of the box
with externally provided instances of CookieJar, e.g., LWPCookieJar and
FileCookieJar.
Caution: dictionary operations that are normally O(1) may be O(n).
Unlike a regular CookieJar, this class is pickleable.
"""
def get(self, name, default=None, domain=None, path=None):
"""Dict-like get() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
multiple domains. Caution: operation is O(n), not O(1)."""
try:
return self._find_no_duplicates(name, domain, path)
except KeyError:
return default
def set(self, name, value, **kwargs):
"""Dict-like set() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
multiple domains."""
# support client code that unsets cookies by assignment of a None value:
if value is None:
remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))
return
if isinstance(value, Morsel):
c = morsel_to_cookie(value)
else:
c = create_cookie(name, value, **kwargs)
self.set_cookie(c)
return c
def iterkeys(self):
"""Dict-like iterkeys() that returns an iterator of names of cookies from the jar.
See itervalues() and iteritems()."""
for cookie in iter(self):
yield cookie.name
def keys(self):
"""Dict-like keys() that returns a list of names of cookies from the jar.
See values() and items()."""
return list(self.iterkeys())
def itervalues(self):
"""Dict-like itervalues() that returns an iterator of values of cookies from the jar.
See iterkeys() and iteritems()."""
for cookie in iter(self):
yield cookie.value
def values(self):
"""Dict-like values() that returns a list of values of cookies from the jar.
See keys() and items()."""
return list(self.itervalues())
def iteritems(self):
"""Dict-like iteritems() that returns an iterator of name-value tuples from the jar.
See iterkeys() and itervalues()."""
for cookie in iter(self):
yield cookie.name, cookie.value
def items(self):
"""Dict-like items() that returns a list of name-value tuples from the jar.
See keys() and values(). Allows client-code to call "dict(RequestsCookieJar)
and get a vanilla python dict of key value pairs."""
return list(self.iteritems())
def list_domains(self):
"""Utility method to list all the domains in the jar."""
domains = []
for cookie in iter(self):
if cookie.domain not in domains:
domains.append(cookie.domain)
return domains
def list_paths(self):
"""Utility method to list all the paths in the jar."""
paths = []
for cookie in iter(self):
if cookie.path not in paths:
paths.append(cookie.path)
return paths
def multiple_domains(self):
"""Returns True if there are multiple domains in the jar.
Returns False otherwise."""
domains = []
for cookie in iter(self):
if cookie.domain is not None and cookie.domain in domains:
return True
domains.append(cookie.domain)
return False # there is only one domain in jar
def get_dict(self, domain=None, path=None):
"""Takes as an argument an optional domain and path and returns a plain old
Python dict of name-value pairs of cookies that meet the requirements."""
dictionary = {}
for cookie in iter(self):
if (domain is None or cookie.domain == domain) and (path is None
or cookie.path == path):
dictionary[cookie.name] = cookie.value
return dictionary
def __getitem__(self, name):
"""Dict-like __getitem__() for compatibility with client code. Throws exception
if there are more than one cookie with name. In that case, use the more
explicit get() method instead. Caution: operation is O(n), not O(1)."""
return self._find_no_duplicates(name)
def __setitem__(self, name, value):
"""Dict-like __setitem__ for compatibility with client code. Throws exception
if there is already a cookie of that name in the jar. In that case, use the more
explicit set() method instead."""
self.set(name, value)
def __delitem__(self, name):
"""Deletes a cookie given a name. Wraps cookielib.CookieJar's remove_cookie_by_name()."""
remove_cookie_by_name(self, name)
def set_cookie(self, cookie, *args, **kwargs):
if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'):
cookie.value = cookie.value.replace('\\"', '')
return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)
def update(self, other):
"""Updates this jar with cookies from another CookieJar or dict-like"""
if isinstance(other, cookielib.CookieJar):
for cookie in other:
self.set_cookie(cookie)
else:
super(RequestsCookieJar, self).update(other)
def _find(self, name, domain=None, path=None):
"""Requests uses this method internally to get cookie values. Takes as args name
and optional domain and path. Returns a cookie.value. If there are conflicting cookies,
_find arbitrarily chooses one. See _find_no_duplicates if you want an exception thrown
if there are conflicting cookies."""
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
if path is None or cookie.path == path:
return cookie.value
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def _find_no_duplicates(self, name, domain=None, path=None):
"""__get_item__ and get call _find_no_duplicates -- never used in Requests internally.
Takes as args name and optional domain and path. Returns a cookie.value.
Throws KeyError if cookie is not found and CookieConflictError if there are
multiple cookies that match name and optionally domain and path."""
toReturn = None
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
if path is None or cookie.path == path:
if toReturn is not None: # if there are multiple cookies that meet passed in criteria
raise CookieConflictError('There are multiple cookies with name, %r' % (name))
toReturn = cookie.value # we will eventually return this as long as no cookie conflict
if toReturn:
return toReturn
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def __getstate__(self):
"""Unlike a normal CookieJar, this class is pickleable."""
state = self.__dict__.copy()
# remove the unpickleable RLock object
state.pop('_cookies_lock')
return state
def __setstate__(self, state):
"""Unlike a normal CookieJar, this class is pickleable."""
self.__dict__.update(state)
if '_cookies_lock' not in self.__dict__:
self._cookies_lock = threading.RLock()
def copy(self):
"""Return a copy of this RequestsCookieJar."""
new_cj = RequestsCookieJar()
new_cj.update(self)
return new_cj
def create_cookie(name, value, **kwargs):
"""Make a cookie from underspecified parameters.
By default, the pair of `name` and `value` will be set for the domain ''
and sent on every request (this is sometimes called a "supercookie").
"""
result = dict(
version=0,
name=name,
value=value,
port=None,
domain='',
path='/',
secure=False,
expires=None,
discard=True,
comment=None,
comment_url=None,
rest={'HttpOnly': None},
rfc2109=False,)
badargs = set(kwargs) - set(result)
if badargs:
err = 'create_cookie() got unexpected keyword arguments: %s'
raise TypeError(err % list(badargs))
result.update(kwargs)
result['port_specified'] = bool(result['port'])
result['domain_specified'] = bool(result['domain'])
result['domain_initial_dot'] = result['domain'].startswith('.')
result['path_specified'] = bool(result['path'])
return cookielib.Cookie(**result)
def morsel_to_cookie(morsel):
"""Convert a Morsel object into a Cookie containing the one k/v pair."""
expires = None
if morsel['max-age']:
expires = time.time() + morsel['max-age']
elif morsel['expires']:
time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
expires = time.mktime(
time.strptime(morsel['expires'], time_template)) - time.timezone
return create_cookie(
comment=morsel['comment'],
comment_url=bool(morsel['comment']),
discard=False,
domain=morsel['domain'],
expires=expires,
name=morsel.key,
path=morsel['path'],
port=None,
rest={'HttpOnly': morsel['httponly']},
rfc2109=False,
secure=bool(morsel['secure']),
value=morsel.value,
version=morsel['version'] or 0,
)
def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
"""Returns a CookieJar from a key/value dictionary.
:param cookie_dict: Dict of key/values to insert into CookieJar.
:param cookiejar: (optional) A cookiejar to add the cookies to.
:param overwrite: (optional) If False, will not replace cookies
already in the jar with new ones.
"""
if cookiejar is None:
cookiejar = RequestsCookieJar()
if cookie_dict is not None:
names_from_jar = [cookie.name for cookie in cookiejar]
for name in cookie_dict:
if overwrite or (name not in names_from_jar):
cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
return cookiejar
def merge_cookies(cookiejar, cookies):
"""Add cookies to cookiejar and returns a merged CookieJar.
:param cookiejar: CookieJar object to add the cookies to.
:param cookies: Dictionary or CookieJar object to be added.
"""
if not isinstance(cookiejar, cookielib.CookieJar):
raise ValueError('You can only merge into CookieJar')
if isinstance(cookies, dict):
cookiejar = cookiejar_from_dict(
cookies, cookiejar=cookiejar, overwrite=False)
elif isinstance(cookies, cookielib.CookieJar):
try:
cookiejar.update(cookies)
except AttributeError:
for cookie_in_jar in cookies:
cookiejar.set_cookie(cookie_in_jar)
return cookiejar | PypiClean |
/Electrum-Zcash-Random-Fork-3.1.3b5.tar.gz/Electrum-Zcash-Random-Fork-3.1.3b5/lib/transaction.py |
# Note: The deserialization code originally comes from ABE.
from .util import print_error, profiler
from . import bitcoin
from .bitcoin import *
import struct
import traceback
import sys
#
# Workalike python implementation of Bitcoin's CDataStream class.
#
from .keystore import xpubkey_to_address, xpubkey_to_pubkey
from pyblake2 import blake2b
NO_SIGNATURE = 'ff'
OVERWINTERED_VERSION_GROUP_ID = 0x03C48270
OVERWINTER_BRANCH_ID = 0x5BA81B19
SAPLING_VERSION_GROUP_ID = 0x892F2085
SAPLING_BRANCH_ID = 0x76B809BB
class TransactionVersionError(Exception):
""" Thrown when there's a problem with transaction versioning """
class SerializationError(Exception):
""" Thrown when there's a problem deserializing or serializing """
class UnknownTxinType(Exception):
pass
class NotRecognizedRedeemScript(Exception):
pass
class BCDataStream(object):
def __init__(self):
self.input = None
self.read_cursor = 0
def clear(self):
self.input = None
self.read_cursor = 0
def write(self, _bytes): # Initialize with string of _bytes
if self.input is None:
self.input = bytearray(_bytes)
else:
self.input += bytearray(_bytes)
def read_string(self, encoding='ascii'):
# Strings are encoded depending on length:
# 0 to 252 : 1-byte-length followed by bytes (if any)
# 253 to 65,535 : byte'253' 2-byte-length followed by bytes
# 65,536 to 4,294,967,295 : byte '254' 4-byte-length followed by bytes
# ... and the Bitcoin client is coded to understand:
# greater than 4,294,967,295 : byte '255' 8-byte-length followed by bytes of string
# ... but I don't think it actually handles any strings that big.
if self.input is None:
raise SerializationError("call write(bytes) before trying to deserialize")
length = self.read_compact_size()
return self.read_bytes(length).decode(encoding)
def write_string(self, string, encoding='ascii'):
string = to_bytes(string, encoding)
# Length-encoded as with read-string
self.write_compact_size(len(string))
self.write(string)
def read_bytes(self, length):
try:
result = self.input[self.read_cursor:self.read_cursor+length]
self.read_cursor += length
return result
except IndexError:
raise SerializationError("attempt to read past end of buffer")
return ''
def read_boolean(self): return self.read_bytes(1)[0] != chr(0)
def read_int16(self): return self._read_num('<h')
def read_uint16(self): return self._read_num('<H')
def read_int32(self): return self._read_num('<i')
def read_uint32(self): return self._read_num('<I')
def read_int64(self): return self._read_num('<q')
def read_uint64(self): return self._read_num('<Q')
def write_boolean(self, val): return self.write(chr(1) if val else chr(0))
def write_int16(self, val): return self._write_num('<h', val)
def write_uint16(self, val): return self._write_num('<H', val)
def write_int32(self, val): return self._write_num('<i', val)
def write_uint32(self, val): return self._write_num('<I', val)
def write_int64(self, val): return self._write_num('<q', val)
def write_uint64(self, val): return self._write_num('<Q', val)
def read_compact_size(self):
try:
size = self.input[self.read_cursor]
self.read_cursor += 1
if size == 253:
size = self._read_num('<H')
elif size == 254:
size = self._read_num('<I')
elif size == 255:
size = self._read_num('<Q')
return size
except IndexError:
raise SerializationError("attempt to read past end of buffer")
def write_compact_size(self, size):
if size < 0:
raise SerializationError("attempt to write size < 0")
elif size < 253:
self.write(bytes([size]))
elif size < 2**16:
self.write(b'\xfd')
self._write_num('<H', size)
elif size < 2**32:
self.write(b'\xfe')
self._write_num('<I', size)
elif size < 2**64:
self.write(b'\xff')
self._write_num('<Q', size)
def _read_num(self, format):
try:
(i,) = struct.unpack_from(format, self.input, self.read_cursor)
self.read_cursor += struct.calcsize(format)
except Exception as e:
raise SerializationError(e)
return i
def _write_num(self, format, num):
s = struct.pack(format, num)
self.write(s)
# enum-like type
# From the Python Cookbook, downloaded from http://code.activestate.com/recipes/67107/
class EnumException(Exception):
pass
class Enumeration:
def __init__(self, name, enumList):
self.__doc__ = name
lookup = { }
reverseLookup = { }
i = 0
uniqueNames = [ ]
uniqueValues = [ ]
for x in enumList:
if isinstance(x, tuple):
x, i = x
if not isinstance(x, str):
raise EnumException("enum name is not a string: " + x)
if not isinstance(i, int):
raise EnumException("enum value is not an integer: " + i)
if x in uniqueNames:
raise EnumException("enum name is not unique: " + x)
if i in uniqueValues:
raise EnumException("enum value is not unique for " + x)
uniqueNames.append(x)
uniqueValues.append(i)
lookup[x] = i
reverseLookup[i] = x
i = i + 1
self.lookup = lookup
self.reverseLookup = reverseLookup
def __getattr__(self, attr):
if attr not in self.lookup:
raise AttributeError
return self.lookup[attr]
def whatis(self, value):
return self.reverseLookup[value]
# This function comes from bitcointools, bct-LICENSE.txt.
def long_hex(bytes):
return bytes.encode('hex_codec')
# This function comes from bitcointools, bct-LICENSE.txt.
def short_hex(bytes):
t = bytes.encode('hex_codec')
if len(t) < 11:
return t
return t[0:4]+"..."+t[-4:]
opcodes = Enumeration("Opcodes", [
("OP_0", 0), ("OP_PUSHDATA1",76), "OP_PUSHDATA2", "OP_PUSHDATA4", "OP_1NEGATE", "OP_RESERVED",
"OP_1", "OP_2", "OP_3", "OP_4", "OP_5", "OP_6", "OP_7",
"OP_8", "OP_9", "OP_10", "OP_11", "OP_12", "OP_13", "OP_14", "OP_15", "OP_16",
"OP_NOP", "OP_VER", "OP_IF", "OP_NOTIF", "OP_VERIF", "OP_VERNOTIF", "OP_ELSE", "OP_ENDIF", "OP_VERIFY",
"OP_RETURN", "OP_TOALTSTACK", "OP_FROMALTSTACK", "OP_2DROP", "OP_2DUP", "OP_3DUP", "OP_2OVER", "OP_2ROT", "OP_2SWAP",
"OP_IFDUP", "OP_DEPTH", "OP_DROP", "OP_DUP", "OP_NIP", "OP_OVER", "OP_PICK", "OP_ROLL", "OP_ROT",
"OP_SWAP", "OP_TUCK", "OP_CAT", "OP_SUBSTR", "OP_LEFT", "OP_RIGHT", "OP_SIZE", "OP_INVERT", "OP_AND",
"OP_OR", "OP_XOR", "OP_EQUAL", "OP_EQUALVERIFY", "OP_RESERVED1", "OP_RESERVED2", "OP_1ADD", "OP_1SUB", "OP_2MUL",
"OP_2DIV", "OP_NEGATE", "OP_ABS", "OP_NOT", "OP_0NOTEQUAL", "OP_ADD", "OP_SUB", "OP_MUL", "OP_DIV",
"OP_MOD", "OP_LSHIFT", "OP_RSHIFT", "OP_BOOLAND", "OP_BOOLOR",
"OP_NUMEQUAL", "OP_NUMEQUALVERIFY", "OP_NUMNOTEQUAL", "OP_LESSTHAN",
"OP_GREATERTHAN", "OP_LESSTHANOREQUAL", "OP_GREATERTHANOREQUAL", "OP_MIN", "OP_MAX",
"OP_WITHIN", "OP_RIPEMD160", "OP_SHA1", "OP_SHA256", "OP_HASH160",
"OP_HASH256", "OP_CODESEPARATOR", "OP_CHECKSIG", "OP_CHECKSIGVERIFY", "OP_CHECKMULTISIG",
"OP_CHECKMULTISIGVERIFY",
("OP_NOP1", 0xB0),
("OP_CHECKLOCKTIMEVERIFY", 0xB1), ("OP_CHECKSEQUENCEVERIFY", 0xB2),
"OP_NOP4", "OP_NOP5", "OP_NOP6", "OP_NOP7", "OP_NOP8", "OP_NOP9", "OP_NOP10",
("OP_INVALIDOPCODE", 0xFF),
])
def script_GetOp(_bytes):
i = 0
while i < len(_bytes):
vch = None
opcode = _bytes[i]
i += 1
if opcode <= opcodes.OP_PUSHDATA4:
nSize = opcode
if opcode == opcodes.OP_PUSHDATA1:
nSize = _bytes[i]
i += 1
elif opcode == opcodes.OP_PUSHDATA2:
(nSize,) = struct.unpack_from('<H', _bytes, i)
i += 2
elif opcode == opcodes.OP_PUSHDATA4:
(nSize,) = struct.unpack_from('<I', _bytes, i)
i += 4
vch = _bytes[i:i + nSize]
i += nSize
yield opcode, vch, i
def script_GetOpName(opcode):
return (opcodes.whatis(opcode)).replace("OP_", "")
def decode_script(bytes):
result = ''
for (opcode, vch, i) in script_GetOp(bytes):
if len(result) > 0: result += " "
if opcode <= opcodes.OP_PUSHDATA4:
result += "%d:"%(opcode,)
result += short_hex(vch)
else:
result += script_GetOpName(opcode)
return result
def match_decoded(decoded, to_match):
if len(decoded) != len(to_match):
return False;
for i in range(len(decoded)):
if to_match[i] == opcodes.OP_PUSHDATA4 and decoded[i][0] <= opcodes.OP_PUSHDATA4 and decoded[i][0]>0:
continue # Opcodes below OP_PUSHDATA4 all just push data onto stack, and are equivalent.
if to_match[i] != decoded[i][0]:
return False
return True
def parse_sig(x_sig):
return [None if x == NO_SIGNATURE else x for x in x_sig]
def safe_parse_pubkey(x):
try:
return xpubkey_to_pubkey(x)
except:
return x
def parse_scriptSig(d, _bytes):
try:
decoded = [ x for x in script_GetOp(_bytes) ]
except Exception as e:
# coinbase transactions raise an exception
print_error("parse_scriptSig: cannot find address in input script (coinbase?)",
bh2u(_bytes))
return
match = [ opcodes.OP_PUSHDATA4 ]
if match_decoded(decoded, match):
item = decoded[0][1]
if item[0] != 0:
# assert item[0] == 0x30
# pay-to-pubkey
d['type'] = 'p2pk'
d['address'] = "(pubkey)"
d['signatures'] = [bh2u(item)]
d['num_sig'] = 1
d['x_pubkeys'] = ["(pubkey)"]
d['pubkeys'] = ["(pubkey)"]
return
# p2pkh TxIn transactions push a signature
# (71-73 bytes) and then their public key
# (33 or 65 bytes) onto the stack:
match = [ opcodes.OP_PUSHDATA4, opcodes.OP_PUSHDATA4 ]
if match_decoded(decoded, match):
sig = bh2u(decoded[0][1])
x_pubkey = bh2u(decoded[1][1])
try:
signatures = parse_sig([sig])
pubkey, address = xpubkey_to_address(x_pubkey)
except:
print_error("parse_scriptSig: cannot find address in input script (p2pkh?)",
bh2u(_bytes))
return
d['type'] = 'p2pkh'
d['signatures'] = signatures
d['x_pubkeys'] = [x_pubkey]
d['num_sig'] = 1
d['pubkeys'] = [pubkey]
d['address'] = address
return
# p2sh transaction, m of n
match = [ opcodes.OP_0 ] + [ opcodes.OP_PUSHDATA4 ] * (len(decoded) - 1)
if match_decoded(decoded, match):
x_sig = [bh2u(x[1]) for x in decoded[1:-1]]
try:
m, n, x_pubkeys, pubkeys, redeemScript = parse_redeemScript(decoded[-1][1])
except NotRecognizedRedeemScript:
print_error("parse_scriptSig: cannot find address in input script (p2sh?)",
bh2u(_bytes))
# we could still guess:
# d['address'] = hash160_to_p2sh(hash_160(decoded[-1][1]))
return
# write result in d
d['type'] = 'p2sh'
d['num_sig'] = m
d['signatures'] = parse_sig(x_sig)
d['x_pubkeys'] = x_pubkeys
d['pubkeys'] = pubkeys
d['redeemScript'] = redeemScript
d['address'] = hash160_to_p2sh(hash_160(bfh(redeemScript)))
return
print_error("parse_scriptSig: cannot find address in input script (unknown)",
bh2u(_bytes))
def parse_redeemScript(s):
dec2 = [ x for x in script_GetOp(s) ]
try:
m = dec2[0][0] - opcodes.OP_1 + 1
n = dec2[-2][0] - opcodes.OP_1 + 1
except IndexError:
raise NotRecognizedRedeemScript()
op_m = opcodes.OP_1 + m - 1
op_n = opcodes.OP_1 + n - 1
match_multisig = [ op_m ] + [opcodes.OP_PUSHDATA4]*n + [ op_n, opcodes.OP_CHECKMULTISIG ]
if not match_decoded(dec2, match_multisig):
raise NotRecognizedRedeemScript()
x_pubkeys = [bh2u(x[1]) for x in dec2[1:-2]]
pubkeys = [safe_parse_pubkey(x) for x in x_pubkeys]
redeemScript = multisig_script(pubkeys, m)
return m, n, x_pubkeys, pubkeys, redeemScript
def get_address_from_output_script(_bytes, *, net=None):
decoded = [x for x in script_GetOp(_bytes)]
# The Genesis Block, self-payments, and pay-by-IP-address payments look like:
# 65 BYTES:... CHECKSIG
match = [ opcodes.OP_PUSHDATA4, opcodes.OP_CHECKSIG ]
if match_decoded(decoded, match):
return TYPE_PUBKEY, bh2u(decoded[0][1])
# Pay-by-Bitcoin-address TxOuts look like:
# DUP HASH160 20 BYTES:... EQUALVERIFY CHECKSIG
match = [ opcodes.OP_DUP, opcodes.OP_HASH160, opcodes.OP_PUSHDATA4, opcodes.OP_EQUALVERIFY, opcodes.OP_CHECKSIG ]
if match_decoded(decoded, match):
return TYPE_ADDRESS, hash160_to_p2pkh(decoded[2][1], net=net)
# p2sh
match = [ opcodes.OP_HASH160, opcodes.OP_PUSHDATA4, opcodes.OP_EQUAL ]
if match_decoded(decoded, match):
return TYPE_ADDRESS, hash160_to_p2sh(decoded[1][1], net=net)
return TYPE_SCRIPT, bh2u(_bytes)
def parse_input(vds):
d = {}
prevout_hash = hash_encode(vds.read_bytes(32))
prevout_n = vds.read_uint32()
scriptSig = vds.read_bytes(vds.read_compact_size())
sequence = vds.read_uint32()
d['prevout_hash'] = prevout_hash
d['prevout_n'] = prevout_n
d['sequence'] = sequence
d['x_pubkeys'] = []
d['pubkeys'] = []
d['signatures'] = {}
d['address'] = None
d['num_sig'] = 0
if prevout_hash == '00'*32:
d['type'] = 'coinbase'
d['scriptSig'] = bh2u(scriptSig)
else:
d['type'] = 'unknown'
if scriptSig:
d['scriptSig'] = bh2u(scriptSig)
try:
parse_scriptSig(d, scriptSig)
except BaseException:
traceback.print_exc(file=sys.stderr)
print_error('failed to parse scriptSig', bh2u(scriptSig))
else:
d['scriptSig'] = ''
return d
def parse_output(vds, i):
d = {}
d['value'] = vds.read_int64()
scriptPubKey = vds.read_bytes(vds.read_compact_size())
d['type'], d['address'] = get_address_from_output_script(scriptPubKey)
d['scriptPubKey'] = bh2u(scriptPubKey)
d['prevout_n'] = i
return d
def parse_join_split(vds):
d = {}
d['vpub_old'] = vds.read_uint64()
d['vpub_new'] = vds.read_uint64()
d['anchor'] = vds.read_bytes(32)
d['nullifiers'] = vds.read_bytes(64)
d['commitments'] = vds.read_bytes(64)
d['ephemeralKey'] = vds.read_bytes(32)
d['randomSeed'] = vds.read_bytes(32)
d['vmacs'] = vds.read_bytes(64)
d['zkproof'] = vds.read_bytes(296)
d['encCiphertexts'] = vds.read_bytes(1202)
return d
def deserialize(raw):
len_raw = len(raw) / 2
vds = BCDataStream()
vds.write(bfh(raw))
d = {}
start = vds.read_cursor
header = vds.read_uint32()
overwintered = True if header & 0x80000000 else False
version = header & 0x7FFFFFFF
if overwintered:
if version not in [3, 4]:
raise TransactionVersionError('Overwintered transaction'
' with invalid version=%d' % version)
ver_group_id = vds.read_uint32()
if (version == 3 and ver_group_id != OVERWINTERED_VERSION_GROUP_ID or
version == 4 and ver_group_id != SAPLING_VERSION_GROUP_ID):
raise TransactionVersionError('Overwintered transaction with wrong'
' versionGroupId=%X' % ver_group_id)
d['versionGroupId'] = ver_group_id
d['overwintered'] = overwintered
d['version'] = version
n_vin = vds.read_compact_size()
d['inputs'] = [parse_input(vds) for i in range(n_vin)]
n_vout = vds.read_compact_size()
d['outputs'] = [parse_output(vds, i) for i in range(n_vout)]
d['lockTime'] = vds.read_uint32()
if overwintered:
d['expiryHeight'] = vds.read_uint32()
if version == 4:
d['valueBalance'] = vds.read_int64()
n_sh_sp = vds.read_compact_size()
if n_sh_sp > 0:
d['shieldedSpends'] = vds.read_bytes(n_sh_sp*384)
n_sh_out = vds.read_compact_size()
if n_sh_out > 0:
d['shieldedOutputs'] = vds.read_bytes(n_sh_out*948)
n_js = vds.read_compact_size()
if n_js > 0:
if version == 3:
d['joinSplits'] = [parse_join_split(vds) for i in range(n_js)]
else:
d['joinSplits'] = vds.read_bytes(n_js*1698)
d['joinSplitPubKey'] = vds.read_bytes(32)
d['joinSplitSig'] = vds.read_bytes(64)
if version == 4:
d['bindingSig'] = vds.read_bytes(64)
return d
# pay & redeem scripts
def multisig_script(public_keys, m):
n = len(public_keys)
assert n <= 15
assert m <= n
op_m = format(opcodes.OP_1 + m - 1, 'x')
op_n = format(opcodes.OP_1 + n - 1, 'x')
keylist = [op_push(len(k)//2) + k for k in public_keys]
return op_m + ''.join(keylist) + op_n + 'ae'
class Transaction:
def __str__(self):
if self.raw is None:
self.raw = self.serialize()
return self.raw
def __init__(self, raw):
if raw is None:
self.raw = None
elif isinstance(raw, str):
self.raw = raw.strip() if raw else None
elif isinstance(raw, dict):
self.raw = raw['hex']
else:
raise Exception("cannot initialize transaction", raw)
self._inputs = None
self._outputs = None
self.locktime = 0
self.version = 4
self.overwintered = True
self.versionGroupId = SAPLING_VERSION_GROUP_ID
self.expiryHeight = 0
self.valueBalance = 0
self.shieldedSpends = None
self.shieldedOutputs = None
self.joinSplits = None
self.joinSplitPubKey = None
self.joinSplitSig = None
self.bindingSig = None
def update(self, raw):
self.raw = raw
self._inputs = None
self.deserialize()
def inputs(self):
if self._inputs is None:
self.deserialize()
return self._inputs
def outputs(self):
if self._outputs is None:
self.deserialize()
return self._outputs
@classmethod
def get_sorted_pubkeys(self, txin):
# sort pubkeys and x_pubkeys, using the order of pubkeys
if txin['type'] == 'coinbase':
return [], []
x_pubkeys = txin['x_pubkeys']
pubkeys = txin.get('pubkeys')
if pubkeys is None:
pubkeys = [xpubkey_to_pubkey(x) for x in x_pubkeys]
pubkeys, x_pubkeys = zip(*sorted(zip(pubkeys, x_pubkeys)))
txin['pubkeys'] = pubkeys = list(pubkeys)
txin['x_pubkeys'] = x_pubkeys = list(x_pubkeys)
return pubkeys, x_pubkeys
def update_signatures(self, raw):
"""Add new signatures to a transaction"""
d = deserialize(raw)
for i, txin in enumerate(self.inputs()):
pubkeys, x_pubkeys = self.get_sorted_pubkeys(txin)
sigs1 = txin.get('signatures')
sigs2 = d['inputs'][i].get('signatures')
for sig in sigs2:
if sig in sigs1:
continue
pre_hash = Hash(bfh(self.serialize_preimage(i)))
# der to string
order = ecdsa.ecdsa.generator_secp256k1.order()
r, s = ecdsa.util.sigdecode_der(bfh(sig[:-2]), order)
sig_string = ecdsa.util.sigencode_string(r, s, order)
compressed = True
for recid in range(4):
public_key = MyVerifyingKey.from_signature(sig_string, recid, pre_hash, curve = SECP256k1)
pubkey = bh2u(point_to_ser(public_key.pubkey.point, compressed))
if pubkey in pubkeys:
public_key.verify_digest(sig_string, pre_hash, sigdecode = ecdsa.util.sigdecode_string)
j = pubkeys.index(pubkey)
print_error("adding sig", i, j, pubkey, sig)
self._inputs[i]['signatures'][j] = sig
#self._inputs[i]['x_pubkeys'][j] = pubkey
break
# redo raw
self.raw = self.serialize()
def deserialize(self):
if self.raw is None:
return
#self.raw = self.serialize()
if self._inputs is not None:
return
d = deserialize(self.raw)
self._inputs = d['inputs']
self._outputs = [(x['type'], x['address'], x['value']) for x in d['outputs']]
self.locktime = d['lockTime']
self.version = d['version']
self.overwintered = d['overwintered']
self.versionGroupId = d.get('versionGroupId')
self.expiryHeight = d.get('expiryHeight', 0)
self.valueBalance = d.get('valueBalance', 0)
self.shieldedSpends = d.get('shieldedSpends')
self.shieldedOutputs = d.get('shieldedOutputs')
self.joinSplits = d.get('joinSplits')
self.joinSplitPubKey = d.get('joinSplitPubKey')
self.joinSplitSig = d.get('joinSplitSig')
self.bindingSig = d.get('bindingSig')
return d
@classmethod
def from_io(klass, inputs, outputs, locktime=0):
self = klass(None)
self._inputs = inputs
self._outputs = outputs
self.locktime = locktime
return self
@classmethod
def pay_script(self, output_type, addr):
if output_type == TYPE_SCRIPT:
return addr
elif output_type == TYPE_ADDRESS:
return bitcoin.address_to_script(addr)
elif output_type == TYPE_PUBKEY:
return bitcoin.public_key_to_p2pk_script(addr)
else:
raise TypeError('Unknown output type')
@classmethod
def estimate_pubkey_size_from_x_pubkey(cls, x_pubkey):
try:
if x_pubkey[0:2] in ['02', '03']: # compressed pubkey
return 0x21
elif x_pubkey[0:2] == '04': # uncompressed pubkey
return 0x41
elif x_pubkey[0:2] == 'ff': # bip32 extended pubkey
return 0x21
elif x_pubkey[0:2] == 'fe': # old electrum extended pubkey
return 0x41
except Exception as e:
pass
return 0x21 # just guess it is compressed
@classmethod
def estimate_pubkey_size_for_txin(cls, txin):
pubkeys = txin.get('pubkeys', [])
x_pubkeys = txin.get('x_pubkeys', [])
if pubkeys and len(pubkeys) > 0:
return cls.estimate_pubkey_size_from_x_pubkey(pubkeys[0])
elif x_pubkeys and len(x_pubkeys) > 0:
return cls.estimate_pubkey_size_from_x_pubkey(x_pubkeys[0])
else:
return 0x21 # just guess it is compressed
@classmethod
def get_siglist(self, txin, estimate_size=False):
# if we have enough signatures, we use the actual pubkeys
# otherwise, use extended pubkeys (with bip32 derivation)
if txin['type'] == 'coinbase':
return [], []
num_sig = txin.get('num_sig', 1)
if estimate_size:
pubkey_size = self.estimate_pubkey_size_for_txin(txin)
pk_list = ["00" * pubkey_size] * len(txin.get('x_pubkeys', [None]))
# we assume that signature will be 0x48 bytes long
sig_list = [ "00" * 0x48 ] * num_sig
else:
pubkeys, x_pubkeys = self.get_sorted_pubkeys(txin)
x_signatures = txin['signatures']
signatures = list(filter(None, x_signatures))
is_complete = len(signatures) == num_sig
if is_complete:
pk_list = pubkeys
sig_list = signatures
else:
pk_list = x_pubkeys
sig_list = [sig if sig else NO_SIGNATURE for sig in x_signatures]
return pk_list, sig_list
@classmethod
def input_script(self, txin, estimate_size=False):
_type = txin['type']
if _type == 'coinbase':
return txin['scriptSig']
pubkeys, sig_list = self.get_siglist(txin, estimate_size)
script = ''.join(push_script(x) for x in sig_list)
if _type == 'p2pk':
pass
elif _type == 'p2sh':
# put op_0 before script
script = '00' + script
redeem_script = multisig_script(pubkeys, txin['num_sig'])
script += push_script(redeem_script)
elif _type == 'p2pkh':
script += push_script(pubkeys[0])
elif _type == 'address':
script += push_script(pubkeys[0])
elif _type == 'unknown':
return txin['scriptSig']
return script
@classmethod
def is_txin_complete(cls, txin):
if txin['type'] == 'coinbase':
return True
num_sig = txin.get('num_sig', 1)
x_signatures = txin['signatures']
signatures = list(filter(None, x_signatures))
return len(signatures) == num_sig
@classmethod
def get_preimage_script(self, txin):
pubkeys, x_pubkeys = self.get_sorted_pubkeys(txin)
if txin['type'] == 'p2pkh':
return bitcoin.address_to_script(txin['address'])
elif txin['type'] in ['p2sh']:
return multisig_script(pubkeys, txin['num_sig'])
elif txin['type'] == 'p2pk':
pubkey = pubkeys[0]
return bitcoin.public_key_to_p2pk_script(pubkey)
else:
raise TypeError('Unknown txin type', txin['type'])
@classmethod
def serialize_outpoint(self, txin):
return bh2u(bfh(txin['prevout_hash'])[::-1]) + int_to_hex(txin['prevout_n'], 4)
@classmethod
def get_outpoint_from_txin(cls, txin):
if txin['type'] == 'coinbase':
return None
prevout_hash = txin['prevout_hash']
prevout_n = txin['prevout_n']
return prevout_hash + ':%d' % prevout_n
@classmethod
def serialize_input(self, txin, script):
# Prev hash and index
s = self.serialize_outpoint(txin)
# Script length, script, sequence
s += var_int(len(script)//2)
s += script
s += int_to_hex(txin.get('sequence', 0xffffffff - 1), 4)
return s
def BIP_LI01_sort(self):
# See https://github.com/kristovatlas/rfc/blob/master/bips/bip-li01.mediawiki
self._inputs.sort(key = lambda i: (i['prevout_hash'], i['prevout_n']))
self._outputs.sort(key = lambda o: (o[2], self.pay_script(o[0], o[1])))
def serialize_output(self, output):
output_type, addr, amount = output
s = int_to_hex(amount, 8)
script = self.pay_script(output_type, addr)
s += var_int(len(script)//2)
s += script
return s
def serialize_join_split(self, js):
s = int_to_hex(js['vpub_old'], 8)
s += int_to_hex(js['vpub_new'], 8)
s += js['anchor']
s += js['nullifiers']
s += js['commitments']
s += js['ephemeralKey']
s += js['randomSeed']
s += js['vmacs']
s += js['zkproof']
s += js['encCiphertexts']
return s
def serialize_preimage(self, i):
overwintered = self.overwintered
version = self.version
nHashType = int_to_hex(1, 4)
nLocktime = int_to_hex(self.locktime, 4)
inputs = self.inputs()
outputs = self.outputs()
txin = inputs[i]
# TODO: py3 hex
if overwintered:
nHeader = int_to_hex(0x80000000 | version, 4)
nVersionGroupId = int_to_hex(self.versionGroupId, 4)
s_prevouts = bfh(''.join(self.serialize_outpoint(txin) for txin in inputs))
hashPrevouts = blake2b(s_prevouts, digest_size=32, person=b'ZcashPrevoutHash').hexdigest()
s_sequences = bfh(''.join(int_to_hex(txin.get('sequence', 0xffffffff - 1), 4) for txin in inputs))
hashSequence = blake2b(s_sequences, digest_size=32, person=b'ZcashSequencHash').hexdigest()
s_outputs = bfh(''.join(self.serialize_output(o) for o in outputs))
hashOutputs = blake2b(s_outputs, digest_size=32, person=b'ZcashOutputsHash').hexdigest()
joinSplits = self.joinSplits
#if joinSplits is None:
# hashJoinSplits = '00'*32
#else:
# s_joinSplits = bfh(''.join(self.serialize_join_split(j) for j in joinSplits))
# s_joinSplits += self.joinSplitPubKey
# hashJoinSplits = blake2b(s_joinSplits, digest_size=32, person=b'ZcashJSplitsHash').hexdigest()
hashJoinSplits = '00'*32
hashShieldedSpends = '00'*32
hashShieldedOutputs = '00'*32
nExpiryHeight = int_to_hex(self.expiryHeight, 4)
nValueBalance = int_to_hex(self.valueBalance, 8)
txin = inputs[i]
preimage_script = self.get_preimage_script(txin)
scriptCode = var_int(len(preimage_script) // 2) + preimage_script
preimage = (
nHeader + nVersionGroupId + hashPrevouts + hashSequence + hashOutputs
+ hashJoinSplits + hashShieldedSpends + hashShieldedOutputs + nLocktime
+ nExpiryHeight + nValueBalance + nHashType
+ self.serialize_outpoint(txin)
+ scriptCode
+ int_to_hex(txin['value'], 8)
+ int_to_hex(txin.get('sequence', 0xffffffff - 1), 4)
)
else:
nVersion = int_to_hex(version, 4)
txins = var_int(len(inputs)) + ''.join(self.serialize_input(txin, self.get_preimage_script(txin) if i==k else '') for k, txin in enumerate(inputs))
txouts = var_int(len(outputs)) + ''.join(self.serialize_output(o) for o in outputs)
preimage = nVersion + txins + txouts + nLocktime + nHashType
return preimage
def serialize(self, estimate_size=False):
nVersion = int_to_hex(self.version, 4)
nLocktime = int_to_hex(self.locktime, 4)
inputs = self.inputs()
outputs = self.outputs()
txins = var_int(len(inputs)) + ''.join(self.serialize_input(txin, self.input_script(txin, estimate_size)) for txin in inputs)
txouts = var_int(len(outputs)) + ''.join(self.serialize_output(o) for o in outputs)
if self.overwintered:
nVersion = int_to_hex(0x80000000 | self.version, 4)
nVersionGroupId = int_to_hex(self.versionGroupId, 4)
nExpiryHeight = int_to_hex(self.expiryHeight, 4)
nValueBalance = int_to_hex(self.valueBalance, 8)
return (nVersion + nVersionGroupId + txins + txouts + nLocktime
+ nExpiryHeight + nValueBalance + '00' + '00' + '00')
else:
return nVersion + txins + txouts + nLocktime
def hash(self):
print("warning: deprecated tx.hash()")
return self.txid()
def txid(self):
if not self.is_complete():
return None
ser = self.serialize()
return bh2u(Hash(bfh(ser))[::-1])
def add_inputs(self, inputs):
self._inputs.extend(inputs)
self.raw = None
def add_outputs(self, outputs):
self._outputs.extend(outputs)
self.raw = None
def input_value(self):
return sum(x['value'] for x in self.inputs())
def output_value(self):
return sum(val for tp, addr, val in self.outputs())
def get_fee(self):
return self.input_value() - self.output_value()
def is_final(self):
return not any([x.get('sequence', 0xffffffff - 1) < 0xffffffff - 1 for x in self.inputs()])
@profiler
def estimated_size(self):
"""Return an estimated virtual tx size in vbytes.
BIP-0141 defines 'Virtual transaction size' to be weight/4 rounded up.
This definition is only for humans, and has little meaning otherwise.
If we wanted sub-byte precision, fee calculation should use transaction
weights, but for simplicity we approximate that with (virtual_size)x4
"""
weight = self.estimated_weight()
return self.virtual_size_from_weight(weight)
@classmethod
def estimated_input_weight(cls, txin):
'''Return an estimate of serialized input weight in weight units.'''
script = cls.input_script(txin, True)
input_size = len(cls.serialize_input(txin, script)) // 2
return 4 * input_size
@classmethod
def estimated_output_size(cls, address):
"""Return an estimate of serialized output size in bytes."""
script = bitcoin.address_to_script(address)
# 8 byte value + 1 byte script len + script
return 9 + len(script) // 2
@classmethod
def virtual_size_from_weight(cls, weight):
return weight // 4 + (weight % 4 > 0)
def estimated_total_size(self):
"""Return an estimated total transaction size in bytes."""
return len(self.serialize(True)) // 2 if not self.is_complete() or self.raw is None else len(self.raw) // 2 # ASCII hex string
def estimated_base_size(self):
"""Return an estimated base transaction size in bytes."""
return self.estimated_total_size()
def estimated_weight(self):
"""Return an estimate of transaction weight."""
total_tx_size = self.estimated_total_size()
base_tx_size = self.estimated_base_size()
return 3 * base_tx_size + total_tx_size
def signature_count(self):
r = 0
s = 0
for txin in self.inputs():
if txin['type'] == 'coinbase':
continue
signatures = list(filter(None, txin.get('signatures',[])))
s += len(signatures)
r += txin.get('num_sig',-1)
return s, r
def is_complete(self):
s, r = self.signature_count()
return r == s
def sign(self, keypairs):
for i, txin in enumerate(self.inputs()):
num = txin['num_sig']
pubkeys, x_pubkeys = self.get_sorted_pubkeys(txin)
for j, x_pubkey in enumerate(x_pubkeys):
signatures = list(filter(None, txin['signatures']))
if len(signatures) == num:
# txin is complete
break
if x_pubkey in keypairs.keys():
print_error("adding signature for", x_pubkey)
sec, compressed = keypairs.get(x_pubkey)
pubkey = public_key_from_private_key(sec, compressed)
# add signature
if self.overwintered:
data = bfh(self.serialize_preimage(i))
person = b'ZcashSigHash' + SAPLING_BRANCH_ID.to_bytes(4, 'little')
pre_hash = blake2b(data, digest_size=32, person=person).digest()
else:
pre_hash = Hash(bfh(self.serialize_preimage(i)))
pkey = regenerate_key(sec)
secexp = pkey.secret
private_key = bitcoin.MySigningKey.from_secret_exponent(secexp, curve = SECP256k1)
public_key = private_key.get_verifying_key()
sig = private_key.sign_digest_deterministic(pre_hash, hashfunc=hashlib.sha256, sigencode = ecdsa.util.sigencode_der_canonize)
if not public_key.verify_digest(sig, pre_hash, sigdecode = ecdsa.util.sigdecode_der):
raise Exception('Sanity check verifying our own signature failed.')
txin['signatures'][j] = bh2u(sig) + '01'
#txin['x_pubkeys'][j] = pubkey
txin['pubkeys'][j] = pubkey # needed for fd keys
self._inputs[i] = txin
print_error("is_complete", self.is_complete())
self.raw = self.serialize()
def get_outputs(self):
"""convert pubkeys to addresses"""
o = []
for type, x, v in self.outputs():
if type == TYPE_ADDRESS:
addr = x
elif type == TYPE_PUBKEY:
addr = bitcoin.public_key_to_p2pkh(bfh(x))
else:
addr = 'SCRIPT ' + x
o.append((addr,v)) # consider using yield (addr, v)
return o
def get_output_addresses(self):
return [addr for addr, val in self.get_outputs()]
def has_address(self, addr):
return (addr in self.get_output_addresses()) or (addr in (tx.get("address") for tx in self.inputs()))
def as_dict(self):
if self.raw is None:
self.raw = self.serialize()
self.deserialize()
out = {
'hex': self.raw,
'complete': self.is_complete(),
'final': self.is_final(),
}
return out
def tx_from_str(txt):
"json or raw hexadecimal"
import json
txt = txt.strip()
if not txt:
raise ValueError("empty string")
try:
bfh(txt)
is_hex = True
except:
is_hex = False
if is_hex:
return txt
tx_dict = json.loads(str(txt))
assert "hex" in tx_dict.keys()
return tx_dict["hex"] | PypiClean |
/HSTools-0.0.3-py3-none-any.whl/hstools/progress.py | import sys
import itertools
class progressBar(object):
def __init__(self, progress_message, type='pulse', refresh_delay=0.25,
finish_message='Finished', error_message='An error has occurred'):
# create a simple progress bar
self.barArray = itertools.cycle(self._pulseArrays(type))
self.refreshDelay = float(refresh_delay)
self.messagelen = 0
self.msg = '\r' + progress_message
self.fin = '\r' + finish_message
self.err = '\r' + error_message
self.overwrite_progress_length = len(self.msg) + 21
def _pulseArrays(self, ptype='pulse'):
types = ['pulse', 'dial', 'dots']
# set default bar type to 'pulse' if unknown type is provided
if ptype not in types:
ptype = 'pulse'
if ptype == 'pulse':
parray = ['___________________'] * 20
parray = [parray[i][:i] + '/\\' + parray[i][i:] for i in range(len(parray))]
parray = parray + parray[-2:0:-1]
elif ptype == 'dial':
parray = ['-', '\\', '|', '/', '-', '\\', '|', '/']
elif ptype == 'dots':
parray = [' ']*19
parray = ['.'*i + ''.join(parray[i:]) for i in range(len(parray))]
# parray = ['.'*i for i in range(20)]
return parray
def _clearLine(self):
chars = len(self.msg) + 27
sys.stdout.write('\r%s' % (chars * ' '))
sys.stdout.flush()
def updateProgressMessage(self, msg):
self.msg = '\r' + msg
def writeprogress(self):
# self._clearLine()
msg = '\r' + ' '.join([self.msg, next(self.barArray)])
#msg += 'x'*(len(self.msg) - len(msg))
sys.stdout.write(msg)
sys.stdout.flush()
def success(self):
self._clearLine()
sys.stdout.write(self.fin + '\n')
sys.stdout.flush()
def error(self):
self._clearLine()
sys.stdout.write(self.err + '\n')
sys.stdout.flush()
def update(self, *args):
self._clearLine()
msg = self.msg + ' %s '
args += tuple([next(self.barArray)])
sys.stdout.write(msg % args)
sys.stdout.flush() | PypiClean |
/Bot-Base-1.7.1.tar.gz/Bot-Base-1.7.1/bot_base/paginators/disnake_paginator.py | from typing import List, Union, TypeVar, Optional, Callable
import disnake
from disnake.ext import commands
# Inspired by https://github.com/nextcord/nextcord-ext-menus
T = TypeVar("T")
class PaginationView(disnake.ui.View):
FIRST_PAGE = "\N{BLACK LEFT-POINTING DOUBLE TRIANGLE WITH VERTICAL BAR}\ufe0f"
PREVIOUS_PAGE = "\N{BLACK LEFT-POINTING TRIANGLE}\ufe0f"
NEXT_PAGE = "\N{BLACK RIGHT-POINTING TRIANGLE}\ufe0f"
LAST_PAGE = "\N{BLACK RIGHT-POINTING DOUBLE TRIANGLE WITH VERTICAL BAR}\ufe0f"
STOP = "\N{BLACK SQUARE FOR STOP}\ufe0f"
def __init__(
self,
author_id: int,
paginator: "DisnakePaginator",
*,
timeout: Optional[float] = 180,
):
super().__init__(timeout=timeout)
self.author_id: int = author_id
self._paginator: "DisnakePaginator" = paginator
# Default to disabled, we change them later anyway if actually required.
self.first_page_button = disnake.ui.Button(label=self.FIRST_PAGE, disabled=True)
self.previous_page_button = disnake.ui.Button(
label=self.PREVIOUS_PAGE, disabled=True
)
self.next_page_button = disnake.ui.Button(label=self.NEXT_PAGE, disabled=True)
self.last_page_button = disnake.ui.Button(label=self.LAST_PAGE, disabled=True)
self.stop_button = disnake.ui.Button(label=self.STOP, disabled=True)
self.first_page_button.callback = self._paginator.go_to_first_page
self.previous_page_button.callback = self._paginator.go_to_previous_page
self.next_page_button.callback = self._paginator.go_to_next_page
self.last_page_button.callback = self._paginator.go_to_last_page
self.stop_button.callback = self._paginator.stop_pages
self.add_item(self.first_page_button)
self.add_item(self.previous_page_button)
self.add_item(self.next_page_button)
self.add_item(self.last_page_button)
self.add_item(self.stop_button)
async def interaction_check(self, interaction: disnake.MessageInteraction) -> bool:
return interaction.user.id == self.author_id
async def on_timeout(self) -> None:
self.stop()
await self._paginator.stop()
class DisnakePaginator:
def __init__(
self,
items_per_page: int,
input_data: List[T],
*,
try_ephemeral: bool = True,
delete_buttons_on_stop: bool = False,
page_formatter: Optional[Callable] = None,
):
"""
A simplistic paginator built for Disnake.
Parameters
----------
items_per_page: int
How many items to show per page.
input_data: List[Any]
The data to be paginated.
try_ephemeral: bool
Whether or not to try send the interaction
as ephemeral. Defaults to ``True``
delete_buttons_on_stop: bool
When the paginator is stopped, should
the buttons be deleted? Defaults to ``False``
which merely disables them.
page_formatter: Callable
An inline formatter to save the need to
subclass/override ``format_page``
"""
self._current_page_index = 0
self._items_per_page: int = items_per_page
self.__input_data: List[T] = input_data
self._try_ephemeral: bool = try_ephemeral
self._delete_buttons_on_stop: bool = delete_buttons_on_stop
self._inline_format_page: Optional[Callable] = page_formatter
if items_per_page <= 0:
raise ValueError("items_per_page must be 1 or higher.")
if self._items_per_page == 1:
self._paged_data: List[T] = self.__input_data
else:
self._paged_data: List[List[T]] = [
self.__input_data[i : i + self._items_per_page]
for i in range(0, len(self.__input_data), self._items_per_page)
]
self._is_done: bool = False
self._message: Optional[disnake.Message] = None
self._pagination_view: Optional[PaginationView] = None
@property
def current_page(self) -> int:
"""The current page for this paginator."""
return self._current_page_index + 1
@current_page.setter
def current_page(self, value) -> None:
if value > self.total_pages:
raise ValueError(
"Cannot change current page to a page bigger then this paginator."
)
self._current_page_index = value - 1
@property
def total_pages(self) -> int:
"How many pages exist in this paginator."
return len(self._paged_data)
@property
def requires_pagination(self) -> bool:
"""Does this paginator have more then 1 page."""
return len(self._paged_data) != 1
@property
def has_prior_page(self) -> bool:
"""Can we move backwards pagination wide."""
return self.current_page != 1
@property
def has_next_page(self) -> bool:
"""Can we move forward pagination wise."""
return self.current_page != self.total_pages
async def start(
self,
*,
interaction: disnake.Interaction = None,
context: commands.Context = None,
):
"""
Start paginating this paginator.
Parameters
----------
interaction: disnake.Interaction
The Interaction to start
this pagination on.
context: commands.Context
The Context to start paginating on.
"""
first_page: Union[str, disnake.Embed] = await self.format_page(
self._paged_data[self._current_page_index], self.current_page
)
send_kwargs = {}
if isinstance(first_page, disnake.Embed):
send_kwargs["embed"] = first_page
else:
send_kwargs["content"] = first_page
if interaction:
self._pagination_view = PaginationView(interaction.user.id, self)
if interaction.response._responded:
self._message = await interaction.original_message()
if self.requires_pagination:
await self._message.edit(**send_kwargs, view=self._pagination_view)
else:
await self._message.edit(**send_kwargs)
else:
if self.requires_pagination:
await interaction.send(
**send_kwargs,
ephemeral=self._try_ephemeral,
view=self._pagination_view,
)
else:
await interaction.send(
**send_kwargs,
ephemeral=self._try_ephemeral,
)
self._message = await interaction.original_message()
elif context:
self._pagination_view = PaginationView(context.author.id, self)
if self.requires_pagination:
self._message = await context.channel.send(
**send_kwargs,
view=self._pagination_view,
)
else:
self._message = await context.channel.send(**send_kwargs)
else:
raise RuntimeError("Context or Interaction is required.")
await self._set_buttons()
async def stop(self):
"""Stop paginating this paginator."""
self._is_done = True
await self._set_buttons()
async def _set_buttons(self) -> disnake.Message:
"""Sets buttons based on current page."""
if not self.requires_pagination:
# No pagination required
return await self._message.edit(view=None)
if self._is_done:
# Disable all buttons
if self._delete_buttons_on_stop:
return await self._message.edit(view=None)
self._pagination_view.stop_button.disabled = True
self._pagination_view.next_page_button.disabled = True
self._pagination_view.last_page_button.disabled = True
self._pagination_view.first_page_button.disabled = True
self._pagination_view.previous_page_button.disabled = True
return await self._message.edit(view=self._pagination_view)
# Toggle buttons
if self.has_prior_page:
self._pagination_view.first_page_button.disabled = False
self._pagination_view.previous_page_button.disabled = False
else:
# Cannot go backwards
self._pagination_view.first_page_button.disabled = True
self._pagination_view.previous_page_button.disabled = True
if self.has_next_page:
self._pagination_view.next_page_button.disabled = False
self._pagination_view.last_page_button.disabled = False
else:
self._pagination_view.next_page_button.disabled = True
self._pagination_view.last_page_button.disabled = True
self._pagination_view.stop_button.disabled = False
return await self._message.edit(view=self._pagination_view)
async def show_page(self, page_number: int):
"""
Change to the given page.
Parameters
----------
page_number: int
The page you wish to see.
Raises
------
ValueError
Page number is too big for this paginator.
"""
self.current_page = page_number
page: Union[str, disnake.Embed] = await self.format_page(
self._paged_data[self._current_page_index], self.current_page
)
if isinstance(page, disnake.Embed):
await self._message.edit(embed=page)
else:
await self._message.edit(content=page)
await self._set_buttons()
async def go_to_first_page(self, interaction: disnake.MessageInteraction):
"""Paginate to the first page."""
await interaction.response.defer()
await self.show_page(1)
async def go_to_previous_page(self, interaction: disnake.Interaction):
"""Paginate to the previous viewable page."""
await interaction.response.defer()
await self.show_page(self.current_page - 1)
async def go_to_next_page(self, interaction: disnake.Interaction):
"""Paginate to the next viewable page."""
await interaction.response.defer()
await self.show_page(self.current_page + 1)
async def go_to_last_page(self, interaction: disnake.Interaction):
"""Paginate to the last viewable page."""
await interaction.response.defer()
await self.show_page(self.total_pages)
async def stop_pages(self, interaction: disnake.Interaction):
"""Stop paginating this paginator."""
await interaction.response.defer()
await self.stop()
async def format_page(
self, page_items: Union[T, List[T]], page_number: int
) -> Union[str, disnake.Embed]:
"""Given the page items, format them how you wish.
Calls the inline formatter if not overridden,
otherwise returns ``page_items`` as a string.
Parameters
----------
page_items: Union[T, List[T]]
The items for this page.
If ``items_per_page`` is ``1`` then this
will be a singular item.
page_number: int
This pages number.
"""
if self._inline_format_page:
return self._inline_format_page(self, page_items, page_number)
return str(page_items) | PypiClean |
/Flask-Statics-Helper-1.0.0.tar.gz/Flask-Statics-Helper-1.0.0/flask_statics/static/angular/i18n/angular-locale_ru.js | 'use strict';
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"AM",
"PM"
],
"DAY": [
"\u0432\u043e\u0441\u043a\u0440\u0435\u0441\u0435\u043d\u044c\u0435",
"\u043f\u043e\u043d\u0435\u0434\u0435\u043b\u044c\u043d\u0438\u043a",
"\u0432\u0442\u043e\u0440\u043d\u0438\u043a",
"\u0441\u0440\u0435\u0434\u0430",
"\u0447\u0435\u0442\u0432\u0435\u0440\u0433",
"\u043f\u044f\u0442\u043d\u0438\u0446\u0430",
"\u0441\u0443\u0431\u0431\u043e\u0442\u0430"
],
"MONTH": [
"\u044f\u043d\u0432\u0430\u0440\u044f",
"\u0444\u0435\u0432\u0440\u0430\u043b\u044f",
"\u043c\u0430\u0440\u0442\u0430",
"\u0430\u043f\u0440\u0435\u043b\u044f",
"\u043c\u0430\u044f",
"\u0438\u044e\u043d\u044f",
"\u0438\u044e\u043b\u044f",
"\u0430\u0432\u0433\u0443\u0441\u0442\u0430",
"\u0441\u0435\u043d\u0442\u044f\u0431\u0440\u044f",
"\u043e\u043a\u0442\u044f\u0431\u0440\u044f",
"\u043d\u043e\u044f\u0431\u0440\u044f",
"\u0434\u0435\u043a\u0430\u0431\u0440\u044f"
],
"SHORTDAY": [
"\u0432\u0441",
"\u043f\u043d",
"\u0432\u0442",
"\u0441\u0440",
"\u0447\u0442",
"\u043f\u0442",
"\u0441\u0431"
],
"SHORTMONTH": [
"\u044f\u043d\u0432.",
"\u0444\u0435\u0432\u0440.",
"\u043c\u0430\u0440\u0442\u0430",
"\u0430\u043f\u0440.",
"\u043c\u0430\u044f",
"\u0438\u044e\u043d\u044f",
"\u0438\u044e\u043b\u044f",
"\u0430\u0432\u0433.",
"\u0441\u0435\u043d\u0442.",
"\u043e\u043a\u0442.",
"\u043d\u043e\u044f\u0431.",
"\u0434\u0435\u043a."
],
"fullDate": "EEEE, d MMMM y '\u0433'.",
"longDate": "d MMMM y '\u0433'.",
"medium": "d MMM y '\u0433'. H:mm:ss",
"mediumDate": "d MMM y '\u0433'.",
"mediumTime": "H:mm:ss",
"short": "dd.MM.yy H:mm",
"shortDate": "dd.MM.yy",
"shortTime": "H:mm"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "\u0440\u0443\u0431.",
"DECIMAL_SEP": ",",
"GROUP_SEP": "\u00a0",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "-",
"negSuf": "\u00a0\u00a4",
"posPre": "",
"posSuf": "\u00a0\u00a4"
}
]
},
"id": "ru",
"pluralCat": function(n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (vf.v == 0 && i % 10 == 1 && i % 100 != 11) { return PLURAL_CATEGORY.ONE; } if (vf.v == 0 && i % 10 >= 2 && i % 10 <= 4 && (i % 100 < 12 || i % 100 > 14)) { return PLURAL_CATEGORY.FEW; } if (vf.v == 0 && i % 10 == 0 || vf.v == 0 && i % 10 >= 5 && i % 10 <= 9 || vf.v == 0 && i % 100 >= 11 && i % 100 <= 14) { return PLURAL_CATEGORY.MANY; } return PLURAL_CATEGORY.OTHER;}
});
}]); | PypiClean |
/ARGs_OAP-2.3.2.tar.gz/ARGs_OAP-2.3.2/ARGs_OAP/bin/bbmap/pipelines/covid/recal.sh |
##Written by Brian Bushnell
##Last modified May 4, 2020
##Description: Creates a recalibration matrix for Illumina data.
##Usage: recal.sh <prefix>
##For example, "recal.sh Sample1" if the data is in Sample1.fq.gz
##This script creates quality-score recalibration matrices for processing Illumina PE reads.
##It needs to be run ONCE on a single library (preferably a large one) from a sequencing run.
##Then the primary script will use the recalibration matrices for all of the libraries,
##assuming all the processing is done in the same directory.
##This script assumes input data is single-ended or paired and interleaved.
##If data is paired in twin files, you can run "reformat.sh in1=r1.fq.gz in2=r2.fq.gz out=both.fq.gz" to interleave it.
##Grab the sample name from the command line
NAME="$1"
##Specify the viral reference file.
##NC_045512.fasta contains the SARS-CoV-2 genome, equivalent to bbmap/resources/Covid19_ref.fa
REF="NC_045512.fasta"
##Discover adapter sequence for this library based on read overlap.
##This step should be skipped for single-ended reads.
bbmerge.sh in="$NAME".fq.gz outa="$NAME"_adapters.fa ow reads=1m
##Adapter-trim and discard everything with adapter sequence so the reads are uniform length.
##This assumes PE 2x150bp reads; minlen should be set to read length
bbduk.sh -Xmx1g in="$NAME".fq.gz out=recal.fq.gz minlen=150 ktrim=r k=21 mink=9 hdist=2 hdist2=1 ref="$NAME"_adapters.fa ow tbo tpe
##Note - if the reads are single-ended, use this command instead:
#bbduk.sh -Xmx1g in="$NAME".fq.gz out=trimmed.fq.gz minlen=150 ktrim=r k=21 mink=9 hdist=2 hdist2=1 ref=adapters ow
##Map the reads with very high sensitivity.
bbmap.sh ref="$REF" in=trimmed.fq.gz outm=mapped.sam.gz vslow -Xmx6g ow
#Discover true variants.
callvariants.sh in=mapped.sam.gz ref="$REF" out=recal.vcf -Xmx6g ow
##Generate recalibration matrices.
calctruequality.sh in=mapped.sam.gz vcf=recal.vcf
##Now the recalibration matrices are stored in ./ref
##BBDuk can be run with the 'recal' flag to recalibrate data (mapped or unmapped).
##It should be run from the directory containing /ref | PypiClean |
/Newcalls-0.0.1-cp37-cp37m-win_amd64.whl/newcalls/node_modules/console-control-strings/README.md~ |
A library of cross-platform tested terminal/console command strings for
doing things like color and cursor positioning. This is a subset of both
ansi and vt100. All control codes included work on both Windows & Unix-like
OSes, except where noted.
## Usage
```js
var consoleControl = require('console-control-strings')
console.log(consoleControl.color('blue','bgRed', 'bold') + 'hi there' + consoleControl.color('reset'))
process.stdout.write(consoleControl.goto(75, 10))
```
## Why Another?
There are tons of libraries similar to this one. I wanted one that was:
1. Very clear about compatibility goals.
2. Could emit, for instance, a start color code without an end one.
3. Returned strings w/o writing to streams.
4. Was not weighed down with other unrelated baggage.
## Functions
### var code = consoleControl.up(_num = 1_)
Returns the escape sequence to move _num_ lines up.
### var code = consoleControl.down(_num = 1_)
Returns the escape sequence to move _num_ lines down.
### var code = consoleControl.forward(_num = 1_)
Returns the escape sequence to move _num_ lines righ.
### var code = consoleControl.back(_num = 1_)
Returns the escape sequence to move _num_ lines left.
### var code = consoleControl.nextLine(_num = 1_)
Returns the escape sequence to move _num_ lines down and to the beginning of
the line.
### var code = consoleControl.previousLine(_num = 1_)
Returns the escape sequence to move _num_ lines up and to the beginning of
the line.
### var code = consoleControl.eraseData()
Returns the escape sequence to erase everything from the current cursor
position to the bottom right of the screen. This is line based, so it
erases the remainder of the current line and all following lines.
### var code = consoleControl.eraseLine()
Returns the escape sequence to erase to the end of the current line.
### var code = consoleControl.goto(_x_, _y_)
Returns the escape sequence to move the cursor to the designated position.
Note that the origin is _1, 1_ not _0, 0_.
### var code = consoleControl.gotoSOL()
Returns the escape sequence to move the cursor to the beginning of the
current line. (That is, it returns a carriage return, `\r`.)
### var code = consoleControl.hideCursor()
Returns the escape sequence to hide the cursor.
### var code = consoleControl.showCursor()
Returns the escape sequence to show the cursor.
### var code = consoleControl.color(_colors = []_)
### var code = consoleControl.color(_color1_, _color2_, _…_, _colorn_)
Returns the escape sequence to set the current terminal display attributes
(mostly colors). Arguments can either be a list of attributes or an array
of attributes. The difference between passing in an array or list of colors
and calling `.color` separately for each one, is that in the former case a
single escape sequence will be produced where as in the latter each change
will have its own distinct escape sequence. Each attribute can be one of:
* Reset:
* **reset** – Reset all attributes to the terminal default.
* Styles:
* **bold** – Display text as bold. In some terminals this means using a
bold font, in others this means changing the color. In some it means
both.
* **italic** – Display text as italic. This is not available in most Windows terminals.
* **underline** – Underline text. This is not available in most Windows Terminals.
* **inverse** – Invert the foreground and background colors.
* **stopBold** – Do not display text as bold.
* **stopItalic** – Do not display text as italic.
* **stopUnderline** – Do not underline text.
* **stopInverse** – Do not invert foreground and background.
* Colors:
* **white**
* **black**
* **blue**
* **cyan**
* **green**
* **magenta**
* **red**
* **yellow**
* **grey** / **brightBlack**
* **brightRed**
* **brightGreen**
* **brightYellow**
* **brightBlue**
* **brightMagenta**
* **brightCyan**
* **brightWhite**
* Background Colors:
* **bgWhite**
* **bgBlack**
* **bgBlue**
* **bgCyan**
* **bgGreen**
* **bgMagenta**
* **bgRed**
* **bgYellow**
* **bgGrey** / **bgBrightBlack**
* **bgBrightRed**
* **bgBrightGreen**
* **bgBrightYellow**
* **bgBrightBlue**
* **bgBrightMagenta**
* **bgBrightCyan**
* **bgBrightWhite** | PypiClean |
/Flask-MongoSet-0.1.8.tar.gz/Flask-MongoSet-0.1.8/flask_mongoset.py | from __future__ import absolute_import
import copy
import operator
import trafaret as t
from flask import abort
from flask.signals import _signals
from importlib import import_module
from pymongo import Connection, ASCENDING
from pymongo.cursor import Cursor
from pymongo.database import Database
from pymongo.collection import Collection
from pymongo.son_manipulator import (SONManipulator, AutoReference,
NamespaceInjector)
# list of collections for models witch need autoincrement id
inc_collections = set([])
after_insert = 'after_insert'
after_update = 'after_update'
after_delete = 'after_delete'
signal_map = {after_insert: _signals.signal('mongo_after_insert'),
after_update: _signals.signal('mongo_after_update'),
after_delete: _signals.signal('mongo_after_delete')}
def resolve_class(class_path):
module_name, class_name = class_path.rsplit('.', 1)
return getattr(import_module(module_name), class_name)
class AuthenticationError(Exception):
pass
class InitDataError(Exception):
pass
class ClassProperty(property):
""" Implements :@classproperty: decorator, like @property but
for the class not for the instance of class
"""
def __init__(self, method, *args, **kwargs):
method = classmethod(method)
super(ClassProperty, self).__init__(method, *args, **kwargs)
def __get__(self, cls, owner):
return self.fget.__get__(None, owner)()
classproperty = ClassProperty
class AttrDict(dict):
"""
Base object that represents a MongoDB document. The object will behave both
like a dict `x['y']` and like an object `x.y`
:param initial: you can define new instance via dictionary::
AttrDict({'a': 'one', 'b': 'two'}) or pass data
in kwargs AttrDict(a='one', b='two')
"""
def __init__(self, initial=None, **kwargs):
initial and kwargs.update(**initial)
self._setattrs(**kwargs)
def __getattr__(self, attr):
return self._change_method('__getitem__', attr)
def __setattr__(self, attr, value):
value = self._make_attr_dict(value)
return self.__setitem__(attr, value)
def __delattr__(self, attr):
return self._change_method('__delitem__', attr)
def _make_attr_dict(self, value):
""" Supporting method for self.__setitem__
"""
if isinstance(value, list):
value = map(self._make_attr_dict, value)
elif isinstance(value, dict) and not isinstance(value, AttrDict):
value = AttrDict(value)
return value
def _change_method(self, method, *args, **kwargs):
""" Changes base dict methods to implemet dot notation
and sets AttributeError instead KeyError
"""
try:
callmethod = operator.methodcaller(method, *args, **kwargs)
return callmethod(super(AttrDict, self))
except KeyError as ex:
raise AttributeError(ex)
def _setattrs(self, **kwargs):
for key, value in kwargs.iteritems():
setattr(self, key, value)
class AutoincrementId(SONManipulator):
""" Creates objects id as integer and autoincrement it,
if "id" not in son object.
But not usefull with DBRefs, DBRefs could't be based on this "id"
"""
def transform_incoming(self, son, collection):
if collection.name in inc_collections:
son["_int_id"] = son.get('_int_id',
self._get_next_id(collection))
return son
def _get_next_id(self, collection):
database = collection.database
result = database._autoincrement_ids.find_and_modify(
query={"id": collection.name},
update={"$inc": {"next": 1}},
upsert=True,
new=True)
return result["next"]
class SavedObject(SONManipulator):
"""
Сonverts saved documents into class instance, the class name with path of
class module keep in document :_class: parameter e.g.::
{'name': 'John', 'age': 18, '_class': 'my_project.account.User'} -
will convert into class User
Embedded documents will convert, if they have :_class: parameter
TODO: this only works for documents that are in the same database. To fix
this we'll need to add a DatabaseInjector that adds `_db` and then make
use of the optional `database` support for DBRefs.
"""
def will_copy(self):
return True
def _transform_value(self, value):
if isinstance(value, list):
return map(self._transform_value, value)
if isinstance(value, dict):
if value.get('_class'):
cls = resolve_class(value['_class'])
return cls(self._transform_dict(value))
return self._transform_dict(value)
return value
def _transform_dict(self, object):
for (key, value) in object.items():
object[key] = self._transform_value(value)
return object
def transform_outgoing(self, son, collection):
return self._transform_value(son)
class MongoCursor(Cursor):
"""
A cursor that will return an instance of :as_class: parameter with
provided :_lang: parameter instead of dict type
"""
def __init__(self, *args, **kwargs):
self._lang = kwargs.pop('_lang')
self.as_class = kwargs.pop('as_class')
super(MongoCursor, self).__init__(*args, **kwargs)
def next(self):
data = super(MongoCursor, self).next()
return self.as_class(data, _lang=self._lang, from_db=True)
def __getitem__(self, index):
item = super(MongoCursor, self).__getitem__(index)
if isinstance(index, slice):
return item
else:
return self.as_class(item, _lang=self._lang, from_db=True)
class BaseQuery(Collection):
"""
`BaseQuery` extends :class:`pymongo.Collection` that adds :_lang: parameter
to response instance via MongoCursor.
If attr i18n not in model, so model doesn't need translation,
pymongo.Collection will use
:param document_class: to return data from db as instance of this class
:param i18n: to change translatable attributes in the search query
"""
def __init__(self, *args, **kwargs):
self.document_class = kwargs.pop('document_class')
self.i18n = getattr(self.document_class, 'i18n', None)
super(BaseQuery, self).__init__(*args, **kwargs)
def find(self, *args, **kwargs):
spec = args and args[0]
kwargs['as_class'] = self.document_class
kwargs['_lang'] = lang = kwargs.pop('_lang',
self.document_class._fallback_lang)
# defines the fields that should be translated
if self.i18n and spec:
if not isinstance(spec, dict):
raise TypeError("The first argument must be an instance of "
"dict")
spec = self._insert_lang(spec, lang)
return MongoCursor(self, *args, **kwargs)
def insert(self, doc_or_docs, manipulate=True,
safe=None, check_keys=True, continue_on_error=False, **kwargs):
""" Overrided method for sending :after_insert: signal
"""
_id = super(BaseQuery, self).insert(doc_or_docs, manipulate, safe,
check_keys, continue_on_error,
**kwargs)
signal_map[after_insert].send(self.document_class.__name__, _id=_id,
collection=self, signal=after_insert)
return _id
def update(self, spec, document, **kwargs):
if self.i18n:
lang = kwargs.pop('_lang')
for attr, value in document.items():
if attr.startswith('$'):
document[attr] = self._insert_lang(value, lang)
else:
document[attr] = {lang: value}
_id = spec.get('_id')
result = super(BaseQuery, self).update(spec, document, **kwargs)
signal_map[after_update].send(self.document_class.__name__, _id=_id,
collection=self, signal=after_update)
return result
def remove(self, spec_or_id=None, safe=None, **kwargs):
signal_map[after_delete].send(self.document_class.__name__,
_id=spec_or_id, collection=self,
signal=after_delete)
return super(BaseQuery, self).remove(spec_or_id, safe, **kwargs)
def get(self, id):
return self.find_one({'_id': id}) or self.find_one({'_int_id': id})
def get_or_404(self, id):
return self.get(id) or abort(404)
def find_one_or_404(self, *args, **kwargs):
return self.find_one(*args, **kwargs) or abort(404)
def find_or_404(self, *args, **kwargs):
cursor = self.find(*args, **kwargs)
return not cursor.count() == 0 and cursor or abort(404)
def _insert_lang(self, document, lang):
for attr in document.copy():
if attr.startswith('$') and attr != '$where':
document[attr] = map(lambda a: self._insert_lang(a, lang),
document[attr])
else:
attrs = attr.split('.')
if attrs[0] in self.i18n and '$' not in attr:
attrs.insert(1, lang)
document['.'.join(attrs)] = document.pop(attr)
return document
def delete(self):
return self.drop()
def all(self):
return self.find()
class ModelType(type):
""" Changes validation rules for transleted attrs.
Implements inheritance for attrs :i18n:, :indexes:
and :structure: from __abstract__ model
Adds :_protected_field_names: into class and :indexes: into Mondodb
"""
def __new__(cls, name, bases, dct):
structure = dct.get('structure')
if structure is not None:
structure.allow_extra('_class', '_id', '_ns', '_int_id')
# inheritance from abstract models:
for model in bases:
if getattr(model, '__abstract__', None) is True:
if '__abstract__' not in dct:
dct['__abstract__'] = False
key_attrs = ['i18n', 'indexes', 'required_fields']
for attr in key_attrs:
base_attrs = set(getattr(model, attr, []))
child_attrs = set(dct.get(attr, []))
dct.update({attr: list(base_attrs | child_attrs)})
if model.structure and structure is not None:
base_structure = set(model.structure.keys)
child_structure = set(structure.keys)
structure.keys = list(base_structure | child_structure)
structure.allow_any = structure.allow_any \
or model.structure.allow_any
structure.ignore_any = structure.ignore_any \
or model.structure.ignore_any
if not structure.allow_any:
structure.extras = list(set(model.structure.extras) |
set(structure.extras))
if not structure.ignore_any:
structure.ignore = list(set(model.structure.ignore) |
set(structure.ignore))
elif model.structure:
dct['structure'] = model.structure
break
# change structure for translated fields:
if not dct.get('__abstract__') and structure and dct.get('i18n'):
for key in structure.keys[:]:
if key.name in dct['i18n']:
dct['structure'].keys.remove(key)
dct['structure'].keys.append(t.Key(key.name,
trafaret=t.Mapping(t.String, key.trafaret),
default=key.default, optional=key.optional,
to_name=key.to_name))
# add required_fields:
if 'required_fields' in dct:
required_fields = dct.get('required_fields')
if dct.get('structure') is not None:
optional = filter(lambda key: key.name not in dct['required_fields'],
dct['structure'].keys)
optional = map(operator.attrgetter('name'), optional)
dct['structure'] = dct['structure'].make_optional(*optional)
else:
struct = dict.fromkeys(required_fields, t.Any)
dct['structure'] = t.Dict(struct).allow_extra('*')
return type.__new__(cls, name, bases, dct)
def __init__(cls, name, bases, dct):
# set protected_field_names:
protected_field_names = set(['_protected_field_names'])
names = [model.__dict__.keys() for model in cls.__mro__]
cls._protected_field_names = list(protected_field_names.union(*names))
if not cls.__abstract__:
# add model into autoincrement_id register:
if cls.inc_id:
inc_collections.add(cls.__collection__)
# add indexes:
if cls.indexes:
for index in cls.indexes[:]:
if isinstance(index, str):
cls.indexes.remove(index)
cls.indexes.append((index, ASCENDING))
if cls.db:
cls.query.ensure_index(cls.indexes)
class Model(AttrDict):
""" Base class for custom user models. Provide convenience ActiveRecord
methods such as :attr:`save`, :attr:`create`, :attr:`update`,
:attr:`delete`.
:param __collection__: name of mongo collection
:param __abstract__: if True - there is an abstract Model,
so :param i18n:, :param structure: and
:param indexes: shall be added for submodels
:param _protected_field_names: fields names that can be added like
dict items, generate automatically by ModelType metaclass
:param _lang: optional, language for model, by default it is
the same as :param _fallback_lang:
:param _fallback_lang: fallback model language, by default it is
app.config.MONGODB_FALLBACK_LANG
:param i18n: optional, list of fields that need to translate
:param db: Mondodb, it is defining by MongoSet
:param indexes: optional, list of fields that need to index
:param query_class: class makes query to MongoDB,
by default it is :BaseQuery:
:param structure: optional, a structure of mongo document, will be
validate by trafaret https://github.com/nimnull/trafaret
:param required_fields: optional, list of required fields
:param use_autorefs: optional, if it is True - AutoReferenceObject
will be use for query, by default is True
:param inc_id: optional, if it if True - AutoincrementId
will be use for query, by default is False
:param from_db: attr to get object from db as instance,
sets automatically
"""
__metaclass__ = ModelType
__collection__ = None
__abstract__ = False
_protected_field_names = None
_lang = None
_fallback_lang = None
i18n = []
db = None
indexes = []
query_class = BaseQuery
structure = None
required_fields = []
use_autorefs = True
inc_id = False
from_db = False
def __init__(self, initial=None, **kwargs):
self.from_db = kwargs.pop('from_db', False)
self._lang = kwargs.pop('_lang', self._fallback_lang)
if not self.from_db:
self._class = ".".join([self.__class__.__module__,
self.__class__.__name__])
dct = kwargs.copy()
if initial and isinstance(initial, dict):
dct.update(**initial)
for field in self._protected_field_names:
if field in dct and not isinstance(getattr(self.__class__,
field, None), property):
raise AttributeError("Forbidden attribute name {} for"
" model {}".format(field, self.__class__.__name__))
super(Model, self).__init__(initial, **kwargs)
def __setattr__(self, attr, value):
if attr in self._protected_field_names:
return dict.__setattr__(self, attr, value)
if attr in self.i18n and not self.from_db:
if attr not in self:
if not isinstance(value, dict) or self._lang not in value:
value = {self._lang: value}
else:
attrs = self[attr].copy()
attrs.update({self._lang: value})
value = attrs
return super(Model, self).__setattr__(attr, value)
def __getattr__(self, attr):
value = super(Model, self).__getattr__(attr)
if attr in self.i18n:
value = value.get(self._lang,
value.get(self._fallback_lang, value))
return value
@classproperty
def query(cls):
return cls.query_class(database=cls.db, name=cls.__collection__,
document_class=cls)
def save(self, *args, **kwargs):
data = self.structure and self.structure.check(self) or self
return self.query.save(data, *args, **kwargs)
def save_with_reload(self, *args, **kwargs):
""" returns self with autorefs after save
"""
_id = self.save(*args, **kwargs)
return self.query.find_one({'_id': _id}, _lang=self._lang)
def update(self, data=None, **kwargs):
if data is None:
data = {}
update_options = set(['upsert', 'manipulate', 'safe', 'multi',
'_check_keys'])
new_attrs = list(kwargs.viewkeys() - update_options)
data = {'$set': dict((k, kwargs.pop(k)) for k in new_attrs)}
if self.i18n:
kwargs['_lang'] = self._lang
return self.query.update({"_id": self._id}, data, **kwargs)
def update_with_reload(self, data=None, **kwargs):
""" returns self with autorefs after update
"""
self.update(data, **kwargs)
result = self.query.find_one({'_id': self._id})
if self.i18n:
result._lang = self._lang
return result
def delete(self):
return self.query.remove(self._id)
@classmethod
def create(cls, *args, **kwargs):
instance = cls(*args, **kwargs)
return instance.save_with_reload()
@classmethod
def get_or_create(cls, *args, **kwargs):
spec = copy.deepcopy(args)
instance = cls.query.find_one(*args, **kwargs)
if not instance:
if not spec or not isinstance(spec[0], dict):
raise InitDataError("first argument must be an instance of "
"dict with init data")
instance = cls.create(spec[0], **kwargs)
return instance
def __repr__(self):
return "<%s:%s>" % (self.__class__.__name__,
super(Model, self).__repr__())
def __unicode__(self):
return str(self).decode('utf-8')
def get_state(app):
"""Gets the state for the application"""
assert 'mongoset' in app.extensions, \
'The mongoset extension was not registered to the current ' \
'application. Please make sure to call init_app() first.'
return app.extensions['mongoset']
class MongoSet(object):
""" This class is used to control the MongoSet integration
to Flask application.
Adds :param db: and :param _fallback_lang: into Model
Usage:
app = Flask(__name__)
mongo = MongoSet(app)
This class also provides access to mongo Model:
class Product(mongo.Model):
structure = t.Dict({
'title': t.String,
'quantity': t.Int,
'attrs': t.Mapping(t.String, t.Or(t.Int, t.Float, t.String)),
}).allow_extra('*')
indexes = ['id']
via register method:
mongo = MongoSet(app)
mongo.register(Product, OtherModel)
or via decorator:
from flask.ext.mongoset import Model
@mongo.register
class Product(Model):
pass
"""
def __init__(self, app=None):
self.Model = Model
if app is not None:
self.init_app(app)
else:
self.app = None
def init_app(self, app):
app.config.setdefault('MONGODB_HOST', "localhost")
app.config.setdefault('MONGODB_PORT', 27017)
app.config.setdefault('MONGODB_USERNAME', '')
app.config.setdefault('MONGODB_PASSWORD', '')
app.config.setdefault('MONGODB_DATABASE', "")
app.config.setdefault('MONGODB_AUTOREF', False)
app.config.setdefault('MONGODB_AUTOINCREMENT', False)
app.config.setdefault('MONGODB_FALLBACK_LANG', 'en')
app.config.setdefault('MONGODB_SLAVE_OKAY', False)
self.app = app
if not hasattr(app, 'extensions'):
app.extensions = {}
app.extensions['mongoset'] = self
self.connect()
@app.teardown_appcontext
def close_connection(response):
state = get_state(app)
if state.connection is not None:
state.connection.end_request()
return response
self.Model.db = self.session
self.Model._fallback_lang = app.config.get('MONGODB_FALLBACK_LANG')
def connect(self):
"""Connect to the MongoDB server and register the documents from
:attr:`registered_documents`. If you set ``MONGODB_USERNAME`` and
``MONGODB_PASSWORD`` then you will be authenticated at the
``MONGODB_DATABASE``.
"""
if not hasattr(self, 'app'):
raise RuntimeError('The mongoset extension was not init to '
'the current application. Please make sure '
'to call init_app() first.')
if not hasattr(self, 'connection'):
self.connection = Connection(
host=self.app.config.get('MONGODB_HOST'),
port=self.app.config.get('MONGODB_PORT'),
slave_okay=self.app.config.get('MONGODB_SLAVE_OKAY', False))
if self.app.config.get('MONGODB_USERNAME') is not '':
auth_success = self.session.authenticate(
self.app.config.get('MONGODB_USERNAME'),
self.app.config.get('MONGODB_PASSWORD'))
if not auth_success:
raise AuthenticationError("can't connect to data base,"
" wrong user_name or password")
def register(self, *models):
"""Register one or more :class:`mongoset.Model` instances to the
connection.
"""
for model in models:
if not model.db or not isinstance(model.db, Database):
setattr(model, 'db', self.session)
model.indexes and model.query.ensure_index(model.indexes)
setattr(model, '_fallback_lang',
self.app.config['MONGODB_FALLBACK_LANG'])
return len(models) == 1 and models[0] or models
@property
def session(self):
""" Returns MongoDB
"""
if not hasattr(self, "db"):
self.db = self.connection[self.app.config['MONGODB_DATABASE']]
# we need namespaces in any case
self.db.add_son_manipulator(NamespaceInjector())
if self.app.config['MONGODB_AUTOREF']:
self.db.add_son_manipulator(AutoReference(self.db))
if self.app.config['MONGODB_AUTOINCREMENT']:
self.db.add_son_manipulator(AutoincrementId())
self.db.add_son_manipulator(SavedObject())
return self.db
def clear(self):
self.connection.drop_database(self.app.config['MONGODB_DATABASE'])
self.connection.end_request() | PypiClean |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.