hexsha
stringlengths 40
40
| size
int64 4
996k
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
245
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
245
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
996k
| avg_line_length
float64 1.33
58.2k
| max_line_length
int64 2
323k
| alphanum_fraction
float64 0
0.97
| content_no_comment
stringlengths 0
946k
| is_comment_constant_removed
bool 2
classes | is_sharp_comment_removed
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
790a221827a535b0708b790e6c2386085beca3fa
| 1,228
|
py
|
Python
|
backend/tests/email_generators/test_event_location_changed.py
|
fjacob21/mididecweb
|
b65f28eb6fdeafa265796b6190a4264a5eac54ce
|
[
"MIT"
] | null | null | null |
backend/tests/email_generators/test_event_location_changed.py
|
fjacob21/mididecweb
|
b65f28eb6fdeafa265796b6190a4264a5eac54ce
|
[
"MIT"
] | 88
|
2016-11-12T14:54:38.000Z
|
2018-08-02T00:25:07.000Z
|
backend/tests/email_generators/test_event_location_changed.py
|
mididecouverte/mididecweb
|
b65f28eb6fdeafa265796b6190a4264a5eac54ce
|
[
"MIT"
] | null | null | null |
import pytest
from datetime import datetime, timedelta
import pytz
from bs4 import BeautifulSoup
from src.events import Events
from src.users import Users
from src.user import USER_ACCESS_MANAGER
from src.stores import MemoryStore
from src.email_generators import EventLocationChangedEmail
def test_event_location_changed_email():
store = MemoryStore()
events = Events(store)
users = Users(store)
start = datetime.now(pytz.timezone("America/New_York"))
dur = timedelta(hours=1)
end = start + dur
u = users.add("test@test.com", 'name', 'alias', 'psw', 8)
e = events.add('test', 'test', 30, start, dur, 'test', 'test',
'test@test.com', 'test', u)
email = EventLocationChangedEmail(e, e, '', root='./src')
html = email.generate(u)
soup = BeautifulSoup(html, 'html.parser')
assert html
assert type(html) == str
assert bool(soup.find())
assert soup.find("div", {"class": "user"}).string.strip() == 'name'
assert soup.find("a", {"class": "event-link"}).string.strip() == 'test'
assert soup.find("td", {"class": "event-location-text"}).string.strip() == 'test'
assert soup.find("div", {"class": "event-description"}).string.strip() == 'test'
| 38.375
| 85
| 0.664495
|
import pytest
from datetime import datetime, timedelta
import pytz
from bs4 import BeautifulSoup
from src.events import Events
from src.users import Users
from src.user import USER_ACCESS_MANAGER
from src.stores import MemoryStore
from src.email_generators import EventLocationChangedEmail
def test_event_location_changed_email():
store = MemoryStore()
events = Events(store)
users = Users(store)
start = datetime.now(pytz.timezone("America/New_York"))
dur = timedelta(hours=1)
end = start + dur
u = users.add("test@test.com", 'name', 'alias', 'psw', 8)
e = events.add('test', 'test', 30, start, dur, 'test', 'test',
'test@test.com', 'test', u)
email = EventLocationChangedEmail(e, e, '', root='./src')
html = email.generate(u)
soup = BeautifulSoup(html, 'html.parser')
assert html
assert type(html) == str
assert bool(soup.find())
assert soup.find("div", {"class": "user"}).string.strip() == 'name'
assert soup.find("a", {"class": "event-link"}).string.strip() == 'test'
assert soup.find("td", {"class": "event-location-text"}).string.strip() == 'test'
assert soup.find("div", {"class": "event-description"}).string.strip() == 'test'
| true
| true
|
790a2249caf62e7895bf6298fd64cbf42158f83a
| 980
|
py
|
Python
|
tests/mimetest.py
|
seantis/mailthon
|
610e29fd3d89267f9b9fd7ba0e18e312a1c1c0f6
|
[
"MIT"
] | 230
|
2015-05-21T04:44:20.000Z
|
2021-12-01T00:47:05.000Z
|
tests/mimetest.py
|
seantis/mailthon
|
610e29fd3d89267f9b9fd7ba0e18e312a1c1c0f6
|
[
"MIT"
] | 33
|
2015-05-22T18:48:59.000Z
|
2020-12-18T09:37:33.000Z
|
tests/mimetest.py
|
seantis/mailthon
|
610e29fd3d89267f9b9fd7ba0e18e312a1c1c0f6
|
[
"MIT"
] | 33
|
2015-05-23T10:49:54.000Z
|
2020-12-11T11:05:35.000Z
|
from re import search
from base64 import b64decode
from email.message import Message
class mimetest:
def __init__(self, mime):
self.mime = mime
assert not mime.defects
def __getitem__(self, header):
return self.mime[header]
@property
def transfer_encoding(self):
return self['Content-Transfer-Encoding']
@property
def encoding(self):
return self.mime.get_content_charset(None)
@property
def mimetype(self):
return self.mime.get_content_type()
@property
def payload(self):
payload = self.mime.get_payload().encode(self.encoding or 'ascii')
if self.transfer_encoding == 'base64':
return b64decode(payload)
return payload
@property
def parts(self):
payload = self.mime.get_payload()
if not isinstance(payload, list):
raise TypeError
return [mimetest(k) for k in payload]
def blank():
return Message()
| 22.790698
| 74
| 0.642857
|
from re import search
from base64 import b64decode
from email.message import Message
class mimetest:
def __init__(self, mime):
self.mime = mime
assert not mime.defects
def __getitem__(self, header):
return self.mime[header]
@property
def transfer_encoding(self):
return self['Content-Transfer-Encoding']
@property
def encoding(self):
return self.mime.get_content_charset(None)
@property
def mimetype(self):
return self.mime.get_content_type()
@property
def payload(self):
payload = self.mime.get_payload().encode(self.encoding or 'ascii')
if self.transfer_encoding == 'base64':
return b64decode(payload)
return payload
@property
def parts(self):
payload = self.mime.get_payload()
if not isinstance(payload, list):
raise TypeError
return [mimetest(k) for k in payload]
def blank():
return Message()
| true
| true
|
790a26b2ecc1936ebb0585bcc8ec4ce375423bfc
| 9,144
|
py
|
Python
|
fairseq_cli/eval_lm.py
|
liangan1/fairseq
|
31b54e8ec03824bad61a80bea376c987e2e7c721
|
[
"MIT"
] | null | null | null |
fairseq_cli/eval_lm.py
|
liangan1/fairseq
|
31b54e8ec03824bad61a80bea376c987e2e7c721
|
[
"MIT"
] | null | null | null |
fairseq_cli/eval_lm.py
|
liangan1/fairseq
|
31b54e8ec03824bad61a80bea376c987e2e7c721
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3 -u
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Evaluate the perplexity of a trained language model.
"""
import logging
import math
import os
import torch
from fairseq import checkpoint_utils, options, tasks, utils
from fairseq.data import LMContextWindowDataset
from fairseq.logging import progress_bar
from fairseq.logging.meters import StopwatchMeter, TimeMeter
from fairseq.sequence_scorer import SequenceScorer
from fairseq import distributed_utils
logging.basicConfig(
format='%(asctime)s | %(levelname)s | %(name)s | %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.INFO,
)
logger = logging.getLogger('fairseq_cli.eval_lm')
class WordStat(object):
def __init__(self, word, is_bpe):
self.word = word
self.is_bpe = is_bpe
self.log_prob = 0
self.next_word_prob = 0
self.count = 0
self.missing_next_words = 0
def add(self, log_prob, next_word_prob):
""" increments counters for the sum of log probs of current word and next
word (given context ending at current word). Since the next word might be at the end of the example,
or it might be not counted because it is not an ending subword unit,
also keeps track of how many of those we have seen """
if next_word_prob is not None:
self.next_word_prob += next_word_prob
else:
self.missing_next_words += 1
self.log_prob += log_prob
self.count += 1
def __str__(self):
return '{}\t{}\t{}\t{}\t{}\t{}'.format(self.word, self.count, self.log_prob, self.is_bpe,
self.next_word_prob, self.count - self.missing_next_words)
def main(parsed_args, **unused_kwargs):
assert parsed_args.path is not None, '--path required for evaluation!'
if torch.cuda.is_available() and not parsed_args.cpu:
torch.cuda.set_device(parsed_args.device_id)
utils.import_user_module(parsed_args)
logger.info(parsed_args)
if parsed_args.ipex:
import intel_pytorch_extension as ipex
if args.dnnl:
ipex.core.enable_auto_dnnl()
else:
ipex.core.disable_auto_dnnl()
if args.mix_precision:
ipex.core.enable_mix_bf16_fp32()
use_cuda = torch.cuda.is_available() and not parsed_args.cpu
task = tasks.setup_task(parsed_args)
# Load ensemble
logger.info('loading model(s) from {}'.format(parsed_args.path))
models, args = checkpoint_utils.load_model_ensemble(
parsed_args.path.split(os.pathsep),
arg_overrides=eval(parsed_args.model_overrides),
task=task,
suffix=getattr(parsed_args, "checkpoint_suffix", ""),
)
for arg in vars(parsed_args).keys():
if arg not in {
'self_target', 'future_target', 'past_target', 'tokens_per_sample',
'output_size_dictionary', 'add_bos_token',
}:
setattr(args, arg, getattr(parsed_args, arg))
# reduce tokens per sample by the required context window size
args.tokens_per_sample -= args.context_window
task = tasks.setup_task(args)
# Load dataset splits
task.load_dataset(args.gen_subset)
dataset = task.dataset(args.gen_subset)
if args.context_window > 0:
dataset = LMContextWindowDataset(
dataset=dataset,
tokens_per_sample=args.tokens_per_sample,
context_window=args.context_window,
pad_idx=task.source_dictionary.pad(),
)
logger.info('{} {} {} examples'.format(args.data, args.gen_subset, len(dataset)))
# Optimize ensemble for generation and set the source and dest dicts on the model (required by scorer)
for model in models:
model.prepare_for_inference_(args)
if args.fp16:
model.half()
if use_cuda:
model.cuda()
if args.ipex:
model = model.to(device = ipex.DEVICE)
assert len(models) > 0
logger.info('num. model params: {}'.format(sum(p.numel() for p in models[0].parameters())))
itr = task.get_batch_iterator(
dataset=dataset,
max_tokens=args.max_tokens or 36000,
max_sentences=args.max_sentences,
max_positions=utils.resolve_max_positions(*[
model.max_positions() for model in models
]),
ignore_invalid_inputs=True,
num_shards=args.num_shards,
shard_id=args.shard_id,
num_workers=args.num_workers,
).next_epoch_itr(shuffle=False)
progress = progress_bar.progress_bar(
itr,
log_format=args.log_format,
log_interval=args.log_interval,
default_log_format=('tqdm' if not args.no_progress_bar else 'none'),
)
gen_timer = StopwatchMeter()
scorer = SequenceScorer(task.target_dictionary, args.softmax_batch)
score_sum = 0.
count = 0
if args.remove_bpe is not None:
if args.remove_bpe == 'sentencepiece':
raise NotImplementedError
else:
bpe_cont = args.remove_bpe.rstrip()
bpe_toks = {
i
for i in range(len(task.source_dictionary))
if task.source_dictionary[i].endswith(bpe_cont)
}
bpe_len = len(bpe_cont)
else:
bpe_toks = None
bpe_len = 0
word_stats = dict()
wps_meter = TimeMeter()
for sample in progress:
if 'net_input' not in sample:
continue
sample = utils.move_to_cuda(sample) if use_cuda else sample
sample = utils.move_to_ipex(sample) if args.ipex else sample
gen_timer.start()
hypos = scorer.generate(models, sample)
gen_timer.stop(sample['ntokens'])
for i, hypos_i in enumerate(hypos):
hypo = hypos_i[0]
sample_id = sample['id'][i]
tokens = hypo['tokens']
tgt_len = tokens.numel()
pos_scores = hypo['positional_scores'].float()
if args.add_bos_token:
assert hypo['tokens'][0].item() == task.target_dictionary.bos()
tokens = tokens[1:]
pos_scores = pos_scores[1:]
skipped_toks = 0
if bpe_toks is not None:
for i in range(tgt_len - 1):
if tokens[i].item() in bpe_toks:
skipped_toks += 1
pos_scores[i + 1] += pos_scores[i]
pos_scores[i] = 0
inf_scores = pos_scores.eq(float('inf')) | pos_scores.eq(float('-inf'))
if inf_scores.any():
logger.info(
'skipping tokens with inf scores:',
task.target_dictionary.string(tokens[inf_scores.nonzero()])
)
pos_scores = pos_scores[(~inf_scores).nonzero()]
score_sum += pos_scores.sum().cpu()
count += pos_scores.numel() - skipped_toks
if args.output_word_probs or args.output_word_stats:
w = ''
word_prob = []
is_bpe = False
for i in range(len(tokens)):
w_ind = tokens[i].item()
w += task.source_dictionary[w_ind]
if bpe_toks is not None and w_ind in bpe_toks:
w = w[:-bpe_len]
is_bpe = True
else:
word_prob.append((w, pos_scores[i].item()))
next_prob = None
ind = i + 1
while ind < len(tokens):
if pos_scores[ind].item() != 0:
next_prob = pos_scores[ind]
break
ind += 1
word_stats.setdefault(w, WordStat(w, is_bpe)).add(pos_scores[i].item(), next_prob)
is_bpe = False
w = ''
if args.output_word_probs:
logger.info(
str(int(sample_id)) + " "
+ ('\t'.join('{} [{:2f}]'.format(x[0], x[1]) for x in word_prob))
)
wps_meter.update(sample['ntokens'])
progress.log({'wps': round(wps_meter.avg)})
avg_nll_loss = -score_sum / count / math.log(2) # convert to base 2
logger.info('Evaluated {} tokens in {:.1f}s ({:.2f} tokens/s)'.format(
gen_timer.n, gen_timer.sum, 1. / gen_timer.avg
))
logger.info('Loss (base 2): {:.4f}, Perplexity: {:.2f}'.format(
avg_nll_loss, 2**avg_nll_loss
))
if args.output_word_stats:
for ws in sorted(word_stats.values(), key=lambda x: x.count, reverse=True):
logger.info(ws)
def cli_main():
parser = options.get_eval_lm_parser()
args = options.parse_args_and_arch(parser)
distributed_utils.call_main(args, main)
if __name__ == '__main__':
cli_main()
| 34.119403
| 112
| 0.585302
|
import logging
import math
import os
import torch
from fairseq import checkpoint_utils, options, tasks, utils
from fairseq.data import LMContextWindowDataset
from fairseq.logging import progress_bar
from fairseq.logging.meters import StopwatchMeter, TimeMeter
from fairseq.sequence_scorer import SequenceScorer
from fairseq import distributed_utils
logging.basicConfig(
format='%(asctime)s | %(levelname)s | %(name)s | %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.INFO,
)
logger = logging.getLogger('fairseq_cli.eval_lm')
class WordStat(object):
def __init__(self, word, is_bpe):
self.word = word
self.is_bpe = is_bpe
self.log_prob = 0
self.next_word_prob = 0
self.count = 0
self.missing_next_words = 0
def add(self, log_prob, next_word_prob):
if next_word_prob is not None:
self.next_word_prob += next_word_prob
else:
self.missing_next_words += 1
self.log_prob += log_prob
self.count += 1
def __str__(self):
return '{}\t{}\t{}\t{}\t{}\t{}'.format(self.word, self.count, self.log_prob, self.is_bpe,
self.next_word_prob, self.count - self.missing_next_words)
def main(parsed_args, **unused_kwargs):
assert parsed_args.path is not None, '--path required for evaluation!'
if torch.cuda.is_available() and not parsed_args.cpu:
torch.cuda.set_device(parsed_args.device_id)
utils.import_user_module(parsed_args)
logger.info(parsed_args)
if parsed_args.ipex:
import intel_pytorch_extension as ipex
if args.dnnl:
ipex.core.enable_auto_dnnl()
else:
ipex.core.disable_auto_dnnl()
if args.mix_precision:
ipex.core.enable_mix_bf16_fp32()
use_cuda = torch.cuda.is_available() and not parsed_args.cpu
task = tasks.setup_task(parsed_args)
logger.info('loading model(s) from {}'.format(parsed_args.path))
models, args = checkpoint_utils.load_model_ensemble(
parsed_args.path.split(os.pathsep),
arg_overrides=eval(parsed_args.model_overrides),
task=task,
suffix=getattr(parsed_args, "checkpoint_suffix", ""),
)
for arg in vars(parsed_args).keys():
if arg not in {
'self_target', 'future_target', 'past_target', 'tokens_per_sample',
'output_size_dictionary', 'add_bos_token',
}:
setattr(args, arg, getattr(parsed_args, arg))
args.tokens_per_sample -= args.context_window
task = tasks.setup_task(args)
task.load_dataset(args.gen_subset)
dataset = task.dataset(args.gen_subset)
if args.context_window > 0:
dataset = LMContextWindowDataset(
dataset=dataset,
tokens_per_sample=args.tokens_per_sample,
context_window=args.context_window,
pad_idx=task.source_dictionary.pad(),
)
logger.info('{} {} {} examples'.format(args.data, args.gen_subset, len(dataset)))
for model in models:
model.prepare_for_inference_(args)
if args.fp16:
model.half()
if use_cuda:
model.cuda()
if args.ipex:
model = model.to(device = ipex.DEVICE)
assert len(models) > 0
logger.info('num. model params: {}'.format(sum(p.numel() for p in models[0].parameters())))
itr = task.get_batch_iterator(
dataset=dataset,
max_tokens=args.max_tokens or 36000,
max_sentences=args.max_sentences,
max_positions=utils.resolve_max_positions(*[
model.max_positions() for model in models
]),
ignore_invalid_inputs=True,
num_shards=args.num_shards,
shard_id=args.shard_id,
num_workers=args.num_workers,
).next_epoch_itr(shuffle=False)
progress = progress_bar.progress_bar(
itr,
log_format=args.log_format,
log_interval=args.log_interval,
default_log_format=('tqdm' if not args.no_progress_bar else 'none'),
)
gen_timer = StopwatchMeter()
scorer = SequenceScorer(task.target_dictionary, args.softmax_batch)
score_sum = 0.
count = 0
if args.remove_bpe is not None:
if args.remove_bpe == 'sentencepiece':
raise NotImplementedError
else:
bpe_cont = args.remove_bpe.rstrip()
bpe_toks = {
i
for i in range(len(task.source_dictionary))
if task.source_dictionary[i].endswith(bpe_cont)
}
bpe_len = len(bpe_cont)
else:
bpe_toks = None
bpe_len = 0
word_stats = dict()
wps_meter = TimeMeter()
for sample in progress:
if 'net_input' not in sample:
continue
sample = utils.move_to_cuda(sample) if use_cuda else sample
sample = utils.move_to_ipex(sample) if args.ipex else sample
gen_timer.start()
hypos = scorer.generate(models, sample)
gen_timer.stop(sample['ntokens'])
for i, hypos_i in enumerate(hypos):
hypo = hypos_i[0]
sample_id = sample['id'][i]
tokens = hypo['tokens']
tgt_len = tokens.numel()
pos_scores = hypo['positional_scores'].float()
if args.add_bos_token:
assert hypo['tokens'][0].item() == task.target_dictionary.bos()
tokens = tokens[1:]
pos_scores = pos_scores[1:]
skipped_toks = 0
if bpe_toks is not None:
for i in range(tgt_len - 1):
if tokens[i].item() in bpe_toks:
skipped_toks += 1
pos_scores[i + 1] += pos_scores[i]
pos_scores[i] = 0
inf_scores = pos_scores.eq(float('inf')) | pos_scores.eq(float('-inf'))
if inf_scores.any():
logger.info(
'skipping tokens with inf scores:',
task.target_dictionary.string(tokens[inf_scores.nonzero()])
)
pos_scores = pos_scores[(~inf_scores).nonzero()]
score_sum += pos_scores.sum().cpu()
count += pos_scores.numel() - skipped_toks
if args.output_word_probs or args.output_word_stats:
w = ''
word_prob = []
is_bpe = False
for i in range(len(tokens)):
w_ind = tokens[i].item()
w += task.source_dictionary[w_ind]
if bpe_toks is not None and w_ind in bpe_toks:
w = w[:-bpe_len]
is_bpe = True
else:
word_prob.append((w, pos_scores[i].item()))
next_prob = None
ind = i + 1
while ind < len(tokens):
if pos_scores[ind].item() != 0:
next_prob = pos_scores[ind]
break
ind += 1
word_stats.setdefault(w, WordStat(w, is_bpe)).add(pos_scores[i].item(), next_prob)
is_bpe = False
w = ''
if args.output_word_probs:
logger.info(
str(int(sample_id)) + " "
+ ('\t'.join('{} [{:2f}]'.format(x[0], x[1]) for x in word_prob))
)
wps_meter.update(sample['ntokens'])
progress.log({'wps': round(wps_meter.avg)})
avg_nll_loss = -score_sum / count / math.log(2)
logger.info('Evaluated {} tokens in {:.1f}s ({:.2f} tokens/s)'.format(
gen_timer.n, gen_timer.sum, 1. / gen_timer.avg
))
logger.info('Loss (base 2): {:.4f}, Perplexity: {:.2f}'.format(
avg_nll_loss, 2**avg_nll_loss
))
if args.output_word_stats:
for ws in sorted(word_stats.values(), key=lambda x: x.count, reverse=True):
logger.info(ws)
def cli_main():
parser = options.get_eval_lm_parser()
args = options.parse_args_and_arch(parser)
distributed_utils.call_main(args, main)
if __name__ == '__main__':
cli_main()
| true
| true
|
790a2738b083790991dabb936f5922bade827531
| 26,978
|
py
|
Python
|
tests/flytekit/unit/core/test_type_hints.py
|
sbrunk/flytekit
|
0aa9cdb1be928f799170da61f1135121ccb64657
|
[
"Apache-2.0"
] | null | null | null |
tests/flytekit/unit/core/test_type_hints.py
|
sbrunk/flytekit
|
0aa9cdb1be928f799170da61f1135121ccb64657
|
[
"Apache-2.0"
] | null | null | null |
tests/flytekit/unit/core/test_type_hints.py
|
sbrunk/flytekit
|
0aa9cdb1be928f799170da61f1135121ccb64657
|
[
"Apache-2.0"
] | null | null | null |
import datetime
import os
import typing
from dataclasses import dataclass
import pandas
import pytest
from dataclasses_json import dataclass_json
import flytekit
from flytekit import ContainerTask, SQLTask, dynamic, kwtypes, maptask
from flytekit.common.translator import get_serializable
from flytekit.core import context_manager, launch_plan, promise
from flytekit.core.condition import conditional
from flytekit.core.context_manager import ExecutionState, Image, ImageConfig
from flytekit.core.node import Node
from flytekit.core.promise import NodeOutput, Promise, VoidPromise
from flytekit.core.resources import Resources
from flytekit.core.task import TaskMetadata, task
from flytekit.core.testing import patch, task_mock
from flytekit.core.type_engine import RestrictedTypeError, TypeEngine
from flytekit.core.workflow import workflow
from flytekit.interfaces.data.data_proxy import FileAccessProvider
from flytekit.models.core import types as _core_types
from flytekit.models.interface import Parameter
from flytekit.models.task import Resources as _resource_models
from flytekit.models.types import LiteralType
from flytekit.types.schema import FlyteSchema, SchemaOpenMode
def test_default_wf_params_works():
@task
def my_task(a: int):
wf_params = flytekit.current_context()
assert wf_params.execution_id == "ex:local:local:local"
my_task(a=3)
def test_simple_input_output():
@task
def my_task(a: int) -> typing.NamedTuple("OutputsBC", b=int, c=str):
ctx = flytekit.current_context()
assert ctx.execution_id == "ex:local:local:local"
return a + 2, "hello world"
assert my_task(a=3) == (5, "hello world")
def test_simple_input_no_output():
@task
def my_task(a: int):
pass
assert my_task(a=3) is None
ctx = context_manager.FlyteContext.current_context()
with ctx.new_compilation_context() as ctx:
outputs = my_task(a=3)
assert isinstance(outputs, VoidPromise)
def test_single_output():
@task
def my_task() -> str:
return "Hello world"
assert my_task() == "Hello world"
ctx = context_manager.FlyteContext.current_context()
with ctx.new_compilation_context() as ctx:
outputs = my_task()
assert ctx.compilation_state is not None
nodes = ctx.compilation_state.nodes
assert len(nodes) == 1
assert outputs.is_ready is False
assert outputs.ref.node is nodes[0]
def test_engine_file_output():
basic_blob_type = _core_types.BlobType(format="", dimensionality=_core_types.BlobType.BlobDimensionality.SINGLE,)
fs = FileAccessProvider(local_sandbox_dir="/tmp/flytetesting")
with context_manager.FlyteContext.current_context().new_file_access_context(file_access_provider=fs) as ctx:
# Write some text to a file not in that directory above
test_file_location = "/tmp/sample.txt"
with open(test_file_location, "w") as fh:
fh.write("Hello World\n")
lit = TypeEngine.to_literal(ctx, test_file_location, os.PathLike, LiteralType(blob=basic_blob_type))
# Since we're using local as remote, we should be able to just read the file from the 'remote' location.
with open(lit.scalar.blob.uri, "r") as fh:
assert fh.readline() == "Hello World\n"
# We should also be able to turn the thing back into regular python native thing.
redownloaded_local_file_location = TypeEngine.to_python_value(ctx, lit, os.PathLike)
with open(redownloaded_local_file_location, "r") as fh:
assert fh.readline() == "Hello World\n"
def test_wf1():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
return a + 2, "world"
@task
def t2(a: str, b: str) -> str:
return b + a
@workflow
def my_wf(a: int, b: str) -> (int, str):
x, y = t1(a=a)
d = t2(a=y, b=b)
return x, d
assert len(my_wf._nodes) == 2
assert my_wf._nodes[0].id == "n0"
assert my_wf._nodes[1]._upstream_nodes[0] is my_wf._nodes[0]
assert len(my_wf._output_bindings) == 2
assert my_wf._output_bindings[0].var == "o0"
assert my_wf._output_bindings[0].binding.promise.var == "t1_int_output"
nt = typing.NamedTuple("SingleNT", t1_int_output=float)
@task
def t3(a: int) -> nt:
return (a + 2,)
assert t3.python_interface.output_tuple_name == "SingleNT"
assert t3.interface.outputs["t1_int_output"] is not None
def test_wf1_run():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
return a + 2, "world"
@task
def t2(a: str, b: str) -> str:
return b + a
@workflow
def my_wf(a: int, b: str) -> (int, str):
x, y = t1(a=a)
d = t2(a=y, b=b)
return x, d
x = my_wf(a=5, b="hello ")
assert x == (7, "hello world")
@workflow
def my_wf2(a: int, b: str) -> (int, str):
tup = t1(a=a)
d = t2(a=tup.c, b=b)
return tup.t1_int_output, d
x = my_wf2(a=5, b="hello ")
assert x == (7, "hello world")
def test_wf1_with_overrides():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
return a + 2, "world"
@task
def t2(a: str, b: str) -> str:
return b + a
@workflow
def my_wf(a: int, b: str) -> (int, str):
x, y = t1(a=a).with_overrides(name="x")
d = t2(a=y, b=b).with_overrides()
return x, d
x = my_wf(a=5, b="hello ")
assert x == (7, "hello world")
def test_wf1_with_list_of_inputs():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
return a + 2, "world"
@task
def t2(a: typing.List[str]) -> str:
return " ".join(a)
@workflow
def my_wf(a: int, b: str) -> (int, str):
xx, yy = t1(a=a)
d = t2(a=[b, yy])
return xx, d
x = my_wf(a=5, b="hello")
assert x == (7, "hello world")
@workflow
def my_wf2(a: int, b: str) -> int:
x, y = t1(a=a)
t2(a=[b, y])
return x
x = my_wf2(a=5, b="hello")
assert x == 7
def test_wf_output_mismatch():
with pytest.raises(AssertionError):
@workflow
def my_wf(a: int, b: str) -> (int, str):
return a
with pytest.raises(AssertionError):
@workflow
def my_wf2(a: int, b: str) -> int:
return a, b
@workflow
def my_wf3(a: int, b: str) -> int:
return (a,)
my_wf3(a=10, b="hello")
def test_promise_return():
"""
Testing that when a workflow is local executed but a local wf execution context already exists, Promise objects
are returned wrapping Flyte literals instead of the unpacked dict.
"""
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
a = a + 2
return a, "world-" + str(a)
@workflow
def mimic_sub_wf(a: int) -> (str, str):
x, y = t1(a=a)
u, v = t1(a=x)
return y, v
ctx = context_manager.FlyteContext.current_context()
with ctx.new_execution_context(mode=ExecutionState.Mode.LOCAL_WORKFLOW_EXECUTION) as ctx:
a, b = mimic_sub_wf(a=3)
assert isinstance(a, promise.Promise)
assert isinstance(b, promise.Promise)
assert a.val.scalar.value.string_value == "world-5"
assert b.val.scalar.value.string_value == "world-7"
def test_wf1_with_sql():
sql = SQLTask(
"my-query",
query_template="SELECT * FROM hive.city.fact_airport_sessions WHERE ds = '{{ .Inputs.ds }}' LIMIT 10",
inputs=kwtypes(ds=datetime.datetime),
outputs=kwtypes(results=FlyteSchema),
metadata=TaskMetadata(retries=2),
)
@task
def t1() -> datetime.datetime:
return datetime.datetime.now()
@workflow
def my_wf() -> FlyteSchema:
dt = t1()
return sql(ds=dt)
with task_mock(sql) as mock:
mock.return_value = pandas.DataFrame(data={"x": [1, 2], "y": ["3", "4"]})
assert (my_wf().open().all() == pandas.DataFrame(data={"x": [1, 2], "y": ["3", "4"]})).all().all()
def test_wf1_with_sql_with_patch():
sql = SQLTask(
"my-query",
query_template="SELECT * FROM hive.city.fact_airport_sessions WHERE ds = '{{ .Inputs.ds }}' LIMIT 10",
inputs=kwtypes(ds=datetime.datetime),
outputs=kwtypes(results=FlyteSchema),
metadata=TaskMetadata(retries=2),
)
@task
def t1() -> datetime.datetime:
return datetime.datetime.now()
@workflow
def my_wf() -> FlyteSchema:
dt = t1()
return sql(ds=dt)
@patch(sql)
def test_user_demo_test(mock_sql):
mock_sql.return_value = pandas.DataFrame(data={"x": [1, 2], "y": ["3", "4"]})
assert (my_wf().open().all() == pandas.DataFrame(data={"x": [1, 2], "y": ["3", "4"]})).all().all()
# Have to call because tests inside tests don't run
test_user_demo_test()
def test_wf1_with_map():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
a = a + 2
return a, "world-" + str(a)
@task
def t2(a: typing.List[int], b: typing.List[str]) -> (int, str):
ra = 0
for x in a:
ra += x
rb = ""
for x in b:
rb += x
return ra, rb
@workflow
def my_wf(a: typing.List[int]) -> (int, str):
x, y = maptask(t1, metadata=TaskMetadata(retries=1))(a=a)
return t2(a=x, b=y)
x = my_wf(a=[5, 6])
assert x == (15, "world-7world-8")
def test_wf1_compile_time_constant_vars():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
return a + 2, "world"
@task
def t2(a: str, b: str) -> str:
return b + a
@workflow
def my_wf(a: int, b: str) -> (int, str):
x, y = t1(a=a)
d = t2(a="This is my way", b=b)
return x, d
x = my_wf(a=5, b="hello ")
assert x == (7, "hello This is my way")
def test_wf1_with_constant_return():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
return a + 2, "world"
@task
def t2(a: str, b: str) -> str:
return b + a
@workflow
def my_wf(a: int, b: str) -> (int, str):
x, y = t1(a=a)
t2(a="This is my way", b=b)
return x, "A constant output"
x = my_wf(a=5, b="hello ")
assert x == (7, "A constant output")
@workflow
def my_wf2(a: int, b: str) -> int:
t1(a=a)
t2(a="This is my way", b=b)
return 10
assert my_wf2(a=5, b="hello ") == 10
def test_wf1_with_dynamic():
@task
def t1(a: int) -> str:
a = a + 2
return "world-" + str(a)
@task
def t2(a: str, b: str) -> str:
return b + a
@dynamic
def my_subwf(a: int) -> typing.List[str]:
s = []
for i in range(a):
s.append(t1(a=i))
return s
@workflow
def my_wf(a: int, b: str) -> (str, typing.List[str]):
x = t2(a=b, b=b)
v = my_subwf(a=a)
return x, v
v = 5
x = my_wf(a=v, b="hello ")
assert x == ("hello hello ", ["world-" + str(i) for i in range(2, v + 2)])
with context_manager.FlyteContext.current_context().new_serialization_settings(
serialization_settings=context_manager.SerializationSettings(
project="test_proj",
domain="test_domain",
version="abc",
image_config=ImageConfig(Image(name="name", fqn="image", tag="name")),
env={},
)
) as ctx:
with ctx.new_execution_context(mode=ExecutionState.Mode.TASK_EXECUTION) as ctx:
dynamic_job_spec = my_subwf.compile_into_workflow(ctx, my_subwf._task_function, a=5)
assert len(dynamic_job_spec._nodes) == 5
def test_list_output():
@task
def t1(a: int) -> str:
a = a + 2
return "world-" + str(a)
@workflow
def lister() -> typing.List[str]:
s = []
# FYI: For users who happen to look at this, keep in mind this is only run once at compile time.
for i in range(10):
s.append(t1(a=i))
return s
assert len(lister.interface.outputs) == 1
binding_data = lister._output_bindings[0].binding # the property should be named binding_data
assert binding_data.collection is not None
assert len(binding_data.collection.bindings) == 10
def test_comparison_refs():
def dummy_node(node_id) -> Node:
n = Node(
node_id,
metadata=None,
bindings=[],
upstream_nodes=[],
flyte_entity=SQLTask(name="x", query_template="x", inputs={}),
)
n._id = node_id
return n
px = Promise("x", NodeOutput(var="x", node=dummy_node("n1")))
py = Promise("y", NodeOutput(var="y", node=dummy_node("n2")))
def print_expr(expr):
print(f"{expr} is type {type(expr)}")
print_expr(px == py)
print_expr(px < py)
print_expr((px == py) & (px < py))
print_expr(((px == py) & (px < py)) | (px > py))
print_expr(px < 5)
print_expr(px >= 5)
def test_comparison_lits():
px = Promise("x", TypeEngine.to_literal(None, 5, int, None))
py = Promise("y", TypeEngine.to_literal(None, 8, int, None))
def eval_expr(expr, expected: bool):
print(f"{expr} evals to {expr.eval()}")
assert expected == expr.eval()
eval_expr(px == py, False)
eval_expr(px < py, True)
eval_expr((px == py) & (px < py), False)
eval_expr(((px == py) & (px < py)) | (px > py), False)
eval_expr(px < 5, False)
eval_expr(px >= 5, True)
eval_expr(py >= 5, True)
def test_wf1_branches():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
return a + 2, "world"
@task
def t2(a: str) -> str:
return a
@workflow
def my_wf(a: int, b: str) -> (int, str):
x, y = t1(a=a)
d = (
conditional("test1")
.if_(x == 4)
.then(t2(a=b))
.elif_(x >= 5)
.then(t2(a=y))
.else_()
.fail("Unable to choose branch")
)
f = conditional("test2").if_(d == "hello ").then(t2(a="It is hello")).else_().then(t2(a="Not Hello!"))
return x, f
x = my_wf(a=5, b="hello ")
assert x == (7, "Not Hello!")
x = my_wf(a=2, b="hello ")
assert x == (4, "It is hello")
def test_wf1_branches_no_else():
with pytest.raises(NotImplementedError):
def foo():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
return a + 2, "world"
@task
def t2(a: str) -> str:
return a
@workflow
def my_wf(a: int, b: str) -> (int, str):
x, y = t1(a=a)
d = conditional("test1").if_(x == 4).then(t2(a=b)).elif_(x >= 5).then(t2(a=y))
conditional("test2").if_(x == 4).then(t2(a=b)).elif_(x >= 5).then(t2(a=y)).else_().fail("blah")
return x, d
foo()
def test_wf1_branches_failing():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
return a + 2, "world"
@task
def t2(a: str) -> str:
return a
@workflow
def my_wf(a: int, b: str) -> (int, str):
x, y = t1(a=a)
d = (
conditional("test1")
.if_(x == 4)
.then(t2(a=b))
.elif_(x >= 5)
.then(t2(a=y))
.else_()
.fail("All Branches failed")
)
return x, d
with pytest.raises(ValueError):
my_wf(a=1, b="hello ")
def test_cant_use_normal_tuples():
with pytest.raises(RestrictedTypeError):
@task
def t1(a: str) -> tuple:
return (a, 3)
def test_wf1_df():
@task
def t1(a: int) -> pandas.DataFrame:
return pandas.DataFrame(data={"col1": [a, 2], "col2": [a, 4]})
@task
def t2(df: pandas.DataFrame) -> pandas.DataFrame:
return df.append(pandas.DataFrame(data={"col1": [5, 10], "col2": [5, 10]}))
@workflow
def my_wf(a: int) -> pandas.DataFrame:
df = t1(a=a)
return t2(df=df)
x = my_wf(a=20)
assert isinstance(x, pandas.DataFrame)
result_df = x.reset_index(drop=True) == pandas.DataFrame(
data={"col1": [20, 2, 5, 10], "col2": [20, 4, 5, 10]}
).reset_index(drop=True)
assert result_df.all().all()
def test_lp_serialize():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
a = a + 2
return a, "world-" + str(a)
@task
def t2(a: str, b: str) -> str:
return b + a
@workflow
def my_subwf(a: int) -> (str, str):
x, y = t1(a=a)
u, v = t1(a=x)
return y, v
lp = launch_plan.LaunchPlan.create("serialize_test1", my_subwf)
lp_with_defaults = launch_plan.LaunchPlan.create("serialize_test2", my_subwf, default_inputs={"a": 3})
serialization_settings = context_manager.SerializationSettings(
project="proj",
domain="dom",
version="123",
image_config=ImageConfig(Image(name="name", fqn="asdf/fdsa", tag="123")),
env={},
)
sdk_lp = get_serializable(serialization_settings, lp)
assert len(sdk_lp.default_inputs.parameters) == 0
assert len(sdk_lp.fixed_inputs.literals) == 0
sdk_lp = get_serializable(serialization_settings, lp_with_defaults)
assert len(sdk_lp.default_inputs.parameters) == 1
assert len(sdk_lp.fixed_inputs.literals) == 0
# Adding a check to make sure oneof is respected. Tricky with booleans... if a default is specified, the
# required field needs to be None, not False.
parameter_a = sdk_lp.default_inputs.parameters["a"]
parameter_a = Parameter.from_flyte_idl(parameter_a.to_flyte_idl())
assert parameter_a.default is not None
def test_wf_container_task():
@task
def t1(a: int) -> (int, str):
return a + 2, str(a) + "-HELLO"
t2 = ContainerTask(
"raw",
image="alpine",
inputs=kwtypes(a=int, b=str),
input_data_dir="/tmp",
output_data_dir="/tmp",
command=["cat"],
arguments=["/tmp/a"],
)
def wf(a: int):
x, y = t1(a=a)
t2(a=x, b=y)
with task_mock(t2) as mock:
mock.side_effect = lambda a, b: None
assert t2(a=10, b="hello") is None
wf(a=10)
def test_wf_container_task_multiple():
square = ContainerTask(
name="square",
input_data_dir="/var/inputs",
output_data_dir="/var/outputs",
inputs=kwtypes(val=int),
outputs=kwtypes(out=int),
image="alpine",
command=["sh", "-c", "echo $(( {{.Inputs.val}} * {{.Inputs.val}} )) | tee /var/outputs/out"],
)
sum = ContainerTask(
name="sum",
input_data_dir="/var/flyte/inputs",
output_data_dir="/var/flyte/outputs",
inputs=kwtypes(x=int, y=int),
outputs=kwtypes(out=int),
image="alpine",
command=["sh", "-c", "echo $(( {{.Inputs.x}} + {{.Inputs.y}} )) | tee /var/flyte/outputs/out"],
)
@workflow
def raw_container_wf(val1: int, val2: int) -> int:
return sum(x=square(val=val1), y=square(val=val2))
with task_mock(square) as square_mock, task_mock(sum) as sum_mock:
square_mock.side_effect = lambda val: val * val
assert square(val=10) == 100
sum_mock.side_effect = lambda x, y: x + y
assert sum(x=10, y=10) == 20
assert raw_container_wf(val1=10, val2=10) == 200
def test_wf_tuple_fails():
with pytest.raises(RestrictedTypeError):
@task
def t1(a: tuple) -> (int, str):
return a[0] + 2, str(a) + "-HELLO"
def test_wf_typed_schema():
schema1 = FlyteSchema[kwtypes(x=int, y=str)]
@task
def t1() -> schema1:
s = schema1()
s.open().write(pandas.DataFrame(data={"x": [1, 2], "y": ["3", "4"]}))
return s
@task
def t2(s: FlyteSchema[kwtypes(x=int, y=str)]) -> FlyteSchema[kwtypes(x=int)]:
df = s.open().all()
return df[s.column_names()[:-1]]
@workflow
def wf() -> FlyteSchema[kwtypes(x=int)]:
return t2(s=t1())
w = t1()
assert w is not None
df = w.open(override_mode=SchemaOpenMode.READ).all()
result_df = df.reset_index(drop=True) == pandas.DataFrame(data={"x": [1, 2], "y": ["3", "4"]}).reset_index(
drop=True
)
assert result_df.all().all()
df = t2(s=w.as_readonly())
assert df is not None
result_df = df.reset_index(drop=True) == pandas.DataFrame(data={"x": [1, 2]}).reset_index(drop=True)
assert result_df.all().all()
x = wf()
df = x.open().all()
result_df = df.reset_index(drop=True) == pandas.DataFrame(data={"x": [1, 2]}).reset_index(drop=True)
assert result_df.all().all()
def test_wf_schema_to_df():
schema1 = FlyteSchema[kwtypes(x=int, y=str)]
@task
def t1() -> schema1:
s = schema1()
s.open().write(pandas.DataFrame(data={"x": [1, 2], "y": ["3", "4"]}))
return s
@task
def t2(df: pandas.DataFrame) -> int:
return len(df.columns.values)
@workflow
def wf() -> int:
return t2(df=t1())
x = wf()
assert x == 2
def test_dict_wf_with_constants():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
return a + 2, "world"
@task
def t2(a: typing.Dict[str, str]) -> str:
return " ".join([v for k, v in a.items()])
@workflow
def my_wf(a: int, b: str) -> (int, str):
x, y = t1(a=a)
d = t2(a={"key1": b, "key2": y})
return x, d
x = my_wf(a=5, b="hello")
assert x == (7, "hello world")
def test_dict_wf_with_conversion():
@task
def t1(a: int) -> typing.Dict[str, str]:
return {"a": str(a)}
@task
def t2(a: dict) -> str:
print(f"HAHAH {a}")
return " ".join([v for k, v in a.items()])
@workflow
def my_wf(a: int) -> str:
return t2(a=t1(a=a))
with pytest.raises(TypeError):
my_wf(a=5)
def test_wf_with_empty_dict():
@task
def t1() -> typing.Dict:
return {}
@task
def t2(d: typing.Dict):
assert d == {}
@workflow
def wf():
d = t1()
t2(d=d)
wf()
def test_wf_with_catching_no_return():
@task
def t1() -> typing.Dict:
return {}
@task
def t2(d: typing.Dict):
assert d == {}
@task
def t3(s: str):
pass
with pytest.raises(AssertionError):
@workflow
def wf():
d = t1()
# The following statement is wrong, this should not be allowed to pass to another task
x = t2(d=d)
# Passing x is wrong in this case
t3(s=x)
wf()
def test_wf_custom_types_missing_dataclass_json():
with pytest.raises(AssertionError):
@dataclass
class MyCustomType(object):
pass
@task
def t1(a: int) -> MyCustomType:
return MyCustomType()
def test_wf_custom_types():
@dataclass_json
@dataclass
class MyCustomType(object):
x: int
y: str
@task
def t1(a: int) -> MyCustomType:
return MyCustomType(x=a, y="t1")
@task
def t2(a: MyCustomType, b: str) -> (MyCustomType, int):
return MyCustomType(x=a.x, y=f"{a.y} {b}"), 5
@workflow
def my_wf(a: int, b: str) -> (MyCustomType, int):
return t2(a=t1(a=a), b=b)
c, v = my_wf(a=10, b="hello")
assert v == 5
assert c.x == 10
assert c.y == "t1 hello"
def test_arbit_class():
class Foo(object):
pass
with pytest.raises(ValueError):
@task
def t1(a: int) -> Foo:
return Foo()
def test_dataclass_more():
@dataclass_json
@dataclass
class Datum(object):
x: int
y: str
z: typing.Dict[int, str]
@task
def stringify(x: int) -> Datum:
return Datum(x=x, y=str(x), z={x: str(x)})
@task
def add(x: Datum, y: Datum) -> Datum:
x.z.update(y.z)
return Datum(x=x.x + y.x, y=x.y + y.y, z=x.z)
@workflow
def wf(x: int, y: int) -> Datum:
return add(x=stringify(x=x), y=stringify(x=y))
wf(x=10, y=20)
def test_environment():
@task(environment={"FOO": "foofoo", "BAZ": "baz"})
def t1(a: int) -> str:
a = a + 2
return "now it's " + str(a)
@workflow
def my_wf(a: int) -> str:
x = t1(a=a)
return x
serialization_settings = context_manager.SerializationSettings(
project="test_proj",
domain="test_domain",
version="abc",
image_config=ImageConfig(Image(name="name", fqn="image", tag="name")),
env={"FOO": "foo", "BAR": "bar"},
)
with context_manager.FlyteContext.current_context().new_compilation_context():
sdk_task = get_serializable(serialization_settings, t1)
assert sdk_task.container.env == {"FOO": "foofoo", "BAR": "bar", "BAZ": "baz"}
def test_resources():
@task(requests=Resources(cpu="1"), limits=Resources(cpu="2", mem="400M"))
def t1(a: int) -> str:
a = a + 2
return "now it's " + str(a)
@task(requests=Resources(cpu="3"))
def t2(a: int) -> str:
a = a + 200
return "now it's " + str(a)
@workflow
def my_wf(a: int) -> str:
x = t1(a=a)
return x
serialization_settings = context_manager.SerializationSettings(
project="test_proj",
domain="test_domain",
version="abc",
image_config=ImageConfig(Image(name="name", fqn="image", tag="name")),
env={},
)
with context_manager.FlyteContext.current_context().new_compilation_context():
sdk_task = get_serializable(serialization_settings, t1)
assert sdk_task.container.resources.requests == [
_resource_models.ResourceEntry(_resource_models.ResourceName.CPU, "1")
]
assert sdk_task.container.resources.limits == [
_resource_models.ResourceEntry(_resource_models.ResourceName.CPU, "2"),
_resource_models.ResourceEntry(_resource_models.ResourceName.MEMORY, "400M"),
]
sdk_task2 = get_serializable(serialization_settings, t2)
assert sdk_task2.container.resources.requests == [
_resource_models.ResourceEntry(_resource_models.ResourceName.CPU, "3")
]
assert sdk_task2.container.resources.limits == []
def test_wf_explicitly_returning_empty_task():
@task
def t1():
...
@workflow
def my_subwf():
return t1() # This forces the wf _local_execute to handle VoidPromises
assert my_subwf() is None
| 27.333333
| 117
| 0.5814
|
import datetime
import os
import typing
from dataclasses import dataclass
import pandas
import pytest
from dataclasses_json import dataclass_json
import flytekit
from flytekit import ContainerTask, SQLTask, dynamic, kwtypes, maptask
from flytekit.common.translator import get_serializable
from flytekit.core import context_manager, launch_plan, promise
from flytekit.core.condition import conditional
from flytekit.core.context_manager import ExecutionState, Image, ImageConfig
from flytekit.core.node import Node
from flytekit.core.promise import NodeOutput, Promise, VoidPromise
from flytekit.core.resources import Resources
from flytekit.core.task import TaskMetadata, task
from flytekit.core.testing import patch, task_mock
from flytekit.core.type_engine import RestrictedTypeError, TypeEngine
from flytekit.core.workflow import workflow
from flytekit.interfaces.data.data_proxy import FileAccessProvider
from flytekit.models.core import types as _core_types
from flytekit.models.interface import Parameter
from flytekit.models.task import Resources as _resource_models
from flytekit.models.types import LiteralType
from flytekit.types.schema import FlyteSchema, SchemaOpenMode
def test_default_wf_params_works():
@task
def my_task(a: int):
wf_params = flytekit.current_context()
assert wf_params.execution_id == "ex:local:local:local"
my_task(a=3)
def test_simple_input_output():
@task
def my_task(a: int) -> typing.NamedTuple("OutputsBC", b=int, c=str):
ctx = flytekit.current_context()
assert ctx.execution_id == "ex:local:local:local"
return a + 2, "hello world"
assert my_task(a=3) == (5, "hello world")
def test_simple_input_no_output():
@task
def my_task(a: int):
pass
assert my_task(a=3) is None
ctx = context_manager.FlyteContext.current_context()
with ctx.new_compilation_context() as ctx:
outputs = my_task(a=3)
assert isinstance(outputs, VoidPromise)
def test_single_output():
@task
def my_task() -> str:
return "Hello world"
assert my_task() == "Hello world"
ctx = context_manager.FlyteContext.current_context()
with ctx.new_compilation_context() as ctx:
outputs = my_task()
assert ctx.compilation_state is not None
nodes = ctx.compilation_state.nodes
assert len(nodes) == 1
assert outputs.is_ready is False
assert outputs.ref.node is nodes[0]
def test_engine_file_output():
basic_blob_type = _core_types.BlobType(format="", dimensionality=_core_types.BlobType.BlobDimensionality.SINGLE,)
fs = FileAccessProvider(local_sandbox_dir="/tmp/flytetesting")
with context_manager.FlyteContext.current_context().new_file_access_context(file_access_provider=fs) as ctx:
test_file_location = "/tmp/sample.txt"
with open(test_file_location, "w") as fh:
fh.write("Hello World\n")
lit = TypeEngine.to_literal(ctx, test_file_location, os.PathLike, LiteralType(blob=basic_blob_type))
with open(lit.scalar.blob.uri, "r") as fh:
assert fh.readline() == "Hello World\n"
# We should also be able to turn the thing back into regular python native thing.
redownloaded_local_file_location = TypeEngine.to_python_value(ctx, lit, os.PathLike)
with open(redownloaded_local_file_location, "r") as fh:
assert fh.readline() == "Hello World\n"
def test_wf1():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
return a + 2, "world"
@task
def t2(a: str, b: str) -> str:
return b + a
@workflow
def my_wf(a: int, b: str) -> (int, str):
x, y = t1(a=a)
d = t2(a=y, b=b)
return x, d
assert len(my_wf._nodes) == 2
assert my_wf._nodes[0].id == "n0"
assert my_wf._nodes[1]._upstream_nodes[0] is my_wf._nodes[0]
assert len(my_wf._output_bindings) == 2
assert my_wf._output_bindings[0].var == "o0"
assert my_wf._output_bindings[0].binding.promise.var == "t1_int_output"
nt = typing.NamedTuple("SingleNT", t1_int_output=float)
@task
def t3(a: int) -> nt:
return (a + 2,)
assert t3.python_interface.output_tuple_name == "SingleNT"
assert t3.interface.outputs["t1_int_output"] is not None
def test_wf1_run():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
return a + 2, "world"
@task
def t2(a: str, b: str) -> str:
return b + a
@workflow
def my_wf(a: int, b: str) -> (int, str):
x, y = t1(a=a)
d = t2(a=y, b=b)
return x, d
x = my_wf(a=5, b="hello ")
assert x == (7, "hello world")
@workflow
def my_wf2(a: int, b: str) -> (int, str):
tup = t1(a=a)
d = t2(a=tup.c, b=b)
return tup.t1_int_output, d
x = my_wf2(a=5, b="hello ")
assert x == (7, "hello world")
def test_wf1_with_overrides():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
return a + 2, "world"
@task
def t2(a: str, b: str) -> str:
return b + a
@workflow
def my_wf(a: int, b: str) -> (int, str):
x, y = t1(a=a).with_overrides(name="x")
d = t2(a=y, b=b).with_overrides()
return x, d
x = my_wf(a=5, b="hello ")
assert x == (7, "hello world")
def test_wf1_with_list_of_inputs():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
return a + 2, "world"
@task
def t2(a: typing.List[str]) -> str:
return " ".join(a)
@workflow
def my_wf(a: int, b: str) -> (int, str):
xx, yy = t1(a=a)
d = t2(a=[b, yy])
return xx, d
x = my_wf(a=5, b="hello")
assert x == (7, "hello world")
@workflow
def my_wf2(a: int, b: str) -> int:
x, y = t1(a=a)
t2(a=[b, y])
return x
x = my_wf2(a=5, b="hello")
assert x == 7
def test_wf_output_mismatch():
with pytest.raises(AssertionError):
@workflow
def my_wf(a: int, b: str) -> (int, str):
return a
with pytest.raises(AssertionError):
@workflow
def my_wf2(a: int, b: str) -> int:
return a, b
@workflow
def my_wf3(a: int, b: str) -> int:
return (a,)
my_wf3(a=10, b="hello")
def test_promise_return():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
a = a + 2
return a, "world-" + str(a)
@workflow
def mimic_sub_wf(a: int) -> (str, str):
x, y = t1(a=a)
u, v = t1(a=x)
return y, v
ctx = context_manager.FlyteContext.current_context()
with ctx.new_execution_context(mode=ExecutionState.Mode.LOCAL_WORKFLOW_EXECUTION) as ctx:
a, b = mimic_sub_wf(a=3)
assert isinstance(a, promise.Promise)
assert isinstance(b, promise.Promise)
assert a.val.scalar.value.string_value == "world-5"
assert b.val.scalar.value.string_value == "world-7"
def test_wf1_with_sql():
sql = SQLTask(
"my-query",
query_template="SELECT * FROM hive.city.fact_airport_sessions WHERE ds = '{{ .Inputs.ds }}' LIMIT 10",
inputs=kwtypes(ds=datetime.datetime),
outputs=kwtypes(results=FlyteSchema),
metadata=TaskMetadata(retries=2),
)
@task
def t1() -> datetime.datetime:
return datetime.datetime.now()
@workflow
def my_wf() -> FlyteSchema:
dt = t1()
return sql(ds=dt)
with task_mock(sql) as mock:
mock.return_value = pandas.DataFrame(data={"x": [1, 2], "y": ["3", "4"]})
assert (my_wf().open().all() == pandas.DataFrame(data={"x": [1, 2], "y": ["3", "4"]})).all().all()
def test_wf1_with_sql_with_patch():
sql = SQLTask(
"my-query",
query_template="SELECT * FROM hive.city.fact_airport_sessions WHERE ds = '{{ .Inputs.ds }}' LIMIT 10",
inputs=kwtypes(ds=datetime.datetime),
outputs=kwtypes(results=FlyteSchema),
metadata=TaskMetadata(retries=2),
)
@task
def t1() -> datetime.datetime:
return datetime.datetime.now()
@workflow
def my_wf() -> FlyteSchema:
dt = t1()
return sql(ds=dt)
@patch(sql)
def test_user_demo_test(mock_sql):
mock_sql.return_value = pandas.DataFrame(data={"x": [1, 2], "y": ["3", "4"]})
assert (my_wf().open().all() == pandas.DataFrame(data={"x": [1, 2], "y": ["3", "4"]})).all().all()
# Have to call because tests inside tests don't run
test_user_demo_test()
def test_wf1_with_map():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
a = a + 2
return a, "world-" + str(a)
@task
def t2(a: typing.List[int], b: typing.List[str]) -> (int, str):
ra = 0
for x in a:
ra += x
rb = ""
for x in b:
rb += x
return ra, rb
@workflow
def my_wf(a: typing.List[int]) -> (int, str):
x, y = maptask(t1, metadata=TaskMetadata(retries=1))(a=a)
return t2(a=x, b=y)
x = my_wf(a=[5, 6])
assert x == (15, "world-7world-8")
def test_wf1_compile_time_constant_vars():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
return a + 2, "world"
@task
def t2(a: str, b: str) -> str:
return b + a
@workflow
def my_wf(a: int, b: str) -> (int, str):
x, y = t1(a=a)
d = t2(a="This is my way", b=b)
return x, d
x = my_wf(a=5, b="hello ")
assert x == (7, "hello This is my way")
def test_wf1_with_constant_return():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
return a + 2, "world"
@task
def t2(a: str, b: str) -> str:
return b + a
@workflow
def my_wf(a: int, b: str) -> (int, str):
x, y = t1(a=a)
t2(a="This is my way", b=b)
return x, "A constant output"
x = my_wf(a=5, b="hello ")
assert x == (7, "A constant output")
@workflow
def my_wf2(a: int, b: str) -> int:
t1(a=a)
t2(a="This is my way", b=b)
return 10
assert my_wf2(a=5, b="hello ") == 10
def test_wf1_with_dynamic():
@task
def t1(a: int) -> str:
a = a + 2
return "world-" + str(a)
@task
def t2(a: str, b: str) -> str:
return b + a
@dynamic
def my_subwf(a: int) -> typing.List[str]:
s = []
for i in range(a):
s.append(t1(a=i))
return s
@workflow
def my_wf(a: int, b: str) -> (str, typing.List[str]):
x = t2(a=b, b=b)
v = my_subwf(a=a)
return x, v
v = 5
x = my_wf(a=v, b="hello ")
assert x == ("hello hello ", ["world-" + str(i) for i in range(2, v + 2)])
with context_manager.FlyteContext.current_context().new_serialization_settings(
serialization_settings=context_manager.SerializationSettings(
project="test_proj",
domain="test_domain",
version="abc",
image_config=ImageConfig(Image(name="name", fqn="image", tag="name")),
env={},
)
) as ctx:
with ctx.new_execution_context(mode=ExecutionState.Mode.TASK_EXECUTION) as ctx:
dynamic_job_spec = my_subwf.compile_into_workflow(ctx, my_subwf._task_function, a=5)
assert len(dynamic_job_spec._nodes) == 5
def test_list_output():
@task
def t1(a: int) -> str:
a = a + 2
return "world-" + str(a)
@workflow
def lister() -> typing.List[str]:
s = []
for i in range(10):
s.append(t1(a=i))
return s
assert len(lister.interface.outputs) == 1
binding_data = lister._output_bindings[0].binding
assert binding_data.collection is not None
assert len(binding_data.collection.bindings) == 10
def test_comparison_refs():
def dummy_node(node_id) -> Node:
n = Node(
node_id,
metadata=None,
bindings=[],
upstream_nodes=[],
flyte_entity=SQLTask(name="x", query_template="x", inputs={}),
)
n._id = node_id
return n
px = Promise("x", NodeOutput(var="x", node=dummy_node("n1")))
py = Promise("y", NodeOutput(var="y", node=dummy_node("n2")))
def print_expr(expr):
print(f"{expr} is type {type(expr)}")
print_expr(px == py)
print_expr(px < py)
print_expr((px == py) & (px < py))
print_expr(((px == py) & (px < py)) | (px > py))
print_expr(px < 5)
print_expr(px >= 5)
def test_comparison_lits():
px = Promise("x", TypeEngine.to_literal(None, 5, int, None))
py = Promise("y", TypeEngine.to_literal(None, 8, int, None))
def eval_expr(expr, expected: bool):
print(f"{expr} evals to {expr.eval()}")
assert expected == expr.eval()
eval_expr(px == py, False)
eval_expr(px < py, True)
eval_expr((px == py) & (px < py), False)
eval_expr(((px == py) & (px < py)) | (px > py), False)
eval_expr(px < 5, False)
eval_expr(px >= 5, True)
eval_expr(py >= 5, True)
def test_wf1_branches():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
return a + 2, "world"
@task
def t2(a: str) -> str:
return a
@workflow
def my_wf(a: int, b: str) -> (int, str):
x, y = t1(a=a)
d = (
conditional("test1")
.if_(x == 4)
.then(t2(a=b))
.elif_(x >= 5)
.then(t2(a=y))
.else_()
.fail("Unable to choose branch")
)
f = conditional("test2").if_(d == "hello ").then(t2(a="It is hello")).else_().then(t2(a="Not Hello!"))
return x, f
x = my_wf(a=5, b="hello ")
assert x == (7, "Not Hello!")
x = my_wf(a=2, b="hello ")
assert x == (4, "It is hello")
def test_wf1_branches_no_else():
with pytest.raises(NotImplementedError):
def foo():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
return a + 2, "world"
@task
def t2(a: str) -> str:
return a
@workflow
def my_wf(a: int, b: str) -> (int, str):
x, y = t1(a=a)
d = conditional("test1").if_(x == 4).then(t2(a=b)).elif_(x >= 5).then(t2(a=y))
conditional("test2").if_(x == 4).then(t2(a=b)).elif_(x >= 5).then(t2(a=y)).else_().fail("blah")
return x, d
foo()
def test_wf1_branches_failing():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
return a + 2, "world"
@task
def t2(a: str) -> str:
return a
@workflow
def my_wf(a: int, b: str) -> (int, str):
x, y = t1(a=a)
d = (
conditional("test1")
.if_(x == 4)
.then(t2(a=b))
.elif_(x >= 5)
.then(t2(a=y))
.else_()
.fail("All Branches failed")
)
return x, d
with pytest.raises(ValueError):
my_wf(a=1, b="hello ")
def test_cant_use_normal_tuples():
with pytest.raises(RestrictedTypeError):
@task
def t1(a: str) -> tuple:
return (a, 3)
def test_wf1_df():
@task
def t1(a: int) -> pandas.DataFrame:
return pandas.DataFrame(data={"col1": [a, 2], "col2": [a, 4]})
@task
def t2(df: pandas.DataFrame) -> pandas.DataFrame:
return df.append(pandas.DataFrame(data={"col1": [5, 10], "col2": [5, 10]}))
@workflow
def my_wf(a: int) -> pandas.DataFrame:
df = t1(a=a)
return t2(df=df)
x = my_wf(a=20)
assert isinstance(x, pandas.DataFrame)
result_df = x.reset_index(drop=True) == pandas.DataFrame(
data={"col1": [20, 2, 5, 10], "col2": [20, 4, 5, 10]}
).reset_index(drop=True)
assert result_df.all().all()
def test_lp_serialize():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
a = a + 2
return a, "world-" + str(a)
@task
def t2(a: str, b: str) -> str:
return b + a
@workflow
def my_subwf(a: int) -> (str, str):
x, y = t1(a=a)
u, v = t1(a=x)
return y, v
lp = launch_plan.LaunchPlan.create("serialize_test1", my_subwf)
lp_with_defaults = launch_plan.LaunchPlan.create("serialize_test2", my_subwf, default_inputs={"a": 3})
serialization_settings = context_manager.SerializationSettings(
project="proj",
domain="dom",
version="123",
image_config=ImageConfig(Image(name="name", fqn="asdf/fdsa", tag="123")),
env={},
)
sdk_lp = get_serializable(serialization_settings, lp)
assert len(sdk_lp.default_inputs.parameters) == 0
assert len(sdk_lp.fixed_inputs.literals) == 0
sdk_lp = get_serializable(serialization_settings, lp_with_defaults)
assert len(sdk_lp.default_inputs.parameters) == 1
assert len(sdk_lp.fixed_inputs.literals) == 0
parameter_a = sdk_lp.default_inputs.parameters["a"]
parameter_a = Parameter.from_flyte_idl(parameter_a.to_flyte_idl())
assert parameter_a.default is not None
def test_wf_container_task():
@task
def t1(a: int) -> (int, str):
return a + 2, str(a) + "-HELLO"
t2 = ContainerTask(
"raw",
image="alpine",
inputs=kwtypes(a=int, b=str),
input_data_dir="/tmp",
output_data_dir="/tmp",
command=["cat"],
arguments=["/tmp/a"],
)
def wf(a: int):
x, y = t1(a=a)
t2(a=x, b=y)
with task_mock(t2) as mock:
mock.side_effect = lambda a, b: None
assert t2(a=10, b="hello") is None
wf(a=10)
def test_wf_container_task_multiple():
square = ContainerTask(
name="square",
input_data_dir="/var/inputs",
output_data_dir="/var/outputs",
inputs=kwtypes(val=int),
outputs=kwtypes(out=int),
image="alpine",
command=["sh", "-c", "echo $(( {{.Inputs.val}} * {{.Inputs.val}} )) | tee /var/outputs/out"],
)
sum = ContainerTask(
name="sum",
input_data_dir="/var/flyte/inputs",
output_data_dir="/var/flyte/outputs",
inputs=kwtypes(x=int, y=int),
outputs=kwtypes(out=int),
image="alpine",
command=["sh", "-c", "echo $(( {{.Inputs.x}} + {{.Inputs.y}} )) | tee /var/flyte/outputs/out"],
)
@workflow
def raw_container_wf(val1: int, val2: int) -> int:
return sum(x=square(val=val1), y=square(val=val2))
with task_mock(square) as square_mock, task_mock(sum) as sum_mock:
square_mock.side_effect = lambda val: val * val
assert square(val=10) == 100
sum_mock.side_effect = lambda x, y: x + y
assert sum(x=10, y=10) == 20
assert raw_container_wf(val1=10, val2=10) == 200
def test_wf_tuple_fails():
with pytest.raises(RestrictedTypeError):
@task
def t1(a: tuple) -> (int, str):
return a[0] + 2, str(a) + "-HELLO"
def test_wf_typed_schema():
schema1 = FlyteSchema[kwtypes(x=int, y=str)]
@task
def t1() -> schema1:
s = schema1()
s.open().write(pandas.DataFrame(data={"x": [1, 2], "y": ["3", "4"]}))
return s
@task
def t2(s: FlyteSchema[kwtypes(x=int, y=str)]) -> FlyteSchema[kwtypes(x=int)]:
df = s.open().all()
return df[s.column_names()[:-1]]
@workflow
def wf() -> FlyteSchema[kwtypes(x=int)]:
return t2(s=t1())
w = t1()
assert w is not None
df = w.open(override_mode=SchemaOpenMode.READ).all()
result_df = df.reset_index(drop=True) == pandas.DataFrame(data={"x": [1, 2], "y": ["3", "4"]}).reset_index(
drop=True
)
assert result_df.all().all()
df = t2(s=w.as_readonly())
assert df is not None
result_df = df.reset_index(drop=True) == pandas.DataFrame(data={"x": [1, 2]}).reset_index(drop=True)
assert result_df.all().all()
x = wf()
df = x.open().all()
result_df = df.reset_index(drop=True) == pandas.DataFrame(data={"x": [1, 2]}).reset_index(drop=True)
assert result_df.all().all()
def test_wf_schema_to_df():
schema1 = FlyteSchema[kwtypes(x=int, y=str)]
@task
def t1() -> schema1:
s = schema1()
s.open().write(pandas.DataFrame(data={"x": [1, 2], "y": ["3", "4"]}))
return s
@task
def t2(df: pandas.DataFrame) -> int:
return len(df.columns.values)
@workflow
def wf() -> int:
return t2(df=t1())
x = wf()
assert x == 2
def test_dict_wf_with_constants():
@task
def t1(a: int) -> typing.NamedTuple("OutputsBC", t1_int_output=int, c=str):
return a + 2, "world"
@task
def t2(a: typing.Dict[str, str]) -> str:
return " ".join([v for k, v in a.items()])
@workflow
def my_wf(a: int, b: str) -> (int, str):
x, y = t1(a=a)
d = t2(a={"key1": b, "key2": y})
return x, d
x = my_wf(a=5, b="hello")
assert x == (7, "hello world")
def test_dict_wf_with_conversion():
@task
def t1(a: int) -> typing.Dict[str, str]:
return {"a": str(a)}
@task
def t2(a: dict) -> str:
print(f"HAHAH {a}")
return " ".join([v for k, v in a.items()])
@workflow
def my_wf(a: int) -> str:
return t2(a=t1(a=a))
with pytest.raises(TypeError):
my_wf(a=5)
def test_wf_with_empty_dict():
@task
def t1() -> typing.Dict:
return {}
@task
def t2(d: typing.Dict):
assert d == {}
@workflow
def wf():
d = t1()
t2(d=d)
wf()
def test_wf_with_catching_no_return():
@task
def t1() -> typing.Dict:
return {}
@task
def t2(d: typing.Dict):
assert d == {}
@task
def t3(s: str):
pass
with pytest.raises(AssertionError):
@workflow
def wf():
d = t1()
x = t2(d=d)
t3(s=x)
wf()
def test_wf_custom_types_missing_dataclass_json():
with pytest.raises(AssertionError):
@dataclass
class MyCustomType(object):
pass
@task
def t1(a: int) -> MyCustomType:
return MyCustomType()
def test_wf_custom_types():
@dataclass_json
@dataclass
class MyCustomType(object):
x: int
y: str
@task
def t1(a: int) -> MyCustomType:
return MyCustomType(x=a, y="t1")
@task
def t2(a: MyCustomType, b: str) -> (MyCustomType, int):
return MyCustomType(x=a.x, y=f"{a.y} {b}"), 5
@workflow
def my_wf(a: int, b: str) -> (MyCustomType, int):
return t2(a=t1(a=a), b=b)
c, v = my_wf(a=10, b="hello")
assert v == 5
assert c.x == 10
assert c.y == "t1 hello"
def test_arbit_class():
class Foo(object):
pass
with pytest.raises(ValueError):
@task
def t1(a: int) -> Foo:
return Foo()
def test_dataclass_more():
@dataclass_json
@dataclass
class Datum(object):
x: int
y: str
z: typing.Dict[int, str]
@task
def stringify(x: int) -> Datum:
return Datum(x=x, y=str(x), z={x: str(x)})
@task
def add(x: Datum, y: Datum) -> Datum:
x.z.update(y.z)
return Datum(x=x.x + y.x, y=x.y + y.y, z=x.z)
@workflow
def wf(x: int, y: int) -> Datum:
return add(x=stringify(x=x), y=stringify(x=y))
wf(x=10, y=20)
def test_environment():
@task(environment={"FOO": "foofoo", "BAZ": "baz"})
def t1(a: int) -> str:
a = a + 2
return "now it's " + str(a)
@workflow
def my_wf(a: int) -> str:
x = t1(a=a)
return x
serialization_settings = context_manager.SerializationSettings(
project="test_proj",
domain="test_domain",
version="abc",
image_config=ImageConfig(Image(name="name", fqn="image", tag="name")),
env={"FOO": "foo", "BAR": "bar"},
)
with context_manager.FlyteContext.current_context().new_compilation_context():
sdk_task = get_serializable(serialization_settings, t1)
assert sdk_task.container.env == {"FOO": "foofoo", "BAR": "bar", "BAZ": "baz"}
def test_resources():
@task(requests=Resources(cpu="1"), limits=Resources(cpu="2", mem="400M"))
def t1(a: int) -> str:
a = a + 2
return "now it's " + str(a)
@task(requests=Resources(cpu="3"))
def t2(a: int) -> str:
a = a + 200
return "now it's " + str(a)
@workflow
def my_wf(a: int) -> str:
x = t1(a=a)
return x
serialization_settings = context_manager.SerializationSettings(
project="test_proj",
domain="test_domain",
version="abc",
image_config=ImageConfig(Image(name="name", fqn="image", tag="name")),
env={},
)
with context_manager.FlyteContext.current_context().new_compilation_context():
sdk_task = get_serializable(serialization_settings, t1)
assert sdk_task.container.resources.requests == [
_resource_models.ResourceEntry(_resource_models.ResourceName.CPU, "1")
]
assert sdk_task.container.resources.limits == [
_resource_models.ResourceEntry(_resource_models.ResourceName.CPU, "2"),
_resource_models.ResourceEntry(_resource_models.ResourceName.MEMORY, "400M"),
]
sdk_task2 = get_serializable(serialization_settings, t2)
assert sdk_task2.container.resources.requests == [
_resource_models.ResourceEntry(_resource_models.ResourceName.CPU, "3")
]
assert sdk_task2.container.resources.limits == []
def test_wf_explicitly_returning_empty_task():
@task
def t1():
...
@workflow
def my_subwf():
return t1() # This forces the wf _local_execute to handle VoidPromises
assert my_subwf() is None
| true
| true
|
790a285d87c8d61d8b263589f416094d3fc693dc
| 2,487
|
py
|
Python
|
utils/utils.py
|
tperrier/mwachx
|
94616659dc29843e661b2ecc9a2e7f1d4e81b5a4
|
[
"Apache-2.0"
] | 3
|
2015-05-27T14:35:49.000Z
|
2016-02-26T21:04:32.000Z
|
utils/utils.py
|
tperrier/mwachx
|
94616659dc29843e661b2ecc9a2e7f1d4e81b5a4
|
[
"Apache-2.0"
] | 375
|
2015-01-31T10:08:34.000Z
|
2021-06-10T19:44:21.000Z
|
utils/utils.py
|
tperrier/mwachx
|
94616659dc29843e661b2ecc9a2e7f1d4e81b5a4
|
[
"Apache-2.0"
] | 6
|
2016-01-10T19:52:41.000Z
|
2020-06-15T22:07:24.000Z
|
import datetime
from constance import config
from django.conf import settings
from django.utils import dateparse , timezone
import django.db.models as db
def today(today=None):
if today is not None:
return dateparse.parse_date(today) if isinstance(today,basestring) else today
elif not getattr(settings,'FAKE_DATE',True):
return datetime.date.today()
elif isinstance(config.CURRENT_DATE,datetime.date):
return config.CURRENT_DATE
return datetime.date(*[int(i) for i in config.CURRENT_DATE.split('-')])
def parse_date(datestr):
return datetime.datetime.strptime(datestr,'%d-%m-%Y').date()
def make_date(date,month=0,day=0):
try:
new_date = datetime.datetime.combine(date,datetime.time())
except TypeError as e:
new_date = datetime.datetime(date,month,day)
return timezone.make_aware(new_date)
def angular_datepicker(datestr):
if datestr is None or hasattr(datestr,'isoformat'):
return datestr #datestr is a date
# datestr from angular datepicker is: 2015-10-18T05:54:53.529Z
return datetime.datetime.strptime(datestr[:10],'%Y-%m-%d').date()
def null_boolean_display(bool_value):
return {True:'Yes',
False:'No',
None:'Unkown'}.get(bool_value)
def null_boolean_form_value(bool_value):
'''
Return the value for a NullBooleanSelect wigit based on bool_value
'''
return {True:'2',False:'3',None:'1'}.get(bool_value)
def null_boolean_from_form(form_value):
'''
Return the boolean value based on a NullBooleanSelect form value
'''
return {'1':None,'2':True,'3':False}.get(form_value)
def days_as_str(days):
''' Return a short string version of days '''
if -7 <= days <= 7:
return '{:d}d'.format(days)
return '{:d}w'.format(int(round(days/7.0)))
class SQLiteDate(db.Func):
function = 'JULIANDAY'
def sqlite_date_diff(start_date,end_date,days=False):
''' return a DjanoORM Expression for the number of seconds/days between start_date and end_data '''
scale = 86400 if days is False else 1
return db.ExpressionWrapper( (SQLiteDate(end_date) - SQLiteDate(start_date)) * scale , db.IntegerField() )
def sql_count_when(*qargs,**kwargs):
""" qargs : list of models.Q objects
kwargs : filter_term=value dict
"""
condition = db.Q(**kwargs)
for q in qargs:
condition &= q
return db.Count( db.Case(
db.When(condition,then=1),output_field=db.IntegerField(),
))
| 33.608108
| 110
| 0.686771
|
import datetime
from constance import config
from django.conf import settings
from django.utils import dateparse , timezone
import django.db.models as db
def today(today=None):
if today is not None:
return dateparse.parse_date(today) if isinstance(today,basestring) else today
elif not getattr(settings,'FAKE_DATE',True):
return datetime.date.today()
elif isinstance(config.CURRENT_DATE,datetime.date):
return config.CURRENT_DATE
return datetime.date(*[int(i) for i in config.CURRENT_DATE.split('-')])
def parse_date(datestr):
return datetime.datetime.strptime(datestr,'%d-%m-%Y').date()
def make_date(date,month=0,day=0):
try:
new_date = datetime.datetime.combine(date,datetime.time())
except TypeError as e:
new_date = datetime.datetime(date,month,day)
return timezone.make_aware(new_date)
def angular_datepicker(datestr):
if datestr is None or hasattr(datestr,'isoformat'):
return datestr
return datetime.datetime.strptime(datestr[:10],'%Y-%m-%d').date()
def null_boolean_display(bool_value):
return {True:'Yes',
False:'No',
None:'Unkown'}.get(bool_value)
def null_boolean_form_value(bool_value):
return {True:'2',False:'3',None:'1'}.get(bool_value)
def null_boolean_from_form(form_value):
return {'1':None,'2':True,'3':False}.get(form_value)
def days_as_str(days):
if -7 <= days <= 7:
return '{:d}d'.format(days)
return '{:d}w'.format(int(round(days/7.0)))
class SQLiteDate(db.Func):
function = 'JULIANDAY'
def sqlite_date_diff(start_date,end_date,days=False):
scale = 86400 if days is False else 1
return db.ExpressionWrapper( (SQLiteDate(end_date) - SQLiteDate(start_date)) * scale , db.IntegerField() )
def sql_count_when(*qargs,**kwargs):
condition = db.Q(**kwargs)
for q in qargs:
condition &= q
return db.Count( db.Case(
db.When(condition,then=1),output_field=db.IntegerField(),
))
| true
| true
|
790a292b8c7580b1fc8ea666e1f9ccc712fd9c7d
| 1,664
|
py
|
Python
|
src/sagemaker_training/_entry_point_type.py
|
bstriner/sagemaker-training-toolkit
|
81a4323761a5327baaf0d24157b9428919b5cc67
|
[
"Apache-2.0"
] | 248
|
2020-04-21T09:25:03.000Z
|
2022-03-24T22:24:26.000Z
|
src/sagemaker_training/_entry_point_type.py
|
bstriner/sagemaker-training-toolkit
|
81a4323761a5327baaf0d24157b9428919b5cc67
|
[
"Apache-2.0"
] | 68
|
2020-04-22T09:31:18.000Z
|
2022-03-19T06:44:36.000Z
|
src/sagemaker_training/_entry_point_type.py
|
bstriner/sagemaker-training-toolkit
|
81a4323761a5327baaf0d24157b9428919b5cc67
|
[
"Apache-2.0"
] | 60
|
2020-06-02T20:52:24.000Z
|
2022-03-16T18:20:41.000Z
|
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License'). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the 'license' file accompanying this file. This file is
# distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""This module contains an enumerated type and helper functions related
to different types of training entry points (Python package, Python
script, bash script, etc.)
"""
import enum
import os
class _EntryPointType(enum.Enum):
"""Enumerated type consisting of valid types of training entry points."""
PYTHON_PACKAGE = "PYTHON_PACKAGE"
PYTHON_PROGRAM = "PYTHON_PROGRAM"
COMMAND = "COMMAND"
PYTHON_PACKAGE = _EntryPointType.PYTHON_PACKAGE
PYTHON_PROGRAM = _EntryPointType.PYTHON_PROGRAM
COMMAND = _EntryPointType.COMMAND
def get(path, name): # type: (str, str) -> _EntryPointType
"""
Args:
path (string): Directory where the entry point is located.
name (string): Name of the entry point file.
Returns:
(_EntryPointType): The type of the entry point.
"""
if name.endswith(".sh"):
return _EntryPointType.COMMAND
elif "setup.py" in os.listdir(path):
return _EntryPointType.PYTHON_PACKAGE
elif name.endswith(".py"):
return _EntryPointType.PYTHON_PROGRAM
else:
return _EntryPointType.COMMAND
| 32.627451
| 77
| 0.722957
|
import enum
import os
class _EntryPointType(enum.Enum):
PYTHON_PACKAGE = "PYTHON_PACKAGE"
PYTHON_PROGRAM = "PYTHON_PROGRAM"
COMMAND = "COMMAND"
PYTHON_PACKAGE = _EntryPointType.PYTHON_PACKAGE
PYTHON_PROGRAM = _EntryPointType.PYTHON_PROGRAM
COMMAND = _EntryPointType.COMMAND
def get(path, name):
if name.endswith(".sh"):
return _EntryPointType.COMMAND
elif "setup.py" in os.listdir(path):
return _EntryPointType.PYTHON_PACKAGE
elif name.endswith(".py"):
return _EntryPointType.PYTHON_PROGRAM
else:
return _EntryPointType.COMMAND
| true
| true
|
790a29d4059727dd8a425c767125df4baaa22f15
| 459
|
py
|
Python
|
simple_notes/notes/migrations/0004_auto_20201008_0513.py
|
Namnetsy/simple-notes-django-app
|
385fa829c43162e1c1a4682acc4668623e6e47b3
|
[
"MIT"
] | null | null | null |
simple_notes/notes/migrations/0004_auto_20201008_0513.py
|
Namnetsy/simple-notes-django-app
|
385fa829c43162e1c1a4682acc4668623e6e47b3
|
[
"MIT"
] | 9
|
2021-04-08T20:20:53.000Z
|
2022-03-12T00:54:21.000Z
|
simple_notes/notes/migrations/0004_auto_20201008_0513.py
|
Namnetsy/simple-notes-django-app
|
385fa829c43162e1c1a4682acc4668623e6e47b3
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.2 on 2020-10-08 05:13
from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('notes', '0003_auto_20201006_0607'),
]
operations = [
migrations.AlterUniqueTogether(
name='publicsharednote',
unique_together={('user', 'note')},
),
]
| 22.95
| 66
| 0.649237
|
from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('notes', '0003_auto_20201006_0607'),
]
operations = [
migrations.AlterUniqueTogether(
name='publicsharednote',
unique_together={('user', 'note')},
),
]
| true
| true
|
790a2a7d0397802902407a821f1b2a9907c9836c
| 17,528
|
py
|
Python
|
instrumentation/opentelemetry-instrumentation-requests/tests/test_requests_integration.py
|
sanketmehta28/opentelemetry-python-contrib
|
f7fd1e069313dce6c4939146ade173a727cc4104
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 3
|
2019-11-26T14:31:09.000Z
|
2020-01-09T23:04:49.000Z
|
instrumentation/opentelemetry-instrumentation-requests/tests/test_requests_integration.py
|
sanketmehta28/opentelemetry-python-contrib
|
f7fd1e069313dce6c4939146ade173a727cc4104
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 16
|
2020-02-07T10:01:02.000Z
|
2020-04-06T22:03:31.000Z
|
instrumentation/opentelemetry-instrumentation-requests/tests/test_requests_integration.py
|
sanketmehta28/opentelemetry-python-contrib
|
f7fd1e069313dce6c4939146ade173a727cc4104
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 5
|
2020-02-05T14:59:12.000Z
|
2020-04-03T15:34:16.000Z
|
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
from unittest import mock
import httpretty
import requests
from requests.adapters import BaseAdapter
from requests.models import Response
import opentelemetry.instrumentation.requests
from opentelemetry import context, trace
# FIXME: fix the importing of this private attribute when the location of the _SUPPRESS_HTTP_INSTRUMENTATION_KEY is defined.
from opentelemetry.context import _SUPPRESS_HTTP_INSTRUMENTATION_KEY
from opentelemetry.instrumentation.requests import RequestsInstrumentor
from opentelemetry.instrumentation.utils import _SUPPRESS_INSTRUMENTATION_KEY
from opentelemetry.propagate import get_global_textmap, set_global_textmap
from opentelemetry.sdk import resources
from opentelemetry.semconv.trace import SpanAttributes
from opentelemetry.test.mock_textmap import MockTextMapPropagator
from opentelemetry.test.test_base import TestBase
from opentelemetry.trace import StatusCode
from opentelemetry.util.http import get_excluded_urls
class TransportMock:
def read(self, *args, **kwargs):
pass
class MyAdapter(BaseAdapter):
def __init__(self, response):
super().__init__()
self._response = response
def send(self, *args, **kwargs): # pylint:disable=signature-differs
return self._response
def close(self):
pass
class InvalidResponseObjectException(Exception):
def __init__(self):
super().__init__()
self.response = {}
class RequestsIntegrationTestBase(abc.ABC):
# pylint: disable=no-member
# pylint: disable=too-many-public-methods
URL = "http://httpbin.org/status/200"
# pylint: disable=invalid-name
def setUp(self):
super().setUp()
self.env_patch = mock.patch.dict(
"os.environ",
{
"OTEL_PYTHON_REQUESTS_EXCLUDED_URLS": "http://localhost/env_excluded_arg/123,env_excluded_noarg"
},
)
self.env_patch.start()
self.exclude_patch = mock.patch(
"opentelemetry.instrumentation.requests._excluded_urls_from_env",
get_excluded_urls("REQUESTS"),
)
self.exclude_patch.start()
RequestsInstrumentor().instrument()
httpretty.enable()
httpretty.register_uri(httpretty.GET, self.URL, body="Hello!")
# pylint: disable=invalid-name
def tearDown(self):
super().tearDown()
self.env_patch.stop()
RequestsInstrumentor().uninstrument()
httpretty.disable()
def assert_span(self, exporter=None, num_spans=1):
if exporter is None:
exporter = self.memory_exporter
span_list = exporter.get_finished_spans()
self.assertEqual(num_spans, len(span_list))
if num_spans == 0:
return None
if num_spans == 1:
return span_list[0]
return span_list
@staticmethod
@abc.abstractmethod
def perform_request(url: str, session: requests.Session = None):
pass
def test_basic(self):
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
span = self.assert_span()
self.assertIs(span.kind, trace.SpanKind.CLIENT)
self.assertEqual(span.name, "HTTP GET")
self.assertEqual(
span.attributes,
{
SpanAttributes.HTTP_METHOD: "GET",
SpanAttributes.HTTP_URL: self.URL,
SpanAttributes.HTTP_STATUS_CODE: 200,
},
)
self.assertIs(span.status.status_code, trace.StatusCode.UNSET)
self.assertEqualSpanInstrumentationInfo(
span, opentelemetry.instrumentation.requests
)
def test_name_callback(self):
def name_callback(method, url):
return "GET" + url
RequestsInstrumentor().uninstrument()
RequestsInstrumentor().instrument(name_callback=name_callback)
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
span = self.assert_span()
self.assertEqual(span.name, "GET" + self.URL)
def test_excluded_urls_explicit(self):
url_404 = "http://httpbin.org/status/404"
httpretty.register_uri(
httpretty.GET,
url_404,
status=404,
)
RequestsInstrumentor().uninstrument()
RequestsInstrumentor().instrument(excluded_urls=".*/404")
self.perform_request(self.URL)
self.perform_request(url_404)
self.assert_span(num_spans=1)
def test_excluded_urls_from_env(self):
url = "http://localhost/env_excluded_arg/123"
httpretty.register_uri(
httpretty.GET,
url,
status=200,
)
RequestsInstrumentor().uninstrument()
RequestsInstrumentor().instrument()
self.perform_request(self.URL)
self.perform_request(url)
self.assert_span(num_spans=1)
def test_name_callback_default(self):
def name_callback(method, url):
return 123
RequestsInstrumentor().uninstrument()
RequestsInstrumentor().instrument(name_callback=name_callback)
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
span = self.assert_span()
self.assertEqual(span.name, "HTTP GET")
def test_not_foundbasic(self):
url_404 = "http://httpbin.org/status/404"
httpretty.register_uri(
httpretty.GET,
url_404,
status=404,
)
result = self.perform_request(url_404)
self.assertEqual(result.status_code, 404)
span = self.assert_span()
self.assertEqual(
span.attributes.get(SpanAttributes.HTTP_STATUS_CODE), 404
)
self.assertIs(
span.status.status_code,
trace.StatusCode.ERROR,
)
def test_uninstrument(self):
RequestsInstrumentor().uninstrument()
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
self.assert_span(num_spans=0)
# instrument again to avoid annoying warning message
RequestsInstrumentor().instrument()
def test_uninstrument_session(self):
session1 = requests.Session()
RequestsInstrumentor().uninstrument_session(session1)
result = self.perform_request(self.URL, session1)
self.assertEqual(result.text, "Hello!")
self.assert_span(num_spans=0)
# Test that other sessions as well as global requests is still
# instrumented
session2 = requests.Session()
result = self.perform_request(self.URL, session2)
self.assertEqual(result.text, "Hello!")
self.assert_span()
self.memory_exporter.clear()
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
self.assert_span()
def test_suppress_instrumentation(self):
token = context.attach(
context.set_value(_SUPPRESS_INSTRUMENTATION_KEY, True)
)
try:
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
finally:
context.detach(token)
self.assert_span(num_spans=0)
def test_suppress_http_instrumentation(self):
token = context.attach(
context.set_value(_SUPPRESS_HTTP_INSTRUMENTATION_KEY, True)
)
try:
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
finally:
context.detach(token)
self.assert_span(num_spans=0)
def test_not_recording(self):
with mock.patch("opentelemetry.trace.INVALID_SPAN") as mock_span:
RequestsInstrumentor().uninstrument()
RequestsInstrumentor().instrument(
tracer_provider=trace.NoOpTracerProvider()
)
mock_span.is_recording.return_value = False
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
self.assert_span(None, 0)
self.assertFalse(mock_span.is_recording())
self.assertTrue(mock_span.is_recording.called)
self.assertFalse(mock_span.set_attribute.called)
self.assertFalse(mock_span.set_status.called)
def test_distributed_context(self):
previous_propagator = get_global_textmap()
try:
set_global_textmap(MockTextMapPropagator())
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
span = self.assert_span()
headers = dict(httpretty.last_request().headers)
self.assertIn(MockTextMapPropagator.TRACE_ID_KEY, headers)
self.assertEqual(
str(span.get_span_context().trace_id),
headers[MockTextMapPropagator.TRACE_ID_KEY],
)
self.assertIn(MockTextMapPropagator.SPAN_ID_KEY, headers)
self.assertEqual(
str(span.get_span_context().span_id),
headers[MockTextMapPropagator.SPAN_ID_KEY],
)
finally:
set_global_textmap(previous_propagator)
def test_span_callback(self):
RequestsInstrumentor().uninstrument()
def span_callback(span, result: requests.Response):
span.set_attribute(
"http.response.body", result.content.decode("utf-8")
)
RequestsInstrumentor().instrument(
tracer_provider=self.tracer_provider,
span_callback=span_callback,
)
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
span = self.assert_span()
self.assertEqual(
span.attributes,
{
SpanAttributes.HTTP_METHOD: "GET",
SpanAttributes.HTTP_URL: self.URL,
SpanAttributes.HTTP_STATUS_CODE: 200,
"http.response.body": "Hello!",
},
)
def test_custom_tracer_provider(self):
resource = resources.Resource.create({})
result = self.create_tracer_provider(resource=resource)
tracer_provider, exporter = result
RequestsInstrumentor().uninstrument()
RequestsInstrumentor().instrument(tracer_provider=tracer_provider)
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
span = self.assert_span(exporter=exporter)
self.assertIs(span.resource, resource)
@mock.patch(
"requests.adapters.HTTPAdapter.send",
side_effect=requests.RequestException,
)
def test_requests_exception_without_response(self, *_, **__):
with self.assertRaises(requests.RequestException):
self.perform_request(self.URL)
span = self.assert_span()
self.assertEqual(
span.attributes,
{
SpanAttributes.HTTP_METHOD: "GET",
SpanAttributes.HTTP_URL: self.URL,
},
)
self.assertEqual(span.status.status_code, StatusCode.ERROR)
mocked_response = requests.Response()
mocked_response.status_code = 500
mocked_response.reason = "Internal Server Error"
@mock.patch(
"requests.adapters.HTTPAdapter.send",
side_effect=InvalidResponseObjectException,
)
def test_requests_exception_without_proper_response_type(self, *_, **__):
with self.assertRaises(InvalidResponseObjectException):
self.perform_request(self.URL)
span = self.assert_span()
self.assertEqual(
span.attributes,
{
SpanAttributes.HTTP_METHOD: "GET",
SpanAttributes.HTTP_URL: self.URL,
},
)
self.assertEqual(span.status.status_code, StatusCode.ERROR)
mocked_response = requests.Response()
mocked_response.status_code = 500
mocked_response.reason = "Internal Server Error"
@mock.patch(
"requests.adapters.HTTPAdapter.send",
side_effect=requests.RequestException(response=mocked_response),
)
def test_requests_exception_with_response(self, *_, **__):
with self.assertRaises(requests.RequestException):
self.perform_request(self.URL)
span = self.assert_span()
self.assertEqual(
span.attributes,
{
SpanAttributes.HTTP_METHOD: "GET",
SpanAttributes.HTTP_URL: self.URL,
SpanAttributes.HTTP_STATUS_CODE: 500,
},
)
self.assertEqual(span.status.status_code, StatusCode.ERROR)
@mock.patch("requests.adapters.HTTPAdapter.send", side_effect=Exception)
def test_requests_basic_exception(self, *_, **__):
with self.assertRaises(Exception):
self.perform_request(self.URL)
span = self.assert_span()
self.assertEqual(span.status.status_code, StatusCode.ERROR)
@mock.patch(
"requests.adapters.HTTPAdapter.send", side_effect=requests.Timeout
)
def test_requests_timeout_exception(self, *_, **__):
with self.assertRaises(Exception):
self.perform_request(self.URL)
span = self.assert_span()
self.assertEqual(span.status.status_code, StatusCode.ERROR)
def test_adapter_with_custom_response(self):
response = Response()
response.status_code = 210
response.reason = "hello adapter"
response.raw = TransportMock()
session = requests.Session()
session.mount(self.URL, MyAdapter(response))
self.perform_request(self.URL, session)
span = self.assert_span()
self.assertEqual(
span.attributes,
{
"http.method": "GET",
"http.url": self.URL,
"http.status_code": 210,
},
)
class TestRequestsIntegration(RequestsIntegrationTestBase, TestBase):
@staticmethod
def perform_request(url: str, session: requests.Session = None):
if session is None:
return requests.get(url)
return session.get(url)
def test_invalid_url(self):
url = "http://[::1/nope"
with self.assertRaises(ValueError):
requests.post(url)
span = self.assert_span()
self.assertEqual(span.name, "HTTP POST")
self.assertEqual(
span.attributes,
{SpanAttributes.HTTP_METHOD: "POST", SpanAttributes.HTTP_URL: url},
)
self.assertEqual(span.status.status_code, StatusCode.ERROR)
def test_credential_removal(self):
new_url = "http://username:password@httpbin.org/status/200"
self.perform_request(new_url)
span = self.assert_span()
self.assertEqual(span.attributes[SpanAttributes.HTTP_URL], self.URL)
def test_if_headers_equals_none(self):
result = requests.get(self.URL, headers=None)
self.assertEqual(result.text, "Hello!")
self.assert_span()
class TestRequestsIntegrationPreparedRequest(
RequestsIntegrationTestBase, TestBase
):
@staticmethod
def perform_request(url: str, session: requests.Session = None):
if session is None:
session = requests.Session()
request = requests.Request("GET", url)
prepared_request = session.prepare_request(request)
return session.send(prepared_request)
class TestRequestsIntergrationMetric(TestBase):
URL = "http://examplehost:8000/status/200"
def setUp(self):
super().setUp()
RequestsInstrumentor().instrument(meter_provider=self.meter_provider)
httpretty.enable()
httpretty.register_uri(httpretty.GET, self.URL, body="Hello!")
def tearDown(self):
super().tearDown()
RequestsInstrumentor().uninstrument()
httpretty.disable()
@staticmethod
def perform_request(url: str) -> requests.Response:
return requests.get(url)
def test_basic_metric_success(self):
self.perform_request(self.URL)
expected_attributes = {
"http.status_code": 200,
"http.host": "examplehost",
"net.peer.port": 8000,
"net.peer.name": "examplehost",
"http.method": "GET",
"http.flavor": "1.1",
"http.scheme": "http",
}
for (
resource_metrics
) in self.memory_metrics_reader.get_metrics_data().resource_metrics:
for scope_metrics in resource_metrics.scope_metrics:
for metric in scope_metrics.metrics:
for data_point in metric.data.data_points:
self.assertDictEqual(
expected_attributes, dict(data_point.attributes)
)
self.assertEqual(data_point.count, 1)
| 32.82397
| 124
| 0.641545
|
import abc
from unittest import mock
import httpretty
import requests
from requests.adapters import BaseAdapter
from requests.models import Response
import opentelemetry.instrumentation.requests
from opentelemetry import context, trace
from opentelemetry.context import _SUPPRESS_HTTP_INSTRUMENTATION_KEY
from opentelemetry.instrumentation.requests import RequestsInstrumentor
from opentelemetry.instrumentation.utils import _SUPPRESS_INSTRUMENTATION_KEY
from opentelemetry.propagate import get_global_textmap, set_global_textmap
from opentelemetry.sdk import resources
from opentelemetry.semconv.trace import SpanAttributes
from opentelemetry.test.mock_textmap import MockTextMapPropagator
from opentelemetry.test.test_base import TestBase
from opentelemetry.trace import StatusCode
from opentelemetry.util.http import get_excluded_urls
class TransportMock:
def read(self, *args, **kwargs):
pass
class MyAdapter(BaseAdapter):
def __init__(self, response):
super().__init__()
self._response = response
def send(self, *args, **kwargs):
return self._response
def close(self):
pass
class InvalidResponseObjectException(Exception):
def __init__(self):
super().__init__()
self.response = {}
class RequestsIntegrationTestBase(abc.ABC):
URL = "http://httpbin.org/status/200"
def setUp(self):
super().setUp()
self.env_patch = mock.patch.dict(
"os.environ",
{
"OTEL_PYTHON_REQUESTS_EXCLUDED_URLS": "http://localhost/env_excluded_arg/123,env_excluded_noarg"
},
)
self.env_patch.start()
self.exclude_patch = mock.patch(
"opentelemetry.instrumentation.requests._excluded_urls_from_env",
get_excluded_urls("REQUESTS"),
)
self.exclude_patch.start()
RequestsInstrumentor().instrument()
httpretty.enable()
httpretty.register_uri(httpretty.GET, self.URL, body="Hello!")
def tearDown(self):
super().tearDown()
self.env_patch.stop()
RequestsInstrumentor().uninstrument()
httpretty.disable()
def assert_span(self, exporter=None, num_spans=1):
if exporter is None:
exporter = self.memory_exporter
span_list = exporter.get_finished_spans()
self.assertEqual(num_spans, len(span_list))
if num_spans == 0:
return None
if num_spans == 1:
return span_list[0]
return span_list
@staticmethod
@abc.abstractmethod
def perform_request(url: str, session: requests.Session = None):
pass
def test_basic(self):
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
span = self.assert_span()
self.assertIs(span.kind, trace.SpanKind.CLIENT)
self.assertEqual(span.name, "HTTP GET")
self.assertEqual(
span.attributes,
{
SpanAttributes.HTTP_METHOD: "GET",
SpanAttributes.HTTP_URL: self.URL,
SpanAttributes.HTTP_STATUS_CODE: 200,
},
)
self.assertIs(span.status.status_code, trace.StatusCode.UNSET)
self.assertEqualSpanInstrumentationInfo(
span, opentelemetry.instrumentation.requests
)
def test_name_callback(self):
def name_callback(method, url):
return "GET" + url
RequestsInstrumentor().uninstrument()
RequestsInstrumentor().instrument(name_callback=name_callback)
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
span = self.assert_span()
self.assertEqual(span.name, "GET" + self.URL)
def test_excluded_urls_explicit(self):
url_404 = "http://httpbin.org/status/404"
httpretty.register_uri(
httpretty.GET,
url_404,
status=404,
)
RequestsInstrumentor().uninstrument()
RequestsInstrumentor().instrument(excluded_urls=".*/404")
self.perform_request(self.URL)
self.perform_request(url_404)
self.assert_span(num_spans=1)
def test_excluded_urls_from_env(self):
url = "http://localhost/env_excluded_arg/123"
httpretty.register_uri(
httpretty.GET,
url,
status=200,
)
RequestsInstrumentor().uninstrument()
RequestsInstrumentor().instrument()
self.perform_request(self.URL)
self.perform_request(url)
self.assert_span(num_spans=1)
def test_name_callback_default(self):
def name_callback(method, url):
return 123
RequestsInstrumentor().uninstrument()
RequestsInstrumentor().instrument(name_callback=name_callback)
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
span = self.assert_span()
self.assertEqual(span.name, "HTTP GET")
def test_not_foundbasic(self):
url_404 = "http://httpbin.org/status/404"
httpretty.register_uri(
httpretty.GET,
url_404,
status=404,
)
result = self.perform_request(url_404)
self.assertEqual(result.status_code, 404)
span = self.assert_span()
self.assertEqual(
span.attributes.get(SpanAttributes.HTTP_STATUS_CODE), 404
)
self.assertIs(
span.status.status_code,
trace.StatusCode.ERROR,
)
def test_uninstrument(self):
RequestsInstrumentor().uninstrument()
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
self.assert_span(num_spans=0)
RequestsInstrumentor().instrument()
def test_uninstrument_session(self):
session1 = requests.Session()
RequestsInstrumentor().uninstrument_session(session1)
result = self.perform_request(self.URL, session1)
self.assertEqual(result.text, "Hello!")
self.assert_span(num_spans=0)
session2 = requests.Session()
result = self.perform_request(self.URL, session2)
self.assertEqual(result.text, "Hello!")
self.assert_span()
self.memory_exporter.clear()
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
self.assert_span()
def test_suppress_instrumentation(self):
token = context.attach(
context.set_value(_SUPPRESS_INSTRUMENTATION_KEY, True)
)
try:
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
finally:
context.detach(token)
self.assert_span(num_spans=0)
def test_suppress_http_instrumentation(self):
token = context.attach(
context.set_value(_SUPPRESS_HTTP_INSTRUMENTATION_KEY, True)
)
try:
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
finally:
context.detach(token)
self.assert_span(num_spans=0)
def test_not_recording(self):
with mock.patch("opentelemetry.trace.INVALID_SPAN") as mock_span:
RequestsInstrumentor().uninstrument()
RequestsInstrumentor().instrument(
tracer_provider=trace.NoOpTracerProvider()
)
mock_span.is_recording.return_value = False
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
self.assert_span(None, 0)
self.assertFalse(mock_span.is_recording())
self.assertTrue(mock_span.is_recording.called)
self.assertFalse(mock_span.set_attribute.called)
self.assertFalse(mock_span.set_status.called)
def test_distributed_context(self):
previous_propagator = get_global_textmap()
try:
set_global_textmap(MockTextMapPropagator())
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
span = self.assert_span()
headers = dict(httpretty.last_request().headers)
self.assertIn(MockTextMapPropagator.TRACE_ID_KEY, headers)
self.assertEqual(
str(span.get_span_context().trace_id),
headers[MockTextMapPropagator.TRACE_ID_KEY],
)
self.assertIn(MockTextMapPropagator.SPAN_ID_KEY, headers)
self.assertEqual(
str(span.get_span_context().span_id),
headers[MockTextMapPropagator.SPAN_ID_KEY],
)
finally:
set_global_textmap(previous_propagator)
def test_span_callback(self):
RequestsInstrumentor().uninstrument()
def span_callback(span, result: requests.Response):
span.set_attribute(
"http.response.body", result.content.decode("utf-8")
)
RequestsInstrumentor().instrument(
tracer_provider=self.tracer_provider,
span_callback=span_callback,
)
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
span = self.assert_span()
self.assertEqual(
span.attributes,
{
SpanAttributes.HTTP_METHOD: "GET",
SpanAttributes.HTTP_URL: self.URL,
SpanAttributes.HTTP_STATUS_CODE: 200,
"http.response.body": "Hello!",
},
)
def test_custom_tracer_provider(self):
resource = resources.Resource.create({})
result = self.create_tracer_provider(resource=resource)
tracer_provider, exporter = result
RequestsInstrumentor().uninstrument()
RequestsInstrumentor().instrument(tracer_provider=tracer_provider)
result = self.perform_request(self.URL)
self.assertEqual(result.text, "Hello!")
span = self.assert_span(exporter=exporter)
self.assertIs(span.resource, resource)
@mock.patch(
"requests.adapters.HTTPAdapter.send",
side_effect=requests.RequestException,
)
def test_requests_exception_without_response(self, *_, **__):
with self.assertRaises(requests.RequestException):
self.perform_request(self.URL)
span = self.assert_span()
self.assertEqual(
span.attributes,
{
SpanAttributes.HTTP_METHOD: "GET",
SpanAttributes.HTTP_URL: self.URL,
},
)
self.assertEqual(span.status.status_code, StatusCode.ERROR)
mocked_response = requests.Response()
mocked_response.status_code = 500
mocked_response.reason = "Internal Server Error"
@mock.patch(
"requests.adapters.HTTPAdapter.send",
side_effect=InvalidResponseObjectException,
)
def test_requests_exception_without_proper_response_type(self, *_, **__):
with self.assertRaises(InvalidResponseObjectException):
self.perform_request(self.URL)
span = self.assert_span()
self.assertEqual(
span.attributes,
{
SpanAttributes.HTTP_METHOD: "GET",
SpanAttributes.HTTP_URL: self.URL,
},
)
self.assertEqual(span.status.status_code, StatusCode.ERROR)
mocked_response = requests.Response()
mocked_response.status_code = 500
mocked_response.reason = "Internal Server Error"
@mock.patch(
"requests.adapters.HTTPAdapter.send",
side_effect=requests.RequestException(response=mocked_response),
)
def test_requests_exception_with_response(self, *_, **__):
with self.assertRaises(requests.RequestException):
self.perform_request(self.URL)
span = self.assert_span()
self.assertEqual(
span.attributes,
{
SpanAttributes.HTTP_METHOD: "GET",
SpanAttributes.HTTP_URL: self.URL,
SpanAttributes.HTTP_STATUS_CODE: 500,
},
)
self.assertEqual(span.status.status_code, StatusCode.ERROR)
@mock.patch("requests.adapters.HTTPAdapter.send", side_effect=Exception)
def test_requests_basic_exception(self, *_, **__):
with self.assertRaises(Exception):
self.perform_request(self.URL)
span = self.assert_span()
self.assertEqual(span.status.status_code, StatusCode.ERROR)
@mock.patch(
"requests.adapters.HTTPAdapter.send", side_effect=requests.Timeout
)
def test_requests_timeout_exception(self, *_, **__):
with self.assertRaises(Exception):
self.perform_request(self.URL)
span = self.assert_span()
self.assertEqual(span.status.status_code, StatusCode.ERROR)
def test_adapter_with_custom_response(self):
response = Response()
response.status_code = 210
response.reason = "hello adapter"
response.raw = TransportMock()
session = requests.Session()
session.mount(self.URL, MyAdapter(response))
self.perform_request(self.URL, session)
span = self.assert_span()
self.assertEqual(
span.attributes,
{
"http.method": "GET",
"http.url": self.URL,
"http.status_code": 210,
},
)
class TestRequestsIntegration(RequestsIntegrationTestBase, TestBase):
@staticmethod
def perform_request(url: str, session: requests.Session = None):
if session is None:
return requests.get(url)
return session.get(url)
def test_invalid_url(self):
url = "http://[::1/nope"
with self.assertRaises(ValueError):
requests.post(url)
span = self.assert_span()
self.assertEqual(span.name, "HTTP POST")
self.assertEqual(
span.attributes,
{SpanAttributes.HTTP_METHOD: "POST", SpanAttributes.HTTP_URL: url},
)
self.assertEqual(span.status.status_code, StatusCode.ERROR)
def test_credential_removal(self):
new_url = "http://username:password@httpbin.org/status/200"
self.perform_request(new_url)
span = self.assert_span()
self.assertEqual(span.attributes[SpanAttributes.HTTP_URL], self.URL)
def test_if_headers_equals_none(self):
result = requests.get(self.URL, headers=None)
self.assertEqual(result.text, "Hello!")
self.assert_span()
class TestRequestsIntegrationPreparedRequest(
RequestsIntegrationTestBase, TestBase
):
@staticmethod
def perform_request(url: str, session: requests.Session = None):
if session is None:
session = requests.Session()
request = requests.Request("GET", url)
prepared_request = session.prepare_request(request)
return session.send(prepared_request)
class TestRequestsIntergrationMetric(TestBase):
URL = "http://examplehost:8000/status/200"
def setUp(self):
super().setUp()
RequestsInstrumentor().instrument(meter_provider=self.meter_provider)
httpretty.enable()
httpretty.register_uri(httpretty.GET, self.URL, body="Hello!")
def tearDown(self):
super().tearDown()
RequestsInstrumentor().uninstrument()
httpretty.disable()
@staticmethod
def perform_request(url: str) -> requests.Response:
return requests.get(url)
def test_basic_metric_success(self):
self.perform_request(self.URL)
expected_attributes = {
"http.status_code": 200,
"http.host": "examplehost",
"net.peer.port": 8000,
"net.peer.name": "examplehost",
"http.method": "GET",
"http.flavor": "1.1",
"http.scheme": "http",
}
for (
resource_metrics
) in self.memory_metrics_reader.get_metrics_data().resource_metrics:
for scope_metrics in resource_metrics.scope_metrics:
for metric in scope_metrics.metrics:
for data_point in metric.data.data_points:
self.assertDictEqual(
expected_attributes, dict(data_point.attributes)
)
self.assertEqual(data_point.count, 1)
| true
| true
|
790a2aa0390a9b66315c4b5f2fa235aaf4e751e1
| 2,530
|
py
|
Python
|
copasi/bindings/python/unittests/Test_CMoiety.py
|
bmoreau/COPASI
|
d0bbec8947b1266ffd2b0ecf2566da7cf2c3e5ba
|
[
"Artistic-2.0"
] | null | null | null |
copasi/bindings/python/unittests/Test_CMoiety.py
|
bmoreau/COPASI
|
d0bbec8947b1266ffd2b0ecf2566da7cf2c3e5ba
|
[
"Artistic-2.0"
] | null | null | null |
copasi/bindings/python/unittests/Test_CMoiety.py
|
bmoreau/COPASI
|
d0bbec8947b1266ffd2b0ecf2566da7cf2c3e5ba
|
[
"Artistic-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Begin CVS Header
# $Source: /Volumes/Home/Users/shoops/cvs/copasi_dev/copasi/bindings/python/unittests/Test_CMoiety.py,v $
# $Revision: 1.11 $
# $Name: $
# $Author: shoops $
# $Date: 2010/07/16 18:55:59 $
# End CVS Header
# Copyright (C) 2010 by Pedro Mendes, Virginia Tech Intellectual
# Properties, Inc., University of Heidelberg, and The University
# of Manchester.
# All rights reserved.
# Copyright (C) 2008 by Pedro Mendes, Virginia Tech Intellectual
# Properties, Inc., EML Research, gGmbH, University of Heidelberg,
# and The University of Manchester.
# All rights reserved.
import COPASI
import unittest
from types import *
class Test_CMoiety(unittest.TestCase):
def setUp(self):
self.datamodel=COPASI.CCopasiRootContainer.addDatamodel()
self.model=self.datamodel.getModel()
self.model.createCompartment("comp1",1.0)
self.model.createCompartment("comp2",2.0)
m1=self.model.createMetabolite("A","comp1")
m2=self.model.createMetabolite("B","comp1")
m3=self.model.createMetabolite("C","comp1")
m4=self.model.createMetabolite("D","comp1")
m5=self.model.createMetabolite("E","comp2")
m6=self.model.createMetabolite("F","comp2")
m7=self.model.createMetabolite("G","comp2")
r=self.model.createReaction("react1")
r.addSubstrate(m1.getKey())
r.addProduct(m2.getKey())
r=self.model.createReaction("react2")
r.addSubstrate(m3.getKey())
r.addProduct(m5.getKey())
r=self.model.createReaction("react3")
r.addSubstrate(m6.getKey())
r.addProduct(m2.getKey())
self.model.compileIfNecessary()
self.moiety=self.model.getMoiety(0)
def test_getDescription(self):
desc=self.moiety.getDescription(self.model)
self.assert_(type(desc)==StringType)
def test_dependentNumber(self):
v=self.moiety.dependentNumber()
self.assert_(type(v)==FloatType)
def test_getNumber(self):
v=self.moiety.getNumber()
self.assert_(type(v)==FloatType)
def test_getKey(self):
key=self.moiety.getKey()
self.assert_(type(key)==StringType)
def test_getDependentNumber(self):
v=self.moiety.getDependentNumber()
self.assert_(type(v)==FloatType)
def suite():
tests=[
'test_getDescription'
,'test_getDependentNumber'
,'test_getNumber'
,'test_getKey'
,'test_getDependentNumber'
]
return unittest.TestSuite(map(Test_CMoiety,tests))
if(__name__ == '__main__'):
unittest.TextTestRunner(verbosity=2).run(suite())
| 30.119048
| 108
| 0.696838
|
import COPASI
import unittest
from types import *
class Test_CMoiety(unittest.TestCase):
def setUp(self):
self.datamodel=COPASI.CCopasiRootContainer.addDatamodel()
self.model=self.datamodel.getModel()
self.model.createCompartment("comp1",1.0)
self.model.createCompartment("comp2",2.0)
m1=self.model.createMetabolite("A","comp1")
m2=self.model.createMetabolite("B","comp1")
m3=self.model.createMetabolite("C","comp1")
m4=self.model.createMetabolite("D","comp1")
m5=self.model.createMetabolite("E","comp2")
m6=self.model.createMetabolite("F","comp2")
m7=self.model.createMetabolite("G","comp2")
r=self.model.createReaction("react1")
r.addSubstrate(m1.getKey())
r.addProduct(m2.getKey())
r=self.model.createReaction("react2")
r.addSubstrate(m3.getKey())
r.addProduct(m5.getKey())
r=self.model.createReaction("react3")
r.addSubstrate(m6.getKey())
r.addProduct(m2.getKey())
self.model.compileIfNecessary()
self.moiety=self.model.getMoiety(0)
def test_getDescription(self):
desc=self.moiety.getDescription(self.model)
self.assert_(type(desc)==StringType)
def test_dependentNumber(self):
v=self.moiety.dependentNumber()
self.assert_(type(v)==FloatType)
def test_getNumber(self):
v=self.moiety.getNumber()
self.assert_(type(v)==FloatType)
def test_getKey(self):
key=self.moiety.getKey()
self.assert_(type(key)==StringType)
def test_getDependentNumber(self):
v=self.moiety.getDependentNumber()
self.assert_(type(v)==FloatType)
def suite():
tests=[
'test_getDescription'
,'test_getDependentNumber'
,'test_getNumber'
,'test_getKey'
,'test_getDependentNumber'
]
return unittest.TestSuite(map(Test_CMoiety,tests))
if(__name__ == '__main__'):
unittest.TextTestRunner(verbosity=2).run(suite())
| true
| true
|
790a2c7cd415c6e41a96218f164884bbd0d995f7
| 697
|
py
|
Python
|
ironic_inventory/tests/test_functional.py
|
softlayer/ironic-inventory-integrator
|
1c7650bb2479939da6af910a80689103d32c75e3
|
[
"Apache-2.0"
] | 1
|
2016-04-28T19:13:58.000Z
|
2016-04-28T19:13:58.000Z
|
ironic_inventory/tests/test_functional.py
|
softlayer/ironic-inventory-integrator
|
1c7650bb2479939da6af910a80689103d32c75e3
|
[
"Apache-2.0"
] | null | null | null |
ironic_inventory/tests/test_functional.py
|
softlayer/ironic-inventory-integrator
|
1c7650bb2479939da6af910a80689103d32c75e3
|
[
"Apache-2.0"
] | 1
|
2016-03-13T07:40:36.000Z
|
2016-03-13T07:40:36.000Z
|
from unittest import TestCase
from webtest import TestApp
from ironic_inventory.tests import FunctionalTest
class TestRootController(FunctionalTest):
def test_get(self):
response = self.app.get('/')
assert response.status_int == 200
def test_search(self):
response = self.app.post('/', params={'q': 'RestController'})
assert response.status_int == 302
assert response.headers['Location'] == (
'http://pecan.readthedocs.org/en/latest/search.html'
'?q=RestController'
)
def test_get_not_found(self):
response = self.app.get('/a/bogus/url', expect_errors=True)
assert response.status_int == 404
| 30.304348
| 69
| 0.657102
|
from unittest import TestCase
from webtest import TestApp
from ironic_inventory.tests import FunctionalTest
class TestRootController(FunctionalTest):
def test_get(self):
response = self.app.get('/')
assert response.status_int == 200
def test_search(self):
response = self.app.post('/', params={'q': 'RestController'})
assert response.status_int == 302
assert response.headers['Location'] == (
'http://pecan.readthedocs.org/en/latest/search.html'
'?q=RestController'
)
def test_get_not_found(self):
response = self.app.get('/a/bogus/url', expect_errors=True)
assert response.status_int == 404
| true
| true
|
790a2cb7f6339a5c9eb59efad748387b5e044992
| 12,797
|
py
|
Python
|
keystone/models/revoke_model.py
|
ISCAS-VDI/keystone
|
11af181c06d78026c89a873f62931558e80f3192
|
[
"Apache-2.0"
] | null | null | null |
keystone/models/revoke_model.py
|
ISCAS-VDI/keystone
|
11af181c06d78026c89a873f62931558e80f3192
|
[
"Apache-2.0"
] | null | null | null |
keystone/models/revoke_model.py
|
ISCAS-VDI/keystone
|
11af181c06d78026c89a873f62931558e80f3192
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import msgpackutils
from oslo_utils import timeutils
from six.moves import map
from keystone.common import cache
from keystone.common import utils
# The set of attributes common between the RevokeEvent
# and the dictionaries created from the token Data.
_NAMES = ['trust_id',
'consumer_id',
'access_token_id',
'audit_id',
'audit_chain_id',
'expires_at',
'domain_id',
'project_id',
'user_id',
'role_id']
# Additional arguments for creating a RevokeEvent
_EVENT_ARGS = ['issued_before', 'revoked_at']
# Names of attributes in the RevocationEvent, including "virtual" attributes.
# Virtual attributes are those added based on other values.
_EVENT_NAMES = _NAMES + ['domain_scope_id']
# Values that will be in the token data but not in the event.
# These will compared with event values that have different names.
# For example: both trustor_id and trustee_id are compared against user_id
_TOKEN_KEYS = ['identity_domain_id',
'assignment_domain_id',
'issued_at',
'trustor_id',
'trustee_id']
# Alternative names to be checked in token for every field in
# revoke tree.
ALTERNATIVES = {
'user_id': ['user_id', 'trustor_id', 'trustee_id'],
'domain_id': ['identity_domain_id', 'assignment_domain_id'],
# For a domain-scoped token, the domain is in assignment_domain_id.
'domain_scope_id': ['assignment_domain_id', ],
}
REVOKE_KEYS = _NAMES + _EVENT_ARGS
def blank_token_data(issued_at):
token_data = dict()
for name in _NAMES:
token_data[name] = None
for name in _TOKEN_KEYS:
token_data[name] = None
# required field
token_data['issued_at'] = issued_at
return token_data
class RevokeEvent(object):
def __init__(self, **kwargs):
for k in REVOKE_KEYS:
v = kwargs.get(k)
setattr(self, k, v)
if self.domain_id and self.expires_at:
# This is revoking a domain-scoped token.
self.domain_scope_id = self.domain_id
self.domain_id = None
else:
# This is revoking all tokens for a domain.
self.domain_scope_id = None
if self.expires_at is not None:
# Trim off the expiration time because MySQL timestamps are only
# accurate to the second.
self.expires_at = self.expires_at.replace(microsecond=0)
if self.revoked_at is None:
self.revoked_at = timeutils.utcnow()
if self.issued_before is None:
self.issued_before = self.revoked_at
def to_dict(self):
keys = ['user_id',
'role_id',
'domain_id',
'domain_scope_id',
'project_id',
'audit_id',
'audit_chain_id',
]
event = {key: self.__dict__[key] for key in keys
if self.__dict__[key] is not None}
if self.trust_id is not None:
event['OS-TRUST:trust_id'] = self.trust_id
if self.consumer_id is not None:
event['OS-OAUTH1:consumer_id'] = self.consumer_id
if self.consumer_id is not None:
event['OS-OAUTH1:access_token_id'] = self.access_token_id
if self.expires_at is not None:
event['expires_at'] = utils.isotime(self.expires_at)
if self.issued_before is not None:
event['issued_before'] = utils.isotime(self.issued_before,
subsecond=True)
return event
def key_for_name(self, name):
return "%s=%s" % (name, getattr(self, name) or '*')
def attr_keys(event):
return list(map(event.key_for_name, _EVENT_NAMES))
def is_revoked(events, token_data):
"""Check if a token matches a revocation event.
Compare a token against every revocation event. If the token matches an
event in the `events` list, the token is revoked. If the token is compared
against every item in the list without a match, it is not considered
revoked from the `revoke_api`.
:param events: a list of RevokeEvent instances
:param token_data: map based on a flattened view of the token. The required
fields are `expires_at`,`user_id`, `project_id`,
`identity_domain_id`, `assignment_domain_id`,
`trust_id`, `trustor_id`, `trustee_id` `consumer_id` and
`access_token_id`
:returns: True if the token matches an existing revocation event, meaning
the token is revoked. False is returned if the token does not
match any revocation events, meaning the token is considered
valid by the revocation API.
"""
return any([matches(e, token_data) for e in events])
def matches(event, token_values):
"""See if the token matches the revocation event.
A brute force approach to checking.
Compare each attribute from the event with the corresponding
value from the token. If the event does not have a value for
the attribute, a match is still possible. If the event has a
value for the attribute, and it does not match the token, no match
is possible, so skip the remaining checks.
:param event: a RevokeEvent instance
:param token_values: dictionary with set of values taken from the
token
:returns: True if the token matches the revocation event, indicating the
token has been revoked
"""
# If any one check does not match, the whole token does
# not match the event. The numerous return False indicate
# that the token is still valid and short-circuits the
# rest of the logic.
# The token has three attributes that can match the user_id
if event.user_id is not None:
if all(event.user_id != token_values[attribute_name]
for attribute_name in ['user_id', 'trustor_id', 'trustee_id']):
return False
# The token has two attributes that can match the domain_id
if event.domain_id is not None:
if all(event.domain_id != token_values[attribute_name]
for attribute_name in ['identity_domain_id',
'assignment_domain_id']):
return False
if event.domain_scope_id is not None:
if event.domain_scope_id != token_values['assignment_domain_id']:
return False
# If an event specifies an attribute name, but it does not match,
# the token is not revoked.
attribute_names = ['project_id',
'expires_at', 'trust_id', 'consumer_id',
'access_token_id', 'audit_id', 'audit_chain_id']
for attribute_name in attribute_names:
if getattr(event, attribute_name) is not None:
if (getattr(event, attribute_name) !=
token_values[attribute_name]):
return False
if event.role_id is not None:
roles = token_values['roles']
if all(event.role_id != role for role in roles):
return False
if token_values['issued_at'] > event.issued_before:
return False
return True
def build_token_values_v2(access, default_domain_id):
token_data = access['token']
token_expires_at = timeutils.parse_isotime(token_data['expires'])
# Trim off the microseconds because the revocation event only has
# expirations accurate to the second.
token_expires_at = token_expires_at.replace(microsecond=0)
token_values = {
'expires_at': timeutils.normalize_time(token_expires_at),
'issued_at': timeutils.normalize_time(
timeutils.parse_isotime(token_data['issued_at'])),
'audit_id': token_data.get('audit_ids', [None])[0],
'audit_chain_id': token_data.get('audit_ids', [None])[-1],
}
token_values['user_id'] = access.get('user', {}).get('id')
project = token_data.get('tenant')
if project is not None:
token_values['project_id'] = project['id']
else:
token_values['project_id'] = None
token_values['identity_domain_id'] = default_domain_id
token_values['assignment_domain_id'] = default_domain_id
trust = token_data.get('trust')
if trust is None:
token_values['trust_id'] = None
token_values['trustor_id'] = None
token_values['trustee_id'] = None
else:
token_values['trust_id'] = trust['id']
token_values['trustor_id'] = trust['trustor_id']
token_values['trustee_id'] = trust['trustee_id']
token_values['consumer_id'] = None
token_values['access_token_id'] = None
role_list = []
# Roles are by ID in metadata and by name in the user section
roles = access.get('metadata', {}).get('roles', [])
for role in roles:
role_list.append(role)
token_values['roles'] = role_list
return token_values
def build_token_values(token_data):
token_expires_at = timeutils.parse_isotime(token_data['expires_at'])
# Trim off the microseconds because the revocation event only has
# expirations accurate to the second.
token_expires_at = token_expires_at.replace(microsecond=0)
token_values = {
'expires_at': timeutils.normalize_time(token_expires_at),
'issued_at': timeutils.normalize_time(
timeutils.parse_isotime(token_data['issued_at'])),
'audit_id': token_data.get('audit_ids', [None])[0],
'audit_chain_id': token_data.get('audit_ids', [None])[-1],
}
user = token_data.get('user')
if user is not None:
token_values['user_id'] = user['id']
# Federated users do not have a domain, be defensive and get the user
# domain set to None in the federated user case.
token_values['identity_domain_id'] = user.get('domain', {}).get('id')
else:
token_values['user_id'] = None
token_values['identity_domain_id'] = None
project = token_data.get('project', token_data.get('tenant'))
if project is not None:
token_values['project_id'] = project['id']
# The domain_id of projects acting as domains is None
token_values['assignment_domain_id'] = (
project['domain']['id'] if project['domain'] else None)
else:
token_values['project_id'] = None
domain = token_data.get('domain')
if domain is not None:
token_values['assignment_domain_id'] = domain['id']
else:
token_values['assignment_domain_id'] = None
role_list = []
roles = token_data.get('roles')
if roles is not None:
for role in roles:
role_list.append(role['id'])
token_values['roles'] = role_list
trust = token_data.get('OS-TRUST:trust')
if trust is None:
token_values['trust_id'] = None
token_values['trustor_id'] = None
token_values['trustee_id'] = None
else:
token_values['trust_id'] = trust['id']
token_values['trustor_id'] = trust['trustor_user']['id']
token_values['trustee_id'] = trust['trustee_user']['id']
oauth1 = token_data.get('OS-OAUTH1')
if oauth1 is None:
token_values['consumer_id'] = None
token_values['access_token_id'] = None
else:
token_values['consumer_id'] = oauth1['consumer_id']
token_values['access_token_id'] = oauth1['access_token_id']
return token_values
class _RevokeEventHandler(object):
# NOTE(morganfainberg): There needs to be reserved "registry" entries set
# in oslo_serialization for application-specific handlers. We picked 127
# here since it's waaaaaay far out before oslo_serialization will use it.
identity = 127
handles = (RevokeEvent,)
def __init__(self, registry):
self._registry = registry
def serialize(self, obj):
return msgpackutils.dumps(obj.__dict__, registry=self._registry)
def deserialize(self, data):
revoke_event_data = msgpackutils.loads(data, registry=self._registry)
revoke_event = RevokeEvent(**revoke_event_data)
return revoke_event
cache.register_model_handler(_RevokeEventHandler)
| 36.562857
| 79
| 0.650231
|
from oslo_serialization import msgpackutils
from oslo_utils import timeutils
from six.moves import map
from keystone.common import cache
from keystone.common import utils
_NAMES = ['trust_id',
'consumer_id',
'access_token_id',
'audit_id',
'audit_chain_id',
'expires_at',
'domain_id',
'project_id',
'user_id',
'role_id']
_EVENT_ARGS = ['issued_before', 'revoked_at']
_EVENT_NAMES = _NAMES + ['domain_scope_id']
_TOKEN_KEYS = ['identity_domain_id',
'assignment_domain_id',
'issued_at',
'trustor_id',
'trustee_id']
ALTERNATIVES = {
'user_id': ['user_id', 'trustor_id', 'trustee_id'],
'domain_id': ['identity_domain_id', 'assignment_domain_id'],
'domain_scope_id': ['assignment_domain_id', ],
}
REVOKE_KEYS = _NAMES + _EVENT_ARGS
def blank_token_data(issued_at):
token_data = dict()
for name in _NAMES:
token_data[name] = None
for name in _TOKEN_KEYS:
token_data[name] = None
token_data['issued_at'] = issued_at
return token_data
class RevokeEvent(object):
def __init__(self, **kwargs):
for k in REVOKE_KEYS:
v = kwargs.get(k)
setattr(self, k, v)
if self.domain_id and self.expires_at:
self.domain_scope_id = self.domain_id
self.domain_id = None
else:
self.domain_scope_id = None
if self.expires_at is not None:
self.expires_at = self.expires_at.replace(microsecond=0)
if self.revoked_at is None:
self.revoked_at = timeutils.utcnow()
if self.issued_before is None:
self.issued_before = self.revoked_at
def to_dict(self):
keys = ['user_id',
'role_id',
'domain_id',
'domain_scope_id',
'project_id',
'audit_id',
'audit_chain_id',
]
event = {key: self.__dict__[key] for key in keys
if self.__dict__[key] is not None}
if self.trust_id is not None:
event['OS-TRUST:trust_id'] = self.trust_id
if self.consumer_id is not None:
event['OS-OAUTH1:consumer_id'] = self.consumer_id
if self.consumer_id is not None:
event['OS-OAUTH1:access_token_id'] = self.access_token_id
if self.expires_at is not None:
event['expires_at'] = utils.isotime(self.expires_at)
if self.issued_before is not None:
event['issued_before'] = utils.isotime(self.issued_before,
subsecond=True)
return event
def key_for_name(self, name):
return "%s=%s" % (name, getattr(self, name) or '*')
def attr_keys(event):
return list(map(event.key_for_name, _EVENT_NAMES))
def is_revoked(events, token_data):
return any([matches(e, token_data) for e in events])
def matches(event, token_values):
if event.user_id is not None:
if all(event.user_id != token_values[attribute_name]
for attribute_name in ['user_id', 'trustor_id', 'trustee_id']):
return False
if event.domain_id is not None:
if all(event.domain_id != token_values[attribute_name]
for attribute_name in ['identity_domain_id',
'assignment_domain_id']):
return False
if event.domain_scope_id is not None:
if event.domain_scope_id != token_values['assignment_domain_id']:
return False
attribute_names = ['project_id',
'expires_at', 'trust_id', 'consumer_id',
'access_token_id', 'audit_id', 'audit_chain_id']
for attribute_name in attribute_names:
if getattr(event, attribute_name) is not None:
if (getattr(event, attribute_name) !=
token_values[attribute_name]):
return False
if event.role_id is not None:
roles = token_values['roles']
if all(event.role_id != role for role in roles):
return False
if token_values['issued_at'] > event.issued_before:
return False
return True
def build_token_values_v2(access, default_domain_id):
token_data = access['token']
token_expires_at = timeutils.parse_isotime(token_data['expires'])
token_expires_at = token_expires_at.replace(microsecond=0)
token_values = {
'expires_at': timeutils.normalize_time(token_expires_at),
'issued_at': timeutils.normalize_time(
timeutils.parse_isotime(token_data['issued_at'])),
'audit_id': token_data.get('audit_ids', [None])[0],
'audit_chain_id': token_data.get('audit_ids', [None])[-1],
}
token_values['user_id'] = access.get('user', {}).get('id')
project = token_data.get('tenant')
if project is not None:
token_values['project_id'] = project['id']
else:
token_values['project_id'] = None
token_values['identity_domain_id'] = default_domain_id
token_values['assignment_domain_id'] = default_domain_id
trust = token_data.get('trust')
if trust is None:
token_values['trust_id'] = None
token_values['trustor_id'] = None
token_values['trustee_id'] = None
else:
token_values['trust_id'] = trust['id']
token_values['trustor_id'] = trust['trustor_id']
token_values['trustee_id'] = trust['trustee_id']
token_values['consumer_id'] = None
token_values['access_token_id'] = None
role_list = []
roles = access.get('metadata', {}).get('roles', [])
for role in roles:
role_list.append(role)
token_values['roles'] = role_list
return token_values
def build_token_values(token_data):
token_expires_at = timeutils.parse_isotime(token_data['expires_at'])
token_expires_at = token_expires_at.replace(microsecond=0)
token_values = {
'expires_at': timeutils.normalize_time(token_expires_at),
'issued_at': timeutils.normalize_time(
timeutils.parse_isotime(token_data['issued_at'])),
'audit_id': token_data.get('audit_ids', [None])[0],
'audit_chain_id': token_data.get('audit_ids', [None])[-1],
}
user = token_data.get('user')
if user is not None:
token_values['user_id'] = user['id']
token_values['identity_domain_id'] = user.get('domain', {}).get('id')
else:
token_values['user_id'] = None
token_values['identity_domain_id'] = None
project = token_data.get('project', token_data.get('tenant'))
if project is not None:
token_values['project_id'] = project['id']
token_values['assignment_domain_id'] = (
project['domain']['id'] if project['domain'] else None)
else:
token_values['project_id'] = None
domain = token_data.get('domain')
if domain is not None:
token_values['assignment_domain_id'] = domain['id']
else:
token_values['assignment_domain_id'] = None
role_list = []
roles = token_data.get('roles')
if roles is not None:
for role in roles:
role_list.append(role['id'])
token_values['roles'] = role_list
trust = token_data.get('OS-TRUST:trust')
if trust is None:
token_values['trust_id'] = None
token_values['trustor_id'] = None
token_values['trustee_id'] = None
else:
token_values['trust_id'] = trust['id']
token_values['trustor_id'] = trust['trustor_user']['id']
token_values['trustee_id'] = trust['trustee_user']['id']
oauth1 = token_data.get('OS-OAUTH1')
if oauth1 is None:
token_values['consumer_id'] = None
token_values['access_token_id'] = None
else:
token_values['consumer_id'] = oauth1['consumer_id']
token_values['access_token_id'] = oauth1['access_token_id']
return token_values
class _RevokeEventHandler(object):
identity = 127
handles = (RevokeEvent,)
def __init__(self, registry):
self._registry = registry
def serialize(self, obj):
return msgpackutils.dumps(obj.__dict__, registry=self._registry)
def deserialize(self, data):
revoke_event_data = msgpackutils.loads(data, registry=self._registry)
revoke_event = RevokeEvent(**revoke_event_data)
return revoke_event
cache.register_model_handler(_RevokeEventHandler)
| true
| true
|
790a2fa8698fdd638a81ff20f5940ba01d810cb6
| 195
|
py
|
Python
|
Code.py
|
Awesome12-arch/Python-Reminder-Application
|
974d63e945676d8d88db987e100012f6414c9528
|
[
"MIT"
] | null | null | null |
Code.py
|
Awesome12-arch/Python-Reminder-Application
|
974d63e945676d8d88db987e100012f6414c9528
|
[
"MIT"
] | null | null | null |
Code.py
|
Awesome12-arch/Python-Reminder-Application
|
974d63e945676d8d88db987e100012f6414c9528
|
[
"MIT"
] | null | null | null |
import time
print("What shall I remind you about?")
text = str(input())
print("In how many minutes ?")
local_time = float(input())
local_time = local_time * 60
time.sleep(local_time)
print(text)
| 21.666667
| 39
| 0.728205
|
import time
print("What shall I remind you about?")
text = str(input())
print("In how many minutes ?")
local_time = float(input())
local_time = local_time * 60
time.sleep(local_time)
print(text)
| true
| true
|
790a2fc209c2a8bb8a66d339807c4d97014f4534
| 73,893
|
py
|
Python
|
discord/message.py
|
NQN-Discord/discord.py
|
2fd948acfa818cfde7feb71c8bc20778c87ac39b
|
[
"MIT"
] | null | null | null |
discord/message.py
|
NQN-Discord/discord.py
|
2fd948acfa818cfde7feb71c8bc20778c87ac39b
|
[
"MIT"
] | null | null | null |
discord/message.py
|
NQN-Discord/discord.py
|
2fd948acfa818cfde7feb71c8bc20778c87ac39b
|
[
"MIT"
] | null | null | null |
"""
The MIT License (MIT)
Copyright (c) 2015-present Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import annotations
import asyncio
import datetime
import re
import io
from os import PathLike
from typing import (
Dict,
TYPE_CHECKING,
Sequence,
Union,
List,
Optional,
Any,
Callable,
Tuple,
ClassVar,
Type,
overload,
)
from . import utils
from .reaction import Reaction
from .emoji import Emoji
from .partial_emoji import PartialEmoji
from .enums import InteractionType, MessageType, ChannelType, try_enum
from .errors import HTTPException
from .components import _component_factory
from .embeds import Embed
from .member import Member
from .flags import MessageFlags
from .file import File
from .utils import escape_mentions, MISSING
from .http import handle_message_parameters
from .guild import Guild
from .mixins import Hashable
from .sticker import StickerItem
from .threads import Thread
from .user import User
from .channel import PartialMessageable
if TYPE_CHECKING:
from typing_extensions import Self
from .types.message import (
Message as MessagePayload,
Attachment as AttachmentPayload,
MessageReference as MessageReferencePayload,
MessageApplication as MessageApplicationPayload,
MessageActivity as MessageActivityPayload,
)
from .types.interactions import MessageInteraction as MessageInteractionPayload
from .types.components import Component as ComponentPayload
from .types.threads import ThreadArchiveDuration
from .types.member import (
Member as MemberPayload,
UserWithMember as UserWithMemberPayload,
)
from .types.user import User as UserPayload
from .types.embed import Embed as EmbedPayload
from .types.gateway import MessageReactionRemoveEvent, MessageUpdateEvent
from .abc import Snowflake
from .abc import GuildChannel, MessageableChannel
from .components import Component
from .state import ConnectionState
from .channel import TextChannel
from .mentions import AllowedMentions
from .user import User
from .role import Role
from .ui.view import View
EmojiInputType = Union[Emoji, PartialEmoji, str]
__all__ = (
'Attachment',
'Message',
'PartialMessage',
'MessageInteraction',
'MessageReference',
'DeletedReferencedMessage',
)
def convert_emoji_reaction(emoji: Union[EmojiInputType, Reaction]) -> str:
if isinstance(emoji, Reaction):
emoji = emoji.emoji
if isinstance(emoji, Emoji):
return f'{emoji.name}:{emoji.id}'
if isinstance(emoji, PartialEmoji):
return emoji._as_reaction()
if isinstance(emoji, str):
# Reactions can be in :name:id format, but not <:name:id>.
# No existing emojis have <> in them, so this should be okay.
return emoji.strip('<>')
raise TypeError(f'emoji argument must be str, Emoji, or Reaction not {emoji.__class__.__name__}.')
class Attachment(Hashable):
"""Represents an attachment from Discord.
.. container:: operations
.. describe:: str(x)
Returns the URL of the attachment.
.. describe:: x == y
Checks if the attachment is equal to another attachment.
.. describe:: x != y
Checks if the attachment is not equal to another attachment.
.. describe:: hash(x)
Returns the hash of the attachment.
.. versionchanged:: 1.7
Attachment can now be casted to :class:`str` and is hashable.
Attributes
------------
id: :class:`int`
The attachment ID.
size: :class:`int`
The attachment size in bytes.
height: Optional[:class:`int`]
The attachment's height, in pixels. Only applicable to images and videos.
width: Optional[:class:`int`]
The attachment's width, in pixels. Only applicable to images and videos.
filename: :class:`str`
The attachment's filename.
url: :class:`str`
The attachment URL. If the message this attachment was attached
to is deleted, then this will 404.
proxy_url: :class:`str`
The proxy URL. This is a cached version of the :attr:`~Attachment.url` in the
case of images. When the message is deleted, this URL might be valid for a few
minutes or not valid at all.
content_type: Optional[:class:`str`]
The attachment's `media type <https://en.wikipedia.org/wiki/Media_type>`_
.. versionadded:: 1.7
description: Optional[:class:`str`]
The attachment's description. Only applicable to images.
.. versionadded:: 2.0
ephemeral: :class:`bool`
Whether the attachment is ephemeral.
.. versionadded:: 2.0
"""
__slots__ = (
'id',
'size',
'height',
'width',
'filename',
'url',
'proxy_url',
'_http',
'content_type',
'description',
'ephemeral',
)
def __init__(self, *, data: AttachmentPayload, state: ConnectionState):
self.id: int = int(data['id'])
self.size: int = data['size']
self.height: Optional[int] = data.get('height')
self.width: Optional[int] = data.get('width')
self.filename: str = data['filename']
self.url: str = data['url']
self.proxy_url: str = data['proxy_url']
self._http = state.http
self.content_type: Optional[str] = data.get('content_type')
self.description: Optional[str] = data.get('description')
self.ephemeral: bool = data.get('ephemeral', False)
def is_spoiler(self) -> bool:
""":class:`bool`: Whether this attachment contains a spoiler."""
return self.filename.startswith('SPOILER_')
def __repr__(self) -> str:
return f'<Attachment id={self.id} filename={self.filename!r} url={self.url!r}>'
def __str__(self) -> str:
return self.url or ''
async def save(
self,
fp: Union[io.BufferedIOBase, PathLike[Any]],
*,
seek_begin: bool = True,
use_cached: bool = False,
) -> int:
"""|coro|
Saves this attachment into a file-like object.
Parameters
-----------
fp: Union[:class:`io.BufferedIOBase`, :class:`os.PathLike`]
The file-like object to save this attachment to or the filename
to use. If a filename is passed then a file is created with that
filename and used instead.
seek_begin: :class:`bool`
Whether to seek to the beginning of the file after saving is
successfully done.
use_cached: :class:`bool`
Whether to use :attr:`proxy_url` rather than :attr:`url` when downloading
the attachment. This will allow attachments to be saved after deletion
more often, compared to the regular URL which is generally deleted right
after the message is deleted. Note that this can still fail to download
deleted attachments if too much time has passed and it does not work
on some types of attachments.
Raises
--------
HTTPException
Saving the attachment failed.
NotFound
The attachment was deleted.
Returns
--------
:class:`int`
The number of bytes written.
"""
data = await self.read(use_cached=use_cached)
if isinstance(fp, io.BufferedIOBase):
written = fp.write(data)
if seek_begin:
fp.seek(0)
return written
else:
with open(fp, 'wb') as f:
return f.write(data)
async def read(self, *, use_cached: bool = False) -> bytes:
"""|coro|
Retrieves the content of this attachment as a :class:`bytes` object.
.. versionadded:: 1.1
Parameters
-----------
use_cached: :class:`bool`
Whether to use :attr:`proxy_url` rather than :attr:`url` when downloading
the attachment. This will allow attachments to be saved after deletion
more often, compared to the regular URL which is generally deleted right
after the message is deleted. Note that this can still fail to download
deleted attachments if too much time has passed and it does not work
on some types of attachments.
Raises
------
HTTPException
Downloading the attachment failed.
Forbidden
You do not have permissions to access this attachment
NotFound
The attachment was deleted.
Returns
-------
:class:`bytes`
The contents of the attachment.
"""
url = self.proxy_url if use_cached else self.url
data = await self._http.get_from_cdn(url)
return data
async def to_file(self, *, use_cached: bool = False, spoiler: bool = False) -> File:
"""|coro|
Converts the attachment into a :class:`File` suitable for sending via
:meth:`abc.Messageable.send`.
.. versionadded:: 1.3
Parameters
-----------
use_cached: :class:`bool`
Whether to use :attr:`proxy_url` rather than :attr:`url` when downloading
the attachment. This will allow attachments to be saved after deletion
more often, compared to the regular URL which is generally deleted right
after the message is deleted. Note that this can still fail to download
deleted attachments if too much time has passed and it does not work
on some types of attachments.
.. versionadded:: 1.4
spoiler: :class:`bool`
Whether the file is a spoiler.
.. versionadded:: 1.4
Raises
------
HTTPException
Downloading the attachment failed.
Forbidden
You do not have permissions to access this attachment
NotFound
The attachment was deleted.
Returns
-------
:class:`File`
The attachment as a file suitable for sending.
"""
data = await self.read(use_cached=use_cached)
return File(io.BytesIO(data), filename=self.filename, description=self.description, spoiler=spoiler)
def to_dict(self) -> AttachmentPayload:
result: AttachmentPayload = {
'filename': self.filename,
'id': self.id,
'proxy_url': self.proxy_url,
'size': self.size,
'url': self.url,
'spoiler': self.is_spoiler(),
}
if self.height:
result['height'] = self.height
if self.width:
result['width'] = self.width
if self.content_type:
result['content_type'] = self.content_type
if self.description is not None:
result['description'] = self.description
return result
class DeletedReferencedMessage:
"""A special sentinel type given when the resolved message reference
points to a deleted message.
The purpose of this class is to separate referenced messages that could not be
fetched and those that were previously fetched but have since been deleted.
.. versionadded:: 1.6
"""
__slots__ = ('_parent',)
def __init__(self, parent: MessageReference):
self._parent: MessageReference = parent
def __repr__(self) -> str:
return f"<DeletedReferencedMessage id={self.id} channel_id={self.channel_id} guild_id={self.guild_id!r}>"
@property
def id(self) -> int:
""":class:`int`: The message ID of the deleted referenced message."""
# the parent's message id won't be None here
return self._parent.message_id # type: ignore
@property
def channel_id(self) -> int:
""":class:`int`: The channel ID of the deleted referenced message."""
return self._parent.channel_id
@property
def guild_id(self) -> Optional[int]:
"""Optional[:class:`int`]: The guild ID of the deleted referenced message."""
return self._parent.guild_id
class MessageReference:
"""Represents a reference to a :class:`~discord.Message`.
.. versionadded:: 1.5
.. versionchanged:: 1.6
This class can now be constructed by users.
Attributes
-----------
message_id: Optional[:class:`int`]
The id of the message referenced.
channel_id: :class:`int`
The channel id of the message referenced.
guild_id: Optional[:class:`int`]
The guild id of the message referenced.
fail_if_not_exists: :class:`bool`
Whether replying to the referenced message should raise :class:`HTTPException`
if the message no longer exists or Discord could not fetch the message.
.. versionadded:: 1.7
resolved: Optional[Union[:class:`Message`, :class:`DeletedReferencedMessage`]]
The message that this reference resolved to. If this is ``None``
then the original message was not fetched either due to the Discord API
not attempting to resolve it or it not being available at the time of creation.
If the message was resolved at a prior point but has since been deleted then
this will be of type :class:`DeletedReferencedMessage`.
Currently, this is mainly the replied to message when a user replies to a message.
.. versionadded:: 1.6
"""
__slots__ = ('message_id', 'channel_id', 'guild_id', 'fail_if_not_exists', 'resolved', '_state')
def __init__(self, *, message_id: int, channel_id: int, guild_id: Optional[int] = None, fail_if_not_exists: bool = True):
self._state: Optional[ConnectionState] = None
self.resolved: Optional[Union[Message, DeletedReferencedMessage]] = None
self.message_id: Optional[int] = message_id
self.channel_id: int = channel_id
self.guild_id: Optional[int] = guild_id
self.fail_if_not_exists: bool = fail_if_not_exists
@classmethod
def with_state(cls, state: ConnectionState, data: MessageReferencePayload) -> Self:
self = cls.__new__(cls)
self.message_id = utils._get_as_snowflake(data, 'message_id')
self.channel_id = int(data.pop('channel_id'))
self.guild_id = utils._get_as_snowflake(data, 'guild_id')
self.fail_if_not_exists = data.get('fail_if_not_exists', True)
self._state = state
self.resolved = None
return self
@classmethod
def from_message(cls, message: PartialMessage, *, fail_if_not_exists: bool = True) -> Self:
"""Creates a :class:`MessageReference` from an existing :class:`~discord.Message`.
.. versionadded:: 1.6
Parameters
----------
message: :class:`~discord.Message`
The message to be converted into a reference.
fail_if_not_exists: :class:`bool`
Whether replying to the referenced message should raise :class:`HTTPException`
if the message no longer exists or Discord could not fetch the message.
.. versionadded:: 1.7
Returns
-------
:class:`MessageReference`
A reference to the message.
"""
self = cls(
message_id=message.id,
channel_id=message.channel.id,
guild_id=getattr(message.guild, 'id', None),
fail_if_not_exists=fail_if_not_exists,
)
self._state = message._state
return self
@property
def cached_message(self) -> Optional[Message]:
"""Optional[:class:`~discord.Message`]: The cached message, if found in the internal message cache."""
return self._state and self._state._get_message(self.message_id)
@property
def jump_url(self) -> str:
""":class:`str`: Returns a URL that allows the client to jump to the referenced message.
.. versionadded:: 1.7
"""
guild_id = self.guild_id if self.guild_id is not None else '@me'
return f'https://discord.com/channels/{guild_id}/{self.channel_id}/{self.message_id}'
def __repr__(self) -> str:
return f'<MessageReference message_id={self.message_id!r} channel_id={self.channel_id!r} guild_id={self.guild_id!r}>'
def to_dict(self) -> MessageReferencePayload:
result: Dict[str, Any] = {'message_id': self.message_id} if self.message_id is not None else {}
result['channel_id'] = self.channel_id
if self.guild_id is not None:
result['guild_id'] = self.guild_id
if self.fail_if_not_exists is not None:
result['fail_if_not_exists'] = self.fail_if_not_exists
return result # type: ignore # Type checker doesn't understand these are the same.
to_message_reference_dict = to_dict
class MessageInteraction(Hashable):
"""Represents the interaction that a :class:`Message` is a response to.
.. versionadded:: 2.0
.. container:: operations
.. describe:: x == y
Checks if two message interactions are equal.
.. describe:: x != y
Checks if two message interactions are not equal.
.. describe:: hash(x)
Returns the message interaction's hash.
Attributes
-----------
id: :class:`int`
The interaction ID.
type: :class:`InteractionType`
The interaction type.
name: :class:`str`
The name of the interaction.
user: Union[:class:`User`, :class:`Member`]
The user or member that invoked the interaction.
"""
__slots__: Tuple[str, ...] = ('id', 'type', 'name', 'user')
def __init__(self, *, state: ConnectionState, guild: Optional[Guild], data: MessageInteractionPayload) -> None:
self.id: int = int(data['id'])
self.type: InteractionType = try_enum(InteractionType, data['type'])
self.name: str = data['name']
self.user: Union[User, Member] = MISSING
try:
payload = data['member']
except KeyError:
self.user = state.create_user(data['user'])
else:
if guild is None:
# This is an unfortunate data loss, but it's better than giving bad data
# This is also an incredibly rare scenario.
self.user = state.create_user(data['user'])
else:
payload['user'] = data['user']
self.user = Member(data=payload, guild=guild, state=state) # type: ignore
def __repr__(self) -> str:
return f'<MessageInteraction id={self.id} name={self.name!r} type={self.type!r} user={self.user!r}>'
@property
def created_at(self) -> datetime.datetime:
""":class:`datetime.datetime`: The interaction's creation time in UTC."""
return utils.snowflake_time(self.id)
def flatten_handlers(cls: Type[Message]) -> Type[Message]:
prefix = len('_handle_')
handlers = [
(key[prefix:], value)
for key, value in cls.__dict__.items()
if key.startswith('_handle_') and key != '_handle_member'
]
# store _handle_member last
handlers.append(('member', cls._handle_member))
cls._HANDLERS = handlers
cls._CACHED_SLOTS = [attr for attr in cls.__slots__ if attr.startswith('_cs_')]
return cls
class PartialMessage(Hashable):
"""Represents a partial message to aid with working messages when only
a message and channel ID are present.
There are two ways to construct this class. The first one is through
the constructor itself, and the second is via the following:
- :meth:`TextChannel.get_partial_message`
- :meth:`VoiceChannel.get_partial_message`
- :meth:`Thread.get_partial_message`
- :meth:`DMChannel.get_partial_message`
Note that this class is trimmed down and has no rich attributes.
.. versionadded:: 1.6
.. container:: operations
.. describe:: x == y
Checks if two partial messages are equal.
.. describe:: x != y
Checks if two partial messages are not equal.
.. describe:: hash(x)
Returns the partial message's hash.
Attributes
-----------
channel: Union[:class:`PartialMessageable`, :class:`TextChannel`, :class:`VoiceChannel`, :class:`Thread`, :class:`DMChannel`]
The channel associated with this partial message.
id: :class:`int`
The message ID.
guild: Optional[:class:`Guild`]
The guild that the partial message belongs to, if applicable.
"""
__slots__ = ('channel', 'id', '_cs_guild', '_state', 'guild')
def __init__(self, *, channel: MessageableChannel, id: int) -> None:
if not isinstance(channel, PartialMessageable) and channel.type not in (
ChannelType.text,
ChannelType.voice,
ChannelType.news,
ChannelType.private,
ChannelType.news_thread,
ChannelType.public_thread,
ChannelType.private_thread,
):
raise TypeError(
f'expected PartialMessageable, TextChannel, VoiceChannel, DMChannel or Thread not {type(channel)!r}'
)
self.channel: MessageableChannel = channel
self._state: ConnectionState = channel._state
self.id: int = id
self.guild: Optional[Guild] = getattr(channel, 'guild', None)
def _update(self, data: MessageUpdateEvent) -> None:
# This is used for duck typing purposes.
# Just do nothing with the data.
pass
# Also needed for duck typing purposes
# n.b. not exposed
pinned: Any = property(None, lambda x, y: None)
def __repr__(self) -> str:
return f'<PartialMessage id={self.id} channel={self.channel!r}>'
@property
def created_at(self) -> datetime.datetime:
""":class:`datetime.datetime`: The partial message's creation time in UTC."""
return utils.snowflake_time(self.id)
@property
def jump_url(self) -> str:
""":class:`str`: Returns a URL that allows the client to jump to this message."""
guild_id = getattr(self.guild, 'id', '@me')
return f'https://discord.com/channels/{guild_id}/{self.channel.id}/{self.id}'
async def fetch(self) -> Message:
"""|coro|
Fetches the partial message to a full :class:`Message`.
Raises
--------
NotFound
The message was not found.
Forbidden
You do not have the permissions required to get a message.
HTTPException
Retrieving the message failed.
Returns
--------
:class:`Message`
The full message.
"""
data = await self._state.http.get_message(self.channel.id, self.id)
return self._state.create_message(channel=self.channel, data=data)
async def delete(self, *, delay: Optional[float] = None) -> None:
"""|coro|
Deletes the message.
Your own messages could be deleted without any proper permissions. However to
delete other people's messages, you need the :attr:`~Permissions.manage_messages`
permission.
.. versionchanged:: 1.1
Added the new ``delay`` keyword-only parameter.
Parameters
-----------
delay: Optional[:class:`float`]
If provided, the number of seconds to wait in the background
before deleting the message. If the deletion fails then it is silently ignored.
Raises
------
Forbidden
You do not have proper permissions to delete the message.
NotFound
The message was deleted already
HTTPException
Deleting the message failed.
"""
if delay is not None:
async def delete(delay: float):
await asyncio.sleep(delay)
try:
await self._state.http.delete_message(self.channel.id, self.id)
except HTTPException:
pass
asyncio.create_task(delete(delay))
else:
await self._state.http.delete_message(self.channel.id, self.id)
@overload
async def edit(
self,
*,
content: Optional[str] = ...,
embed: Optional[Embed] = ...,
attachments: Sequence[Union[Attachment, File]] = ...,
delete_after: Optional[float] = ...,
allowed_mentions: Optional[AllowedMentions] = ...,
view: Optional[View] = ...,
) -> Message:
...
@overload
async def edit(
self,
*,
content: Optional[str] = ...,
embeds: Sequence[Embed] = ...,
attachments: Sequence[Union[Attachment, File]] = ...,
delete_after: Optional[float] = ...,
allowed_mentions: Optional[AllowedMentions] = ...,
view: Optional[View] = ...,
) -> Message:
...
async def edit(
self,
content: Optional[str] = MISSING,
embed: Optional[Embed] = MISSING,
embeds: Sequence[Embed] = MISSING,
attachments: Sequence[Union[Attachment, File]] = MISSING,
delete_after: Optional[float] = None,
allowed_mentions: Optional[AllowedMentions] = MISSING,
view: Optional[View] = MISSING,
) -> Message:
"""|coro|
Edits the message.
The content must be able to be transformed into a string via ``str(content)``.
.. versionchanged:: 2.0
Edits are no longer in-place, the newly edited message is returned instead.
.. versionchanged:: 2.0
This function will now raise :exc:`TypeError` instead of
``InvalidArgument``.
Parameters
-----------
content: Optional[:class:`str`]
The new content to replace the message with.
Could be ``None`` to remove the content.
embed: Optional[:class:`Embed`]
The new embed to replace the original with.
Could be ``None`` to remove the embed.
embeds: List[:class:`Embed`]
The new embeds to replace the original with. Must be a maximum of 10.
To remove all embeds ``[]`` should be passed.
.. versionadded:: 2.0
attachments: List[Union[:class:`Attachment`, :class:`File`]]
A list of attachments to keep in the message as well as new files to upload. If ``[]`` is passed
then all attachments are removed.
.. note::
New files will always appear after current attachments.
.. versionadded:: 2.0
delete_after: Optional[:class:`float`]
If provided, the number of seconds to wait in the background
before deleting the message we just edited. If the deletion fails,
then it is silently ignored.
allowed_mentions: Optional[:class:`~discord.AllowedMentions`]
Controls the mentions being processed in this message. If this is
passed, then the object is merged with :attr:`~discord.Client.allowed_mentions`.
The merging behaviour only overrides attributes that have been explicitly passed
to the object, otherwise it uses the attributes set in :attr:`~discord.Client.allowed_mentions`.
If no object is passed at all then the defaults given by :attr:`~discord.Client.allowed_mentions`
are used instead.
.. versionadded:: 1.4
view: Optional[:class:`~discord.ui.View`]
The updated view to update this message with. If ``None`` is passed then
the view is removed.
Raises
-------
HTTPException
Editing the message failed.
Forbidden
Tried to suppress a message without permissions or
edited a message's content or embed that isn't yours.
TypeError
You specified both ``embed`` and ``embeds``
Returns
--------
:class:`Message`
The newly edited message.
"""
if content is not MISSING:
previous_allowed_mentions = self._state.allowed_mentions
else:
previous_allowed_mentions = None
if view is not MISSING:
self._state.prevent_view_updates_for(self.id)
params = handle_message_parameters(
content=content,
embed=embed,
embeds=embeds,
attachments=attachments,
view=view,
allowed_mentions=allowed_mentions,
previous_allowed_mentions=previous_allowed_mentions,
)
data = await self._state.http.edit_message(self.channel.id, self.id, params=params)
message = Message(state=self._state, channel=self.channel, data=data)
if view and not view.is_finished():
self._state.store_view(view, self.id)
if delete_after is not None:
await self.delete(delay=delete_after)
return message
async def publish(self) -> None:
"""|coro|
Publishes this message to your announcement channel.
You must have the :attr:`~Permissions.send_messages` permission to do this.
If the message is not your own then the :attr:`~Permissions.manage_messages`
permission is also needed.
Raises
-------
Forbidden
You do not have the proper permissions to publish this message.
HTTPException
Publishing the message failed.
"""
await self._state.http.publish_message(self.channel.id, self.id)
async def pin(self, *, reason: Optional[str] = None) -> None:
"""|coro|
Pins the message.
You must have the :attr:`~Permissions.manage_messages` permission to do
this in a non-private channel context.
Parameters
-----------
reason: Optional[:class:`str`]
The reason for pinning the message. Shows up on the audit log.
.. versionadded:: 1.4
Raises
-------
Forbidden
You do not have permissions to pin the message.
NotFound
The message or channel was not found or deleted.
HTTPException
Pinning the message failed, probably due to the channel
having more than 50 pinned messages.
"""
await self._state.http.pin_message(self.channel.id, self.id, reason=reason)
# pinned exists on PartialMessage for duck typing purposes
self.pinned = True
async def unpin(self, *, reason: Optional[str] = None) -> None:
"""|coro|
Unpins the message.
You must have the :attr:`~Permissions.manage_messages` permission to do
this in a non-private channel context.
Parameters
-----------
reason: Optional[:class:`str`]
The reason for unpinning the message. Shows up on the audit log.
.. versionadded:: 1.4
Raises
-------
Forbidden
You do not have permissions to unpin the message.
NotFound
The message or channel was not found or deleted.
HTTPException
Unpinning the message failed.
"""
await self._state.http.unpin_message(self.channel.id, self.id, reason=reason)
# pinned exists on PartialMessage for duck typing purposes
self.pinned = False
async def add_reaction(self, emoji: EmojiInputType, /) -> None:
"""|coro|
Adds a reaction to the message.
The emoji may be a unicode emoji or a custom guild :class:`Emoji`.
You must have the :attr:`~Permissions.read_message_history` permission
to use this. If nobody else has reacted to the message using this
emoji, the :attr:`~Permissions.add_reactions` permission is required.
.. versionchanged:: 2.0
``emoji`` parameter is now positional-only.
.. versionchanged:: 2.0
This function will now raise :exc:`TypeError` instead of
``InvalidArgument``.
Parameters
------------
emoji: Union[:class:`Emoji`, :class:`Reaction`, :class:`PartialEmoji`, :class:`str`]
The emoji to react with.
Raises
--------
HTTPException
Adding the reaction failed.
Forbidden
You do not have the proper permissions to react to the message.
NotFound
The emoji you specified was not found.
TypeError
The emoji parameter is invalid.
"""
emoji = convert_emoji_reaction(emoji)
await self._state.http.add_reaction(self.channel.id, self.id, emoji)
async def remove_reaction(self, emoji: Union[EmojiInputType, Reaction], member: Snowflake) -> None:
"""|coro|
Remove a reaction by the member from the message.
The emoji may be a unicode emoji or a custom guild :class:`Emoji`.
If the reaction is not your own (i.e. ``member`` parameter is not you) then
the :attr:`~Permissions.manage_messages` permission is needed.
The ``member`` parameter must represent a member and meet
the :class:`abc.Snowflake` abc.
.. versionchanged:: 2.0
This function will now raise :exc:`TypeError` instead of
``InvalidArgument``.
Parameters
------------
emoji: Union[:class:`Emoji`, :class:`Reaction`, :class:`PartialEmoji`, :class:`str`]
The emoji to remove.
member: :class:`abc.Snowflake`
The member for which to remove the reaction.
Raises
--------
HTTPException
Removing the reaction failed.
Forbidden
You do not have the proper permissions to remove the reaction.
NotFound
The member or emoji you specified was not found.
TypeError
The emoji parameter is invalid.
"""
emoji = convert_emoji_reaction(emoji)
if member.id == self._state.self_id:
await self._state.http.remove_own_reaction(self.channel.id, self.id, emoji)
else:
await self._state.http.remove_reaction(self.channel.id, self.id, emoji, member.id)
async def clear_reaction(self, emoji: Union[EmojiInputType, Reaction]) -> None:
"""|coro|
Clears a specific reaction from the message.
The emoji may be a unicode emoji or a custom guild :class:`Emoji`.
You need the :attr:`~Permissions.manage_messages` permission to use this.
.. versionadded:: 1.3
.. versionchanged:: 2.0
This function will now raise :exc:`TypeError` instead of
``InvalidArgument``.
Parameters
-----------
emoji: Union[:class:`Emoji`, :class:`Reaction`, :class:`PartialEmoji`, :class:`str`]
The emoji to clear.
Raises
--------
HTTPException
Clearing the reaction failed.
Forbidden
You do not have the proper permissions to clear the reaction.
NotFound
The emoji you specified was not found.
TypeError
The emoji parameter is invalid.
"""
emoji = convert_emoji_reaction(emoji)
await self._state.http.clear_single_reaction(self.channel.id, self.id, emoji)
async def clear_reactions(self) -> None:
"""|coro|
Removes all the reactions from the message.
You need the :attr:`~Permissions.manage_messages` permission to use this.
Raises
--------
HTTPException
Removing the reactions failed.
Forbidden
You do not have the proper permissions to remove all the reactions.
"""
await self._state.http.clear_reactions(self.channel.id, self.id)
async def create_thread(
self,
*,
name: str,
auto_archive_duration: ThreadArchiveDuration = MISSING,
slowmode_delay: Optional[int] = None,
reason: Optional[str] = None,
) -> Thread:
"""|coro|
Creates a public thread from this message.
You must have :attr:`~discord.Permissions.create_public_threads` in order to
create a public thread from a message.
The channel this message belongs in must be a :class:`TextChannel`.
.. versionadded:: 2.0
Parameters
-----------
name: :class:`str`
The name of the thread.
auto_archive_duration: :class:`int`
The duration in minutes before a thread is automatically archived for inactivity.
If not provided, the channel's default auto archive duration is used.
slowmode_delay: Optional[:class:`int`]
Specifies the slowmode rate limit for user in this channel, in seconds.
The maximum value possible is `21600`. By default no slowmode rate limit
if this is ``None``.
reason: Optional[:class:`str`]
The reason for creating a new thread. Shows up on the audit log.
Raises
-------
Forbidden
You do not have permissions to create a thread.
HTTPException
Creating the thread failed.
ValueError
This message does not have guild info attached.
Returns
--------
:class:`.Thread`
The created thread.
"""
if self.guild is None:
raise ValueError('This message does not have guild info attached.')
default_auto_archive_duration: ThreadArchiveDuration = getattr(self.channel, 'default_auto_archive_duration', 1440)
data = await self._state.http.start_thread_with_message(
self.channel.id,
self.id,
name=name,
auto_archive_duration=auto_archive_duration or default_auto_archive_duration,
rate_limit_per_user=slowmode_delay,
reason=reason,
)
return Thread(guild=self.guild, state=self._state, data=data)
async def reply(self, content: Optional[str] = None, **kwargs: Any) -> Message:
"""|coro|
A shortcut method to :meth:`.abc.Messageable.send` to reply to the
:class:`.Message`.
.. versionadded:: 1.6
.. versionchanged:: 2.0
This function will now raise :exc:`TypeError` or
:exc:`ValueError` instead of ``InvalidArgument``.
Raises
--------
~discord.HTTPException
Sending the message failed.
~discord.Forbidden
You do not have the proper permissions to send the message.
ValueError
The ``files`` list is not of the appropriate size
TypeError
You specified both ``file`` and ``files``.
Returns
---------
:class:`.Message`
The message that was sent.
"""
return await self.channel.send(content, reference=self, **kwargs)
def to_reference(self, *, fail_if_not_exists: bool = True) -> MessageReference:
"""Creates a :class:`~discord.MessageReference` from the current message.
.. versionadded:: 1.6
Parameters
----------
fail_if_not_exists: :class:`bool`
Whether replying using the message reference should raise :class:`HTTPException`
if the message no longer exists or Discord could not fetch the message.
.. versionadded:: 1.7
Returns
---------
:class:`~discord.MessageReference`
The reference to this message.
"""
return MessageReference.from_message(self, fail_if_not_exists=fail_if_not_exists)
def to_message_reference_dict(self) -> MessageReferencePayload:
data: MessageReferencePayload = {
'message_id': self.id,
'channel_id': self.channel.id,
}
if self.guild is not None:
data['guild_id'] = self.guild.id
return data
@flatten_handlers
class Message(PartialMessage, Hashable):
r"""Represents a message from Discord.
.. container:: operations
.. describe:: x == y
Checks if two messages are equal.
.. describe:: x != y
Checks if two messages are not equal.
.. describe:: hash(x)
Returns the message's hash.
Attributes
-----------
tts: :class:`bool`
Specifies if the message was done with text-to-speech.
This can only be accurately received in :func:`on_message` due to
a discord limitation.
type: :class:`MessageType`
The type of message. In most cases this should not be checked, but it is helpful
in cases where it might be a system message for :attr:`system_content`.
author: Union[:class:`Member`, :class:`abc.User`]
A :class:`Member` that sent the message. If :attr:`channel` is a
private channel or the user has the left the guild, then it is a :class:`User` instead.
content: :class:`str`
The actual contents of the message.
nonce: Optional[Union[:class:`str`, :class:`int`]]
The value used by the discord guild and the client to verify that the message is successfully sent.
This is not stored long term within Discord's servers and is only used ephemerally.
embeds: List[:class:`Embed`]
A list of embeds the message has.
channel: Union[:class:`TextChannel`, :class:`VoiceChannel`, :class:`Thread`, :class:`DMChannel`, :class:`GroupChannel`, :class:`PartialMessageable`]
The :class:`TextChannel` or :class:`Thread` that the message was sent from.
Could be a :class:`DMChannel` or :class:`GroupChannel` if it's a private message.
reference: Optional[:class:`~discord.MessageReference`]
The message that this message references. This is only applicable to messages of
type :attr:`MessageType.pins_add`, crossposted messages created by a
followed channel integration, or message replies.
.. versionadded:: 1.5
mention_everyone: :class:`bool`
Specifies if the message mentions everyone.
.. note::
This does not check if the ``@everyone`` or the ``@here`` text is in the message itself.
Rather this boolean indicates if either the ``@everyone`` or the ``@here`` text is in the message
**and** it did end up mentioning.
mentions: List[:class:`abc.User`]
A list of :class:`Member` that were mentioned. If the message is in a private message
then the list will be of :class:`User` instead. For messages that are not of type
:attr:`MessageType.default`\, this array can be used to aid in system messages.
For more information, see :attr:`system_content`.
.. warning::
The order of the mentions list is not in any particular order so you should
not rely on it. This is a Discord limitation, not one with the library.
channel_mentions: List[Union[:class:`abc.GuildChannel`, :class:`Thread`]]
A list of :class:`abc.GuildChannel` or :class:`Thread` that were mentioned. If the message is
in a private message then the list is always empty.
role_mentions: List[:class:`Role`]
A list of :class:`Role` that were mentioned. If the message is in a private message
then the list is always empty.
id: :class:`int`
The message ID.
webhook_id: Optional[:class:`int`]
If this message was sent by a webhook, then this is the webhook ID's that sent this
message.
attachments: List[:class:`Attachment`]
A list of attachments given to a message.
pinned: :class:`bool`
Specifies if the message is currently pinned.
flags: :class:`MessageFlags`
Extra features of the message.
.. versionadded:: 1.3
reactions : List[:class:`Reaction`]
Reactions to a message. Reactions can be either custom emoji or standard unicode emoji.
activity: Optional[:class:`dict`]
The activity associated with this message. Sent with Rich-Presence related messages that for
example, request joining, spectating, or listening to or with another member.
It is a dictionary with the following optional keys:
- ``type``: An integer denoting the type of message activity being requested.
- ``party_id``: The party ID associated with the party.
application: Optional[:class:`dict`]
The rich presence enabled application associated with this message.
It is a dictionary with the following keys:
- ``id``: A string representing the application's ID.
- ``name``: A string representing the application's name.
- ``description``: A string representing the application's description.
- ``icon``: A string representing the icon ID of the application.
- ``cover_image``: A string representing the embed's image asset ID.
stickers: List[:class:`StickerItem`]
A list of sticker items given to the message.
.. versionadded:: 1.6
components: List[:class:`Component`]
A list of components in the message.
.. versionadded:: 2.0
interaction: Optional[:class:`MessageInteraction`]
The interaction that this message is a response to.
.. versionadded:: 2.0
guild: Optional[:class:`Guild`]
The guild that the message belongs to, if applicable.
"""
__slots__ = (
'_state',
'_edited_timestamp',
'_cs_channel_mentions',
'_cs_raw_mentions',
'_cs_clean_content',
'_cs_raw_channel_mentions',
'_cs_raw_role_mentions',
'_cs_system_content',
'tts',
'content',
'channel',
'webhook_id',
'mention_everyone',
'embeds',
'mentions',
'author',
'attachments',
'nonce',
'pinned',
'role_mentions',
'type',
'flags',
'reactions',
'reference',
'application',
'activity',
'stickers',
'components',
'interaction',
)
if TYPE_CHECKING:
_HANDLERS: ClassVar[List[Tuple[str, Callable[..., None]]]]
_CACHED_SLOTS: ClassVar[List[str]]
# guild: Optional[Guild]
reference: Optional[MessageReference]
mentions: List[Union[User, Member]]
author: Union[User, Member]
role_mentions: List[Role]
def __init__(
self,
*,
state: ConnectionState,
channel: MessageableChannel,
data: MessagePayload,
) -> None:
self.channel: MessageableChannel = channel
self.id: int = int(data['id'])
self._state: ConnectionState = state
self.webhook_id: Optional[int] = utils._get_as_snowflake(data, 'webhook_id')
self.reactions: List[Reaction] = [Reaction(message=self, data=d) for d in data.get('reactions', [])]
self.attachments: List[Attachment] = [Attachment(data=a, state=self._state) for a in data['attachments']]
self.embeds: List[Embed] = [Embed.from_dict(a) for a in data['embeds']]
self.application: Optional[MessageApplicationPayload] = data.get('application')
self.activity: Optional[MessageActivityPayload] = data.get('activity')
self.channel: MessageableChannel = channel
self._edited_timestamp: Optional[datetime.datetime] = utils.parse_time(data['edited_timestamp'])
self.type: MessageType = try_enum(MessageType, data['type'])
self.pinned: bool = data['pinned']
self.flags: MessageFlags = MessageFlags._from_value(data.get('flags', 0))
self.mention_everyone: bool = data['mention_everyone']
self.tts: bool = data['tts']
self.content: str = data['content']
self.nonce: Optional[Union[int, str]] = data.get('nonce')
self.stickers: List[StickerItem] = [StickerItem(data=d, state=state) for d in data.get('sticker_items', [])]
self.components: List[Component] = [_component_factory(d) for d in data.get('components', [])]
try:
# if the channel doesn't have a guild attribute, we handle that
self.guild = channel.guild # type: ignore
except AttributeError:
self.guild = state._get_guild(utils._get_as_snowflake(data, 'guild_id'))
self.interaction: Optional[MessageInteraction] = None
try:
interaction = data['interaction']
except KeyError:
pass
else:
self.interaction = MessageInteraction(state=state, guild=self.guild, data=interaction)
try:
ref = data['message_reference']
except KeyError:
self.reference = None
else:
self.reference = ref = MessageReference.with_state(state, ref)
try:
resolved = data['referenced_message']
except KeyError:
pass
else:
if resolved is None:
ref.resolved = DeletedReferencedMessage(ref)
else:
# Right now the channel IDs match but maybe in the future they won't.
if ref.channel_id == channel.id:
chan = channel
elif isinstance(channel, Thread) and channel.parent_id == ref.channel_id:
chan = channel
else:
chan, _ = state._get_guild_channel(resolved, ref.guild_id)
# the channel will be the correct type here
ref.resolved = self.__class__(channel=chan, data=resolved, state=state) # type: ignore
for handler in ('author', 'member', 'mentions', 'mention_roles'):
try:
getattr(self, f'_handle_{handler}')(data[handler])
except KeyError:
continue
def __repr__(self) -> str:
name = self.__class__.__name__
return (
f'<{name} id={self.id} channel={self.channel!r} type={self.type!r} author={self.author!r} flags={self.flags!r}>'
)
def _try_patch(self, data, key, transform=None) -> None:
try:
value = data[key]
except KeyError:
pass
else:
if transform is None:
setattr(self, key, value)
else:
setattr(self, key, transform(value))
def _add_reaction(self, data, emoji, user_id) -> Reaction:
reaction = utils.find(lambda r: r.emoji == emoji, self.reactions)
is_me = data['me'] = user_id == self._state.self_id
if reaction is None:
reaction = Reaction(message=self, data=data, emoji=emoji)
self.reactions.append(reaction)
else:
reaction.count += 1
if is_me:
reaction.me = is_me
return reaction
def _remove_reaction(self, data: MessageReactionRemoveEvent, emoji: EmojiInputType, user_id: int) -> Reaction:
reaction = utils.find(lambda r: r.emoji == emoji, self.reactions)
if reaction is None:
# already removed?
raise ValueError('Emoji already removed?')
# if reaction isn't in the list, we crash. This means discord
# sent bad data, or we stored improperly
reaction.count -= 1
if user_id == self._state.self_id:
reaction.me = False
if reaction.count == 0:
# this raises ValueError if something went wrong as well.
self.reactions.remove(reaction)
return reaction
def _clear_emoji(self, emoji: PartialEmoji) -> Optional[Reaction]:
to_check = str(emoji)
for index, reaction in enumerate(self.reactions):
if str(reaction.emoji) == to_check:
break
else:
# didn't find anything so just return
return
del self.reactions[index]
return reaction
def _update(self, data: MessageUpdateEvent) -> None:
# In an update scheme, 'author' key has to be handled before 'member'
# otherwise they overwrite each other which is undesirable.
# Since there's no good way to do this we have to iterate over every
# handler rather than iterating over the keys which is a little slower
for key, handler in self._HANDLERS:
try:
value = data[key]
except KeyError:
continue
else:
handler(self, value)
# clear the cached properties
for attr in self._CACHED_SLOTS:
try:
delattr(self, attr)
except AttributeError:
pass
def _handle_edited_timestamp(self, value: str) -> None:
self._edited_timestamp = utils.parse_time(value)
def _handle_pinned(self, value: bool) -> None:
self.pinned = value
def _handle_flags(self, value: int) -> None:
self.flags = MessageFlags._from_value(value)
def _handle_application(self, value: MessageApplicationPayload) -> None:
self.application = value
def _handle_activity(self, value: MessageActivityPayload) -> None:
self.activity = value
def _handle_mention_everyone(self, value: bool) -> None:
self.mention_everyone = value
def _handle_tts(self, value: bool) -> None:
self.tts = value
def _handle_type(self, value: int) -> None:
self.type = try_enum(MessageType, value)
def _handle_content(self, value: str) -> None:
self.content = value
def _handle_attachments(self, value: List[AttachmentPayload]) -> None:
self.attachments = [Attachment(data=a, state=self._state) for a in value]
def _handle_embeds(self, value: List[EmbedPayload]) -> None:
self.embeds = [Embed.from_dict(data) for data in value]
def _handle_nonce(self, value: Union[str, int]) -> None:
self.nonce = value
def _handle_author(self, author: UserPayload) -> None:
self.author = User(state=self._state, data=author)
def _handle_member(self, member: MemberPayload) -> None:
member["user"] = self.author._to_minimal_user_json()
self.author = Member(data=member, guild=self.guild, state=self._state)
def _handle_mentions(self, mentions: List[UserWithMemberPayload]) -> None:
self.mentions = r = []
guild = self.guild
state = self._state
if not isinstance(guild, Guild):
self.mentions = [state.store_user(m) for m in mentions]
return
for mention in filter(None, mentions):
id_search = int(mention['id'])
member = guild.get_member(id_search)
if member is not None:
r.append(member)
else:
r.append(Member._try_upgrade(data=mention, guild=guild, state=state))
def _handle_mention_roles(self, role_mentions: List[int]) -> None:
self.role_mentions = []
if isinstance(self.guild, Guild):
for role_id in map(int, role_mentions):
role = self.guild.get_role(role_id)
if role is not None:
self.role_mentions.append(role)
def _handle_components(self, components: List[ComponentPayload]):
pass
def _handle_interaction(self, data: MessageInteractionPayload):
self.interaction = MessageInteraction(state=self._state, guild=self.guild, data=data)
def _rebind_cached_references(self, new_guild: Guild, new_channel: Union[TextChannel, Thread]) -> None:
self.guild = new_guild
self.channel = new_channel
@utils.cached_slot_property('_cs_raw_mentions')
def raw_mentions(self) -> List[int]:
"""List[:class:`int`]: A property that returns an array of user IDs matched with
the syntax of ``<@user_id>`` in the message content.
This allows you to receive the user IDs of mentioned users
even in a private message context.
"""
return [int(x) for x in re.findall(r'<@!?([0-9]{15,20})>', self.content)]
@utils.cached_slot_property('_cs_raw_channel_mentions')
def raw_channel_mentions(self) -> List[int]:
"""List[:class:`int`]: A property that returns an array of channel IDs matched with
the syntax of ``<#channel_id>`` in the message content.
"""
return [int(x) for x in re.findall(r'<#([0-9]{15,20})>', self.content)]
@utils.cached_slot_property('_cs_raw_role_mentions')
def raw_role_mentions(self) -> List[int]:
"""List[:class:`int`]: A property that returns an array of role IDs matched with
the syntax of ``<@&role_id>`` in the message content.
"""
return [int(x) for x in re.findall(r'<@&([0-9]{15,20})>', self.content)]
@utils.cached_slot_property('_cs_channel_mentions')
def channel_mentions(self) -> List[Union[GuildChannel, Thread]]:
if self.guild is None:
return []
it = filter(None, map(self.guild._resolve_channel, self.raw_channel_mentions))
return utils._unique(it)
@utils.cached_slot_property('_cs_clean_content')
def clean_content(self) -> str:
""":class:`str`: A property that returns the content in a "cleaned up"
manner. This basically means that mentions are transformed
into the way the client shows it. e.g. ``<#id>`` will transform
into ``#name``.
This will also transform @everyone and @here mentions into
non-mentions.
.. note::
This *does not* affect markdown. If you want to escape
or remove markdown then use :func:`utils.escape_markdown` or :func:`utils.remove_markdown`
respectively, along with this function.
"""
if self.guild:
def resolve_member(id: int) -> str:
m = self.guild.get_member(id) or utils.get(self.mentions, id=id) # type: ignore
return f'@{m.display_name}' if m else '@deleted-user'
def resolve_role(id: int) -> str:
r = self.guild.get_role(id) or utils.get(self.role_mentions, id=id) # type: ignore
return f'@{r.name}' if r else '@deleted-role'
def resolve_channel(id: int) -> str:
c = self.guild._resolve_channel(id) # type: ignore
return f'#{c.name}' if c else '#deleted-channel'
else:
def resolve_member(id: int) -> str:
m = utils.get(self.mentions, id=id)
return f'@{m.display_name}' if m else '@deleted-user'
def resolve_role(id: int) -> str:
return '@deleted-role'
def resolve_channel(id: int) -> str:
return f'#deleted-channel'
transforms = {
'@': resolve_member,
'@!': resolve_member,
'#': resolve_channel,
'@&': resolve_role,
}
def repl(match: re.Match) -> str:
type = match[1]
id = int(match[2])
transformed = transforms[type](id)
return transformed
result = re.sub(r'<(@[!&]?|#)([0-9]{15,20})>', repl, self.content)
return escape_mentions(result)
@property
def created_at(self) -> datetime.datetime:
""":class:`datetime.datetime`: The message's creation time in UTC."""
return utils.snowflake_time(self.id)
@property
def edited_at(self) -> Optional[datetime.datetime]:
"""Optional[:class:`datetime.datetime`]: An aware UTC datetime object containing the edited time of the message."""
return self._edited_timestamp
def is_system(self) -> bool:
""":class:`bool`: Whether the message is a system message.
A system message is a message that is constructed entirely by the Discord API
in response to something.
.. versionadded:: 1.3
"""
return self.type not in (
MessageType.default,
MessageType.reply,
MessageType.chat_input_command,
MessageType.context_menu_command,
MessageType.thread_starter_message,
)
@utils.cached_slot_property('_cs_system_content')
def system_content(self) -> Optional[str]:
r""":class:`str`: A property that returns the content that is rendered
regardless of the :attr:`Message.type`.
In the case of :attr:`MessageType.default` and :attr:`MessageType.reply`\,
this just returns the regular :attr:`Message.content`. Otherwise this
returns an English message denoting the contents of the system message.
"""
if self.type is MessageType.default:
return self.content
if self.type is MessageType.recipient_add:
if self.channel.type is ChannelType.group:
return f'{self.author.name} added {self.mentions[0].name} to the group.'
else:
return f'{self.author.name} added {self.mentions[0].name} to the thread.'
if self.type is MessageType.recipient_remove:
if self.channel.type is ChannelType.group:
return f'{self.author.name} removed {self.mentions[0].name} from the group.'
else:
return f'{self.author.name} removed {self.mentions[0].name} from the thread.'
if self.type is MessageType.channel_name_change:
return f'{self.author.name} changed the channel name: **{self.content}**'
if self.type is MessageType.channel_icon_change:
return f'{self.author.name} changed the channel icon.'
if self.type is MessageType.pins_add:
return f'{self.author.name} pinned a message to this channel.'
if self.type is MessageType.new_member:
formats = [
"{0} joined the party.",
"{0} is here.",
"Welcome, {0}. We hope you brought pizza.",
"A wild {0} appeared.",
"{0} just landed.",
"{0} just slid into the server.",
"{0} just showed up!",
"Welcome {0}. Say hi!",
"{0} hopped into the server.",
"Everyone welcome {0}!",
"Glad you're here, {0}.",
"Good to see you, {0}.",
"Yay you made it, {0}!",
]
created_at_ms = int(self.created_at.timestamp() * 1000)
return formats[created_at_ms % len(formats)].format(self.author.name)
if self.type is MessageType.premium_guild_subscription:
if not self.content:
return f'{self.author.name} just boosted the server!'
else:
return f'{self.author.name} just boosted the server **{self.content}** times!'
if self.type is MessageType.premium_guild_tier_1:
if not self.content:
return f'{self.author.name} just boosted the server! {self.guild} has achieved **Level 1!**'
else:
return f'{self.author.name} just boosted the server **{self.content}** times! {self.guild} has achieved **Level 1!**'
if self.type is MessageType.premium_guild_tier_2:
if not self.content:
return f'{self.author.name} just boosted the server! {self.guild} has achieved **Level 2!**'
else:
return f'{self.author.name} just boosted the server **{self.content}** times! {self.guild} has achieved **Level 2!**'
if self.type is MessageType.premium_guild_tier_3:
if not self.content:
return f'{self.author.name} just boosted the server! {self.guild} has achieved **Level 3!**'
else:
return f'{self.author.name} just boosted the server **{self.content}** times! {self.guild} has achieved **Level 3!**'
if self.type is MessageType.channel_follow_add:
return (
f'{self.author.name} has added {self.content} to this channel. Its most important updates will show up here.'
)
if self.type is MessageType.guild_stream:
# the author will be a Member
return f'{self.author.name} is live! Now streaming {self.author.activity.name}' # type: ignore
if self.type is MessageType.guild_discovery_disqualified:
return 'This server has been removed from Server Discovery because it no longer passes all the requirements. Check Server Settings for more details.'
if self.type is MessageType.guild_discovery_requalified:
return 'This server is eligible for Server Discovery again and has been automatically relisted!'
if self.type is MessageType.guild_discovery_grace_period_initial_warning:
return 'This server has failed Discovery activity requirements for 1 week. If this server fails for 4 weeks in a row, it will be automatically removed from Discovery.'
if self.type is MessageType.guild_discovery_grace_period_final_warning:
return 'This server has failed Discovery activity requirements for 3 weeks in a row. If this server fails for 1 more week, it will be removed from Discovery.'
if self.type is MessageType.thread_created:
return f'{self.author.name} started a thread: **{self.content}**. See all **threads**.'
if self.type is MessageType.reply:
return self.content
if self.type is MessageType.thread_starter_message:
if self.reference is None or self.reference.resolved is None:
return 'Sorry, we couldn\'t load the first message in this thread'
# the resolved message for the reference will be a Message
return self.reference.resolved.content # type: ignore
if self.type is MessageType.guild_invite_reminder:
return 'Wondering who to invite?\nStart by inviting anyone who can help you build the server!'
@overload
async def edit(
self,
*,
content: Optional[str] = ...,
embed: Optional[Embed] = ...,
attachments: Sequence[Union[Attachment, File]] = ...,
suppress: bool = ...,
delete_after: Optional[float] = ...,
allowed_mentions: Optional[AllowedMentions] = ...,
view: Optional[View] = ...,
) -> Message:
...
@overload
async def edit(
self,
*,
content: Optional[str] = ...,
embeds: Sequence[Embed] = ...,
attachments: Sequence[Union[Attachment, File]] = ...,
suppress: bool = ...,
delete_after: Optional[float] = ...,
allowed_mentions: Optional[AllowedMentions] = ...,
view: Optional[View] = ...,
) -> Message:
...
async def edit(
self,
content: Optional[str] = MISSING,
embed: Optional[Embed] = MISSING,
embeds: Sequence[Embed] = MISSING,
attachments: Sequence[Union[Attachment, File]] = MISSING,
suppress: bool = False,
delete_after: Optional[float] = None,
allowed_mentions: Optional[AllowedMentions] = MISSING,
view: Optional[View] = MISSING,
) -> Message:
"""|coro|
Edits the message.
The content must be able to be transformed into a string via ``str(content)``.
.. versionchanged:: 1.3
The ``suppress`` keyword-only parameter was added.
.. versionchanged:: 2.0
Edits are no longer in-place, the newly edited message is returned instead.
.. versionchanged:: 2.0
This function will now raise :exc:`TypeError` instead of
``InvalidArgument``.
Parameters
-----------
content: Optional[:class:`str`]
The new content to replace the message with.
Could be ``None`` to remove the content.
embed: Optional[:class:`Embed`]
The new embed to replace the original with.
Could be ``None`` to remove the embed.
embeds: List[:class:`Embed`]
The new embeds to replace the original with. Must be a maximum of 10.
To remove all embeds ``[]`` should be passed.
.. versionadded:: 2.0
attachments: List[Union[:class:`Attachment`, :class:`File`]]
A list of attachments to keep in the message as well as new files to upload. If ``[]`` is passed
then all attachments are removed.
.. note::
New files will always appear after current attachments.
.. versionadded:: 2.0
suppress: :class:`bool`
Whether to suppress embeds for the message. This removes
all the embeds if set to ``True``. If set to ``False``
this brings the embeds back if they were suppressed.
Using this parameter requires :attr:`~.Permissions.manage_messages`.
delete_after: Optional[:class:`float`]
If provided, the number of seconds to wait in the background
before deleting the message we just edited. If the deletion fails,
then it is silently ignored.
allowed_mentions: Optional[:class:`~discord.AllowedMentions`]
Controls the mentions being processed in this message. If this is
passed, then the object is merged with :attr:`~discord.Client.allowed_mentions`.
The merging behaviour only overrides attributes that have been explicitly passed
to the object, otherwise it uses the attributes set in :attr:`~discord.Client.allowed_mentions`.
If no object is passed at all then the defaults given by :attr:`~discord.Client.allowed_mentions`
are used instead.
.. versionadded:: 1.4
view: Optional[:class:`~discord.ui.View`]
The updated view to update this message with. If ``None`` is passed then
the view is removed.
Raises
-------
HTTPException
Editing the message failed.
Forbidden
Tried to suppress a message without permissions or
edited a message's content or embed that isn't yours.
TypeError
You specified both ``embed`` and ``embeds``
Returns
--------
:class:`Message`
The newly edited message.
"""
if content is not MISSING:
previous_allowed_mentions = self._state.allowed_mentions
else:
previous_allowed_mentions = None
if suppress is not MISSING:
flags = MessageFlags._from_value(self.flags.value)
flags.suppress_embeds = suppress
else:
flags = MISSING
if view is not MISSING:
self._state.prevent_view_updates_for(self.id)
params = handle_message_parameters(
content=content,
flags=flags,
embed=embed,
embeds=embeds,
attachments=attachments,
view=view,
allowed_mentions=allowed_mentions,
previous_allowed_mentions=previous_allowed_mentions,
)
data = await self._state.http.edit_message(self.channel.id, self.id, params=params)
message = Message(state=self._state, channel=self.channel, data=data)
if view and not view.is_finished():
self._state.store_view(view, self.id)
if delete_after is not None:
await self.delete(delay=delete_after)
return message
async def add_files(self, *files: File) -> Message:
r"""|coro|
Adds new files to the end of the message attachments.
.. versionadded:: 2.0
Parameters
-----------
\*files: :class:`File`
New files to add to the message.
Raises
-------
HTTPException
Editing the message failed.
Forbidden
Tried to edit a message that isn't yours.
Returns
--------
:class:`Message`
The newly edited message.
"""
return await self.edit(attachments=[*self.attachments, *files])
async def remove_attachments(self, *attachments: Attachment) -> Message:
r"""|coro|
Removes attachments from the message.
.. versionadded:: 2.0
Parameters
-----------
\*attachments: :class:`Attachment`
Attachments to remove from the message.
Raises
-------
HTTPException
Editing the message failed.
Forbidden
Tried to edit a message that isn't yours.
Returns
--------
:class:`Message`
The newly edited message.
"""
return await self.edit(attachments=[a for a in self.attachments if a not in attachments])
| 36.239823
| 179
| 0.613306
|
from __future__ import annotations
import asyncio
import datetime
import re
import io
from os import PathLike
from typing import (
Dict,
TYPE_CHECKING,
Sequence,
Union,
List,
Optional,
Any,
Callable,
Tuple,
ClassVar,
Type,
overload,
)
from . import utils
from .reaction import Reaction
from .emoji import Emoji
from .partial_emoji import PartialEmoji
from .enums import InteractionType, MessageType, ChannelType, try_enum
from .errors import HTTPException
from .components import _component_factory
from .embeds import Embed
from .member import Member
from .flags import MessageFlags
from .file import File
from .utils import escape_mentions, MISSING
from .http import handle_message_parameters
from .guild import Guild
from .mixins import Hashable
from .sticker import StickerItem
from .threads import Thread
from .user import User
from .channel import PartialMessageable
if TYPE_CHECKING:
from typing_extensions import Self
from .types.message import (
Message as MessagePayload,
Attachment as AttachmentPayload,
MessageReference as MessageReferencePayload,
MessageApplication as MessageApplicationPayload,
MessageActivity as MessageActivityPayload,
)
from .types.interactions import MessageInteraction as MessageInteractionPayload
from .types.components import Component as ComponentPayload
from .types.threads import ThreadArchiveDuration
from .types.member import (
Member as MemberPayload,
UserWithMember as UserWithMemberPayload,
)
from .types.user import User as UserPayload
from .types.embed import Embed as EmbedPayload
from .types.gateway import MessageReactionRemoveEvent, MessageUpdateEvent
from .abc import Snowflake
from .abc import GuildChannel, MessageableChannel
from .components import Component
from .state import ConnectionState
from .channel import TextChannel
from .mentions import AllowedMentions
from .user import User
from .role import Role
from .ui.view import View
EmojiInputType = Union[Emoji, PartialEmoji, str]
__all__ = (
'Attachment',
'Message',
'PartialMessage',
'MessageInteraction',
'MessageReference',
'DeletedReferencedMessage',
)
def convert_emoji_reaction(emoji: Union[EmojiInputType, Reaction]) -> str:
if isinstance(emoji, Reaction):
emoji = emoji.emoji
if isinstance(emoji, Emoji):
return f'{emoji.name}:{emoji.id}'
if isinstance(emoji, PartialEmoji):
return emoji._as_reaction()
if isinstance(emoji, str):
return emoji.strip('<>')
raise TypeError(f'emoji argument must be str, Emoji, or Reaction not {emoji.__class__.__name__}.')
class Attachment(Hashable):
__slots__ = (
'id',
'size',
'height',
'width',
'filename',
'url',
'proxy_url',
'_http',
'content_type',
'description',
'ephemeral',
)
def __init__(self, *, data: AttachmentPayload, state: ConnectionState):
self.id: int = int(data['id'])
self.size: int = data['size']
self.height: Optional[int] = data.get('height')
self.width: Optional[int] = data.get('width')
self.filename: str = data['filename']
self.url: str = data['url']
self.proxy_url: str = data['proxy_url']
self._http = state.http
self.content_type: Optional[str] = data.get('content_type')
self.description: Optional[str] = data.get('description')
self.ephemeral: bool = data.get('ephemeral', False)
def is_spoiler(self) -> bool:
return self.filename.startswith('SPOILER_')
def __repr__(self) -> str:
return f'<Attachment id={self.id} filename={self.filename!r} url={self.url!r}>'
def __str__(self) -> str:
return self.url or ''
async def save(
self,
fp: Union[io.BufferedIOBase, PathLike[Any]],
*,
seek_begin: bool = True,
use_cached: bool = False,
) -> int:
data = await self.read(use_cached=use_cached)
if isinstance(fp, io.BufferedIOBase):
written = fp.write(data)
if seek_begin:
fp.seek(0)
return written
else:
with open(fp, 'wb') as f:
return f.write(data)
async def read(self, *, use_cached: bool = False) -> bytes:
url = self.proxy_url if use_cached else self.url
data = await self._http.get_from_cdn(url)
return data
async def to_file(self, *, use_cached: bool = False, spoiler: bool = False) -> File:
data = await self.read(use_cached=use_cached)
return File(io.BytesIO(data), filename=self.filename, description=self.description, spoiler=spoiler)
def to_dict(self) -> AttachmentPayload:
result: AttachmentPayload = {
'filename': self.filename,
'id': self.id,
'proxy_url': self.proxy_url,
'size': self.size,
'url': self.url,
'spoiler': self.is_spoiler(),
}
if self.height:
result['height'] = self.height
if self.width:
result['width'] = self.width
if self.content_type:
result['content_type'] = self.content_type
if self.description is not None:
result['description'] = self.description
return result
class DeletedReferencedMessage:
__slots__ = ('_parent',)
def __init__(self, parent: MessageReference):
self._parent: MessageReference = parent
def __repr__(self) -> str:
return f"<DeletedReferencedMessage id={self.id} channel_id={self.channel_id} guild_id={self.guild_id!r}>"
@property
def id(self) -> int:
return self._parent.message_id
@property
def channel_id(self) -> int:
return self._parent.channel_id
@property
def guild_id(self) -> Optional[int]:
return self._parent.guild_id
class MessageReference:
__slots__ = ('message_id', 'channel_id', 'guild_id', 'fail_if_not_exists', 'resolved', '_state')
def __init__(self, *, message_id: int, channel_id: int, guild_id: Optional[int] = None, fail_if_not_exists: bool = True):
self._state: Optional[ConnectionState] = None
self.resolved: Optional[Union[Message, DeletedReferencedMessage]] = None
self.message_id: Optional[int] = message_id
self.channel_id: int = channel_id
self.guild_id: Optional[int] = guild_id
self.fail_if_not_exists: bool = fail_if_not_exists
@classmethod
def with_state(cls, state: ConnectionState, data: MessageReferencePayload) -> Self:
self = cls.__new__(cls)
self.message_id = utils._get_as_snowflake(data, 'message_id')
self.channel_id = int(data.pop('channel_id'))
self.guild_id = utils._get_as_snowflake(data, 'guild_id')
self.fail_if_not_exists = data.get('fail_if_not_exists', True)
self._state = state
self.resolved = None
return self
@classmethod
def from_message(cls, message: PartialMessage, *, fail_if_not_exists: bool = True) -> Self:
self = cls(
message_id=message.id,
channel_id=message.channel.id,
guild_id=getattr(message.guild, 'id', None),
fail_if_not_exists=fail_if_not_exists,
)
self._state = message._state
return self
@property
def cached_message(self) -> Optional[Message]:
return self._state and self._state._get_message(self.message_id)
@property
def jump_url(self) -> str:
guild_id = self.guild_id if self.guild_id is not None else '@me'
return f'https://discord.com/channels/{guild_id}/{self.channel_id}/{self.message_id}'
def __repr__(self) -> str:
return f'<MessageReference message_id={self.message_id!r} channel_id={self.channel_id!r} guild_id={self.guild_id!r}>'
def to_dict(self) -> MessageReferencePayload:
result: Dict[str, Any] = {'message_id': self.message_id} if self.message_id is not None else {}
result['channel_id'] = self.channel_id
if self.guild_id is not None:
result['guild_id'] = self.guild_id
if self.fail_if_not_exists is not None:
result['fail_if_not_exists'] = self.fail_if_not_exists
return result sageInteraction(Hashable):
__slots__: Tuple[str, ...] = ('id', 'type', 'name', 'user')
def __init__(self, *, state: ConnectionState, guild: Optional[Guild], data: MessageInteractionPayload) -> None:
self.id: int = int(data['id'])
self.type: InteractionType = try_enum(InteractionType, data['type'])
self.name: str = data['name']
self.user: Union[User, Member] = MISSING
try:
payload = data['member']
except KeyError:
self.user = state.create_user(data['user'])
else:
if guild is None:
# This is an unfortunate data loss, but it's better than giving bad data
self.user = state.create_user(data['user'])
else:
payload['user'] = data['user']
self.user = Member(data=payload, guild=guild, state=state)
def __repr__(self) -> str:
return f'<MessageInteraction id={self.id} name={self.name!r} type={self.type!r} user={self.user!r}>'
@property
def created_at(self) -> datetime.datetime:
return utils.snowflake_time(self.id)
def flatten_handlers(cls: Type[Message]) -> Type[Message]:
prefix = len('_handle_')
handlers = [
(key[prefix:], value)
for key, value in cls.__dict__.items()
if key.startswith('_handle_') and key != '_handle_member'
]
handlers.append(('member', cls._handle_member))
cls._HANDLERS = handlers
cls._CACHED_SLOTS = [attr for attr in cls.__slots__ if attr.startswith('_cs_')]
return cls
class PartialMessage(Hashable):
__slots__ = ('channel', 'id', '_cs_guild', '_state', 'guild')
def __init__(self, *, channel: MessageableChannel, id: int) -> None:
if not isinstance(channel, PartialMessageable) and channel.type not in (
ChannelType.text,
ChannelType.voice,
ChannelType.news,
ChannelType.private,
ChannelType.news_thread,
ChannelType.public_thread,
ChannelType.private_thread,
):
raise TypeError(
f'expected PartialMessageable, TextChannel, VoiceChannel, DMChannel or Thread not {type(channel)!r}'
)
self.channel: MessageableChannel = channel
self._state: ConnectionState = channel._state
self.id: int = id
self.guild: Optional[Guild] = getattr(channel, 'guild', None)
def _update(self, data: MessageUpdateEvent) -> None:
pass
pinned: Any = property(None, lambda x, y: None)
def __repr__(self) -> str:
return f'<PartialMessage id={self.id} channel={self.channel!r}>'
@property
def created_at(self) -> datetime.datetime:
return utils.snowflake_time(self.id)
@property
def jump_url(self) -> str:
guild_id = getattr(self.guild, 'id', '@me')
return f'https://discord.com/channels/{guild_id}/{self.channel.id}/{self.id}'
async def fetch(self) -> Message:
data = await self._state.http.get_message(self.channel.id, self.id)
return self._state.create_message(channel=self.channel, data=data)
async def delete(self, *, delay: Optional[float] = None) -> None:
if delay is not None:
async def delete(delay: float):
await asyncio.sleep(delay)
try:
await self._state.http.delete_message(self.channel.id, self.id)
except HTTPException:
pass
asyncio.create_task(delete(delay))
else:
await self._state.http.delete_message(self.channel.id, self.id)
@overload
async def edit(
self,
*,
content: Optional[str] = ...,
embed: Optional[Embed] = ...,
attachments: Sequence[Union[Attachment, File]] = ...,
delete_after: Optional[float] = ...,
allowed_mentions: Optional[AllowedMentions] = ...,
view: Optional[View] = ...,
) -> Message:
...
@overload
async def edit(
self,
*,
content: Optional[str] = ...,
embeds: Sequence[Embed] = ...,
attachments: Sequence[Union[Attachment, File]] = ...,
delete_after: Optional[float] = ...,
allowed_mentions: Optional[AllowedMentions] = ...,
view: Optional[View] = ...,
) -> Message:
...
async def edit(
self,
content: Optional[str] = MISSING,
embed: Optional[Embed] = MISSING,
embeds: Sequence[Embed] = MISSING,
attachments: Sequence[Union[Attachment, File]] = MISSING,
delete_after: Optional[float] = None,
allowed_mentions: Optional[AllowedMentions] = MISSING,
view: Optional[View] = MISSING,
) -> Message:
if content is not MISSING:
previous_allowed_mentions = self._state.allowed_mentions
else:
previous_allowed_mentions = None
if view is not MISSING:
self._state.prevent_view_updates_for(self.id)
params = handle_message_parameters(
content=content,
embed=embed,
embeds=embeds,
attachments=attachments,
view=view,
allowed_mentions=allowed_mentions,
previous_allowed_mentions=previous_allowed_mentions,
)
data = await self._state.http.edit_message(self.channel.id, self.id, params=params)
message = Message(state=self._state, channel=self.channel, data=data)
if view and not view.is_finished():
self._state.store_view(view, self.id)
if delete_after is not None:
await self.delete(delay=delete_after)
return message
async def publish(self) -> None:
await self._state.http.publish_message(self.channel.id, self.id)
async def pin(self, *, reason: Optional[str] = None) -> None:
await self._state.http.pin_message(self.channel.id, self.id, reason=reason)
self.pinned = True
async def unpin(self, *, reason: Optional[str] = None) -> None:
await self._state.http.unpin_message(self.channel.id, self.id, reason=reason)
self.pinned = False
async def add_reaction(self, emoji: EmojiInputType, /) -> None:
emoji = convert_emoji_reaction(emoji)
await self._state.http.add_reaction(self.channel.id, self.id, emoji)
async def remove_reaction(self, emoji: Union[EmojiInputType, Reaction], member: Snowflake) -> None:
emoji = convert_emoji_reaction(emoji)
if member.id == self._state.self_id:
await self._state.http.remove_own_reaction(self.channel.id, self.id, emoji)
else:
await self._state.http.remove_reaction(self.channel.id, self.id, emoji, member.id)
async def clear_reaction(self, emoji: Union[EmojiInputType, Reaction]) -> None:
emoji = convert_emoji_reaction(emoji)
await self._state.http.clear_single_reaction(self.channel.id, self.id, emoji)
async def clear_reactions(self) -> None:
await self._state.http.clear_reactions(self.channel.id, self.id)
async def create_thread(
self,
*,
name: str,
auto_archive_duration: ThreadArchiveDuration = MISSING,
slowmode_delay: Optional[int] = None,
reason: Optional[str] = None,
) -> Thread:
if self.guild is None:
raise ValueError('This message does not have guild info attached.')
default_auto_archive_duration: ThreadArchiveDuration = getattr(self.channel, 'default_auto_archive_duration', 1440)
data = await self._state.http.start_thread_with_message(
self.channel.id,
self.id,
name=name,
auto_archive_duration=auto_archive_duration or default_auto_archive_duration,
rate_limit_per_user=slowmode_delay,
reason=reason,
)
return Thread(guild=self.guild, state=self._state, data=data)
async def reply(self, content: Optional[str] = None, **kwargs: Any) -> Message:
return await self.channel.send(content, reference=self, **kwargs)
def to_reference(self, *, fail_if_not_exists: bool = True) -> MessageReference:
return MessageReference.from_message(self, fail_if_not_exists=fail_if_not_exists)
def to_message_reference_dict(self) -> MessageReferencePayload:
data: MessageReferencePayload = {
'message_id': self.id,
'channel_id': self.channel.id,
}
if self.guild is not None:
data['guild_id'] = self.guild.id
return data
@flatten_handlers
class Message(PartialMessage, Hashable):
__slots__ = (
'_state',
'_edited_timestamp',
'_cs_channel_mentions',
'_cs_raw_mentions',
'_cs_clean_content',
'_cs_raw_channel_mentions',
'_cs_raw_role_mentions',
'_cs_system_content',
'tts',
'content',
'channel',
'webhook_id',
'mention_everyone',
'embeds',
'mentions',
'author',
'attachments',
'nonce',
'pinned',
'role_mentions',
'type',
'flags',
'reactions',
'reference',
'application',
'activity',
'stickers',
'components',
'interaction',
)
if TYPE_CHECKING:
_HANDLERS: ClassVar[List[Tuple[str, Callable[..., None]]]]
_CACHED_SLOTS: ClassVar[List[str]]
reference: Optional[MessageReference]
mentions: List[Union[User, Member]]
author: Union[User, Member]
role_mentions: List[Role]
def __init__(
self,
*,
state: ConnectionState,
channel: MessageableChannel,
data: MessagePayload,
) -> None:
self.channel: MessageableChannel = channel
self.id: int = int(data['id'])
self._state: ConnectionState = state
self.webhook_id: Optional[int] = utils._get_as_snowflake(data, 'webhook_id')
self.reactions: List[Reaction] = [Reaction(message=self, data=d) for d in data.get('reactions', [])]
self.attachments: List[Attachment] = [Attachment(data=a, state=self._state) for a in data['attachments']]
self.embeds: List[Embed] = [Embed.from_dict(a) for a in data['embeds']]
self.application: Optional[MessageApplicationPayload] = data.get('application')
self.activity: Optional[MessageActivityPayload] = data.get('activity')
self.channel: MessageableChannel = channel
self._edited_timestamp: Optional[datetime.datetime] = utils.parse_time(data['edited_timestamp'])
self.type: MessageType = try_enum(MessageType, data['type'])
self.pinned: bool = data['pinned']
self.flags: MessageFlags = MessageFlags._from_value(data.get('flags', 0))
self.mention_everyone: bool = data['mention_everyone']
self.tts: bool = data['tts']
self.content: str = data['content']
self.nonce: Optional[Union[int, str]] = data.get('nonce')
self.stickers: List[StickerItem] = [StickerItem(data=d, state=state) for d in data.get('sticker_items', [])]
self.components: List[Component] = [_component_factory(d) for d in data.get('components', [])]
try:
self.guild = channel.guild # type: ignore
except AttributeError:
self.guild = state._get_guild(utils._get_as_snowflake(data, 'guild_id'))
self.interaction: Optional[MessageInteraction] = None
try:
interaction = data['interaction']
except KeyError:
pass
else:
self.interaction = MessageInteraction(state=state, guild=self.guild, data=interaction)
try:
ref = data['message_reference']
except KeyError:
self.reference = None
else:
self.reference = ref = MessageReference.with_state(state, ref)
try:
resolved = data['referenced_message']
except KeyError:
pass
else:
if resolved is None:
ref.resolved = DeletedReferencedMessage(ref)
else:
# Right now the channel IDs match but maybe in the future they won't.
if ref.channel_id == channel.id:
chan = channel
elif isinstance(channel, Thread) and channel.parent_id == ref.channel_id:
chan = channel
else:
chan, _ = state._get_guild_channel(resolved, ref.guild_id)
ref.resolved = self.__class__(channel=chan, data=resolved, state=state)
for handler in ('author', 'member', 'mentions', 'mention_roles'):
try:
getattr(self, f'_handle_{handler}')(data[handler])
except KeyError:
continue
def __repr__(self) -> str:
name = self.__class__.__name__
return (
f'<{name} id={self.id} channel={self.channel!r} type={self.type!r} author={self.author!r} flags={self.flags!r}>'
)
def _try_patch(self, data, key, transform=None) -> None:
try:
value = data[key]
except KeyError:
pass
else:
if transform is None:
setattr(self, key, value)
else:
setattr(self, key, transform(value))
def _add_reaction(self, data, emoji, user_id) -> Reaction:
reaction = utils.find(lambda r: r.emoji == emoji, self.reactions)
is_me = data['me'] = user_id == self._state.self_id
if reaction is None:
reaction = Reaction(message=self, data=data, emoji=emoji)
self.reactions.append(reaction)
else:
reaction.count += 1
if is_me:
reaction.me = is_me
return reaction
def _remove_reaction(self, data: MessageReactionRemoveEvent, emoji: EmojiInputType, user_id: int) -> Reaction:
reaction = utils.find(lambda r: r.emoji == emoji, self.reactions)
if reaction is None:
raise ValueError('Emoji already removed?')
# sent bad data, or we stored improperly
reaction.count -= 1
if user_id == self._state.self_id:
reaction.me = False
if reaction.count == 0:
# this raises ValueError if something went wrong as well.
self.reactions.remove(reaction)
return reaction
def _clear_emoji(self, emoji: PartialEmoji) -> Optional[Reaction]:
to_check = str(emoji)
for index, reaction in enumerate(self.reactions):
if str(reaction.emoji) == to_check:
break
else:
# didn't find anything so just return
return
del self.reactions[index]
return reaction
def _update(self, data: MessageUpdateEvent) -> None:
# handler rather than iterating over the keys which is a little slower
for key, handler in self._HANDLERS:
try:
value = data[key]
except KeyError:
continue
else:
handler(self, value)
# clear the cached properties
for attr in self._CACHED_SLOTS:
try:
delattr(self, attr)
except AttributeError:
pass
def _handle_edited_timestamp(self, value: str) -> None:
self._edited_timestamp = utils.parse_time(value)
def _handle_pinned(self, value: bool) -> None:
self.pinned = value
def _handle_flags(self, value: int) -> None:
self.flags = MessageFlags._from_value(value)
def _handle_application(self, value: MessageApplicationPayload) -> None:
self.application = value
def _handle_activity(self, value: MessageActivityPayload) -> None:
self.activity = value
def _handle_mention_everyone(self, value: bool) -> None:
self.mention_everyone = value
def _handle_tts(self, value: bool) -> None:
self.tts = value
def _handle_type(self, value: int) -> None:
self.type = try_enum(MessageType, value)
def _handle_content(self, value: str) -> None:
self.content = value
def _handle_attachments(self, value: List[AttachmentPayload]) -> None:
self.attachments = [Attachment(data=a, state=self._state) for a in value]
def _handle_embeds(self, value: List[EmbedPayload]) -> None:
self.embeds = [Embed.from_dict(data) for data in value]
def _handle_nonce(self, value: Union[str, int]) -> None:
self.nonce = value
def _handle_author(self, author: UserPayload) -> None:
self.author = User(state=self._state, data=author)
def _handle_member(self, member: MemberPayload) -> None:
member["user"] = self.author._to_minimal_user_json()
self.author = Member(data=member, guild=self.guild, state=self._state)
def _handle_mentions(self, mentions: List[UserWithMemberPayload]) -> None:
self.mentions = r = []
guild = self.guild
state = self._state
if not isinstance(guild, Guild):
self.mentions = [state.store_user(m) for m in mentions]
return
for mention in filter(None, mentions):
id_search = int(mention['id'])
member = guild.get_member(id_search)
if member is not None:
r.append(member)
else:
r.append(Member._try_upgrade(data=mention, guild=guild, state=state))
def _handle_mention_roles(self, role_mentions: List[int]) -> None:
self.role_mentions = []
if isinstance(self.guild, Guild):
for role_id in map(int, role_mentions):
role = self.guild.get_role(role_id)
if role is not None:
self.role_mentions.append(role)
def _handle_components(self, components: List[ComponentPayload]):
pass
def _handle_interaction(self, data: MessageInteractionPayload):
self.interaction = MessageInteraction(state=self._state, guild=self.guild, data=data)
def _rebind_cached_references(self, new_guild: Guild, new_channel: Union[TextChannel, Thread]) -> None:
self.guild = new_guild
self.channel = new_channel
@utils.cached_slot_property('_cs_raw_mentions')
def raw_mentions(self) -> List[int]:
return [int(x) for x in re.findall(r'<@!?([0-9]{15,20})>', self.content)]
@utils.cached_slot_property('_cs_raw_channel_mentions')
def raw_channel_mentions(self) -> List[int]:
return [int(x) for x in re.findall(r'<
@utils.cached_slot_property('_cs_raw_role_mentions')
def raw_role_mentions(self) -> List[int]:
return [int(x) for x in re.findall(r'<@&([0-9]{15,20})>', self.content)]
@utils.cached_slot_property('_cs_channel_mentions')
def channel_mentions(self) -> List[Union[GuildChannel, Thread]]:
if self.guild is None:
return []
it = filter(None, map(self.guild._resolve_channel, self.raw_channel_mentions))
return utils._unique(it)
@utils.cached_slot_property('_cs_clean_content')
def clean_content(self) -> str:
if self.guild:
def resolve_member(id: int) -> str:
m = self.guild.get_member(id) or utils.get(self.mentions, id=id) # type: ignore
return f'@{m.display_name}' if m else '@deleted-user'
def resolve_role(id: int) -> str:
r = self.guild.get_role(id) or utils.get(self.role_mentions, id=id) # type: ignore
return f'@{r.name}' if r else '@deleted-role'
def resolve_channel(id: int) -> str:
c = self.guild._resolve_channel(id) # type: ignore
return f' def resolve_member(id: int) -> str:
m = utils.get(self.mentions, id=id)
return f'@{m.display_name}' if m else '@deleted-user'
def resolve_role(id: int) -> str:
return '@deleted-role'
def resolve_channel(id: int) -> str:
return f'
transforms = {
'@': resolve_member,
'@!': resolve_member,
'
'@&': resolve_role,
}
def repl(match: re.Match) -> str:
type = match[1]
id = int(match[2])
transformed = transforms[type](id)
return transformed
result = re.sub(r'<(@[!&]?|
return escape_mentions(result)
@property
def created_at(self) -> datetime.datetime:
return utils.snowflake_time(self.id)
@property
def edited_at(self) -> Optional[datetime.datetime]:
return self._edited_timestamp
def is_system(self) -> bool:
return self.type not in (
MessageType.default,
MessageType.reply,
MessageType.chat_input_command,
MessageType.context_menu_command,
MessageType.thread_starter_message,
)
@utils.cached_slot_property('_cs_system_content')
def system_content(self) -> Optional[str]:
if self.type is MessageType.default:
return self.content
if self.type is MessageType.recipient_add:
if self.channel.type is ChannelType.group:
return f'{self.author.name} added {self.mentions[0].name} to the group.'
else:
return f'{self.author.name} added {self.mentions[0].name} to the thread.'
if self.type is MessageType.recipient_remove:
if self.channel.type is ChannelType.group:
return f'{self.author.name} removed {self.mentions[0].name} from the group.'
else:
return f'{self.author.name} removed {self.mentions[0].name} from the thread.'
if self.type is MessageType.channel_name_change:
return f'{self.author.name} changed the channel name: **{self.content}**'
if self.type is MessageType.channel_icon_change:
return f'{self.author.name} changed the channel icon.'
if self.type is MessageType.pins_add:
return f'{self.author.name} pinned a message to this channel.'
if self.type is MessageType.new_member:
formats = [
"{0} joined the party.",
"{0} is here.",
"Welcome, {0}. We hope you brought pizza.",
"A wild {0} appeared.",
"{0} just landed.",
"{0} just slid into the server.",
"{0} just showed up!",
"Welcome {0}. Say hi!",
"{0} hopped into the server.",
"Everyone welcome {0}!",
"Glad you're here, {0}.",
"Good to see you, {0}.",
"Yay you made it, {0}!",
]
created_at_ms = int(self.created_at.timestamp() * 1000)
return formats[created_at_ms % len(formats)].format(self.author.name)
if self.type is MessageType.premium_guild_subscription:
if not self.content:
return f'{self.author.name} just boosted the server!'
else:
return f'{self.author.name} just boosted the server **{self.content}** times!'
if self.type is MessageType.premium_guild_tier_1:
if not self.content:
return f'{self.author.name} just boosted the server! {self.guild} has achieved **Level 1!**'
else:
return f'{self.author.name} just boosted the server **{self.content}** times! {self.guild} has achieved **Level 1!**'
if self.type is MessageType.premium_guild_tier_2:
if not self.content:
return f'{self.author.name} just boosted the server! {self.guild} has achieved **Level 2!**'
else:
return f'{self.author.name} just boosted the server **{self.content}** times! {self.guild} has achieved **Level 2!**'
if self.type is MessageType.premium_guild_tier_3:
if not self.content:
return f'{self.author.name} just boosted the server! {self.guild} has achieved **Level 3!**'
else:
return f'{self.author.name} just boosted the server **{self.content}** times! {self.guild} has achieved **Level 3!**'
if self.type is MessageType.channel_follow_add:
return (
f'{self.author.name} has added {self.content} to this channel. Its most important updates will show up here.'
)
if self.type is MessageType.guild_stream:
return f'{self.author.name} is live! Now streaming {self.author.activity.name}'
if self.type is MessageType.guild_discovery_disqualified:
return 'This server has been removed from Server Discovery because it no longer passes all the requirements. Check Server Settings for more details.'
if self.type is MessageType.guild_discovery_requalified:
return 'This server is eligible for Server Discovery again and has been automatically relisted!'
if self.type is MessageType.guild_discovery_grace_period_initial_warning:
return 'This server has failed Discovery activity requirements for 1 week. If this server fails for 4 weeks in a row, it will be automatically removed from Discovery.'
if self.type is MessageType.guild_discovery_grace_period_final_warning:
return 'This server has failed Discovery activity requirements for 3 weeks in a row. If this server fails for 1 more week, it will be removed from Discovery.'
if self.type is MessageType.thread_created:
return f'{self.author.name} started a thread: **{self.content}**. See all **threads**.'
if self.type is MessageType.reply:
return self.content
if self.type is MessageType.thread_starter_message:
if self.reference is None or self.reference.resolved is None:
return 'Sorry, we couldn\'t load the first message in this thread'
# the resolved message for the reference will be a Message
return self.reference.resolved.content # type: ignore
if self.type is MessageType.guild_invite_reminder:
return 'Wondering who to invite?\nStart by inviting anyone who can help you build the server!'
@overload
async def edit(
self,
*,
content: Optional[str] = ...,
embed: Optional[Embed] = ...,
attachments: Sequence[Union[Attachment, File]] = ...,
suppress: bool = ...,
delete_after: Optional[float] = ...,
allowed_mentions: Optional[AllowedMentions] = ...,
view: Optional[View] = ...,
) -> Message:
...
@overload
async def edit(
self,
*,
content: Optional[str] = ...,
embeds: Sequence[Embed] = ...,
attachments: Sequence[Union[Attachment, File]] = ...,
suppress: bool = ...,
delete_after: Optional[float] = ...,
allowed_mentions: Optional[AllowedMentions] = ...,
view: Optional[View] = ...,
) -> Message:
...
async def edit(
self,
content: Optional[str] = MISSING,
embed: Optional[Embed] = MISSING,
embeds: Sequence[Embed] = MISSING,
attachments: Sequence[Union[Attachment, File]] = MISSING,
suppress: bool = False,
delete_after: Optional[float] = None,
allowed_mentions: Optional[AllowedMentions] = MISSING,
view: Optional[View] = MISSING,
) -> Message:
if content is not MISSING:
previous_allowed_mentions = self._state.allowed_mentions
else:
previous_allowed_mentions = None
if suppress is not MISSING:
flags = MessageFlags._from_value(self.flags.value)
flags.suppress_embeds = suppress
else:
flags = MISSING
if view is not MISSING:
self._state.prevent_view_updates_for(self.id)
params = handle_message_parameters(
content=content,
flags=flags,
embed=embed,
embeds=embeds,
attachments=attachments,
view=view,
allowed_mentions=allowed_mentions,
previous_allowed_mentions=previous_allowed_mentions,
)
data = await self._state.http.edit_message(self.channel.id, self.id, params=params)
message = Message(state=self._state, channel=self.channel, data=data)
if view and not view.is_finished():
self._state.store_view(view, self.id)
if delete_after is not None:
await self.delete(delay=delete_after)
return message
async def add_files(self, *files: File) -> Message:
return await self.edit(attachments=[*self.attachments, *files])
async def remove_attachments(self, *attachments: Attachment) -> Message:
return await self.edit(attachments=[a for a in self.attachments if a not in attachments])
| true
| true
|
790a300e9c50914e34d93bb7a7af54da5ee62d4d
| 3,035
|
py
|
Python
|
lib/pubtal/DateContext.py
|
owlfish/pubtal
|
fb20a0acf2769b2c06012b65bd462f02da12bd1c
|
[
"BSD-3-Clause"
] | null | null | null |
lib/pubtal/DateContext.py
|
owlfish/pubtal
|
fb20a0acf2769b2c06012b65bd462f02da12bd1c
|
[
"BSD-3-Clause"
] | null | null | null |
lib/pubtal/DateContext.py
|
owlfish/pubtal
|
fb20a0acf2769b2c06012b65bd462f02da12bd1c
|
[
"BSD-3-Clause"
] | null | null | null |
""" A class that can provide a date/time in any timeformat.format() format and both
local and UTC timezones within a ContextVariable.
Copyright (c) 2004 Colin Stewart (http://www.owlfish.com/)
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
If you make any bug fixes or feature enhancements please let me know!
"""
import re, time, math, string
import timeformat
from simpletal import simpleTALES
PATHREGEX = re.compile ('^((?:local)|(?:utc))/?(.*)$')
class Date (simpleTALES.ContextVariable):
""" Wraps a DateTime and provides context paths local and utc.
These paths in turn can take TimeFormat formats, for example:
utc/%d-%m-%Y
"""
def __init__ (self, value = None, defaultFormat = '%a[SHORT], %d %b[SHORT] %Y %H:%M:%S %Z'):
""" The value should be in the LOCAL timezone.
"""
self.ourValue = value
self.defaultFormat = defaultFormat
def value (self, currentPath=None):
# Default to local timezone and RFC822 format
utcTime = 0
strFrmt = self.defaultFormat
if (currentPath is not None):
index, paths = currentPath
currentPath = '/'.join (paths[index:])
match = PATHREGEX.match (currentPath)
if (match is not None):
type = match.group(1)
if (type == 'local'):
utcTime = 0
else:
utcTime = 1
strFrmt = match.group(2)
if (strFrmt == ""):
strFrmt = self.defaultFormat
if (self.ourValue is None):
# Default to the current time!
timeValue = time.localtime()
else:
timeValue = self.ourValue
if (utcTime):
# Convert to UTC (GMT)
timeValue = time.gmtime (time.mktime (timeValue))
value = timeformat.format (strFrmt, timeValue, utctime=utcTime)
raise simpleTALES.ContextVariable (value)
| 38.417722
| 93
| 0.730478
|
import re, time, math, string
import timeformat
from simpletal import simpleTALES
PATHREGEX = re.compile ('^((?:local)|(?:utc))/?(.*)$')
class Date (simpleTALES.ContextVariable):
def __init__ (self, value = None, defaultFormat = '%a[SHORT], %d %b[SHORT] %Y %H:%M:%S %Z'):
self.ourValue = value
self.defaultFormat = defaultFormat
def value (self, currentPath=None):
utcTime = 0
strFrmt = self.defaultFormat
if (currentPath is not None):
index, paths = currentPath
currentPath = '/'.join (paths[index:])
match = PATHREGEX.match (currentPath)
if (match is not None):
type = match.group(1)
if (type == 'local'):
utcTime = 0
else:
utcTime = 1
strFrmt = match.group(2)
if (strFrmt == ""):
strFrmt = self.defaultFormat
if (self.ourValue is None):
timeValue = time.localtime()
else:
timeValue = self.ourValue
if (utcTime):
timeValue = time.gmtime (time.mktime (timeValue))
value = timeformat.format (strFrmt, timeValue, utctime=utcTime)
raise simpleTALES.ContextVariable (value)
| true
| true
|
790a309ea229283fa6080c94713b6f66529013fa
| 461
|
py
|
Python
|
codigo/Live176/exemplos_dos_slides/exemplo_05.py
|
BrunoPontesLira/live-de-python
|
da6e463a89ed90d9efaa1c34088ab6460e949de1
|
[
"MIT"
] | 572
|
2018-04-03T03:17:08.000Z
|
2022-03-31T19:05:32.000Z
|
codigo/Live176/exemplos_dos_slides/exemplo_05.py
|
BrunoPontesLira/live-de-python
|
da6e463a89ed90d9efaa1c34088ab6460e949de1
|
[
"MIT"
] | 176
|
2018-05-18T15:56:16.000Z
|
2022-03-28T20:39:07.000Z
|
codigo/Live176/exemplos_dos_slides/exemplo_05.py
|
BrunoPontesLira/live-de-python
|
da6e463a89ed90d9efaa1c34088ab6460e949de1
|
[
"MIT"
] | 140
|
2018-04-18T13:59:11.000Z
|
2022-03-29T00:43:49.000Z
|
"""Exemplo de como virar[flip] imagems."""
from PIL import Image
# Abre a imagem
im = Image.open('beijo_menor.jpg')
# Flip
im.transpose(Image.FLIP_LEFT_RIGHT).show() # Invete na horizontal
im.transpose(Image.FLIP_TOP_BOTTOM).show() # Invete na vertical
# Transposição
im.transpose(Image.ROTATE_90).show()
im.transpose(Image.ROTATE_180).show()
im.transpose(Image.ROTATE_270).show()
im.transpose(Image.TRANSPOSE).show()
im.transpose(Image.TRANSVERSE).show()
| 27.117647
| 66
| 0.761388
|
from PIL import Image
im = Image.open('beijo_menor.jpg')
im.transpose(Image.FLIP_LEFT_RIGHT).show()
im.transpose(Image.FLIP_TOP_BOTTOM).show()
im.transpose(Image.ROTATE_90).show()
im.transpose(Image.ROTATE_180).show()
im.transpose(Image.ROTATE_270).show()
im.transpose(Image.TRANSPOSE).show()
im.transpose(Image.TRANSVERSE).show()
| true
| true
|
790a31602a2e6231958a1ed23fbe61a5ef5fd6fa
| 23
|
py
|
Python
|
examples/ndfd/ndfd.py
|
eLBati/pyxb
|
14737c23a125fd12c954823ad64fc4497816fae3
|
[
"Apache-2.0"
] | 123
|
2015-01-12T06:43:22.000Z
|
2022-03-20T18:06:46.000Z
|
examples/ndfd/ndfd.py
|
eLBati/pyxb
|
14737c23a125fd12c954823ad64fc4497816fae3
|
[
"Apache-2.0"
] | 103
|
2015-01-08T18:35:57.000Z
|
2022-01-18T01:44:14.000Z
|
examples/ndfd/ndfd.py
|
eLBati/pyxb
|
14737c23a125fd12c954823ad64fc4497816fae3
|
[
"Apache-2.0"
] | 54
|
2015-02-15T17:12:00.000Z
|
2022-03-07T23:02:32.000Z
|
from raw.ndfd import *
| 11.5
| 22
| 0.73913
|
from raw.ndfd import *
| true
| true
|
790a31901607e0e43d54b9c3e5faabe14c2fa874
| 1,017
|
py
|
Python
|
meerkat/views/simple.py
|
by46/meerkat
|
41376dc1636b5975a50020bad5632b4edbf5b16d
|
[
"MIT"
] | null | null | null |
meerkat/views/simple.py
|
by46/meerkat
|
41376dc1636b5975a50020bad5632b4edbf5b16d
|
[
"MIT"
] | null | null | null |
meerkat/views/simple.py
|
by46/meerkat
|
41376dc1636b5975a50020bad5632b4edbf5b16d
|
[
"MIT"
] | null | null | null |
import string
from flask import Blueprint
from flask import abort
from flask import redirect
from flask import render_template
from meerkat import utils
from meerkat.db import DataAccess
page = Blueprint('simple', __name__)
@page.route('/simple/')
def simple_index():
links = DataAccess.get_libs()
links = sorted(links, key=string.lower)
return render_template('simple.html', links=links)
@page.route('/simple/<prefix>/')
def simple(prefix=''):
normalized, prefix = utils.normalize_pkg_name(prefix)
if normalized:
return redirect('/simple/{0}/'.format(prefix))
if not DataAccess.has_lib(prefix):
abort(404)
links = []
for package in DataAccess.get_packages_by_lib(prefix):
info = DataAccess.get_package(package)
href = '/packages/{0}#md5={1}'.format(package, info.get('md5'))
links.append(dict(file=package, href=href))
return render_template('simple_detail.html', links=links, prefix=prefix)
| 26.763158
| 77
| 0.679449
|
import string
from flask import Blueprint
from flask import abort
from flask import redirect
from flask import render_template
from meerkat import utils
from meerkat.db import DataAccess
page = Blueprint('simple', __name__)
@page.route('/simple/')
def simple_index():
links = DataAccess.get_libs()
links = sorted(links, key=string.lower)
return render_template('simple.html', links=links)
@page.route('/simple/<prefix>/')
def simple(prefix=''):
normalized, prefix = utils.normalize_pkg_name(prefix)
if normalized:
return redirect('/simple/{0}/'.format(prefix))
if not DataAccess.has_lib(prefix):
abort(404)
links = []
for package in DataAccess.get_packages_by_lib(prefix):
info = DataAccess.get_package(package)
href = '/packages/{0}#md5={1}'.format(package, info.get('md5'))
links.append(dict(file=package, href=href))
return render_template('simple_detail.html', links=links, prefix=prefix)
| true
| true
|
790a351ab08745d337dc403b70ab76fc5f969c80
| 1,407
|
py
|
Python
|
quantum/plugins/cisco/nexus/cisco_nexus_configuration.py
|
r-mibu/neutron
|
7aebe2468bdcc1befef7d09136fdedafcb0049ec
|
[
"Apache-2.0"
] | null | null | null |
quantum/plugins/cisco/nexus/cisco_nexus_configuration.py
|
r-mibu/neutron
|
7aebe2468bdcc1befef7d09136fdedafcb0049ec
|
[
"Apache-2.0"
] | null | null | null |
quantum/plugins/cisco/nexus/cisco_nexus_configuration.py
|
r-mibu/neutron
|
7aebe2468bdcc1befef7d09136fdedafcb0049ec
|
[
"Apache-2.0"
] | null | null | null |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2011 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Sumit Naiksatam, Cisco Systems, Inc.
# @author: Edgar Magana, Cisco Systems, Inc.
#
"""
Configuration consolidation for the Nexus Driver
This module will export the configuration parameters
from the nexus.ini file
"""
from quantum.common.utils import find_config_file
from quantum.plugins.cisco.common import cisco_configparser as confp
CP = confp.CiscoConfigParser(find_config_file({'plugin': 'cisco'},
"nexus.ini"))
SECTION = CP['SWITCH']
NEXUS_IP_ADDRESS = SECTION['nexus_ip_address']
NEXUS_FIRST_PORT = SECTION['nexus_first_port']
NEXUS_SECOND_PORT = SECTION['nexus_second_port']
NEXUS_SSH_PORT = SECTION['nexus_ssh_port']
SECTION = CP['DRIVER']
NEXUS_DRIVER = SECTION['name']
| 34.317073
| 78
| 0.739161
|
from quantum.common.utils import find_config_file
from quantum.plugins.cisco.common import cisco_configparser as confp
CP = confp.CiscoConfigParser(find_config_file({'plugin': 'cisco'},
"nexus.ini"))
SECTION = CP['SWITCH']
NEXUS_IP_ADDRESS = SECTION['nexus_ip_address']
NEXUS_FIRST_PORT = SECTION['nexus_first_port']
NEXUS_SECOND_PORT = SECTION['nexus_second_port']
NEXUS_SSH_PORT = SECTION['nexus_ssh_port']
SECTION = CP['DRIVER']
NEXUS_DRIVER = SECTION['name']
| true
| true
|
790a382f05c75e9ec22248774e92d49b634a4a6b
| 8,937
|
py
|
Python
|
pysnmp/smi/mibs/SNMPv2-TM.py
|
RKinsey/pysnmp
|
96b5cf31e2f5d19f34d0dd1075014c488f6a5789
|
[
"BSD-2-Clause"
] | 492
|
2016-03-13T11:03:13.000Z
|
2022-03-21T02:52:57.000Z
|
pysnmp/smi/mibs/SNMPv2-TM.py
|
bartomo/pysnmp
|
becd15c79c9a6b5696928ecd50bf5cca8b1770a1
|
[
"BSD-2-Clause"
] | 372
|
2016-03-29T22:42:05.000Z
|
2022-03-26T10:28:25.000Z
|
pysnmp/smi/mibs/SNMPv2-TM.py
|
bartomo/pysnmp
|
becd15c79c9a6b5696928ecd50bf5cca8b1770a1
|
[
"BSD-2-Clause"
] | 197
|
2016-03-13T11:01:54.000Z
|
2022-03-07T19:52:15.000Z
|
#
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pysnmp/license.html
#
# ASN.1 source http://mibs.snmplabs.com:80/asn1/SNMPv2-TM
# Produced by pysmi-0.4.0 at Sun Feb 17 08:56:38 2019
#
# Parts of otherwise autogenerated MIB has been updated manually.
#
try:
from socket import inet_ntop, inet_pton, AF_INET
except ImportError:
from socket import inet_ntoa, inet_aton, AF_INET
inet_ntop = lambda x, y: inet_ntoa(y)
inet_pton = lambda x, y: inet_aton(y)
from pyasn1.compat.octets import int2oct
from pyasn1.compat.octets import oct2int
if 'mibBuilder' not in globals():
import sys
sys.stderr.write(__doc__)
sys.exit(1)
(Integer,
OctetString,
ObjectIdentifier) = mibBuilder.importSymbols(
"ASN1",
"Integer",
"OctetString",
"ObjectIdentifier")
(NamedValues,) = mibBuilder.importSymbols(
"ASN1-ENUMERATION",
"NamedValues")
(ConstraintsIntersection,
SingleValueConstraint,
ValueRangeConstraint,
ValueSizeConstraint,
ConstraintsUnion) = mibBuilder.importSymbols(
"ASN1-REFINEMENT",
"ConstraintsIntersection",
"SingleValueConstraint",
"ValueRangeConstraint",
"ValueSizeConstraint",
"ConstraintsUnion")
(Counter64,
iso,
NotificationType,
ObjectIdentity,
Bits,
ModuleIdentity,
TimeTicks,
Counter32,
IpAddress,
snmpProxys,
MibScalar,
MibTable,
MibTableRow,
MibTableColumn,
Gauge32,
Unsigned32,
snmpDomains,
Integer32,
MibIdentifier,
snmpModules) = mibBuilder.importSymbols(
"SNMPv2-SMI",
"Counter64",
"iso",
"NotificationType",
"ObjectIdentity",
"Bits",
"ModuleIdentity",
"TimeTicks",
"Counter32",
"IpAddress",
"snmpProxys",
"MibScalar",
"MibTable",
"MibTableRow",
"MibTableColumn",
"Gauge32",
"Unsigned32",
"snmpDomains",
"Integer32",
"MibIdentifier",
"snmpModules")
(TextualConvention,) = mibBuilder.importSymbols(
"SNMPv2-TC",
"TextualConvention")
snmpv2tm = ModuleIdentity(
(1, 3, 6, 1, 6, 3, 19)
)
snmpv2tm.setRevisions(
("2002-10-16 00:00",
"1996-01-01 00:00",
"1993-04-01 00:00")
)
snmpv2tm.setLastUpdated("200210160000Z")
if mibBuilder.loadTexts:
snmpv2tm.setOrganization("""\
IETF SNMPv3 Working Group
""")
snmpv2tm.setContactInfo("""\
WG-EMail: snmpv3@lists.tislabs.com Subscribe: snmpv3-request@lists.tislabs.com
Co-Chair: Russ Mundy Network Associates Laboratories postal: 15204 Omega Drive,
Suite 300 Rockville, MD 20850-4601 USA EMail: mundy@tislabs.com phone: +1 301
947-7107 Co-Chair: David Harrington Enterasys Networks postal: 35 Industrial
Way P. O. Box 5005 Rochester, NH 03866-5005 USA EMail: dbh@enterasys.com phone:
+1 603 337-2614 Editor: Randy Presuhn BMC Software, Inc. postal: 2141 North
First Street San Jose, CA 95131 USA EMail: randy_presuhn@bmc.com phone: +1 408
546-1006
""")
if mibBuilder.loadTexts:
snmpv2tm.setDescription("""\
The MIB module for SNMP transport mappings. Copyright (C) The Internet Society
(2002). This version of this MIB module is part of RFC 3417; see the RFC itself
for full legal notices.
""")
class SnmpUDPAddress(TextualConvention, OctetString):
status = "current"
displayHint = "1d.1d.1d.1d/2d"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(6, 6),
)
if mibBuilder.loadTexts:
description = """\
Represents a UDP over IPv4 address: octets contents encoding 1-4 IP-address
network-byte order 5-6 UDP-port network-byte order
"""
fixedLength = 6
def prettyIn(self, value):
if isinstance(value, tuple):
# Wild hack -- need to implement TextualConvention.prettyIn
value = inet_pton(AF_INET, value[0]) + int2oct((value[1] >> 8) & 0xff) + int2oct(value[1] & 0xff)
return OctetString.prettyIn(self, value)
# Socket address syntax coercion
def __asSocketAddress(self):
if not hasattr(self, '__tuple_value'):
v = self.asOctets()
self.__tuple_value = (
inet_ntop(AF_INET, v[:4]),
oct2int(v[4]) << 8 | oct2int(v[5])
)
return self.__tuple_value
def __iter__(self):
return iter(self.__asSocketAddress())
def __getitem__(self, item):
return self.__asSocketAddress()[item]
class SnmpOSIAddress(TextualConvention, OctetString):
status = "current"
displayHint = "*1x:/1x:"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(1, 1),
ValueSizeConstraint(4, 85),
)
if mibBuilder.loadTexts:
description = """\
Represents an OSI transport-address: octets contents encoding 1 length of NSAP
'n' as an unsigned-integer (either 0 or from 3 to 20) 2..(n+1) NSAP concrete
binary representation (n+2)..m TSEL string of (up to 64) octets
"""
class SnmpNBPAddress(TextualConvention, OctetString):
status = "current"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(3, 99),
)
if mibBuilder.loadTexts:
description = """\
Represents an NBP name: octets contents encoding 1 length of object 'n' as an
unsigned integer 2..(n+1) object string of (up to 32) octets n+2 length of type
'p' as an unsigned integer (n+3)..(n+2+p) type string of (up to 32) octets
n+3+p length of zone 'q' as an unsigned integer (n+4+p)..(n+3+p+q) zone string
of (up to 32) octets For comparison purposes, strings are case-insensitive. All
strings may contain any octet other than 255 (hex ff).
"""
class SnmpIPXAddress(TextualConvention, OctetString):
status = "current"
displayHint = "4x.1x:1x:1x:1x:1x:1x.2d"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(12, 12),
)
if mibBuilder.loadTexts:
description = """\
Represents an IPX address: octets contents encoding 1-4 network-number network-
byte order 5-10 physical-address network-byte order 11-12 socket-number
network-byte order
"""
fixedLength = 12
_SnmpUDPDomain_ObjectIdentity = ObjectIdentity
snmpUDPDomain = _SnmpUDPDomain_ObjectIdentity(
(1, 3, 6, 1, 6, 1, 1)
)
if mibBuilder.loadTexts:
snmpUDPDomain.setStatus("current")
if mibBuilder.loadTexts:
snmpUDPDomain.setDescription("""\
The SNMP over UDP over IPv4 transport domain. The corresponding transport
address is of type SnmpUDPAddress.
""")
_SnmpCLNSDomain_ObjectIdentity = ObjectIdentity
snmpCLNSDomain = _SnmpCLNSDomain_ObjectIdentity(
(1, 3, 6, 1, 6, 1, 2)
)
if mibBuilder.loadTexts:
snmpCLNSDomain.setStatus("current")
if mibBuilder.loadTexts:
snmpCLNSDomain.setDescription("""\
The SNMP over CLNS transport domain. The corresponding transport address is of
type SnmpOSIAddress.
""")
_SnmpCONSDomain_ObjectIdentity = ObjectIdentity
snmpCONSDomain = _SnmpCONSDomain_ObjectIdentity(
(1, 3, 6, 1, 6, 1, 3)
)
if mibBuilder.loadTexts:
snmpCONSDomain.setStatus("current")
if mibBuilder.loadTexts:
snmpCONSDomain.setDescription("""\
The SNMP over CONS transport domain. The corresponding transport address is of
type SnmpOSIAddress.
""")
_SnmpDDPDomain_ObjectIdentity = ObjectIdentity
snmpDDPDomain = _SnmpDDPDomain_ObjectIdentity(
(1, 3, 6, 1, 6, 1, 4)
)
if mibBuilder.loadTexts:
snmpDDPDomain.setStatus("current")
if mibBuilder.loadTexts:
snmpDDPDomain.setDescription("""\
The SNMP over DDP transport domain. The corresponding transport address is of
type SnmpNBPAddress.
""")
_SnmpIPXDomain_ObjectIdentity = ObjectIdentity
snmpIPXDomain = _SnmpIPXDomain_ObjectIdentity(
(1, 3, 6, 1, 6, 1, 5)
)
if mibBuilder.loadTexts:
snmpIPXDomain.setStatus("current")
if mibBuilder.loadTexts:
snmpIPXDomain.setDescription("""\
The SNMP over IPX transport domain. The corresponding transport address is of
type SnmpIPXAddress.
""")
_Rfc1157Proxy_ObjectIdentity = ObjectIdentity
rfc1157Proxy = _Rfc1157Proxy_ObjectIdentity(
(1, 3, 6, 1, 6, 2, 1)
)
_Rfc1157Domain_ObjectIdentity = ObjectIdentity
rfc1157Domain = _Rfc1157Domain_ObjectIdentity(
(1, 3, 6, 1, 6, 2, 1, 1)
)
if mibBuilder.loadTexts:
rfc1157Domain.setStatus("deprecated")
if mibBuilder.loadTexts:
rfc1157Domain.setDescription("""\
The transport domain for SNMPv1 over UDP over IPv4. The corresponding transport
address is of type SnmpUDPAddress.
""")
mibBuilder.exportSymbols(
"SNMPv2-TM",
**{"SnmpUDPAddress": SnmpUDPAddress,
"SnmpOSIAddress": SnmpOSIAddress,
"SnmpNBPAddress": SnmpNBPAddress,
"SnmpIPXAddress": SnmpIPXAddress,
"snmpUDPDomain": snmpUDPDomain,
"snmpCLNSDomain": snmpCLNSDomain,
"snmpCONSDomain": snmpCONSDomain,
"snmpDDPDomain": snmpDDPDomain,
"snmpIPXDomain": snmpIPXDomain,
"rfc1157Proxy": rfc1157Proxy,
"rfc1157Domain": rfc1157Domain,
"snmpv2tm": snmpv2tm}
)
| 28.92233
| 109
| 0.71176
|
try:
from socket import inet_ntop, inet_pton, AF_INET
except ImportError:
from socket import inet_ntoa, inet_aton, AF_INET
inet_ntop = lambda x, y: inet_ntoa(y)
inet_pton = lambda x, y: inet_aton(y)
from pyasn1.compat.octets import int2oct
from pyasn1.compat.octets import oct2int
if 'mibBuilder' not in globals():
import sys
sys.stderr.write(__doc__)
sys.exit(1)
(Integer,
OctetString,
ObjectIdentifier) = mibBuilder.importSymbols(
"ASN1",
"Integer",
"OctetString",
"ObjectIdentifier")
(NamedValues,) = mibBuilder.importSymbols(
"ASN1-ENUMERATION",
"NamedValues")
(ConstraintsIntersection,
SingleValueConstraint,
ValueRangeConstraint,
ValueSizeConstraint,
ConstraintsUnion) = mibBuilder.importSymbols(
"ASN1-REFINEMENT",
"ConstraintsIntersection",
"SingleValueConstraint",
"ValueRangeConstraint",
"ValueSizeConstraint",
"ConstraintsUnion")
(Counter64,
iso,
NotificationType,
ObjectIdentity,
Bits,
ModuleIdentity,
TimeTicks,
Counter32,
IpAddress,
snmpProxys,
MibScalar,
MibTable,
MibTableRow,
MibTableColumn,
Gauge32,
Unsigned32,
snmpDomains,
Integer32,
MibIdentifier,
snmpModules) = mibBuilder.importSymbols(
"SNMPv2-SMI",
"Counter64",
"iso",
"NotificationType",
"ObjectIdentity",
"Bits",
"ModuleIdentity",
"TimeTicks",
"Counter32",
"IpAddress",
"snmpProxys",
"MibScalar",
"MibTable",
"MibTableRow",
"MibTableColumn",
"Gauge32",
"Unsigned32",
"snmpDomains",
"Integer32",
"MibIdentifier",
"snmpModules")
(TextualConvention,) = mibBuilder.importSymbols(
"SNMPv2-TC",
"TextualConvention")
snmpv2tm = ModuleIdentity(
(1, 3, 6, 1, 6, 3, 19)
)
snmpv2tm.setRevisions(
("2002-10-16 00:00",
"1996-01-01 00:00",
"1993-04-01 00:00")
)
snmpv2tm.setLastUpdated("200210160000Z")
if mibBuilder.loadTexts:
snmpv2tm.setOrganization("""\
IETF SNMPv3 Working Group
""")
snmpv2tm.setContactInfo("""\
WG-EMail: snmpv3@lists.tislabs.com Subscribe: snmpv3-request@lists.tislabs.com
Co-Chair: Russ Mundy Network Associates Laboratories postal: 15204 Omega Drive,
Suite 300 Rockville, MD 20850-4601 USA EMail: mundy@tislabs.com phone: +1 301
947-7107 Co-Chair: David Harrington Enterasys Networks postal: 35 Industrial
Way P. O. Box 5005 Rochester, NH 03866-5005 USA EMail: dbh@enterasys.com phone:
+1 603 337-2614 Editor: Randy Presuhn BMC Software, Inc. postal: 2141 North
First Street San Jose, CA 95131 USA EMail: randy_presuhn@bmc.com phone: +1 408
546-1006
""")
if mibBuilder.loadTexts:
snmpv2tm.setDescription("""\
The MIB module for SNMP transport mappings. Copyright (C) The Internet Society
(2002). This version of this MIB module is part of RFC 3417; see the RFC itself
for full legal notices.
""")
class SnmpUDPAddress(TextualConvention, OctetString):
status = "current"
displayHint = "1d.1d.1d.1d/2d"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(6, 6),
)
if mibBuilder.loadTexts:
description = """\
Represents a UDP over IPv4 address: octets contents encoding 1-4 IP-address
network-byte order 5-6 UDP-port network-byte order
"""
fixedLength = 6
def prettyIn(self, value):
if isinstance(value, tuple):
value = inet_pton(AF_INET, value[0]) + int2oct((value[1] >> 8) & 0xff) + int2oct(value[1] & 0xff)
return OctetString.prettyIn(self, value)
def __asSocketAddress(self):
if not hasattr(self, '__tuple_value'):
v = self.asOctets()
self.__tuple_value = (
inet_ntop(AF_INET, v[:4]),
oct2int(v[4]) << 8 | oct2int(v[5])
)
return self.__tuple_value
def __iter__(self):
return iter(self.__asSocketAddress())
def __getitem__(self, item):
return self.__asSocketAddress()[item]
class SnmpOSIAddress(TextualConvention, OctetString):
status = "current"
displayHint = "*1x:/1x:"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(1, 1),
ValueSizeConstraint(4, 85),
)
if mibBuilder.loadTexts:
description = """\
Represents an OSI transport-address: octets contents encoding 1 length of NSAP
'n' as an unsigned-integer (either 0 or from 3 to 20) 2..(n+1) NSAP concrete
binary representation (n+2)..m TSEL string of (up to 64) octets
"""
class SnmpNBPAddress(TextualConvention, OctetString):
status = "current"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(3, 99),
)
if mibBuilder.loadTexts:
description = """\
Represents an NBP name: octets contents encoding 1 length of object 'n' as an
unsigned integer 2..(n+1) object string of (up to 32) octets n+2 length of type
'p' as an unsigned integer (n+3)..(n+2+p) type string of (up to 32) octets
n+3+p length of zone 'q' as an unsigned integer (n+4+p)..(n+3+p+q) zone string
of (up to 32) octets For comparison purposes, strings are case-insensitive. All
strings may contain any octet other than 255 (hex ff).
"""
class SnmpIPXAddress(TextualConvention, OctetString):
status = "current"
displayHint = "4x.1x:1x:1x:1x:1x:1x.2d"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(12, 12),
)
if mibBuilder.loadTexts:
description = """\
Represents an IPX address: octets contents encoding 1-4 network-number network-
byte order 5-10 physical-address network-byte order 11-12 socket-number
network-byte order
"""
fixedLength = 12
_SnmpUDPDomain_ObjectIdentity = ObjectIdentity
snmpUDPDomain = _SnmpUDPDomain_ObjectIdentity(
(1, 3, 6, 1, 6, 1, 1)
)
if mibBuilder.loadTexts:
snmpUDPDomain.setStatus("current")
if mibBuilder.loadTexts:
snmpUDPDomain.setDescription("""\
The SNMP over UDP over IPv4 transport domain. The corresponding transport
address is of type SnmpUDPAddress.
""")
_SnmpCLNSDomain_ObjectIdentity = ObjectIdentity
snmpCLNSDomain = _SnmpCLNSDomain_ObjectIdentity(
(1, 3, 6, 1, 6, 1, 2)
)
if mibBuilder.loadTexts:
snmpCLNSDomain.setStatus("current")
if mibBuilder.loadTexts:
snmpCLNSDomain.setDescription("""\
The SNMP over CLNS transport domain. The corresponding transport address is of
type SnmpOSIAddress.
""")
_SnmpCONSDomain_ObjectIdentity = ObjectIdentity
snmpCONSDomain = _SnmpCONSDomain_ObjectIdentity(
(1, 3, 6, 1, 6, 1, 3)
)
if mibBuilder.loadTexts:
snmpCONSDomain.setStatus("current")
if mibBuilder.loadTexts:
snmpCONSDomain.setDescription("""\
The SNMP over CONS transport domain. The corresponding transport address is of
type SnmpOSIAddress.
""")
_SnmpDDPDomain_ObjectIdentity = ObjectIdentity
snmpDDPDomain = _SnmpDDPDomain_ObjectIdentity(
(1, 3, 6, 1, 6, 1, 4)
)
if mibBuilder.loadTexts:
snmpDDPDomain.setStatus("current")
if mibBuilder.loadTexts:
snmpDDPDomain.setDescription("""\
The SNMP over DDP transport domain. The corresponding transport address is of
type SnmpNBPAddress.
""")
_SnmpIPXDomain_ObjectIdentity = ObjectIdentity
snmpIPXDomain = _SnmpIPXDomain_ObjectIdentity(
(1, 3, 6, 1, 6, 1, 5)
)
if mibBuilder.loadTexts:
snmpIPXDomain.setStatus("current")
if mibBuilder.loadTexts:
snmpIPXDomain.setDescription("""\
The SNMP over IPX transport domain. The corresponding transport address is of
type SnmpIPXAddress.
""")
_Rfc1157Proxy_ObjectIdentity = ObjectIdentity
rfc1157Proxy = _Rfc1157Proxy_ObjectIdentity(
(1, 3, 6, 1, 6, 2, 1)
)
_Rfc1157Domain_ObjectIdentity = ObjectIdentity
rfc1157Domain = _Rfc1157Domain_ObjectIdentity(
(1, 3, 6, 1, 6, 2, 1, 1)
)
if mibBuilder.loadTexts:
rfc1157Domain.setStatus("deprecated")
if mibBuilder.loadTexts:
rfc1157Domain.setDescription("""\
The transport domain for SNMPv1 over UDP over IPv4. The corresponding transport
address is of type SnmpUDPAddress.
""")
mibBuilder.exportSymbols(
"SNMPv2-TM",
**{"SnmpUDPAddress": SnmpUDPAddress,
"SnmpOSIAddress": SnmpOSIAddress,
"SnmpNBPAddress": SnmpNBPAddress,
"SnmpIPXAddress": SnmpIPXAddress,
"snmpUDPDomain": snmpUDPDomain,
"snmpCLNSDomain": snmpCLNSDomain,
"snmpCONSDomain": snmpCONSDomain,
"snmpDDPDomain": snmpDDPDomain,
"snmpIPXDomain": snmpIPXDomain,
"rfc1157Proxy": rfc1157Proxy,
"rfc1157Domain": rfc1157Domain,
"snmpv2tm": snmpv2tm}
)
| true
| true
|
790a387be1dd9111b47fa150bd553d8f2d43cd43
| 5,002
|
py
|
Python
|
kubernetes/client/models/io_xk8s_cluster_v1alpha4_machine_spec_bootstrap.py
|
mariusgheorghies/python
|
68ac7e168963d8b5a81dc493b1973d29e903a15b
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/client/models/io_xk8s_cluster_v1alpha4_machine_spec_bootstrap.py
|
mariusgheorghies/python
|
68ac7e168963d8b5a81dc493b1973d29e903a15b
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/client/models/io_xk8s_cluster_v1alpha4_machine_spec_bootstrap.py
|
mariusgheorghies/python
|
68ac7e168963d8b5a81dc493b1973d29e903a15b
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1.20.7
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class IoXK8sClusterV1alpha4MachineSpecBootstrap(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'config_ref': 'IoXK8sClusterV1alpha4MachineSpecBootstrapConfigRef',
'data_secret_name': 'str'
}
attribute_map = {
'config_ref': 'configRef',
'data_secret_name': 'dataSecretName'
}
def __init__(self, config_ref=None, data_secret_name=None, local_vars_configuration=None): # noqa: E501
"""IoXK8sClusterV1alpha4MachineSpecBootstrap - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._config_ref = None
self._data_secret_name = None
self.discriminator = None
if config_ref is not None:
self.config_ref = config_ref
if data_secret_name is not None:
self.data_secret_name = data_secret_name
@property
def config_ref(self):
"""Gets the config_ref of this IoXK8sClusterV1alpha4MachineSpecBootstrap. # noqa: E501
:return: The config_ref of this IoXK8sClusterV1alpha4MachineSpecBootstrap. # noqa: E501
:rtype: IoXK8sClusterV1alpha4MachineSpecBootstrapConfigRef
"""
return self._config_ref
@config_ref.setter
def config_ref(self, config_ref):
"""Sets the config_ref of this IoXK8sClusterV1alpha4MachineSpecBootstrap.
:param config_ref: The config_ref of this IoXK8sClusterV1alpha4MachineSpecBootstrap. # noqa: E501
:type: IoXK8sClusterV1alpha4MachineSpecBootstrapConfigRef
"""
self._config_ref = config_ref
@property
def data_secret_name(self):
"""Gets the data_secret_name of this IoXK8sClusterV1alpha4MachineSpecBootstrap. # noqa: E501
DataSecretName is the name of the secret that stores the bootstrap data script. If nil, the Machine should remain in the Pending state. # noqa: E501
:return: The data_secret_name of this IoXK8sClusterV1alpha4MachineSpecBootstrap. # noqa: E501
:rtype: str
"""
return self._data_secret_name
@data_secret_name.setter
def data_secret_name(self, data_secret_name):
"""Sets the data_secret_name of this IoXK8sClusterV1alpha4MachineSpecBootstrap.
DataSecretName is the name of the secret that stores the bootstrap data script. If nil, the Machine should remain in the Pending state. # noqa: E501
:param data_secret_name: The data_secret_name of this IoXK8sClusterV1alpha4MachineSpecBootstrap. # noqa: E501
:type: str
"""
self._data_secret_name = data_secret_name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IoXK8sClusterV1alpha4MachineSpecBootstrap):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IoXK8sClusterV1alpha4MachineSpecBootstrap):
return True
return self.to_dict() != other.to_dict()
| 33.57047
| 157
| 0.645942
|
import pprint
import re
import six
from kubernetes.client.configuration import Configuration
class IoXK8sClusterV1alpha4MachineSpecBootstrap(object):
openapi_types = {
'config_ref': 'IoXK8sClusterV1alpha4MachineSpecBootstrapConfigRef',
'data_secret_name': 'str'
}
attribute_map = {
'config_ref': 'configRef',
'data_secret_name': 'dataSecretName'
}
def __init__(self, config_ref=None, data_secret_name=None, local_vars_configuration=None):
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._config_ref = None
self._data_secret_name = None
self.discriminator = None
if config_ref is not None:
self.config_ref = config_ref
if data_secret_name is not None:
self.data_secret_name = data_secret_name
@property
def config_ref(self):
return self._config_ref
@config_ref.setter
def config_ref(self, config_ref):
self._config_ref = config_ref
@property
def data_secret_name(self):
return self._data_secret_name
@data_secret_name.setter
def data_secret_name(self, data_secret_name):
self._data_secret_name = data_secret_name
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, IoXK8sClusterV1alpha4MachineSpecBootstrap):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
if not isinstance(other, IoXK8sClusterV1alpha4MachineSpecBootstrap):
return True
return self.to_dict() != other.to_dict()
| true
| true
|
790a3995c4a227a6dddb3783247357c4a32e4718
| 7,721
|
py
|
Python
|
sympy/codegen/rewriting.py
|
MartinThoma/sympy
|
009d0031bec7222ffa472e52148a2b4e441cd3a5
|
[
"BSD-3-Clause"
] | 2
|
2019-05-18T22:36:49.000Z
|
2019-05-24T05:56:16.000Z
|
sympy/codegen/rewriting.py
|
mmelotti/sympy
|
bea29026d27cc50c2e6a5501b6a70a9629ed3e18
|
[
"BSD-3-Clause"
] | 1
|
2020-04-22T12:45:26.000Z
|
2020-04-22T12:45:26.000Z
|
sympy/codegen/rewriting.py
|
mmelotti/sympy
|
bea29026d27cc50c2e6a5501b6a70a9629ed3e18
|
[
"BSD-3-Clause"
] | 3
|
2019-05-18T21:32:31.000Z
|
2019-07-26T11:05:46.000Z
|
"""
Classes and functions useful for rewriting expressions for optimized code
generation. Some languages (or standards thereof), e.g. C99, offer specialized
math functions for better performance and/or precision.
Using the ``optimize`` function in this module, together with a collection of
rules (represented as instances of ``Optimization``), one can rewrite the
expressions for this purpose::
>>> from sympy import Symbol, exp, log
>>> from sympy.codegen.rewriting import optimize, optims_c99
>>> x = Symbol('x')
>>> optimize(3*exp(2*x) - 3, optims_c99)
3*expm1(2*x)
>>> optimize(exp(2*x) - 3, optims_c99)
exp(2*x) - 3
>>> optimize(log(3*x + 3), optims_c99)
log1p(x) + log(3)
>>> optimize(log(2*x + 3), optims_c99)
log(2*x + 3)
The ``optims_c99`` imported above is tuple containing the following instances
(which may be imported from ``sympy.codegen.rewriting``):
- ``expm1_opt``
- ``log1p_opt``
- ``exp2_opt``
- ``log2_opt``
- ``log2const_opt``
"""
from itertools import chain
from sympy import log, exp, Max, Min, Wild, expand_log, Dummy
from sympy.assumptions import Q, ask
from sympy.codegen.cfunctions import log1p, log2, exp2, expm1
from sympy.codegen.matrix_nodes import MatrixSolve
from sympy.core.expr import UnevaluatedExpr
from sympy.core.mul import Mul
from sympy.matrices.expressions.matexpr import MatrixSymbol
from sympy.utilities.iterables import sift
class Optimization:
""" Abstract base class for rewriting optimization.
Subclasses should implement ``__call__`` taking an expression
as argument.
Parameters
==========
cost_function : callable returning number
priority : number
"""
def __init__(self, cost_function=None, priority=1):
self.cost_function = cost_function
self.priority=priority
class ReplaceOptim(Optimization):
""" Rewriting optimization calling replace on expressions.
The instance can be used as a function on expressions for which
it will apply the ``replace`` method (see
:meth:`sympy.core.basic.Basic.replace`).
Parameters
==========
query : first argument passed to replace
value : second argument passed to replace
Examples
========
>>> from sympy import Symbol, Pow
>>> from sympy.codegen.rewriting import ReplaceOptim
>>> from sympy.codegen.cfunctions import exp2
>>> x = Symbol('x')
>>> exp2_opt = ReplaceOptim(lambda p: p.is_Pow and p.base == 2,
... lambda p: exp2(p.exp))
>>> exp2_opt(2**x)
exp2(x)
"""
def __init__(self, query, value, **kwargs):
super().__init__(**kwargs)
self.query = query
self.value = value
def __call__(self, expr):
return expr.replace(self.query, self.value)
def optimize(expr, optimizations):
""" Apply optimizations to an expression.
Parameters
==========
expr : expression
optimizations : iterable of ``Optimization`` instances
The optimizations will be sorted with respect to ``priority`` (highest first).
Examples
========
>>> from sympy import log, Symbol
>>> from sympy.codegen.rewriting import optims_c99, optimize
>>> x = Symbol('x')
>>> optimize(log(x+3)/log(2) + log(x**2 + 1), optims_c99)
log1p(x**2) + log2(x + 3)
"""
for optim in sorted(optimizations, key=lambda opt: opt.priority, reverse=True):
new_expr = optim(expr)
if optim.cost_function is None:
expr = new_expr
else:
before, after = map(lambda x: optim.cost_function(x), (expr, new_expr))
if before > after:
expr = new_expr
return expr
exp2_opt = ReplaceOptim(
lambda p: p.is_Pow and p.base == 2,
lambda p: exp2(p.exp)
)
_d = Wild('d', properties=[lambda x: x.is_Dummy])
_u = Wild('u', properties=[lambda x: not x.is_number and not x.is_Add])
_v = Wild('v')
_w = Wild('w')
log2_opt = ReplaceOptim(_v*log(_w)/log(2), _v*log2(_w), cost_function=lambda expr: expr.count(
lambda e: ( # division & eval of transcendentals are expensive floating point operations...
e.is_Pow and e.exp.is_negative # division
or (isinstance(e, (log, log2)) and not e.args[0].is_number)) # transcendental
)
)
log2const_opt = ReplaceOptim(log(2)*log2(_w), log(_w))
logsumexp_2terms_opt = ReplaceOptim(
lambda l: (isinstance(l, log)
and l.args[0].is_Add
and len(l.args[0].args) == 2
and all(isinstance(t, exp) for t in l.args[0].args)),
lambda l: (
Max(*[e.args[0] for e in l.args[0].args]) +
log1p(exp(Min(*[e.args[0] for e in l.args[0].args])))
)
)
def _try_expm1(expr):
protected, old_new = expr.replace(exp, lambda arg: Dummy(), map=True)
factored = protected.factor()
new_old = {v: k for k, v in old_new.items()}
return factored.replace(_d - 1, lambda d: expm1(new_old[d].args[0])).xreplace(new_old)
def _expm1_value(e):
numbers, non_num = sift(e.args, lambda arg: arg.is_number, binary=True)
non_num_exp, non_num_other = sift(non_num, lambda arg: arg.has(exp),
binary=True)
numsum = sum(numbers)
new_exp_terms, done = [], False
for exp_term in non_num_exp:
if done:
new_exp_terms.append(exp_term)
else:
looking_at = exp_term + numsum
attempt = _try_expm1(looking_at)
if looking_at == attempt:
new_exp_terms.append(exp_term)
else:
done = True
new_exp_terms.append(attempt)
if not done:
new_exp_terms.append(numsum)
return e.func(*chain(new_exp_terms, non_num_other))
expm1_opt = ReplaceOptim(lambda e: e.is_Add, _expm1_value)
log1p_opt = ReplaceOptim(
lambda e: isinstance(e, log),
lambda l: expand_log(l.replace(
log, lambda arg: log(arg.factor())
)).replace(log(_u+1), log1p(_u))
)
def create_expand_pow_optimization(limit):
""" Creates an instance of :class:`ReplaceOptim` for expanding ``Pow``.
The requirements for expansions are that the base needs to be a symbol
and the exponent needs to be an Integer (and be less than or equal to
``limit``).
Parameters
==========
limit : int
The highest power which is expanded into multiplication.
Examples
========
>>> from sympy import Symbol, sin
>>> from sympy.codegen.rewriting import create_expand_pow_optimization
>>> x = Symbol('x')
>>> expand_opt = create_expand_pow_optimization(3)
>>> expand_opt(x**5 + x**3)
x**5 + x*x*x
>>> expand_opt(x**5 + x**3 + sin(x)**3)
x**5 + sin(x)**3 + x*x*x
"""
return ReplaceOptim(
lambda e: e.is_Pow and e.base.is_symbol and e.exp.is_Integer and abs(e.exp) <= limit,
lambda p: (
UnevaluatedExpr(Mul(*([p.base]*+p.exp), evaluate=False)) if p.exp > 0 else
1/UnevaluatedExpr(Mul(*([p.base]*-p.exp), evaluate=False))
))
# Optimization procedures for turning A**(-1) * x into MatrixSolve(A, x)
def _matinv_predicate(expr):
# TODO: We should be able to support more than 2 elements
if expr.is_MatMul and len(expr.args) == 2:
left, right = expr.args
if left.is_Inverse and right.shape[1] == 1:
inv_arg = left.arg
if isinstance(inv_arg, MatrixSymbol):
return bool(ask(Q.fullrank(left.arg)))
return False
def _matinv_transform(expr):
left, right = expr.args
inv_arg = left.arg
return MatrixSolve(inv_arg, right)
matinv_opt = ReplaceOptim(_matinv_predicate, _matinv_transform)
# Collections of optimizations:
optims_c99 = (expm1_opt, log1p_opt, exp2_opt, log2_opt, log2const_opt)
| 30.278431
| 96
| 0.643051
|
from itertools import chain
from sympy import log, exp, Max, Min, Wild, expand_log, Dummy
from sympy.assumptions import Q, ask
from sympy.codegen.cfunctions import log1p, log2, exp2, expm1
from sympy.codegen.matrix_nodes import MatrixSolve
from sympy.core.expr import UnevaluatedExpr
from sympy.core.mul import Mul
from sympy.matrices.expressions.matexpr import MatrixSymbol
from sympy.utilities.iterables import sift
class Optimization:
def __init__(self, cost_function=None, priority=1):
self.cost_function = cost_function
self.priority=priority
class ReplaceOptim(Optimization):
def __init__(self, query, value, **kwargs):
super().__init__(**kwargs)
self.query = query
self.value = value
def __call__(self, expr):
return expr.replace(self.query, self.value)
def optimize(expr, optimizations):
for optim in sorted(optimizations, key=lambda opt: opt.priority, reverse=True):
new_expr = optim(expr)
if optim.cost_function is None:
expr = new_expr
else:
before, after = map(lambda x: optim.cost_function(x), (expr, new_expr))
if before > after:
expr = new_expr
return expr
exp2_opt = ReplaceOptim(
lambda p: p.is_Pow and p.base == 2,
lambda p: exp2(p.exp)
)
_d = Wild('d', properties=[lambda x: x.is_Dummy])
_u = Wild('u', properties=[lambda x: not x.is_number and not x.is_Add])
_v = Wild('v')
_w = Wild('w')
log2_opt = ReplaceOptim(_v*log(_w)/log(2), _v*log2(_w), cost_function=lambda expr: expr.count(
lambda e: (
e.is_Pow and e.exp.is_negative
or (isinstance(e, (log, log2)) and not e.args[0].is_number))
)
)
log2const_opt = ReplaceOptim(log(2)*log2(_w), log(_w))
logsumexp_2terms_opt = ReplaceOptim(
lambda l: (isinstance(l, log)
and l.args[0].is_Add
and len(l.args[0].args) == 2
and all(isinstance(t, exp) for t in l.args[0].args)),
lambda l: (
Max(*[e.args[0] for e in l.args[0].args]) +
log1p(exp(Min(*[e.args[0] for e in l.args[0].args])))
)
)
def _try_expm1(expr):
protected, old_new = expr.replace(exp, lambda arg: Dummy(), map=True)
factored = protected.factor()
new_old = {v: k for k, v in old_new.items()}
return factored.replace(_d - 1, lambda d: expm1(new_old[d].args[0])).xreplace(new_old)
def _expm1_value(e):
numbers, non_num = sift(e.args, lambda arg: arg.is_number, binary=True)
non_num_exp, non_num_other = sift(non_num, lambda arg: arg.has(exp),
binary=True)
numsum = sum(numbers)
new_exp_terms, done = [], False
for exp_term in non_num_exp:
if done:
new_exp_terms.append(exp_term)
else:
looking_at = exp_term + numsum
attempt = _try_expm1(looking_at)
if looking_at == attempt:
new_exp_terms.append(exp_term)
else:
done = True
new_exp_terms.append(attempt)
if not done:
new_exp_terms.append(numsum)
return e.func(*chain(new_exp_terms, non_num_other))
expm1_opt = ReplaceOptim(lambda e: e.is_Add, _expm1_value)
log1p_opt = ReplaceOptim(
lambda e: isinstance(e, log),
lambda l: expand_log(l.replace(
log, lambda arg: log(arg.factor())
)).replace(log(_u+1), log1p(_u))
)
def create_expand_pow_optimization(limit):
return ReplaceOptim(
lambda e: e.is_Pow and e.base.is_symbol and e.exp.is_Integer and abs(e.exp) <= limit,
lambda p: (
UnevaluatedExpr(Mul(*([p.base]*+p.exp), evaluate=False)) if p.exp > 0 else
1/UnevaluatedExpr(Mul(*([p.base]*-p.exp), evaluate=False))
))
def _matinv_predicate(expr):
if expr.is_MatMul and len(expr.args) == 2:
left, right = expr.args
if left.is_Inverse and right.shape[1] == 1:
inv_arg = left.arg
if isinstance(inv_arg, MatrixSymbol):
return bool(ask(Q.fullrank(left.arg)))
return False
def _matinv_transform(expr):
left, right = expr.args
inv_arg = left.arg
return MatrixSolve(inv_arg, right)
matinv_opt = ReplaceOptim(_matinv_predicate, _matinv_transform)
optims_c99 = (expm1_opt, log1p_opt, exp2_opt, log2_opt, log2const_opt)
| true
| true
|
790a3a0571330e3c1221e6178016abcb51459297
| 1,105
|
py
|
Python
|
infertrade/algos/community/__init__.py
|
holderfolyf/infertrade
|
db46d35244c01d595ae0fc5ad896101ec2a3fb57
|
[
"Apache-2.0"
] | null | null | null |
infertrade/algos/community/__init__.py
|
holderfolyf/infertrade
|
db46d35244c01d595ae0fc5ad896101ec2a3fb57
|
[
"Apache-2.0"
] | null | null | null |
infertrade/algos/community/__init__.py
|
holderfolyf/infertrade
|
db46d35244c01d595ae0fc5ad896101ec2a3fb57
|
[
"Apache-2.0"
] | null | null | null |
"""
Functions for signals and positions created within this package.
Copyright 2021 InferStat Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from infertrade.PandasEnum import PandasEnum
from infertrade.algos.community.allocations import scikit_allocation_factory, infertrade_export_allocations
from infertrade.algos.community.signals import normalised_close, scikit_signal_factory, infertrade_export_signals
# A dictionary providing the list of community signals and trading strategies.
infertrade_export = {
"signal": infertrade_export_signals,
PandasEnum.ALLOCATION.value: infertrade_export_allocations,
}
| 39.464286
| 113
| 0.819005
|
from infertrade.PandasEnum import PandasEnum
from infertrade.algos.community.allocations import scikit_allocation_factory, infertrade_export_allocations
from infertrade.algos.community.signals import normalised_close, scikit_signal_factory, infertrade_export_signals
infertrade_export = {
"signal": infertrade_export_signals,
PandasEnum.ALLOCATION.value: infertrade_export_allocations,
}
| true
| true
|
790a3cbe4a9fb22354647695cb3667d39b29f240
| 37,137
|
py
|
Python
|
vendor/k8s.io/kubernetes/cluster/juju/layers/kubernetes-master/reactive/kubernetes_master.py
|
sharang-d/kops
|
35cf4a352e2e115714f3d28fbda81feec2ae8a4e
|
[
"Apache-2.0"
] | 76
|
2017-12-18T21:56:50.000Z
|
2021-11-19T13:18:29.000Z
|
vendor/k8s.io/kubernetes/cluster/juju/layers/kubernetes-master/reactive/kubernetes_master.py
|
sharang-d/kops
|
35cf4a352e2e115714f3d28fbda81feec2ae8a4e
|
[
"Apache-2.0"
] | 23
|
2017-12-18T18:23:20.000Z
|
2018-02-28T08:38:50.000Z
|
vendor/k8s.io/kubernetes/cluster/juju/layers/kubernetes-master/reactive/kubernetes_master.py
|
sharang-d/kops
|
35cf4a352e2e115714f3d28fbda81feec2ae8a4e
|
[
"Apache-2.0"
] | 17
|
2017-12-06T15:21:21.000Z
|
2022-02-18T04:50:28.000Z
|
#!/usr/bin/env python
# Copyright 2015 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import os
import re
import random
import shutil
import socket
import string
import json
import ipaddress
import charms.leadership
from shlex import split
from subprocess import check_call
from subprocess import check_output
from subprocess import CalledProcessError
from charms import layer
from charms.layer import snap
from charms.reactive import hook
from charms.reactive import remove_state
from charms.reactive import set_state
from charms.reactive import is_state
from charms.reactive import when, when_any, when_not
from charms.reactive.helpers import data_changed, any_file_changed
from charms.kubernetes.common import get_version
from charms.kubernetes.common import retry
from charms.kubernetes.flagmanager import FlagManager
from charmhelpers.core import hookenv
from charmhelpers.core import host
from charmhelpers.core import unitdata
from charmhelpers.core.host import service_stop
from charmhelpers.core.templating import render
from charmhelpers.fetch import apt_install
from charmhelpers.contrib.charmsupport import nrpe
# Override the default nagios shortname regex to allow periods, which we
# need because our bin names contain them (e.g. 'snap.foo.daemon'). The
# default regex in charmhelpers doesn't allow periods, but nagios itself does.
nrpe.Check.shortname_re = '[\.A-Za-z0-9-_]+$'
os.environ['PATH'] += os.pathsep + os.path.join(os.sep, 'snap', 'bin')
def service_cidr():
''' Return the charm's service-cidr config '''
db = unitdata.kv()
frozen_cidr = db.get('kubernetes-master.service-cidr')
return frozen_cidr or hookenv.config('service-cidr')
def freeze_service_cidr():
''' Freeze the service CIDR. Once the apiserver has started, we can no
longer safely change this value. '''
db = unitdata.kv()
db.set('kubernetes-master.service-cidr', service_cidr())
@hook('upgrade-charm')
def reset_states_for_delivery():
'''An upgrade charm event was triggered by Juju, react to that here.'''
migrate_from_pre_snaps()
install_snaps()
set_state('reconfigure.authentication.setup')
remove_state('authentication.setup')
def rename_file_idempotent(source, destination):
if os.path.isfile(source):
os.rename(source, destination)
def migrate_from_pre_snaps():
# remove old states
remove_state('kubernetes.components.installed')
remove_state('kubernetes.dashboard.available')
remove_state('kube-dns.available')
remove_state('kubernetes-master.app_version.set')
# disable old services
services = ['kube-apiserver',
'kube-controller-manager',
'kube-scheduler']
for service in services:
hookenv.log('Stopping {0} service.'.format(service))
host.service_stop(service)
# rename auth files
os.makedirs('/root/cdk', exist_ok=True)
rename_file_idempotent('/etc/kubernetes/serviceaccount.key',
'/root/cdk/serviceaccount.key')
rename_file_idempotent('/srv/kubernetes/basic_auth.csv',
'/root/cdk/basic_auth.csv')
rename_file_idempotent('/srv/kubernetes/known_tokens.csv',
'/root/cdk/known_tokens.csv')
# cleanup old files
files = [
"/lib/systemd/system/kube-apiserver.service",
"/lib/systemd/system/kube-controller-manager.service",
"/lib/systemd/system/kube-scheduler.service",
"/etc/default/kube-defaults",
"/etc/default/kube-apiserver.defaults",
"/etc/default/kube-controller-manager.defaults",
"/etc/default/kube-scheduler.defaults",
"/srv/kubernetes",
"/home/ubuntu/kubectl",
"/usr/local/bin/kubectl",
"/usr/local/bin/kube-apiserver",
"/usr/local/bin/kube-controller-manager",
"/usr/local/bin/kube-scheduler",
"/etc/kubernetes"
]
for file in files:
if os.path.isdir(file):
hookenv.log("Removing directory: " + file)
shutil.rmtree(file)
elif os.path.isfile(file):
hookenv.log("Removing file: " + file)
os.remove(file)
# clear the flag managers
FlagManager('kube-apiserver').destroy_all()
FlagManager('kube-controller-manager').destroy_all()
FlagManager('kube-scheduler').destroy_all()
def install_snaps():
channel = hookenv.config('channel')
hookenv.status_set('maintenance', 'Installing kubectl snap')
snap.install('kubectl', channel=channel, classic=True)
hookenv.status_set('maintenance', 'Installing kube-apiserver snap')
snap.install('kube-apiserver', channel=channel)
hookenv.status_set('maintenance',
'Installing kube-controller-manager snap')
snap.install('kube-controller-manager', channel=channel)
hookenv.status_set('maintenance', 'Installing kube-scheduler snap')
snap.install('kube-scheduler', channel=channel)
hookenv.status_set('maintenance', 'Installing cdk-addons snap')
snap.install('cdk-addons', channel=channel)
set_state('kubernetes-master.snaps.installed')
remove_state('kubernetes-master.components.started')
@when('config.changed.channel')
def channel_changed():
install_snaps()
@when('config.changed.client_password', 'leadership.is_leader')
def password_changed():
"""Handle password change via the charms config."""
password = hookenv.config('client_password')
if password == "" and is_state('client.password.initialised'):
# password_changed is called during an upgrade. Nothing to do.
return
elif password == "":
# Password not initialised
password = token_generator()
setup_basic_auth(password, "admin", "admin")
set_state('reconfigure.authentication.setup')
remove_state('authentication.setup')
set_state('client.password.initialised')
@when('cni.connected')
@when_not('cni.configured')
def configure_cni(cni):
''' Set master configuration on the CNI relation. This lets the CNI
subordinate know that we're the master so it can respond accordingly. '''
cni.set_config(is_master=True, kubeconfig_path='')
@when('leadership.is_leader')
@when_not('authentication.setup')
def setup_leader_authentication():
'''Setup basic authentication and token access for the cluster.'''
api_opts = FlagManager('kube-apiserver')
controller_opts = FlagManager('kube-controller-manager')
service_key = '/root/cdk/serviceaccount.key'
basic_auth = '/root/cdk/basic_auth.csv'
known_tokens = '/root/cdk/known_tokens.csv'
api_opts.add('basic-auth-file', basic_auth)
api_opts.add('token-auth-file', known_tokens)
hookenv.status_set('maintenance', 'Rendering authentication templates.')
keys = [service_key, basic_auth, known_tokens]
# Try first to fetch data from an old leadership broadcast.
if not get_keys_from_leader(keys) \
or is_state('reconfigure.authentication.setup'):
last_pass = get_password('basic_auth.csv', 'admin')
setup_basic_auth(last_pass, 'admin', 'admin')
if not os.path.isfile(known_tokens):
setup_tokens(None, 'admin', 'admin')
setup_tokens(None, 'kubelet', 'kubelet')
setup_tokens(None, 'kube_proxy', 'kube_proxy')
# Generate the default service account token key
os.makedirs('/root/cdk', exist_ok=True)
if not os.path.isfile(service_key):
cmd = ['openssl', 'genrsa', '-out', service_key,
'2048']
check_call(cmd)
remove_state('reconfigure.authentication.setup')
api_opts.add('service-account-key-file', service_key)
controller_opts.add('service-account-private-key-file', service_key)
# read service account key for syndication
leader_data = {}
for f in [known_tokens, basic_auth, service_key]:
with open(f, 'r') as fp:
leader_data[f] = fp.read()
# this is slightly opaque, but we are sending file contents under its file
# path as a key.
# eg:
# {'/root/cdk/serviceaccount.key': 'RSA:2471731...'}
charms.leadership.leader_set(leader_data)
remove_state('kubernetes-master.components.started')
set_state('authentication.setup')
@when_not('leadership.is_leader')
def setup_non_leader_authentication():
service_key = '/root/cdk/serviceaccount.key'
basic_auth = '/root/cdk/basic_auth.csv'
known_tokens = '/root/cdk/known_tokens.csv'
keys = [service_key, basic_auth, known_tokens]
# The source of truth for non-leaders is the leader.
# Therefore we overwrite_local with whatever the leader has.
if not get_keys_from_leader(keys, overwrite_local=True):
# the keys were not retrieved. Non-leaders have to retry.
return
if not any_file_changed(keys) and is_state('authentication.setup'):
# No change detected and we have already setup the authentication
return
hookenv.status_set('maintenance', 'Rendering authentication templates.')
api_opts = FlagManager('kube-apiserver')
api_opts.add('basic-auth-file', basic_auth)
api_opts.add('token-auth-file', known_tokens)
api_opts.add('service-account-key-file', service_key)
controller_opts = FlagManager('kube-controller-manager')
controller_opts.add('service-account-private-key-file', service_key)
remove_state('kubernetes-master.components.started')
set_state('authentication.setup')
def get_keys_from_leader(keys, overwrite_local=False):
"""
Gets the broadcasted keys from the leader and stores them in
the corresponding files.
Args:
keys: list of keys. Keys are actually files on the FS.
Returns: True if all key were fetched, False if not.
"""
# This races with other codepaths, and seems to require being created first
# This block may be extracted later, but for now seems to work as intended
os.makedirs('/root/cdk', exist_ok=True)
for k in keys:
# If the path does not exist, assume we need it
if not os.path.exists(k) or overwrite_local:
# Fetch data from leadership broadcast
contents = charms.leadership.leader_get(k)
# Default to logging the warning and wait for leader data to be set
if contents is None:
msg = "Waiting on leaders crypto keys."
hookenv.status_set('waiting', msg)
hookenv.log('Missing content for file {}'.format(k))
return False
# Write out the file and move on to the next item
with open(k, 'w+') as fp:
fp.write(contents)
return True
@when('kubernetes-master.snaps.installed')
def set_app_version():
''' Declare the application version to juju '''
version = check_output(['kube-apiserver', '--version'])
hookenv.application_version_set(version.split(b' v')[-1].rstrip())
@when('cdk-addons.configured', 'kube-api-endpoint.available',
'kube-control.connected')
def idle_status(kube_api, kube_control):
''' Signal at the end of the run that we are running. '''
if not all_kube_system_pods_running():
hookenv.status_set('waiting', 'Waiting for kube-system pods to start')
elif hookenv.config('service-cidr') != service_cidr():
msg = 'WARN: cannot change service-cidr, still using ' + service_cidr()
hookenv.status_set('active', msg)
else:
# All services should be up and running at this point. Double-check...
failing_services = master_services_down()
if len(failing_services) == 0:
hookenv.status_set('active', 'Kubernetes master running.')
else:
msg = 'Stopped services: {}'.format(','.join(failing_services))
hookenv.status_set('blocked', msg)
def master_services_down():
"""Ensure master services are up and running.
Return: list of failing services"""
services = ['kube-apiserver',
'kube-controller-manager',
'kube-scheduler']
failing_services = []
for service in services:
daemon = 'snap.{}.daemon'.format(service)
if not host.service_running(daemon):
failing_services.append(service)
return failing_services
@when('etcd.available', 'tls_client.server.certificate.saved',
'authentication.setup')
@when_not('kubernetes-master.components.started')
def start_master(etcd):
'''Run the Kubernetes master components.'''
hookenv.status_set('maintenance',
'Configuring the Kubernetes master services.')
freeze_service_cidr()
if not etcd.get_connection_string():
# etcd is not returning a connection string. This hapens when
# the master unit disconnects from etcd and is ready to terminate.
# No point in trying to start master services and fail. Just return.
return
handle_etcd_relation(etcd)
configure_master_services()
hookenv.status_set('maintenance',
'Starting the Kubernetes master services.')
services = ['kube-apiserver',
'kube-controller-manager',
'kube-scheduler']
for service in services:
host.service_restart('snap.%s.daemon' % service)
hookenv.open_port(6443)
set_state('kubernetes-master.components.started')
@when('etcd.available')
def etcd_data_change(etcd):
''' Etcd scale events block master reconfiguration due to the
kubernetes-master.components.started state. We need a way to
handle these events consistenly only when the number of etcd
units has actually changed '''
# key off of the connection string
connection_string = etcd.get_connection_string()
# If the connection string changes, remove the started state to trigger
# handling of the master components
if data_changed('etcd-connect', connection_string):
remove_state('kubernetes-master.components.started')
@when('kube-control.connected')
@when('cdk-addons.configured')
def send_cluster_dns_detail(kube_control):
''' Send cluster DNS info '''
# Note that the DNS server doesn't necessarily exist at this point. We know
# where we're going to put it, though, so let's send the info anyway.
dns_ip = get_dns_ip()
kube_control.set_dns(53, hookenv.config('dns_domain'), dns_ip)
@when('kube-control.auth.requested')
@when('authentication.setup')
@when('leadership.is_leader')
def send_tokens(kube_control):
"""Send the tokens to the workers."""
kubelet_token = get_token('kubelet')
proxy_token = get_token('kube_proxy')
admin_token = get_token('admin')
# Send the data
requests = kube_control.auth_user()
for request in requests:
kube_control.sign_auth_request(request[0], kubelet_token,
proxy_token, admin_token)
@when_not('kube-control.connected')
def missing_kube_control():
"""Inform the operator master is waiting for a relation to workers.
If deploying via bundle this won't happen, but if operator is upgrading a
a charm in a deployment that pre-dates the kube-control relation, it'll be
missing.
"""
hookenv.status_set('blocked', 'Waiting for workers.')
@when('kube-api-endpoint.available')
def push_service_data(kube_api):
''' Send configuration to the load balancer, and close access to the
public interface '''
kube_api.configure(port=6443)
@when('certificates.available')
def send_data(tls):
'''Send the data that is required to create a server certificate for
this server.'''
# Use the public ip of this unit as the Common Name for the certificate.
common_name = hookenv.unit_public_ip()
# Get the SDN gateway based on the cidr address.
kubernetes_service_ip = get_kubernetes_service_ip()
domain = hookenv.config('dns_domain')
# Create SANs that the tls layer will add to the server cert.
sans = [
hookenv.unit_public_ip(),
hookenv.unit_private_ip(),
socket.gethostname(),
kubernetes_service_ip,
'kubernetes',
'kubernetes.{0}'.format(domain),
'kubernetes.default',
'kubernetes.default.svc',
'kubernetes.default.svc.{0}'.format(domain)
]
# Create a path safe name by removing path characters from the unit name.
certificate_name = hookenv.local_unit().replace('/', '_')
# Request a server cert with this information.
tls.request_server_cert(common_name, sans, certificate_name)
@when('kubernetes-master.components.started')
def configure_cdk_addons():
''' Configure CDK addons '''
remove_state('cdk-addons.configured')
dbEnabled = str(hookenv.config('enable-dashboard-addons')).lower()
args = [
'arch=' + arch(),
'dns-ip=' + get_dns_ip(),
'dns-domain=' + hookenv.config('dns_domain'),
'enable-dashboard=' + dbEnabled
]
check_call(['snap', 'set', 'cdk-addons'] + args)
if not addons_ready():
hookenv.status_set('waiting', 'Waiting to retry addon deployment')
remove_state('cdk-addons.configured')
return
set_state('cdk-addons.configured')
@retry(times=3, delay_secs=20)
def addons_ready():
"""
Test if the add ons got installed
Returns: True is the addons got applied
"""
try:
check_call(['cdk-addons.apply'])
return True
except CalledProcessError:
hookenv.log("Addons are not ready yet.")
return False
@when('loadbalancer.available', 'certificates.ca.available',
'certificates.client.cert.available', 'authentication.setup')
def loadbalancer_kubeconfig(loadbalancer, ca, client):
# Get the potential list of loadbalancers from the relation object.
hosts = loadbalancer.get_addresses_ports()
# Get the public address of loadbalancers so users can access the cluster.
address = hosts[0].get('public-address')
# Get the port of the loadbalancer so users can access the cluster.
port = hosts[0].get('port')
server = 'https://{0}:{1}'.format(address, port)
build_kubeconfig(server)
@when('certificates.ca.available', 'certificates.client.cert.available',
'authentication.setup')
@when_not('loadbalancer.available')
def create_self_config(ca, client):
'''Create a kubernetes configuration for the master unit.'''
server = 'https://{0}:{1}'.format(hookenv.unit_get('public-address'), 6443)
build_kubeconfig(server)
@when('ceph-storage.available')
def ceph_state_control(ceph_admin):
''' Determine if we should remove the state that controls the re-render
and execution of the ceph-relation-changed event because there
are changes in the relationship data, and we should re-render any
configs, keys, and/or service pre-reqs '''
ceph_relation_data = {
'mon_hosts': ceph_admin.mon_hosts(),
'fsid': ceph_admin.fsid(),
'auth_supported': ceph_admin.auth(),
'hostname': socket.gethostname(),
'key': ceph_admin.key()
}
# Re-execute the rendering if the data has changed.
if data_changed('ceph-config', ceph_relation_data):
remove_state('ceph-storage.configured')
@when('ceph-storage.available')
@when_not('ceph-storage.configured')
def ceph_storage(ceph_admin):
'''Ceph on kubernetes will require a few things - namely a ceph
configuration, and the ceph secret key file used for authentication.
This method will install the client package, and render the requisit files
in order to consume the ceph-storage relation.'''
ceph_context = {
'mon_hosts': ceph_admin.mon_hosts(),
'fsid': ceph_admin.fsid(),
'auth_supported': ceph_admin.auth(),
'use_syslog': "true",
'ceph_public_network': '',
'ceph_cluster_network': '',
'loglevel': 1,
'hostname': socket.gethostname(),
}
# Install the ceph common utilities.
apt_install(['ceph-common'], fatal=True)
etc_ceph_directory = '/etc/ceph'
if not os.path.isdir(etc_ceph_directory):
os.makedirs(etc_ceph_directory)
charm_ceph_conf = os.path.join(etc_ceph_directory, 'ceph.conf')
# Render the ceph configuration from the ceph conf template
render('ceph.conf', charm_ceph_conf, ceph_context)
# The key can rotate independently of other ceph config, so validate it
admin_key = os.path.join(etc_ceph_directory,
'ceph.client.admin.keyring')
try:
with open(admin_key, 'w') as key_file:
key_file.write("[client.admin]\n\tkey = {}\n".format(
ceph_admin.key()))
except IOError as err:
hookenv.log("IOError writing admin.keyring: {}".format(err))
# Enlist the ceph-admin key as a kubernetes secret
if ceph_admin.key():
encoded_key = base64.b64encode(ceph_admin.key().encode('utf-8'))
else:
# We didn't have a key, and cannot proceed. Do not set state and
# allow this method to re-execute
return
context = {'secret': encoded_key.decode('ascii')}
render('ceph-secret.yaml', '/tmp/ceph-secret.yaml', context)
try:
# At first glance this is deceptive. The apply stanza will create if
# it doesn't exist, otherwise it will update the entry, ensuring our
# ceph-secret is always reflective of what we have in /etc/ceph
# assuming we have invoked this anytime that file would change.
cmd = ['kubectl', 'apply', '-f', '/tmp/ceph-secret.yaml']
check_call(cmd)
os.remove('/tmp/ceph-secret.yaml')
except:
# the enlistment in kubernetes failed, return and prepare for re-exec
return
# when complete, set a state relating to configuration of the storage
# backend that will allow other modules to hook into this and verify we
# have performed the necessary pre-req steps to interface with a ceph
# deployment.
set_state('ceph-storage.configured')
@when('nrpe-external-master.available')
@when_not('nrpe-external-master.initial-config')
def initial_nrpe_config(nagios=None):
set_state('nrpe-external-master.initial-config')
update_nrpe_config(nagios)
@when('kubernetes-master.components.started')
@when('nrpe-external-master.available')
@when_any('config.changed.nagios_context',
'config.changed.nagios_servicegroups')
def update_nrpe_config(unused=None):
services = (
'snap.kube-apiserver.daemon',
'snap.kube-controller-manager.daemon',
'snap.kube-scheduler.daemon'
)
hostname = nrpe.get_nagios_hostname()
current_unit = nrpe.get_nagios_unit_name()
nrpe_setup = nrpe.NRPE(hostname=hostname)
nrpe.add_init_service_checks(nrpe_setup, services, current_unit)
nrpe_setup.write()
@when_not('nrpe-external-master.available')
@when('nrpe-external-master.initial-config')
def remove_nrpe_config(nagios=None):
remove_state('nrpe-external-master.initial-config')
# List of systemd services for which the checks will be removed
services = (
'snap.kube-apiserver.daemon',
'snap.kube-controller-manager.daemon',
'snap.kube-scheduler.daemon'
)
# The current nrpe-external-master interface doesn't handle a lot of logic,
# use the charm-helpers code for now.
hostname = nrpe.get_nagios_hostname()
nrpe_setup = nrpe.NRPE(hostname=hostname)
for service in services:
nrpe_setup.remove_check(shortname=service)
def is_privileged():
"""Return boolean indicating whether or not to set allow-privileged=true.
"""
privileged = hookenv.config('allow-privileged')
if privileged == 'auto':
return is_state('kubernetes-master.gpu.enabled')
else:
return privileged == 'true'
@when('config.changed.allow-privileged')
@when('kubernetes-master.components.started')
def on_config_allow_privileged_change():
"""React to changed 'allow-privileged' config value.
"""
remove_state('kubernetes-master.components.started')
remove_state('config.changed.allow-privileged')
@when('kube-control.gpu.available')
@when('kubernetes-master.components.started')
@when_not('kubernetes-master.gpu.enabled')
def on_gpu_available(kube_control):
"""The remote side (kubernetes-worker) is gpu-enabled.
We need to run in privileged mode.
"""
config = hookenv.config()
if config['allow-privileged'] == "false":
hookenv.status_set(
'active',
'GPUs available. Set allow-privileged="auto" to enable.'
)
return
remove_state('kubernetes-master.components.started')
set_state('kubernetes-master.gpu.enabled')
@when('kubernetes-master.gpu.enabled')
@when_not('kubernetes-master.privileged')
def disable_gpu_mode():
"""We were in gpu mode, but the operator has set allow-privileged="false",
so we can't run in gpu mode anymore.
"""
remove_state('kubernetes-master.gpu.enabled')
@hook('stop')
def shutdown():
""" Stop the kubernetes master services
"""
service_stop('snap.kube-apiserver.daemon')
service_stop('snap.kube-controller-manager.daemon')
service_stop('snap.kube-scheduler.daemon')
def arch():
'''Return the package architecture as a string. Raise an exception if the
architecture is not supported by kubernetes.'''
# Get the package architecture for this system.
architecture = check_output(['dpkg', '--print-architecture']).rstrip()
# Convert the binary result into a string.
architecture = architecture.decode('utf-8')
return architecture
def build_kubeconfig(server):
'''Gather the relevant data for Kubernetes configuration objects and create
a config object with that information.'''
# Get the options from the tls-client layer.
layer_options = layer.options('tls-client')
# Get all the paths to the tls information required for kubeconfig.
ca = layer_options.get('ca_certificate_path')
ca_exists = ca and os.path.isfile(ca)
client_pass = get_password('basic_auth.csv', 'admin')
# Do we have everything we need?
if ca_exists and client_pass:
# Create an absolute path for the kubeconfig file.
kubeconfig_path = os.path.join(os.sep, 'home', 'ubuntu', 'config')
# Create the kubeconfig on this system so users can access the cluster.
create_kubeconfig(kubeconfig_path, server, ca,
user='admin', password=client_pass)
# Make the config file readable by the ubuntu users so juju scp works.
cmd = ['chown', 'ubuntu:ubuntu', kubeconfig_path]
check_call(cmd)
def create_kubeconfig(kubeconfig, server, ca, key=None, certificate=None,
user='ubuntu', context='juju-context',
cluster='juju-cluster', password=None, token=None):
'''Create a configuration for Kubernetes based on path using the supplied
arguments for values of the Kubernetes server, CA, key, certificate, user
context and cluster.'''
if not key and not certificate and not password and not token:
raise ValueError('Missing authentication mechanism.')
# token and password are mutually exclusive. Error early if both are
# present. The developer has requested an impossible situation.
# see: kubectl config set-credentials --help
if token and password:
raise ValueError('Token and Password are mutually exclusive.')
# Create the config file with the address of the master server.
cmd = 'kubectl config --kubeconfig={0} set-cluster {1} ' \
'--server={2} --certificate-authority={3} --embed-certs=true'
check_call(split(cmd.format(kubeconfig, cluster, server, ca)))
# Delete old users
cmd = 'kubectl config --kubeconfig={0} unset users'
check_call(split(cmd.format(kubeconfig)))
# Create the credentials using the client flags.
cmd = 'kubectl config --kubeconfig={0} ' \
'set-credentials {1} '.format(kubeconfig, user)
if key and certificate:
cmd = '{0} --client-key={1} --client-certificate={2} '\
'--embed-certs=true'.format(cmd, key, certificate)
if password:
cmd = "{0} --username={1} --password={2}".format(cmd, user, password)
# This is mutually exclusive from password. They will not work together.
if token:
cmd = "{0} --token={1}".format(cmd, token)
check_call(split(cmd))
# Create a default context with the cluster.
cmd = 'kubectl config --kubeconfig={0} set-context {1} ' \
'--cluster={2} --user={3}'
check_call(split(cmd.format(kubeconfig, context, cluster, user)))
# Make the config use this new context.
cmd = 'kubectl config --kubeconfig={0} use-context {1}'
check_call(split(cmd.format(kubeconfig, context)))
def get_dns_ip():
'''Get an IP address for the DNS server on the provided cidr.'''
interface = ipaddress.IPv4Interface(service_cidr())
# Add .10 at the end of the network
ip = interface.network.network_address + 10
return ip.exploded
def get_kubernetes_service_ip():
'''Get the IP address for the kubernetes service based on the cidr.'''
interface = ipaddress.IPv4Interface(service_cidr())
# Add .1 at the end of the network
ip = interface.network.network_address + 1
return ip.exploded
def handle_etcd_relation(reldata):
''' Save the client credentials and set appropriate daemon flags when
etcd declares itself as available'''
connection_string = reldata.get_connection_string()
# Define where the etcd tls files will be kept.
etcd_dir = '/root/cdk/etcd'
# Create paths to the etcd client ca, key, and cert file locations.
ca = os.path.join(etcd_dir, 'client-ca.pem')
key = os.path.join(etcd_dir, 'client-key.pem')
cert = os.path.join(etcd_dir, 'client-cert.pem')
# Save the client credentials (in relation data) to the paths provided.
reldata.save_client_credentials(key, cert, ca)
api_opts = FlagManager('kube-apiserver')
# Never use stale data, always prefer whats coming in during context
# building. if its stale, its because whats in unitdata is stale
data = api_opts.data
if data.get('etcd-servers-strict') or data.get('etcd-servers'):
api_opts.destroy('etcd-cafile')
api_opts.destroy('etcd-keyfile')
api_opts.destroy('etcd-certfile')
api_opts.destroy('etcd-servers', strict=True)
api_opts.destroy('etcd-servers')
# Set the apiserver flags in the options manager
api_opts.add('etcd-cafile', ca)
api_opts.add('etcd-keyfile', key)
api_opts.add('etcd-certfile', cert)
api_opts.add('etcd-servers', connection_string, strict=True)
def configure_master_services():
''' Add remaining flags for the master services and configure snaps to use
them '''
api_opts = FlagManager('kube-apiserver')
controller_opts = FlagManager('kube-controller-manager')
scheduler_opts = FlagManager('kube-scheduler')
scheduler_opts.add('v', '2')
# Get the tls paths from the layer data.
layer_options = layer.options('tls-client')
ca_cert_path = layer_options.get('ca_certificate_path')
client_cert_path = layer_options.get('client_certificate_path')
client_key_path = layer_options.get('client_key_path')
server_cert_path = layer_options.get('server_certificate_path')
server_key_path = layer_options.get('server_key_path')
if is_privileged():
api_opts.add('allow-privileged', 'true', strict=True)
set_state('kubernetes-master.privileged')
else:
api_opts.add('allow-privileged', 'false', strict=True)
remove_state('kubernetes-master.privileged')
# Handle static options for now
api_opts.add('service-cluster-ip-range', service_cidr())
api_opts.add('min-request-timeout', '300')
api_opts.add('v', '4')
api_opts.add('tls-cert-file', server_cert_path)
api_opts.add('tls-private-key-file', server_key_path)
api_opts.add('kubelet-certificate-authority', ca_cert_path)
api_opts.add('kubelet-client-certificate', client_cert_path)
api_opts.add('kubelet-client-key', client_key_path)
api_opts.add('logtostderr', 'true')
api_opts.add('insecure-bind-address', '127.0.0.1')
api_opts.add('insecure-port', '8080')
api_opts.add('storage-backend', 'etcd2') # FIXME: add etcd3 support
admission_control = [
'Initializers',
'NamespaceLifecycle',
'LimitRanger',
'ServiceAccount',
'ResourceQuota',
'DefaultTolerationSeconds'
]
if get_version('kube-apiserver') < (1, 6):
hookenv.log('Removing DefaultTolerationSeconds from admission-control')
admission_control.remove('DefaultTolerationSeconds')
if get_version('kube-apiserver') < (1, 7):
hookenv.log('Removing Initializers from admission-control')
admission_control.remove('Initializers')
api_opts.add('admission-control', ','.join(admission_control), strict=True)
# Default to 3 minute resync. TODO: Make this configureable?
controller_opts.add('min-resync-period', '3m')
controller_opts.add('v', '2')
controller_opts.add('root-ca-file', ca_cert_path)
controller_opts.add('logtostderr', 'true')
controller_opts.add('master', 'http://127.0.0.1:8080')
scheduler_opts.add('v', '2')
scheduler_opts.add('logtostderr', 'true')
scheduler_opts.add('master', 'http://127.0.0.1:8080')
cmd = ['snap', 'set', 'kube-apiserver'] + api_opts.to_s().split(' ')
check_call(cmd)
cmd = (
['snap', 'set', 'kube-controller-manager'] +
controller_opts.to_s().split(' ')
)
check_call(cmd)
cmd = ['snap', 'set', 'kube-scheduler'] + scheduler_opts.to_s().split(' ')
check_call(cmd)
def setup_basic_auth(password=None, username='admin', uid='admin'):
'''Create the htacces file and the tokens.'''
root_cdk = '/root/cdk'
if not os.path.isdir(root_cdk):
os.makedirs(root_cdk)
htaccess = os.path.join(root_cdk, 'basic_auth.csv')
if not password:
password = token_generator()
with open(htaccess, 'w') as stream:
stream.write('{0},{1},{2}'.format(password, username, uid))
def setup_tokens(token, username, user):
'''Create a token file for kubernetes authentication.'''
root_cdk = '/root/cdk'
if not os.path.isdir(root_cdk):
os.makedirs(root_cdk)
known_tokens = os.path.join(root_cdk, 'known_tokens.csv')
if not token:
token = token_generator()
with open(known_tokens, 'a') as stream:
stream.write('{0},{1},{2}\n'.format(token, username, user))
def get_password(csv_fname, user):
'''Get the password of user within the csv file provided.'''
root_cdk = '/root/cdk'
tokens_fname = os.path.join(root_cdk, csv_fname)
if not os.path.isfile(tokens_fname):
return None
with open(tokens_fname, 'r') as stream:
for line in stream:
record = line.split(',')
if record[1] == user:
return record[0]
return None
def get_token(username):
"""Grab a token from the static file if present. """
return get_password('known_tokens.csv', username)
def set_token(password, save_salt):
''' Store a token so it can be recalled later by token_generator.
param: password - the password to be stored
param: save_salt - the key to store the value of the token.'''
db = unitdata.kv()
db.set(save_salt, password)
return db.get(save_salt)
def token_generator(length=32):
''' Generate a random token for use in passwords and account tokens.
param: length - the length of the token to generate'''
alpha = string.ascii_letters + string.digits
token = ''.join(random.SystemRandom().choice(alpha) for _ in range(length))
return token
@retry(times=3, delay_secs=10)
def all_kube_system_pods_running():
''' Check pod status in the kube-system namespace. Returns True if all
pods are running, False otherwise. '''
cmd = ['kubectl', 'get', 'po', '-n', 'kube-system', '-o', 'json']
try:
output = check_output(cmd).decode('utf-8')
except CalledProcessError:
hookenv.log('failed to get kube-system pod status')
return False
result = json.loads(output)
for pod in result['items']:
status = pod['status']['phase']
if status != 'Running':
return False
return True
def apiserverVersion():
cmd = 'kube-apiserver --version'.split()
version_string = check_output(cmd).decode('utf-8')
return tuple(int(q) for q in re.findall("[0-9]+", version_string)[:3])
| 36.878848
| 79
| 0.683819
|
import base64
import os
import re
import random
import shutil
import socket
import string
import json
import ipaddress
import charms.leadership
from shlex import split
from subprocess import check_call
from subprocess import check_output
from subprocess import CalledProcessError
from charms import layer
from charms.layer import snap
from charms.reactive import hook
from charms.reactive import remove_state
from charms.reactive import set_state
from charms.reactive import is_state
from charms.reactive import when, when_any, when_not
from charms.reactive.helpers import data_changed, any_file_changed
from charms.kubernetes.common import get_version
from charms.kubernetes.common import retry
from charms.kubernetes.flagmanager import FlagManager
from charmhelpers.core import hookenv
from charmhelpers.core import host
from charmhelpers.core import unitdata
from charmhelpers.core.host import service_stop
from charmhelpers.core.templating import render
from charmhelpers.fetch import apt_install
from charmhelpers.contrib.charmsupport import nrpe
nrpe.Check.shortname_re = '[\.A-Za-z0-9-_]+$'
os.environ['PATH'] += os.pathsep + os.path.join(os.sep, 'snap', 'bin')
def service_cidr():
db = unitdata.kv()
frozen_cidr = db.get('kubernetes-master.service-cidr')
return frozen_cidr or hookenv.config('service-cidr')
def freeze_service_cidr():
db = unitdata.kv()
db.set('kubernetes-master.service-cidr', service_cidr())
@hook('upgrade-charm')
def reset_states_for_delivery():
migrate_from_pre_snaps()
install_snaps()
set_state('reconfigure.authentication.setup')
remove_state('authentication.setup')
def rename_file_idempotent(source, destination):
if os.path.isfile(source):
os.rename(source, destination)
def migrate_from_pre_snaps():
# remove old states
remove_state('kubernetes.components.installed')
remove_state('kubernetes.dashboard.available')
remove_state('kube-dns.available')
remove_state('kubernetes-master.app_version.set')
# disable old services
services = ['kube-apiserver',
'kube-controller-manager',
'kube-scheduler']
for service in services:
hookenv.log('Stopping {0} service.'.format(service))
host.service_stop(service)
# rename auth files
os.makedirs('/root/cdk', exist_ok=True)
rename_file_idempotent('/etc/kubernetes/serviceaccount.key',
'/root/cdk/serviceaccount.key')
rename_file_idempotent('/srv/kubernetes/basic_auth.csv',
'/root/cdk/basic_auth.csv')
rename_file_idempotent('/srv/kubernetes/known_tokens.csv',
'/root/cdk/known_tokens.csv')
# cleanup old files
files = [
"/lib/systemd/system/kube-apiserver.service",
"/lib/systemd/system/kube-controller-manager.service",
"/lib/systemd/system/kube-scheduler.service",
"/etc/default/kube-defaults",
"/etc/default/kube-apiserver.defaults",
"/etc/default/kube-controller-manager.defaults",
"/etc/default/kube-scheduler.defaults",
"/srv/kubernetes",
"/home/ubuntu/kubectl",
"/usr/local/bin/kubectl",
"/usr/local/bin/kube-apiserver",
"/usr/local/bin/kube-controller-manager",
"/usr/local/bin/kube-scheduler",
"/etc/kubernetes"
]
for file in files:
if os.path.isdir(file):
hookenv.log("Removing directory: " + file)
shutil.rmtree(file)
elif os.path.isfile(file):
hookenv.log("Removing file: " + file)
os.remove(file)
# clear the flag managers
FlagManager('kube-apiserver').destroy_all()
FlagManager('kube-controller-manager').destroy_all()
FlagManager('kube-scheduler').destroy_all()
def install_snaps():
channel = hookenv.config('channel')
hookenv.status_set('maintenance', 'Installing kubectl snap')
snap.install('kubectl', channel=channel, classic=True)
hookenv.status_set('maintenance', 'Installing kube-apiserver snap')
snap.install('kube-apiserver', channel=channel)
hookenv.status_set('maintenance',
'Installing kube-controller-manager snap')
snap.install('kube-controller-manager', channel=channel)
hookenv.status_set('maintenance', 'Installing kube-scheduler snap')
snap.install('kube-scheduler', channel=channel)
hookenv.status_set('maintenance', 'Installing cdk-addons snap')
snap.install('cdk-addons', channel=channel)
set_state('kubernetes-master.snaps.installed')
remove_state('kubernetes-master.components.started')
@when('config.changed.channel')
def channel_changed():
install_snaps()
@when('config.changed.client_password', 'leadership.is_leader')
def password_changed():
password = hookenv.config('client_password')
if password == "" and is_state('client.password.initialised'):
# password_changed is called during an upgrade. Nothing to do.
return
elif password == "":
# Password not initialised
password = token_generator()
setup_basic_auth(password, "admin", "admin")
set_state('reconfigure.authentication.setup')
remove_state('authentication.setup')
set_state('client.password.initialised')
@when('cni.connected')
@when_not('cni.configured')
def configure_cni(cni):
cni.set_config(is_master=True, kubeconfig_path='')
@when('leadership.is_leader')
@when_not('authentication.setup')
def setup_leader_authentication():
api_opts = FlagManager('kube-apiserver')
controller_opts = FlagManager('kube-controller-manager')
service_key = '/root/cdk/serviceaccount.key'
basic_auth = '/root/cdk/basic_auth.csv'
known_tokens = '/root/cdk/known_tokens.csv'
api_opts.add('basic-auth-file', basic_auth)
api_opts.add('token-auth-file', known_tokens)
hookenv.status_set('maintenance', 'Rendering authentication templates.')
keys = [service_key, basic_auth, known_tokens]
# Try first to fetch data from an old leadership broadcast.
if not get_keys_from_leader(keys) \
or is_state('reconfigure.authentication.setup'):
last_pass = get_password('basic_auth.csv', 'admin')
setup_basic_auth(last_pass, 'admin', 'admin')
if not os.path.isfile(known_tokens):
setup_tokens(None, 'admin', 'admin')
setup_tokens(None, 'kubelet', 'kubelet')
setup_tokens(None, 'kube_proxy', 'kube_proxy')
# Generate the default service account token key
os.makedirs('/root/cdk', exist_ok=True)
if not os.path.isfile(service_key):
cmd = ['openssl', 'genrsa', '-out', service_key,
'2048']
check_call(cmd)
remove_state('reconfigure.authentication.setup')
api_opts.add('service-account-key-file', service_key)
controller_opts.add('service-account-private-key-file', service_key)
# read service account key for syndication
leader_data = {}
for f in [known_tokens, basic_auth, service_key]:
with open(f, 'r') as fp:
leader_data[f] = fp.read()
# this is slightly opaque, but we are sending file contents under its file
# path as a key.
# eg:
# {'/root/cdk/serviceaccount.key': 'RSA:2471731...'}
charms.leadership.leader_set(leader_data)
remove_state('kubernetes-master.components.started')
set_state('authentication.setup')
@when_not('leadership.is_leader')
def setup_non_leader_authentication():
service_key = '/root/cdk/serviceaccount.key'
basic_auth = '/root/cdk/basic_auth.csv'
known_tokens = '/root/cdk/known_tokens.csv'
keys = [service_key, basic_auth, known_tokens]
# The source of truth for non-leaders is the leader.
# Therefore we overwrite_local with whatever the leader has.
if not get_keys_from_leader(keys, overwrite_local=True):
# the keys were not retrieved. Non-leaders have to retry.
return
if not any_file_changed(keys) and is_state('authentication.setup'):
# No change detected and we have already setup the authentication
return
hookenv.status_set('maintenance', 'Rendering authentication templates.')
api_opts = FlagManager('kube-apiserver')
api_opts.add('basic-auth-file', basic_auth)
api_opts.add('token-auth-file', known_tokens)
api_opts.add('service-account-key-file', service_key)
controller_opts = FlagManager('kube-controller-manager')
controller_opts.add('service-account-private-key-file', service_key)
remove_state('kubernetes-master.components.started')
set_state('authentication.setup')
def get_keys_from_leader(keys, overwrite_local=False):
# This races with other codepaths, and seems to require being created first
# This block may be extracted later, but for now seems to work as intended
os.makedirs('/root/cdk', exist_ok=True)
for k in keys:
# If the path does not exist, assume we need it
if not os.path.exists(k) or overwrite_local:
# Fetch data from leadership broadcast
contents = charms.leadership.leader_get(k)
# Default to logging the warning and wait for leader data to be set
if contents is None:
msg = "Waiting on leaders crypto keys."
hookenv.status_set('waiting', msg)
hookenv.log('Missing content for file {}'.format(k))
return False
# Write out the file and move on to the next item
with open(k, 'w+') as fp:
fp.write(contents)
return True
@when('kubernetes-master.snaps.installed')
def set_app_version():
version = check_output(['kube-apiserver', '--version'])
hookenv.application_version_set(version.split(b' v')[-1].rstrip())
@when('cdk-addons.configured', 'kube-api-endpoint.available',
'kube-control.connected')
def idle_status(kube_api, kube_control):
if not all_kube_system_pods_running():
hookenv.status_set('waiting', 'Waiting for kube-system pods to start')
elif hookenv.config('service-cidr') != service_cidr():
msg = 'WARN: cannot change service-cidr, still using ' + service_cidr()
hookenv.status_set('active', msg)
else:
# All services should be up and running at this point. Double-check...
failing_services = master_services_down()
if len(failing_services) == 0:
hookenv.status_set('active', 'Kubernetes master running.')
else:
msg = 'Stopped services: {}'.format(','.join(failing_services))
hookenv.status_set('blocked', msg)
def master_services_down():
services = ['kube-apiserver',
'kube-controller-manager',
'kube-scheduler']
failing_services = []
for service in services:
daemon = 'snap.{}.daemon'.format(service)
if not host.service_running(daemon):
failing_services.append(service)
return failing_services
@when('etcd.available', 'tls_client.server.certificate.saved',
'authentication.setup')
@when_not('kubernetes-master.components.started')
def start_master(etcd):
hookenv.status_set('maintenance',
'Configuring the Kubernetes master services.')
freeze_service_cidr()
if not etcd.get_connection_string():
# etcd is not returning a connection string. This hapens when
# the master unit disconnects from etcd and is ready to terminate.
# No point in trying to start master services and fail. Just return.
return
handle_etcd_relation(etcd)
configure_master_services()
hookenv.status_set('maintenance',
'Starting the Kubernetes master services.')
services = ['kube-apiserver',
'kube-controller-manager',
'kube-scheduler']
for service in services:
host.service_restart('snap.%s.daemon' % service)
hookenv.open_port(6443)
set_state('kubernetes-master.components.started')
@when('etcd.available')
def etcd_data_change(etcd):
# key off of the connection string
connection_string = etcd.get_connection_string()
# If the connection string changes, remove the started state to trigger
# handling of the master components
if data_changed('etcd-connect', connection_string):
remove_state('kubernetes-master.components.started')
@when('kube-control.connected')
@when('cdk-addons.configured')
def send_cluster_dns_detail(kube_control):
# Note that the DNS server doesn't necessarily exist at this point. We know
dns_ip = get_dns_ip()
kube_control.set_dns(53, hookenv.config('dns_domain'), dns_ip)
@when('kube-control.auth.requested')
@when('authentication.setup')
@when('leadership.is_leader')
def send_tokens(kube_control):
kubelet_token = get_token('kubelet')
proxy_token = get_token('kube_proxy')
admin_token = get_token('admin')
requests = kube_control.auth_user()
for request in requests:
kube_control.sign_auth_request(request[0], kubelet_token,
proxy_token, admin_token)
@when_not('kube-control.connected')
def missing_kube_control():
hookenv.status_set('blocked', 'Waiting for workers.')
@when('kube-api-endpoint.available')
def push_service_data(kube_api):
kube_api.configure(port=6443)
@when('certificates.available')
def send_data(tls):
common_name = hookenv.unit_public_ip()
kubernetes_service_ip = get_kubernetes_service_ip()
domain = hookenv.config('dns_domain')
sans = [
hookenv.unit_public_ip(),
hookenv.unit_private_ip(),
socket.gethostname(),
kubernetes_service_ip,
'kubernetes',
'kubernetes.{0}'.format(domain),
'kubernetes.default',
'kubernetes.default.svc',
'kubernetes.default.svc.{0}'.format(domain)
]
certificate_name = hookenv.local_unit().replace('/', '_')
tls.request_server_cert(common_name, sans, certificate_name)
@when('kubernetes-master.components.started')
def configure_cdk_addons():
remove_state('cdk-addons.configured')
dbEnabled = str(hookenv.config('enable-dashboard-addons')).lower()
args = [
'arch=' + arch(),
'dns-ip=' + get_dns_ip(),
'dns-domain=' + hookenv.config('dns_domain'),
'enable-dashboard=' + dbEnabled
]
check_call(['snap', 'set', 'cdk-addons'] + args)
if not addons_ready():
hookenv.status_set('waiting', 'Waiting to retry addon deployment')
remove_state('cdk-addons.configured')
return
set_state('cdk-addons.configured')
@retry(times=3, delay_secs=20)
def addons_ready():
try:
check_call(['cdk-addons.apply'])
return True
except CalledProcessError:
hookenv.log("Addons are not ready yet.")
return False
@when('loadbalancer.available', 'certificates.ca.available',
'certificates.client.cert.available', 'authentication.setup')
def loadbalancer_kubeconfig(loadbalancer, ca, client):
hosts = loadbalancer.get_addresses_ports()
address = hosts[0].get('public-address')
port = hosts[0].get('port')
server = 'https://{0}:{1}'.format(address, port)
build_kubeconfig(server)
@when('certificates.ca.available', 'certificates.client.cert.available',
'authentication.setup')
@when_not('loadbalancer.available')
def create_self_config(ca, client):
server = 'https://{0}:{1}'.format(hookenv.unit_get('public-address'), 6443)
build_kubeconfig(server)
@when('ceph-storage.available')
def ceph_state_control(ceph_admin):
ceph_relation_data = {
'mon_hosts': ceph_admin.mon_hosts(),
'fsid': ceph_admin.fsid(),
'auth_supported': ceph_admin.auth(),
'hostname': socket.gethostname(),
'key': ceph_admin.key()
}
if data_changed('ceph-config', ceph_relation_data):
remove_state('ceph-storage.configured')
@when('ceph-storage.available')
@when_not('ceph-storage.configured')
def ceph_storage(ceph_admin):
ceph_context = {
'mon_hosts': ceph_admin.mon_hosts(),
'fsid': ceph_admin.fsid(),
'auth_supported': ceph_admin.auth(),
'use_syslog': "true",
'ceph_public_network': '',
'ceph_cluster_network': '',
'loglevel': 1,
'hostname': socket.gethostname(),
}
apt_install(['ceph-common'], fatal=True)
etc_ceph_directory = '/etc/ceph'
if not os.path.isdir(etc_ceph_directory):
os.makedirs(etc_ceph_directory)
charm_ceph_conf = os.path.join(etc_ceph_directory, 'ceph.conf')
render('ceph.conf', charm_ceph_conf, ceph_context)
admin_key = os.path.join(etc_ceph_directory,
'ceph.client.admin.keyring')
try:
with open(admin_key, 'w') as key_file:
key_file.write("[client.admin]\n\tkey = {}\n".format(
ceph_admin.key()))
except IOError as err:
hookenv.log("IOError writing admin.keyring: {}".format(err))
if ceph_admin.key():
encoded_key = base64.b64encode(ceph_admin.key().encode('utf-8'))
else:
# allow this method to re-execute
return
context = {'secret': encoded_key.decode('ascii')}
render('ceph-secret.yaml', '/tmp/ceph-secret.yaml', context)
try:
# At first glance this is deceptive. The apply stanza will create if
# it doesn't exist, otherwise it will update the entry, ensuring our
cmd = ['kubectl', 'apply', '-f', '/tmp/ceph-secret.yaml']
check_call(cmd)
os.remove('/tmp/ceph-secret.yaml')
except:
return
set_state('ceph-storage.configured')
@when('nrpe-external-master.available')
@when_not('nrpe-external-master.initial-config')
def initial_nrpe_config(nagios=None):
set_state('nrpe-external-master.initial-config')
update_nrpe_config(nagios)
@when('kubernetes-master.components.started')
@when('nrpe-external-master.available')
@when_any('config.changed.nagios_context',
'config.changed.nagios_servicegroups')
def update_nrpe_config(unused=None):
services = (
'snap.kube-apiserver.daemon',
'snap.kube-controller-manager.daemon',
'snap.kube-scheduler.daemon'
)
hostname = nrpe.get_nagios_hostname()
current_unit = nrpe.get_nagios_unit_name()
nrpe_setup = nrpe.NRPE(hostname=hostname)
nrpe.add_init_service_checks(nrpe_setup, services, current_unit)
nrpe_setup.write()
@when_not('nrpe-external-master.available')
@when('nrpe-external-master.initial-config')
def remove_nrpe_config(nagios=None):
remove_state('nrpe-external-master.initial-config')
services = (
'snap.kube-apiserver.daemon',
'snap.kube-controller-manager.daemon',
'snap.kube-scheduler.daemon'
)
# use the charm-helpers code for now.
hostname = nrpe.get_nagios_hostname()
nrpe_setup = nrpe.NRPE(hostname=hostname)
for service in services:
nrpe_setup.remove_check(shortname=service)
def is_privileged():
privileged = hookenv.config('allow-privileged')
if privileged == 'auto':
return is_state('kubernetes-master.gpu.enabled')
else:
return privileged == 'true'
@when('config.changed.allow-privileged')
@when('kubernetes-master.components.started')
def on_config_allow_privileged_change():
remove_state('kubernetes-master.components.started')
remove_state('config.changed.allow-privileged')
@when('kube-control.gpu.available')
@when('kubernetes-master.components.started')
@when_not('kubernetes-master.gpu.enabled')
def on_gpu_available(kube_control):
config = hookenv.config()
if config['allow-privileged'] == "false":
hookenv.status_set(
'active',
'GPUs available. Set allow-privileged="auto" to enable.'
)
return
remove_state('kubernetes-master.components.started')
set_state('kubernetes-master.gpu.enabled')
@when('kubernetes-master.gpu.enabled')
@when_not('kubernetes-master.privileged')
def disable_gpu_mode():
remove_state('kubernetes-master.gpu.enabled')
@hook('stop')
def shutdown():
service_stop('snap.kube-apiserver.daemon')
service_stop('snap.kube-controller-manager.daemon')
service_stop('snap.kube-scheduler.daemon')
def arch():
# Get the package architecture for this system.
architecture = check_output(['dpkg', '--print-architecture']).rstrip()
# Convert the binary result into a string.
architecture = architecture.decode('utf-8')
return architecture
def build_kubeconfig(server):
# Get the options from the tls-client layer.
layer_options = layer.options('tls-client')
# Get all the paths to the tls information required for kubeconfig.
ca = layer_options.get('ca_certificate_path')
ca_exists = ca and os.path.isfile(ca)
client_pass = get_password('basic_auth.csv', 'admin')
# Do we have everything we need?
if ca_exists and client_pass:
# Create an absolute path for the kubeconfig file.
kubeconfig_path = os.path.join(os.sep, 'home', 'ubuntu', 'config')
# Create the kubeconfig on this system so users can access the cluster.
create_kubeconfig(kubeconfig_path, server, ca,
user='admin', password=client_pass)
# Make the config file readable by the ubuntu users so juju scp works.
cmd = ['chown', 'ubuntu:ubuntu', kubeconfig_path]
check_call(cmd)
def create_kubeconfig(kubeconfig, server, ca, key=None, certificate=None,
user='ubuntu', context='juju-context',
cluster='juju-cluster', password=None, token=None):
if not key and not certificate and not password and not token:
raise ValueError('Missing authentication mechanism.')
# token and password are mutually exclusive. Error early if both are
# present. The developer has requested an impossible situation.
# see: kubectl config set-credentials --help
if token and password:
raise ValueError('Token and Password are mutually exclusive.')
# Create the config file with the address of the master server.
cmd = 'kubectl config --kubeconfig={0} set-cluster {1} ' \
'--server={2} --certificate-authority={3} --embed-certs=true'
check_call(split(cmd.format(kubeconfig, cluster, server, ca)))
# Delete old users
cmd = 'kubectl config --kubeconfig={0} unset users'
check_call(split(cmd.format(kubeconfig)))
# Create the credentials using the client flags.
cmd = 'kubectl config --kubeconfig={0} ' \
'set-credentials {1} '.format(kubeconfig, user)
if key and certificate:
cmd = '{0} --client-key={1} --client-certificate={2} '\
'--embed-certs=true'.format(cmd, key, certificate)
if password:
cmd = "{0} --username={1} --password={2}".format(cmd, user, password)
# This is mutually exclusive from password. They will not work together.
if token:
cmd = "{0} --token={1}".format(cmd, token)
check_call(split(cmd))
# Create a default context with the cluster.
cmd = 'kubectl config --kubeconfig={0} set-context {1} ' \
'--cluster={2} --user={3}'
check_call(split(cmd.format(kubeconfig, context, cluster, user)))
# Make the config use this new context.
cmd = 'kubectl config --kubeconfig={0} use-context {1}'
check_call(split(cmd.format(kubeconfig, context)))
def get_dns_ip():
interface = ipaddress.IPv4Interface(service_cidr())
# Add .10 at the end of the network
ip = interface.network.network_address + 10
return ip.exploded
def get_kubernetes_service_ip():
interface = ipaddress.IPv4Interface(service_cidr())
# Add .1 at the end of the network
ip = interface.network.network_address + 1
return ip.exploded
def handle_etcd_relation(reldata):
connection_string = reldata.get_connection_string()
# Define where the etcd tls files will be kept.
etcd_dir = '/root/cdk/etcd'
# Create paths to the etcd client ca, key, and cert file locations.
ca = os.path.join(etcd_dir, 'client-ca.pem')
key = os.path.join(etcd_dir, 'client-key.pem')
cert = os.path.join(etcd_dir, 'client-cert.pem')
# Save the client credentials (in relation data) to the paths provided.
reldata.save_client_credentials(key, cert, ca)
api_opts = FlagManager('kube-apiserver')
# Never use stale data, always prefer whats coming in during context
# building. if its stale, its because whats in unitdata is stale
data = api_opts.data
if data.get('etcd-servers-strict') or data.get('etcd-servers'):
api_opts.destroy('etcd-cafile')
api_opts.destroy('etcd-keyfile')
api_opts.destroy('etcd-certfile')
api_opts.destroy('etcd-servers', strict=True)
api_opts.destroy('etcd-servers')
# Set the apiserver flags in the options manager
api_opts.add('etcd-cafile', ca)
api_opts.add('etcd-keyfile', key)
api_opts.add('etcd-certfile', cert)
api_opts.add('etcd-servers', connection_string, strict=True)
def configure_master_services():
api_opts = FlagManager('kube-apiserver')
controller_opts = FlagManager('kube-controller-manager')
scheduler_opts = FlagManager('kube-scheduler')
scheduler_opts.add('v', '2')
# Get the tls paths from the layer data.
layer_options = layer.options('tls-client')
ca_cert_path = layer_options.get('ca_certificate_path')
client_cert_path = layer_options.get('client_certificate_path')
client_key_path = layer_options.get('client_key_path')
server_cert_path = layer_options.get('server_certificate_path')
server_key_path = layer_options.get('server_key_path')
if is_privileged():
api_opts.add('allow-privileged', 'true', strict=True)
set_state('kubernetes-master.privileged')
else:
api_opts.add('allow-privileged', 'false', strict=True)
remove_state('kubernetes-master.privileged')
# Handle static options for now
api_opts.add('service-cluster-ip-range', service_cidr())
api_opts.add('min-request-timeout', '300')
api_opts.add('v', '4')
api_opts.add('tls-cert-file', server_cert_path)
api_opts.add('tls-private-key-file', server_key_path)
api_opts.add('kubelet-certificate-authority', ca_cert_path)
api_opts.add('kubelet-client-certificate', client_cert_path)
api_opts.add('kubelet-client-key', client_key_path)
api_opts.add('logtostderr', 'true')
api_opts.add('insecure-bind-address', '127.0.0.1')
api_opts.add('insecure-port', '8080')
api_opts.add('storage-backend', 'etcd2') # FIXME: add etcd3 support
admission_control = [
'Initializers',
'NamespaceLifecycle',
'LimitRanger',
'ServiceAccount',
'ResourceQuota',
'DefaultTolerationSeconds'
]
if get_version('kube-apiserver') < (1, 6):
hookenv.log('Removing DefaultTolerationSeconds from admission-control')
admission_control.remove('DefaultTolerationSeconds')
if get_version('kube-apiserver') < (1, 7):
hookenv.log('Removing Initializers from admission-control')
admission_control.remove('Initializers')
api_opts.add('admission-control', ','.join(admission_control), strict=True)
# Default to 3 minute resync. TODO: Make this configureable?
controller_opts.add('min-resync-period', '3m')
controller_opts.add('v', '2')
controller_opts.add('root-ca-file', ca_cert_path)
controller_opts.add('logtostderr', 'true')
controller_opts.add('master', 'http://127.0.0.1:8080')
scheduler_opts.add('v', '2')
scheduler_opts.add('logtostderr', 'true')
scheduler_opts.add('master', 'http://127.0.0.1:8080')
cmd = ['snap', 'set', 'kube-apiserver'] + api_opts.to_s().split(' ')
check_call(cmd)
cmd = (
['snap', 'set', 'kube-controller-manager'] +
controller_opts.to_s().split(' ')
)
check_call(cmd)
cmd = ['snap', 'set', 'kube-scheduler'] + scheduler_opts.to_s().split(' ')
check_call(cmd)
def setup_basic_auth(password=None, username='admin', uid='admin'):
root_cdk = '/root/cdk'
if not os.path.isdir(root_cdk):
os.makedirs(root_cdk)
htaccess = os.path.join(root_cdk, 'basic_auth.csv')
if not password:
password = token_generator()
with open(htaccess, 'w') as stream:
stream.write('{0},{1},{2}'.format(password, username, uid))
def setup_tokens(token, username, user):
root_cdk = '/root/cdk'
if not os.path.isdir(root_cdk):
os.makedirs(root_cdk)
known_tokens = os.path.join(root_cdk, 'known_tokens.csv')
if not token:
token = token_generator()
with open(known_tokens, 'a') as stream:
stream.write('{0},{1},{2}\n'.format(token, username, user))
def get_password(csv_fname, user):
root_cdk = '/root/cdk'
tokens_fname = os.path.join(root_cdk, csv_fname)
if not os.path.isfile(tokens_fname):
return None
with open(tokens_fname, 'r') as stream:
for line in stream:
record = line.split(',')
if record[1] == user:
return record[0]
return None
def get_token(username):
return get_password('known_tokens.csv', username)
def set_token(password, save_salt):
db = unitdata.kv()
db.set(save_salt, password)
return db.get(save_salt)
def token_generator(length=32):
alpha = string.ascii_letters + string.digits
token = ''.join(random.SystemRandom().choice(alpha) for _ in range(length))
return token
@retry(times=3, delay_secs=10)
def all_kube_system_pods_running():
cmd = ['kubectl', 'get', 'po', '-n', 'kube-system', '-o', 'json']
try:
output = check_output(cmd).decode('utf-8')
except CalledProcessError:
hookenv.log('failed to get kube-system pod status')
return False
result = json.loads(output)
for pod in result['items']:
status = pod['status']['phase']
if status != 'Running':
return False
return True
def apiserverVersion():
cmd = 'kube-apiserver --version'.split()
version_string = check_output(cmd).decode('utf-8')
return tuple(int(q) for q in re.findall("[0-9]+", version_string)[:3])
| true
| true
|
790a3d59aa4af9a835a878db0704ae15286d09b4
| 1,875
|
py
|
Python
|
whole_foods_delivery_slot_firefox.py
|
jtravisnorton/Whole-Foods-Delivery-Slot
|
abf81267d25afe7238135ee5841b9155d4f1071b
|
[
"Apache-2.0"
] | 519
|
2020-03-30T05:01:59.000Z
|
2022-01-28T15:10:28.000Z
|
whole_foods_delivery_slot_firefox.py
|
jtravisnorton/Whole-Foods-Delivery-Slot
|
abf81267d25afe7238135ee5841b9155d4f1071b
|
[
"Apache-2.0"
] | 64
|
2020-03-30T19:09:21.000Z
|
2021-03-03T17:59:31.000Z
|
whole_foods_delivery_slot_firefox.py
|
jtravisnorton/Whole-Foods-Delivery-Slot
|
abf81267d25afe7238135ee5841b9155d4f1071b
|
[
"Apache-2.0"
] | 189
|
2020-03-30T21:01:11.000Z
|
2022-03-31T08:16:02.000Z
|
import bs4
from selenium import webdriver
import sys
import time
import os
def getWFSlot(productUrl):
driver = webdriver.Firefox()
driver.get(productUrl)
html = driver.page_source
soup = bs4.BeautifulSoup(html)
time.sleep(60)
no_open_slots = True
while no_open_slots:
driver.refresh()
print("refreshed")
html = driver.page_source
soup = bs4.BeautifulSoup(html)
time.sleep(4)
slot_patterns = ['Next available', '1-hour delivery windows', '2-hour delivery windows']
try:
next_slot_text = soup.find('h4', class_ ='ufss-slotgroup-heading-text a-text-normal').text
if any(next_slot_text in slot_pattern for slot_pattern in slot_patterns):
print('SLOTS OPEN!')
os.system('say "Slots for delivery opened!"')
no_open_slots = False
time.sleep(1400)
except AttributeError:
pass
try:
slot_opened_text = "Not available"
all_dates = soup.findAll("div", {"class": "ufss-date-select-toggle-text-availability"})
for each_date in all_dates:
if slot_opened_text not in each_date.text:
print('SLOTS OPEN!')
os.system('say "Slots for delivery opened!"')
no_open_slots = False
time.sleep(1400)
except AttributeError:
pass
try:
no_slot_pattern = 'No delivery windows available. New windows are released throughout the day.'
if no_slot_pattern == soup.find('h4', class_ ='a-alert-heading').text:
print("NO SLOTS!")
except AttributeError:
print('SLOTS OPEN!')
os.system('say "Slots for delivery opened!"')
no_open_slots = False
getWFSlot('https://www.amazon.com/gp/buy/shipoptionselect/handlers/display.html?hasWorkingJavascript=1')
| 30.737705
| 104
| 0.6256
|
import bs4
from selenium import webdriver
import sys
import time
import os
def getWFSlot(productUrl):
driver = webdriver.Firefox()
driver.get(productUrl)
html = driver.page_source
soup = bs4.BeautifulSoup(html)
time.sleep(60)
no_open_slots = True
while no_open_slots:
driver.refresh()
print("refreshed")
html = driver.page_source
soup = bs4.BeautifulSoup(html)
time.sleep(4)
slot_patterns = ['Next available', '1-hour delivery windows', '2-hour delivery windows']
try:
next_slot_text = soup.find('h4', class_ ='ufss-slotgroup-heading-text a-text-normal').text
if any(next_slot_text in slot_pattern for slot_pattern in slot_patterns):
print('SLOTS OPEN!')
os.system('say "Slots for delivery opened!"')
no_open_slots = False
time.sleep(1400)
except AttributeError:
pass
try:
slot_opened_text = "Not available"
all_dates = soup.findAll("div", {"class": "ufss-date-select-toggle-text-availability"})
for each_date in all_dates:
if slot_opened_text not in each_date.text:
print('SLOTS OPEN!')
os.system('say "Slots for delivery opened!"')
no_open_slots = False
time.sleep(1400)
except AttributeError:
pass
try:
no_slot_pattern = 'No delivery windows available. New windows are released throughout the day.'
if no_slot_pattern == soup.find('h4', class_ ='a-alert-heading').text:
print("NO SLOTS!")
except AttributeError:
print('SLOTS OPEN!')
os.system('say "Slots for delivery opened!"')
no_open_slots = False
getWFSlot('https://www.amazon.com/gp/buy/shipoptionselect/handlers/display.html?hasWorkingJavascript=1')
| true
| true
|
790a3e9e46b7dca599ff65138be11f6962e19dcc
| 16,655
|
py
|
Python
|
pymatgen/io/vasp/tests/test_sets.py
|
rousseab/pymatgen
|
ecfba4a576a21f31c222be8fd20ce2ddaa77495a
|
[
"MIT"
] | 1
|
2015-05-18T14:31:20.000Z
|
2015-05-18T14:31:20.000Z
|
pymatgen/io/vasp/tests/test_sets.py
|
rousseab/pymatgen
|
ecfba4a576a21f31c222be8fd20ce2ddaa77495a
|
[
"MIT"
] | null | null | null |
pymatgen/io/vasp/tests/test_sets.py
|
rousseab/pymatgen
|
ecfba4a576a21f31c222be8fd20ce2ddaa77495a
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from __future__ import unicode_literals
import unittest
import os
import shutil
import numpy as np
from monty.json import MontyDecoder
from pymatgen.io.vasp.sets import MITVaspInputSet, MITHSEVaspInputSet, \
MPVaspInputSet, MITGGAVaspInputSet, MITNEBVaspInputSet,\
MPStaticVaspInputSet, MPNonSCFVaspInputSet, MITMDVaspInputSet,\
MPHSEVaspInputSet, MPBSHSEVaspInputSet, MPStaticDielectricDFPTVaspInputSet,\
MPOpticsNonSCFVaspInputSet
from pymatgen.io.vasp.inputs import Poscar, Incar
from pymatgen import Specie, Lattice, Structure
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..",
'test_files')
dec = MontyDecoder()
class MITMPVaspInputSetTest(unittest.TestCase):
def setUp(self):
if "VASP_PSP_DIR" not in os.environ:
os.environ["VASP_PSP_DIR"] = test_dir
filepath = os.path.join(test_dir, 'POSCAR')
poscar = Poscar.from_file(filepath)
self.struct = poscar.structure
self.mitparamset = MITVaspInputSet()
self.mitparamset_unsorted = MITVaspInputSet(sort_structure=False)
self.mithseparamset = MITHSEVaspInputSet()
self.paramset = MPVaspInputSet()
self.userparamset = MPVaspInputSet(
user_incar_settings={'MAGMOM': {"Fe": 10, "S": -5, "Mn3+": 100}}
)
self.mitggaparam = MITGGAVaspInputSet()
self.mpstaticparamset = MPStaticVaspInputSet()
self.mpnscfparamsetu = MPNonSCFVaspInputSet(
{"NBANDS": 50}, mode="Uniform")
self.mpnscfparamsetl = MPNonSCFVaspInputSet(
{"NBANDS": 60}, mode="Line")
self.mphseparamset = MPHSEVaspInputSet()
self.mpbshseparamsetl = MPBSHSEVaspInputSet(mode="Line")
self.mpbshseparamsetu = MPBSHSEVaspInputSet(
mode="Uniform", added_kpoints=[[0.5, 0.5, 0.0]])
self.mpdielparamset = MPStaticDielectricDFPTVaspInputSet()
def test_get_poscar(self):
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.5, 0.75])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ["Fe", "Mn"], coords)
s_unsorted = self.mitparamset_unsorted.get_poscar(struct).structure
s_sorted = self.mitparamset.get_poscar(struct).structure
self.assertEqual(s_unsorted[0].specie.symbol, 'Fe')
self.assertEqual(s_sorted[0].specie.symbol, 'Mn')
def test_get_potcar_symbols(self):
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.5, 0.75])
coords.append([0.75, 0.25, 0.75])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ["P", "Fe", "O"], coords)
syms = self.paramset.get_potcar_symbols(struct)
self.assertEqual(syms, ['Fe_pv', 'P', 'O'])
syms = MPVaspInputSet(sort_structure=False).get_potcar_symbols(struct)
self.assertEqual(syms, ['P', 'Fe_pv', 'O'])
def test_false_potcar_hash(self):
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.5, 0.75])
coords.append([0.75, 0.25, 0.75])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ["P", "Fe", "O"], coords)
self.mitparamset.potcar_settings['Fe']['symbol'] = 'Fe_pv'
self.assertRaises(ValueError, self.mitparamset.get_potcar, struct, check_hash=True)
self.mitparamset.potcar_settings['Fe']['symbol'] = 'Fe'
def test_lda_potcar(self):
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.5, 0.75])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ["P", "Fe"], coords)
p = MITVaspInputSet(potcar_functional="LDA").get_potcar(struct)
self.assertEqual(p.functional, 'LDA')
def test_get_nelect(self):
coords = [[0]*3, [0.5]*3, [0.75]*3]
lattice = Lattice.cubic(4)
s = Structure(lattice, ['Si', 'Si', 'Fe'], coords)
self.assertAlmostEqual(MITVaspInputSet().get_nelect(s), 16)
def test_get_incar(self):
incar = self.paramset.get_incar(self.struct)
self.assertEqual(incar['LDAUU'], [5.3, 0, 0])
self.assertAlmostEqual(incar['EDIFF'], 0.0012)
incar = self.mitparamset.get_incar(self.struct)
self.assertEqual(incar['LDAUU'], [4.0, 0, 0])
self.assertAlmostEqual(incar['EDIFF'], 0.0012)
incar_gga = self.mitggaparam.get_incar(self.struct)
self.assertNotIn("LDAU", incar_gga)
incar_static = self.mpstaticparamset.get_incar(self.struct)
self.assertEqual(incar_static["NSW"], 0)
incar_nscfl = self.mpnscfparamsetl.get_incar(self.struct)
self.assertEqual(incar_nscfl["NBANDS"], 60)
incar_nscfu = self.mpnscfparamsetu.get_incar(self.struct)
self.assertEqual(incar_nscfu["ISYM"], 0)
incar_hse = self.mphseparamset.get_incar(self.struct)
self.assertEqual(incar_hse['LHFCALC'], True)
self.assertEqual(incar_hse['HFSCREEN'], 0.2)
incar_hse_bsl = self.mpbshseparamsetl.get_incar(self.struct)
self.assertEqual(incar_hse_bsl['LHFCALC'], True)
self.assertEqual(incar_hse_bsl['HFSCREEN'], 0.2)
self.assertEqual(incar_hse_bsl['NSW'], 0)
incar_hse_bsu = self.mpbshseparamsetu.get_incar(self.struct)
self.assertEqual(incar_hse_bsu['LHFCALC'], True)
self.assertEqual(incar_hse_bsu['HFSCREEN'], 0.2)
self.assertEqual(incar_hse_bsu['NSW'], 0)
incar_diel = self.mpdielparamset.get_incar(self.struct)
self.assertEqual(incar_diel['IBRION'], 8)
self.assertEqual(incar_diel['LEPSILON'], True)
si = 14
coords = list()
coords.append(np.array([0, 0, 0]))
coords.append(np.array([0.75, 0.5, 0.75]))
#Silicon structure for testing.
latt = Lattice(np.array([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]]))
struct = Structure(latt, [si, si], coords)
incar = self.paramset.get_incar(struct)
self.assertNotIn("LDAU", incar)
incar = self.mithseparamset.get_incar(self.struct)
self.assertTrue(incar['LHFCALC'])
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.5, 0.75])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ["Fe", "Mn"], coords)
incar = self.paramset.get_incar(struct)
self.assertNotIn('LDAU', incar)
#check fluorides
struct = Structure(lattice, ["Fe", "F"], coords)
incar = self.paramset.get_incar(struct)
self.assertEqual(incar['LDAUU'], [5.3, 0])
self.assertEqual(incar['MAGMOM'], [5, 0.6])
struct = Structure(lattice, ["Fe", "F"], coords)
incar = self.mitparamset.get_incar(struct)
self.assertEqual(incar['LDAUU'], [4.0, 0])
#Make sure this works with species.
struct = Structure(lattice, ["Fe2+", "O2-"], coords)
incar = self.paramset.get_incar(struct)
self.assertEqual(incar['LDAUU'], [5.3, 0])
struct = Structure(lattice, ["Fe", "Mn"], coords,
site_properties={'magmom': (5.2, -4.5)})
incar = self.paramset.get_incar(struct)
self.assertEqual(incar['MAGMOM'], [-4.5, 5.2])
incar = self.mpstaticparamset.get_incar(struct)
self.assertEqual(incar['MAGMOM'], [-4.5, 5.2])
incar = self.mitparamset_unsorted.get_incar(struct)
self.assertEqual(incar['MAGMOM'], [5.2, -4.5])
struct = Structure(lattice, [Specie("Fe", 2, {'spin': 4.1}), "Mn"],
coords)
incar = self.paramset.get_incar(struct)
self.assertEqual(incar['MAGMOM'], [5, 4.1])
incar = self.mpnscfparamsetl.get_incar(struct)
self.assertEqual(incar.get('MAGMOM', None), None)
struct = Structure(lattice, ["Mn3+", "Mn4+"], coords)
incar = self.mitparamset.get_incar(struct)
self.assertEqual(incar['MAGMOM'], [4, 3])
incar = self.mpnscfparamsetu.get_incar(struct)
self.assertEqual(incar.get('MAGMOM', None), None)
self.assertEqual(self.userparamset.get_incar(struct)['MAGMOM'],
[100, 0.6])
#sulfide vs sulfate test
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.5, 0.75])
coords.append([0.25, 0.5, 0])
struct = Structure(lattice, ["Fe", "Fe", "S"], coords)
incar = self.mitparamset.get_incar(struct)
self.assertEqual(incar['LDAUU'], [1.9, 0])
#Make sure Matproject sulfides are ok.
self.assertNotIn('LDAUU', self.paramset.get_incar(struct))
self.assertNotIn('LDAUU', self.mpstaticparamset.get_incar(struct))
struct = Structure(lattice, ["Fe", "S", "O"], coords)
incar = self.mitparamset.get_incar(struct)
self.assertEqual(incar['LDAUU'], [4.0, 0, 0])
#Make sure Matproject sulfates are ok.
self.assertEqual(self.paramset.get_incar(struct)['LDAUU'], [5.3, 0, 0])
self.assertEqual(self.mpnscfparamsetl.get_incar(struct)['LDAUU'],
[5.3, 0, 0])
self.assertEqual(self.userparamset.get_incar(struct)['MAGMOM'],
[10, -5, 0.6])
def test_optics(self):
self.mpopticsparamset = MPOpticsNonSCFVaspInputSet.from_previous_vasp_run(
'{}/static_silicon'.format(test_dir), output_dir='optics_test_dir',
nedos=1145)
self.assertTrue(os.path.exists('optics_test_dir/CHGCAR'))
incar = Incar.from_file('optics_test_dir/INCAR')
self.assertTrue(incar['LOPTICS'])
self.assertEqual(incar['NEDOS'], 1145)
#Remove the directory in which the inputs have been created
shutil.rmtree('optics_test_dir')
def test_get_kpoints(self):
kpoints = self.paramset.get_kpoints(self.struct)
self.assertEqual(kpoints.kpts, [[2, 4, 6]])
self.assertEqual(kpoints.style, 'Monkhorst')
kpoints = self.mitparamset.get_kpoints(self.struct)
self.assertEqual(kpoints.kpts, [[2, 4, 6]])
self.assertEqual(kpoints.style, 'Monkhorst')
kpoints = self.mpstaticparamset.get_kpoints(self.struct)
self.assertEqual(kpoints.kpts, [[6, 6, 4]])
self.assertEqual(kpoints.style, 'Monkhorst')
kpoints = self.mpnscfparamsetl.get_kpoints(self.struct)
self.assertEqual(kpoints.num_kpts, 140)
self.assertEqual(kpoints.style, 'Reciprocal')
kpoints = self.mpnscfparamsetu.get_kpoints(self.struct)
self.assertEqual(kpoints.num_kpts, 168)
kpoints = self.mpbshseparamsetl.get_kpoints(self.struct)
self.assertAlmostEqual(kpoints.num_kpts, 164)
self.assertAlmostEqual(kpoints.kpts[10][0], 0.0)
self.assertAlmostEqual(kpoints.kpts[10][1], 0.5)
self.assertAlmostEqual(kpoints.kpts[10][2], 0.16666667)
self.assertAlmostEqual(kpoints.kpts[26][0], 0.0714285714286)
self.assertAlmostEqual(kpoints.kpts[26][1], 0.0)
self.assertAlmostEqual(kpoints.kpts[26][2], 0.0)
self.assertAlmostEqual(kpoints.kpts[-1][0], 0.5)
self.assertAlmostEqual(kpoints.kpts[-1][1], 0.5)
self.assertAlmostEqual(kpoints.kpts[-1][2], 0.5)
kpoints = self.mpbshseparamsetu.get_kpoints(self.struct)
self.assertAlmostEqual(kpoints.num_kpts, 25)
self.assertAlmostEqual(kpoints.kpts[10][0], 0.0)
self.assertAlmostEqual(kpoints.kpts[10][1], 0.5)
self.assertAlmostEqual(kpoints.kpts[10][2], 0.16666667)
self.assertAlmostEqual(kpoints.kpts[-1][0], 0.5)
self.assertAlmostEqual(kpoints.kpts[-1][1], 0.5)
self.assertAlmostEqual(kpoints.kpts[-1][2], 0.0)
def test_get_all_vasp_input(self):
d = self.mitparamset.get_all_vasp_input(self.struct)
self.assertEqual(d["INCAR"]["ISMEAR"], -5)
self.struct.make_supercell(4)
d = self.mitparamset.get_all_vasp_input(self.struct)
self.assertEqual(d["INCAR"]["ISMEAR"], 0)
def test_to_from_dict(self):
self.mitparamset = MITVaspInputSet()
self.mithseparamset = MITHSEVaspInputSet()
self.paramset = MPVaspInputSet()
self.userparamset = MPVaspInputSet(
user_incar_settings={'MAGMOM': {"Fe": 10, "S": -5, "Mn3+": 100}}
)
d = self.mitparamset.as_dict()
v = dec.process_decoded(d)
self.assertEqual(v.incar_settings["LDAUU"]["O"]["Fe"], 4)
d = self.mitggaparam.as_dict()
v = dec.process_decoded(d)
self.assertNotIn("LDAUU", v.incar_settings)
d = self.mithseparamset.as_dict()
v = dec.process_decoded(d)
self.assertEqual(v.incar_settings["LHFCALC"], True)
d = self.mphseparamset.as_dict()
v = dec.process_decoded(d)
self.assertEqual(v.incar_settings["LHFCALC"], True)
d = self.paramset.as_dict()
v = dec.process_decoded(d)
self.assertEqual(v.incar_settings["LDAUU"]["O"]["Fe"], 5.3)
d = self.userparamset.as_dict()
v = dec.process_decoded(d)
#self.assertEqual(type(v), MPVaspInputSet)
self.assertEqual(v.incar_settings["MAGMOM"],
{"Fe": 10, "S": -5, "Mn3+": 100})
class MITMDVaspInputSetTest(unittest.TestCase):
def setUp(self):
filepath = os.path.join(test_dir, 'POSCAR')
poscar = Poscar.from_file(filepath)
self.struct = poscar.structure
self.mitmdparam = MITMDVaspInputSet(300, 1200, 10000)
def test_get_potcar_symbols(self):
syms = self.mitmdparam.get_potcar_symbols(self.struct)
self.assertEqual(syms, ['Fe', 'P', 'O'])
def test_get_incar(self):
incar = self.mitmdparam.get_incar(self.struct)
self.assertNotIn("LDAUU", incar)
self.assertAlmostEqual(incar['EDIFF'], 2.4e-5)
def test_get_kpoints(self):
kpoints = self.mitmdparam.get_kpoints(self.struct)
self.assertEqual(kpoints.kpts, [(1, 1, 1)])
self.assertEqual(kpoints.style, 'Gamma')
def test_to_from_dict(self):
d = self.mitmdparam.as_dict()
v = dec.process_decoded(d)
self.assertEqual(type(v), MITMDVaspInputSet)
self.assertEqual(v.incar_settings["TEBEG"], 300)
class MITNEBVaspInputSetTest(unittest.TestCase):
def setUp(self):
filepath = os.path.join(test_dir, 'POSCAR')
poscar = Poscar.from_file(filepath)
self.struct = poscar.structure
self.vis = MITNEBVaspInputSet(nimages=10, hubbard_off=True)
def test_get_potcar_symbols(self):
syms = self.vis.get_potcar_symbols(self.struct)
self.assertEqual(syms, ['Fe', 'P', 'O'])
def test_get_incar(self):
incar = self.vis.get_incar(self.struct)
self.assertNotIn("LDAUU", incar)
self.assertAlmostEqual(incar['EDIFF'], 0.00005)
def test_get_kpoints(self):
kpoints = self.vis.get_kpoints(self.struct)
self.assertEqual(kpoints.kpts, [[2, 4, 6]])
self.assertEqual(kpoints.style, 'Monkhorst')
def test_to_from_dict(self):
d = self.vis.as_dict()
v = dec.process_decoded(d)
self.assertEqual(v.incar_settings["IMAGES"], 10)
def test_write_inputs(self):
c1 = [[0.5] * 3, [0.9] * 3]
c2 = [[0.5] * 3, [0.9, 0.1, 0.1]]
s1 = Structure(Lattice.cubic(5), ['Si', 'Si'], c1)
s2 = Structure(Lattice.cubic(5), ['Si', 'Si'], c2)
structs = []
for s in s1.interpolate(s2, 3, pbc=True):
structs.append(Structure.from_sites(s.sites,
to_unit_cell=True))
fc = self.vis._process_structures(structs)[2].frac_coords
self.assertTrue(np.allclose(fc, [[0.5]*3,[0.9, 1.033333, 1.0333333]]))
if __name__ == '__main__':
unittest.main()
| 39.940048
| 91
| 0.616331
|
from __future__ import unicode_literals
import unittest
import os
import shutil
import numpy as np
from monty.json import MontyDecoder
from pymatgen.io.vasp.sets import MITVaspInputSet, MITHSEVaspInputSet, \
MPVaspInputSet, MITGGAVaspInputSet, MITNEBVaspInputSet,\
MPStaticVaspInputSet, MPNonSCFVaspInputSet, MITMDVaspInputSet,\
MPHSEVaspInputSet, MPBSHSEVaspInputSet, MPStaticDielectricDFPTVaspInputSet,\
MPOpticsNonSCFVaspInputSet
from pymatgen.io.vasp.inputs import Poscar, Incar
from pymatgen import Specie, Lattice, Structure
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..",
'test_files')
dec = MontyDecoder()
class MITMPVaspInputSetTest(unittest.TestCase):
def setUp(self):
if "VASP_PSP_DIR" not in os.environ:
os.environ["VASP_PSP_DIR"] = test_dir
filepath = os.path.join(test_dir, 'POSCAR')
poscar = Poscar.from_file(filepath)
self.struct = poscar.structure
self.mitparamset = MITVaspInputSet()
self.mitparamset_unsorted = MITVaspInputSet(sort_structure=False)
self.mithseparamset = MITHSEVaspInputSet()
self.paramset = MPVaspInputSet()
self.userparamset = MPVaspInputSet(
user_incar_settings={'MAGMOM': {"Fe": 10, "S": -5, "Mn3+": 100}}
)
self.mitggaparam = MITGGAVaspInputSet()
self.mpstaticparamset = MPStaticVaspInputSet()
self.mpnscfparamsetu = MPNonSCFVaspInputSet(
{"NBANDS": 50}, mode="Uniform")
self.mpnscfparamsetl = MPNonSCFVaspInputSet(
{"NBANDS": 60}, mode="Line")
self.mphseparamset = MPHSEVaspInputSet()
self.mpbshseparamsetl = MPBSHSEVaspInputSet(mode="Line")
self.mpbshseparamsetu = MPBSHSEVaspInputSet(
mode="Uniform", added_kpoints=[[0.5, 0.5, 0.0]])
self.mpdielparamset = MPStaticDielectricDFPTVaspInputSet()
def test_get_poscar(self):
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.5, 0.75])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ["Fe", "Mn"], coords)
s_unsorted = self.mitparamset_unsorted.get_poscar(struct).structure
s_sorted = self.mitparamset.get_poscar(struct).structure
self.assertEqual(s_unsorted[0].specie.symbol, 'Fe')
self.assertEqual(s_sorted[0].specie.symbol, 'Mn')
def test_get_potcar_symbols(self):
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.5, 0.75])
coords.append([0.75, 0.25, 0.75])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ["P", "Fe", "O"], coords)
syms = self.paramset.get_potcar_symbols(struct)
self.assertEqual(syms, ['Fe_pv', 'P', 'O'])
syms = MPVaspInputSet(sort_structure=False).get_potcar_symbols(struct)
self.assertEqual(syms, ['P', 'Fe_pv', 'O'])
def test_false_potcar_hash(self):
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.5, 0.75])
coords.append([0.75, 0.25, 0.75])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ["P", "Fe", "O"], coords)
self.mitparamset.potcar_settings['Fe']['symbol'] = 'Fe_pv'
self.assertRaises(ValueError, self.mitparamset.get_potcar, struct, check_hash=True)
self.mitparamset.potcar_settings['Fe']['symbol'] = 'Fe'
def test_lda_potcar(self):
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.5, 0.75])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ["P", "Fe"], coords)
p = MITVaspInputSet(potcar_functional="LDA").get_potcar(struct)
self.assertEqual(p.functional, 'LDA')
def test_get_nelect(self):
coords = [[0]*3, [0.5]*3, [0.75]*3]
lattice = Lattice.cubic(4)
s = Structure(lattice, ['Si', 'Si', 'Fe'], coords)
self.assertAlmostEqual(MITVaspInputSet().get_nelect(s), 16)
def test_get_incar(self):
incar = self.paramset.get_incar(self.struct)
self.assertEqual(incar['LDAUU'], [5.3, 0, 0])
self.assertAlmostEqual(incar['EDIFF'], 0.0012)
incar = self.mitparamset.get_incar(self.struct)
self.assertEqual(incar['LDAUU'], [4.0, 0, 0])
self.assertAlmostEqual(incar['EDIFF'], 0.0012)
incar_gga = self.mitggaparam.get_incar(self.struct)
self.assertNotIn("LDAU", incar_gga)
incar_static = self.mpstaticparamset.get_incar(self.struct)
self.assertEqual(incar_static["NSW"], 0)
incar_nscfl = self.mpnscfparamsetl.get_incar(self.struct)
self.assertEqual(incar_nscfl["NBANDS"], 60)
incar_nscfu = self.mpnscfparamsetu.get_incar(self.struct)
self.assertEqual(incar_nscfu["ISYM"], 0)
incar_hse = self.mphseparamset.get_incar(self.struct)
self.assertEqual(incar_hse['LHFCALC'], True)
self.assertEqual(incar_hse['HFSCREEN'], 0.2)
incar_hse_bsl = self.mpbshseparamsetl.get_incar(self.struct)
self.assertEqual(incar_hse_bsl['LHFCALC'], True)
self.assertEqual(incar_hse_bsl['HFSCREEN'], 0.2)
self.assertEqual(incar_hse_bsl['NSW'], 0)
incar_hse_bsu = self.mpbshseparamsetu.get_incar(self.struct)
self.assertEqual(incar_hse_bsu['LHFCALC'], True)
self.assertEqual(incar_hse_bsu['HFSCREEN'], 0.2)
self.assertEqual(incar_hse_bsu['NSW'], 0)
incar_diel = self.mpdielparamset.get_incar(self.struct)
self.assertEqual(incar_diel['IBRION'], 8)
self.assertEqual(incar_diel['LEPSILON'], True)
si = 14
coords = list()
coords.append(np.array([0, 0, 0]))
coords.append(np.array([0.75, 0.5, 0.75]))
latt = Lattice(np.array([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]]))
struct = Structure(latt, [si, si], coords)
incar = self.paramset.get_incar(struct)
self.assertNotIn("LDAU", incar)
incar = self.mithseparamset.get_incar(self.struct)
self.assertTrue(incar['LHFCALC'])
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.5, 0.75])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ["Fe", "Mn"], coords)
incar = self.paramset.get_incar(struct)
self.assertNotIn('LDAU', incar)
struct = Structure(lattice, ["Fe", "F"], coords)
incar = self.paramset.get_incar(struct)
self.assertEqual(incar['LDAUU'], [5.3, 0])
self.assertEqual(incar['MAGMOM'], [5, 0.6])
struct = Structure(lattice, ["Fe", "F"], coords)
incar = self.mitparamset.get_incar(struct)
self.assertEqual(incar['LDAUU'], [4.0, 0])
struct = Structure(lattice, ["Fe2+", "O2-"], coords)
incar = self.paramset.get_incar(struct)
self.assertEqual(incar['LDAUU'], [5.3, 0])
struct = Structure(lattice, ["Fe", "Mn"], coords,
site_properties={'magmom': (5.2, -4.5)})
incar = self.paramset.get_incar(struct)
self.assertEqual(incar['MAGMOM'], [-4.5, 5.2])
incar = self.mpstaticparamset.get_incar(struct)
self.assertEqual(incar['MAGMOM'], [-4.5, 5.2])
incar = self.mitparamset_unsorted.get_incar(struct)
self.assertEqual(incar['MAGMOM'], [5.2, -4.5])
struct = Structure(lattice, [Specie("Fe", 2, {'spin': 4.1}), "Mn"],
coords)
incar = self.paramset.get_incar(struct)
self.assertEqual(incar['MAGMOM'], [5, 4.1])
incar = self.mpnscfparamsetl.get_incar(struct)
self.assertEqual(incar.get('MAGMOM', None), None)
struct = Structure(lattice, ["Mn3+", "Mn4+"], coords)
incar = self.mitparamset.get_incar(struct)
self.assertEqual(incar['MAGMOM'], [4, 3])
incar = self.mpnscfparamsetu.get_incar(struct)
self.assertEqual(incar.get('MAGMOM', None), None)
self.assertEqual(self.userparamset.get_incar(struct)['MAGMOM'],
[100, 0.6])
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.5, 0.75])
coords.append([0.25, 0.5, 0])
struct = Structure(lattice, ["Fe", "Fe", "S"], coords)
incar = self.mitparamset.get_incar(struct)
self.assertEqual(incar['LDAUU'], [1.9, 0])
self.assertNotIn('LDAUU', self.paramset.get_incar(struct))
self.assertNotIn('LDAUU', self.mpstaticparamset.get_incar(struct))
struct = Structure(lattice, ["Fe", "S", "O"], coords)
incar = self.mitparamset.get_incar(struct)
self.assertEqual(incar['LDAUU'], [4.0, 0, 0])
self.assertEqual(self.paramset.get_incar(struct)['LDAUU'], [5.3, 0, 0])
self.assertEqual(self.mpnscfparamsetl.get_incar(struct)['LDAUU'],
[5.3, 0, 0])
self.assertEqual(self.userparamset.get_incar(struct)['MAGMOM'],
[10, -5, 0.6])
def test_optics(self):
self.mpopticsparamset = MPOpticsNonSCFVaspInputSet.from_previous_vasp_run(
'{}/static_silicon'.format(test_dir), output_dir='optics_test_dir',
nedos=1145)
self.assertTrue(os.path.exists('optics_test_dir/CHGCAR'))
incar = Incar.from_file('optics_test_dir/INCAR')
self.assertTrue(incar['LOPTICS'])
self.assertEqual(incar['NEDOS'], 1145)
shutil.rmtree('optics_test_dir')
def test_get_kpoints(self):
kpoints = self.paramset.get_kpoints(self.struct)
self.assertEqual(kpoints.kpts, [[2, 4, 6]])
self.assertEqual(kpoints.style, 'Monkhorst')
kpoints = self.mitparamset.get_kpoints(self.struct)
self.assertEqual(kpoints.kpts, [[2, 4, 6]])
self.assertEqual(kpoints.style, 'Monkhorst')
kpoints = self.mpstaticparamset.get_kpoints(self.struct)
self.assertEqual(kpoints.kpts, [[6, 6, 4]])
self.assertEqual(kpoints.style, 'Monkhorst')
kpoints = self.mpnscfparamsetl.get_kpoints(self.struct)
self.assertEqual(kpoints.num_kpts, 140)
self.assertEqual(kpoints.style, 'Reciprocal')
kpoints = self.mpnscfparamsetu.get_kpoints(self.struct)
self.assertEqual(kpoints.num_kpts, 168)
kpoints = self.mpbshseparamsetl.get_kpoints(self.struct)
self.assertAlmostEqual(kpoints.num_kpts, 164)
self.assertAlmostEqual(kpoints.kpts[10][0], 0.0)
self.assertAlmostEqual(kpoints.kpts[10][1], 0.5)
self.assertAlmostEqual(kpoints.kpts[10][2], 0.16666667)
self.assertAlmostEqual(kpoints.kpts[26][0], 0.0714285714286)
self.assertAlmostEqual(kpoints.kpts[26][1], 0.0)
self.assertAlmostEqual(kpoints.kpts[26][2], 0.0)
self.assertAlmostEqual(kpoints.kpts[-1][0], 0.5)
self.assertAlmostEqual(kpoints.kpts[-1][1], 0.5)
self.assertAlmostEqual(kpoints.kpts[-1][2], 0.5)
kpoints = self.mpbshseparamsetu.get_kpoints(self.struct)
self.assertAlmostEqual(kpoints.num_kpts, 25)
self.assertAlmostEqual(kpoints.kpts[10][0], 0.0)
self.assertAlmostEqual(kpoints.kpts[10][1], 0.5)
self.assertAlmostEqual(kpoints.kpts[10][2], 0.16666667)
self.assertAlmostEqual(kpoints.kpts[-1][0], 0.5)
self.assertAlmostEqual(kpoints.kpts[-1][1], 0.5)
self.assertAlmostEqual(kpoints.kpts[-1][2], 0.0)
def test_get_all_vasp_input(self):
d = self.mitparamset.get_all_vasp_input(self.struct)
self.assertEqual(d["INCAR"]["ISMEAR"], -5)
self.struct.make_supercell(4)
d = self.mitparamset.get_all_vasp_input(self.struct)
self.assertEqual(d["INCAR"]["ISMEAR"], 0)
def test_to_from_dict(self):
self.mitparamset = MITVaspInputSet()
self.mithseparamset = MITHSEVaspInputSet()
self.paramset = MPVaspInputSet()
self.userparamset = MPVaspInputSet(
user_incar_settings={'MAGMOM': {"Fe": 10, "S": -5, "Mn3+": 100}}
)
d = self.mitparamset.as_dict()
v = dec.process_decoded(d)
self.assertEqual(v.incar_settings["LDAUU"]["O"]["Fe"], 4)
d = self.mitggaparam.as_dict()
v = dec.process_decoded(d)
self.assertNotIn("LDAUU", v.incar_settings)
d = self.mithseparamset.as_dict()
v = dec.process_decoded(d)
self.assertEqual(v.incar_settings["LHFCALC"], True)
d = self.mphseparamset.as_dict()
v = dec.process_decoded(d)
self.assertEqual(v.incar_settings["LHFCALC"], True)
d = self.paramset.as_dict()
v = dec.process_decoded(d)
self.assertEqual(v.incar_settings["LDAUU"]["O"]["Fe"], 5.3)
d = self.userparamset.as_dict()
v = dec.process_decoded(d)
self.assertEqual(v.incar_settings["MAGMOM"],
{"Fe": 10, "S": -5, "Mn3+": 100})
class MITMDVaspInputSetTest(unittest.TestCase):
def setUp(self):
filepath = os.path.join(test_dir, 'POSCAR')
poscar = Poscar.from_file(filepath)
self.struct = poscar.structure
self.mitmdparam = MITMDVaspInputSet(300, 1200, 10000)
def test_get_potcar_symbols(self):
syms = self.mitmdparam.get_potcar_symbols(self.struct)
self.assertEqual(syms, ['Fe', 'P', 'O'])
def test_get_incar(self):
incar = self.mitmdparam.get_incar(self.struct)
self.assertNotIn("LDAUU", incar)
self.assertAlmostEqual(incar['EDIFF'], 2.4e-5)
def test_get_kpoints(self):
kpoints = self.mitmdparam.get_kpoints(self.struct)
self.assertEqual(kpoints.kpts, [(1, 1, 1)])
self.assertEqual(kpoints.style, 'Gamma')
def test_to_from_dict(self):
d = self.mitmdparam.as_dict()
v = dec.process_decoded(d)
self.assertEqual(type(v), MITMDVaspInputSet)
self.assertEqual(v.incar_settings["TEBEG"], 300)
class MITNEBVaspInputSetTest(unittest.TestCase):
def setUp(self):
filepath = os.path.join(test_dir, 'POSCAR')
poscar = Poscar.from_file(filepath)
self.struct = poscar.structure
self.vis = MITNEBVaspInputSet(nimages=10, hubbard_off=True)
def test_get_potcar_symbols(self):
syms = self.vis.get_potcar_symbols(self.struct)
self.assertEqual(syms, ['Fe', 'P', 'O'])
def test_get_incar(self):
incar = self.vis.get_incar(self.struct)
self.assertNotIn("LDAUU", incar)
self.assertAlmostEqual(incar['EDIFF'], 0.00005)
def test_get_kpoints(self):
kpoints = self.vis.get_kpoints(self.struct)
self.assertEqual(kpoints.kpts, [[2, 4, 6]])
self.assertEqual(kpoints.style, 'Monkhorst')
def test_to_from_dict(self):
d = self.vis.as_dict()
v = dec.process_decoded(d)
self.assertEqual(v.incar_settings["IMAGES"], 10)
def test_write_inputs(self):
c1 = [[0.5] * 3, [0.9] * 3]
c2 = [[0.5] * 3, [0.9, 0.1, 0.1]]
s1 = Structure(Lattice.cubic(5), ['Si', 'Si'], c1)
s2 = Structure(Lattice.cubic(5), ['Si', 'Si'], c2)
structs = []
for s in s1.interpolate(s2, 3, pbc=True):
structs.append(Structure.from_sites(s.sites,
to_unit_cell=True))
fc = self.vis._process_structures(structs)[2].frac_coords
self.assertTrue(np.allclose(fc, [[0.5]*3,[0.9, 1.033333, 1.0333333]]))
if __name__ == '__main__':
unittest.main()
| true
| true
|
790a3eb6d4ca4e63976f0726a55f649a8308636b
| 1,302
|
py
|
Python
|
Lecture_3_Agents/Exercise1/Exercises/reflex_vacuum_agent.py
|
aleksander-GD/AI-F20
|
c5d086e317f657c1b7a2d2179eafcef0267755ed
|
[
"MIT"
] | null | null | null |
Lecture_3_Agents/Exercise1/Exercises/reflex_vacuum_agent.py
|
aleksander-GD/AI-F20
|
c5d086e317f657c1b7a2d2179eafcef0267755ed
|
[
"MIT"
] | null | null | null |
Lecture_3_Agents/Exercise1/Exercises/reflex_vacuum_agent.py
|
aleksander-GD/AI-F20
|
c5d086e317f657c1b7a2d2179eafcef0267755ed
|
[
"MIT"
] | null | null | null |
A = 'A'
B = 'B'
Environment = {
A: 'Dirty',
B: 'Dirty',
'Current': A
}
def REFLEX_VACUUM_AGENT(loc_st): # Determine action
if loc_st[1] == 'Dirty':
return 'Suck'
if loc_st[0] == A:
return 'Right'
if loc_st[0] == B:
return 'Left'
def Sensors(): # Sense Environment
location = Environment['Current']
return (location, Environment[location])
def Actuators(action): # Modify Environment
location = Environment['Current']
if action == 'Suck':
Environment[location] = 'Clean'
elif action == 'Right' and location == A:
Environment['Current'] = B
elif action == 'Left' and location == B:
Environment['Current'] = A
def run(n, make_agent): # run the agent through n steps
print(' Current New')
print('location status action location status')
for i in range(1, n):
(location, status) = Sensors() # Sense Environment before action
print("{:12s}{:8s}".format(location, status), end='')
action = make_agent(Sensors())
Actuators(action)
(location, status) = Sensors() # Sense Environment after action
print("{:8s}{:12s}{:8s}".format(action, location, status))
if __name__ == '__main__':
run(10, REFLEX_VACUUM_AGENT)
| 26.571429
| 73
| 0.592934
|
A = 'A'
B = 'B'
Environment = {
A: 'Dirty',
B: 'Dirty',
'Current': A
}
def REFLEX_VACUUM_AGENT(loc_st):
if loc_st[1] == 'Dirty':
return 'Suck'
if loc_st[0] == A:
return 'Right'
if loc_st[0] == B:
return 'Left'
def Sensors():
location = Environment['Current']
return (location, Environment[location])
def Actuators(action):
location = Environment['Current']
if action == 'Suck':
Environment[location] = 'Clean'
elif action == 'Right' and location == A:
Environment['Current'] = B
elif action == 'Left' and location == B:
Environment['Current'] = A
def run(n, make_agent):
print(' Current New')
print('location status action location status')
for i in range(1, n):
(location, status) = Sensors()
print("{:12s}{:8s}".format(location, status), end='')
action = make_agent(Sensors())
Actuators(action)
(location, status) = Sensors()
print("{:8s}{:12s}{:8s}".format(action, location, status))
if __name__ == '__main__':
run(10, REFLEX_VACUUM_AGENT)
| true
| true
|
790a3f0b2709166a3728a66ccd30986d5f1f1278
| 905
|
py
|
Python
|
genmenu/gen_wiki_sitemap.py
|
RustFisher/python-playground
|
3851eedd3db58d0e7de149da35f44356c7caa3f6
|
[
"MIT"
] | 2
|
2021-07-22T01:58:16.000Z
|
2021-07-22T06:24:13.000Z
|
genmenu/gen_wiki_sitemap.py
|
RustFisher/python-playground
|
3851eedd3db58d0e7de149da35f44356c7caa3f6
|
[
"MIT"
] | null | null | null |
genmenu/gen_wiki_sitemap.py
|
RustFisher/python-playground
|
3851eedd3db58d0e7de149da35f44356c7caa3f6
|
[
"MIT"
] | null | null | null |
import os
import re
def gen_sitemap(main_site, md_file):
pattern = re.compile(r': (.*?).md', re.S)
res = []
with open(md_file) as md:
for line in md.readlines():
line = str(line)
cur_urls = re.findall(pattern, line)
if len(cur_urls) > 0:
if cur_urls[0] == '/':
continue
res.append(main_site + cur_urls[0])
return res
if __name__ == '__main__':
print("生成wiki站的sitemap")
site_map = gen_sitemap('https://www.an.rustfisher.com/',
'/Users/rustfisher/Desktop/ws/wiki-ws/mk-android-wiki-proj/mk-an-wiki/mkdocs.yml')
print(len(site_map))
sitemap_file = 'a-sp.txt'
if os.path.exists(sitemap_file):
os.remove(sitemap_file)
with open(sitemap_file, 'w') as s:
for url in site_map:
s.write(url)
s.write('\n')
| 27.424242
| 109
| 0.550276
|
import os
import re
def gen_sitemap(main_site, md_file):
pattern = re.compile(r': (.*?).md', re.S)
res = []
with open(md_file) as md:
for line in md.readlines():
line = str(line)
cur_urls = re.findall(pattern, line)
if len(cur_urls) > 0:
if cur_urls[0] == '/':
continue
res.append(main_site + cur_urls[0])
return res
if __name__ == '__main__':
print("生成wiki站的sitemap")
site_map = gen_sitemap('https://www.an.rustfisher.com/',
'/Users/rustfisher/Desktop/ws/wiki-ws/mk-android-wiki-proj/mk-an-wiki/mkdocs.yml')
print(len(site_map))
sitemap_file = 'a-sp.txt'
if os.path.exists(sitemap_file):
os.remove(sitemap_file)
with open(sitemap_file, 'w') as s:
for url in site_map:
s.write(url)
s.write('\n')
| true
| true
|
790a40393b486f96b1b116c61ba7abb9cc2df2e4
| 6,307
|
py
|
Python
|
docs/source/conf.py
|
jsmits/github-cli
|
0e5a4fb7a5b6b5ea0fb81ca66c1e1df0ef7eadbd
|
[
"BSD-3-Clause"
] | 81
|
2015-02-12T01:04:07.000Z
|
2022-02-13T13:44:20.000Z
|
docs/source/conf.py
|
jsmits/github-cli
|
0e5a4fb7a5b6b5ea0fb81ca66c1e1df0ef7eadbd
|
[
"BSD-3-Clause"
] | 2
|
2016-01-09T07:37:06.000Z
|
2018-06-04T14:15:19.000Z
|
docs/source/conf.py
|
jsmits/github-cli
|
0e5a4fb7a5b6b5ea0fb81ca66c1e1df0ef7eadbd
|
[
"BSD-3-Clause"
] | 7
|
2015-05-08T15:51:58.000Z
|
2020-11-03T16:28:56.000Z
|
# -*- coding: utf-8 -*-
#
# github-cli documentation build configuration file, created by
# sphinx-quickstart on Tue May 5 17:40:34 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'github-cli'
copyright = u'2009-2012, Sander Smits'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'sphinxdoc'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'github-clidoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'github-cli.tex', u'github-cli Documentation',
u'Sander Smits', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
| 32.34359
| 80
| 0.722531
|
import sys, os
extensions = []
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'github-cli'
copyright = u'2009-2012, Sander Smits'
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
# for source files.
exclude_trees = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'sphinxdoc'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'github-clidoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'github-cli.tex', u'github-cli Documentation',
u'Sander Smits', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
| true
| true
|
790a417f405c24318040a99ff6cb8d6e62da8eea
| 320
|
py
|
Python
|
shoppinglist/serializers.py
|
christiankuhl/foodplanner
|
20c4a577849bf0ba9304f82f43c307552e846bf2
|
[
"MIT"
] | null | null | null |
shoppinglist/serializers.py
|
christiankuhl/foodplanner
|
20c4a577849bf0ba9304f82f43c307552e846bf2
|
[
"MIT"
] | null | null | null |
shoppinglist/serializers.py
|
christiankuhl/foodplanner
|
20c4a577849bf0ba9304f82f43c307552e846bf2
|
[
"MIT"
] | null | null | null |
from shoppinglist.models import Ingredient
from rest_framework import serializers
class IngredientSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Ingredient
fields = ('account', 'member', 'ref_date', 'ref_meal',
'ingredient', 'created', 'ingredient_there')
| 35.555556
| 67
| 0.709375
|
from shoppinglist.models import Ingredient
from rest_framework import serializers
class IngredientSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Ingredient
fields = ('account', 'member', 'ref_date', 'ref_meal',
'ingredient', 'created', 'ingredient_there')
| true
| true
|
790a420515c20d0bda082401a9fdd5c0dc97bd05
| 909
|
py
|
Python
|
src/bjointsp/main.py
|
5GCity/5GCity-resource-placement
|
2704bcbf37bcc708d6b21b466853a83ad9d2636c
|
[
"Apache-2.0"
] | null | null | null |
src/bjointsp/main.py
|
5GCity/5GCity-resource-placement
|
2704bcbf37bcc708d6b21b466853a83ad9d2636c
|
[
"Apache-2.0"
] | null | null | null |
src/bjointsp/main.py
|
5GCity/5GCity-resource-placement
|
2704bcbf37bcc708d6b21b466853a83ad9d2636c
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (C) 2019 - Virtual Open Systems SAS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# author = Teodora Sechkova
# author_email = teodora@virtualopensystems.com
import bjointsp.api.placement as placement
# Start the placement server
def main():
placement.api.app.run(host='localhost', port=3800, debug=True)
if __name__ == '__main__':
main()
| 31.344828
| 74
| 0.731573
|
import bjointsp.api.placement as placement
def main():
placement.api.app.run(host='localhost', port=3800, debug=True)
if __name__ == '__main__':
main()
| true
| true
|
790a42ab58d8a54aad07ae8b2a0b22075c955a08
| 748
|
py
|
Python
|
comment/models.py
|
jackyfzh/j_django_blog
|
c0d5ea0919ad3244dfbf8358695ed22eac5bdf37
|
[
"MIT"
] | 5
|
2020-06-13T05:16:52.000Z
|
2021-05-22T10:57:45.000Z
|
comment/models.py
|
jackyfzh/j_django_blog
|
c0d5ea0919ad3244dfbf8358695ed22eac5bdf37
|
[
"MIT"
] | 9
|
2020-06-06T01:52:52.000Z
|
2022-03-12T00:33:13.000Z
|
comment/models.py
|
jackyfzh/j_django_blog
|
c0d5ea0919ad3244dfbf8358695ed22eac5bdf37
|
[
"MIT"
] | 1
|
2020-06-13T05:17:25.000Z
|
2020-06-13T05:17:25.000Z
|
from django.db import models
import uuid
from django.contrib.auth.models import User
from article.models import ArticlePost
from ckeditor.fields import RichTextField
from mptt.models import MPTTModel, TreeForeignKey
# Create your models here.
class Comment(models.Model): # 博文评论
article = models.ForeignKey(
ArticlePost,
on_delete=models.CASCADE,
related_name='comments'
)
user = models.ForeignKey(
User,
on_delete=models.CASCADE,
related_name='comments'
)
body = RichTextField()
created = models.DateTimeField(auto_now_add=True)
class Meta:
ordering = ('-created',)
verbose_name_plural = '评论'
def __str__(self):
return self.body[:20]
| 27.703704
| 53
| 0.679144
|
from django.db import models
import uuid
from django.contrib.auth.models import User
from article.models import ArticlePost
from ckeditor.fields import RichTextField
from mptt.models import MPTTModel, TreeForeignKey
class Comment(models.Model):
article = models.ForeignKey(
ArticlePost,
on_delete=models.CASCADE,
related_name='comments'
)
user = models.ForeignKey(
User,
on_delete=models.CASCADE,
related_name='comments'
)
body = RichTextField()
created = models.DateTimeField(auto_now_add=True)
class Meta:
ordering = ('-created',)
verbose_name_plural = '评论'
def __str__(self):
return self.body[:20]
| true
| true
|
790a42de2c372488a969ad7dfc9c9229356f7c69
| 9,840
|
py
|
Python
|
alembic/versions/3741581c7fc4_initial.py
|
ShacharOch/anyway
|
dd62eeec19d478aca78bf9eb151110a26690495d
|
[
"BSD-3-Clause"
] | null | null | null |
alembic/versions/3741581c7fc4_initial.py
|
ShacharOch/anyway
|
dd62eeec19d478aca78bf9eb151110a26690495d
|
[
"BSD-3-Clause"
] | null | null | null |
alembic/versions/3741581c7fc4_initial.py
|
ShacharOch/anyway
|
dd62eeec19d478aca78bf9eb151110a26690495d
|
[
"BSD-3-Clause"
] | null | null | null |
"""initial
Revision ID: 3741581c7fc4
Revises:
Create Date: 2017-10-02 09:13:51.619334
"""
# revision identifiers, used by Alembic.
revision = '3741581c7fc4'
down_revision = None
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('discussions',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('latitude', sa.Float(), nullable=True),
sa.Column('longitude', sa.Float(), nullable=True),
sa.Column('type', sa.Integer(), nullable=True),
sa.Column('title', sa.String(length=100), nullable=True),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('identifier', sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('identifier')
)
op.create_index('disc_long_lat_idx', 'discussions', ['latitude', 'longitude'], unique=False)
op.create_index(op.f('ix_discussions_created'), 'discussions', ['created'], unique=False)
op.create_table('highlight_markers',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('latitude', sa.Float(), nullable=True),
sa.Column('longitude', sa.Float(), nullable=True),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('type', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('highlight_long_lat_idx', 'highlight_markers', ['latitude', 'longitude'], unique=False)
op.create_table('markers',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('latitude', sa.Float(), nullable=True),
sa.Column('longitude', sa.Float(), nullable=True),
sa.Column('type', sa.Integer(), nullable=True),
sa.Column('title', sa.String(length=100), nullable=True),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('provider_code', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('subtype', sa.Integer(), nullable=True),
sa.Column('severity', sa.Integer(), nullable=True),
sa.Column('address', sa.Text(), nullable=True),
sa.Column('locationAccuracy', sa.Integer(), nullable=True),
sa.Column('roadType', sa.Integer(), nullable=True),
sa.Column('roadShape', sa.Integer(), nullable=True),
sa.Column('dayType', sa.Integer(), nullable=True),
sa.Column('unit', sa.Integer(), nullable=True),
sa.Column('mainStreet', sa.Text(), nullable=True),
sa.Column('secondaryStreet', sa.Text(), nullable=True),
sa.Column('junction', sa.Text(), nullable=True),
sa.Column('one_lane', sa.Integer(), nullable=True),
sa.Column('multi_lane', sa.Integer(), nullable=True),
sa.Column('speed_limit', sa.Integer(), nullable=True),
sa.Column('intactness', sa.Integer(), nullable=True),
sa.Column('road_width', sa.Integer(), nullable=True),
sa.Column('road_sign', sa.Integer(), nullable=True),
sa.Column('road_light', sa.Integer(), nullable=True),
sa.Column('road_control', sa.Integer(), nullable=True),
sa.Column('weather', sa.Integer(), nullable=True),
sa.Column('road_surface', sa.Integer(), nullable=True),
sa.Column('road_object', sa.Integer(), nullable=True),
sa.Column('object_distance', sa.Integer(), nullable=True),
sa.Column('didnt_cross', sa.Integer(), nullable=True),
sa.Column('cross_mode', sa.Integer(), nullable=True),
sa.Column('cross_location', sa.Integer(), nullable=True),
sa.Column('cross_direction', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id', 'provider_code')
)
op.create_index('acc_long_lat_idx', 'markers', ['latitude', 'longitude'], unique=False)
op.create_index(op.f('ix_markers_created'), 'markers', ['created'], unique=False)
op.create_table('roles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=80), nullable=True),
sa.Column('description', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('email', sa.String(length=120), nullable=True),
sa.Column('first_name', sa.String(length=50), nullable=True),
sa.Column('last_name', sa.String(length=50), nullable=True),
sa.Column('access_token', sa.String(length=100), nullable=True),
sa.Column('username', sa.String(length=50), nullable=True),
sa.Column('facebook_id', sa.String(length=50), nullable=True),
sa.Column('facebook_url', sa.String(length=100), nullable=True),
sa.Column('is_admin', sa.Boolean(), nullable=True),
sa.Column('new_features_subscription', sa.Boolean(), nullable=True),
sa.Column('password', sa.String(length=256), nullable=True),
sa.Column('active', sa.Boolean(), nullable=True),
sa.Column('confirmed_at', sa.DateTime(), nullable=True),
sa.Column('social_id', sa.String(length=64), nullable=True),
sa.Column('nickname', sa.String(length=64), nullable=True),
sa.Column('provider', sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('social_id'),
sa.UniqueConstraint('username')
)
op.create_table('general_preferences',
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('minimum_displayed_severity', sa.Integer(), nullable=True),
sa.Column('resource_type', sa.String(length=64), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('user_id')
)
op.create_table('involved',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('provider_code', sa.Integer(), nullable=True),
sa.Column('accident_id', sa.Integer(), nullable=True),
sa.Column('involved_type', sa.Integer(), nullable=True),
sa.Column('license_acquiring_date', sa.Integer(), nullable=True),
sa.Column('age_group', sa.Integer(), nullable=True),
sa.Column('sex', sa.Integer(), nullable=True),
sa.Column('car_type', sa.Integer(), nullable=True),
sa.Column('safety_measures', sa.Integer(), nullable=True),
sa.Column('home_city', sa.Integer(), nullable=True),
sa.Column('injury_severity', sa.Integer(), nullable=True),
sa.Column('injured_type', sa.Integer(), nullable=True),
sa.Column('injured_position', sa.Integer(), nullable=True),
sa.Column('population_type', sa.Integer(), nullable=True),
sa.Column('home_district', sa.Integer(), nullable=True),
sa.Column('home_nafa', sa.Integer(), nullable=True),
sa.Column('home_area', sa.Integer(), nullable=True),
sa.Column('home_municipal_status', sa.Integer(), nullable=True),
sa.Column('home_residence_type', sa.Integer(), nullable=True),
sa.Column('hospital_time', sa.Integer(), nullable=True),
sa.Column('medical_type', sa.Integer(), nullable=True),
sa.Column('release_dest', sa.Integer(), nullable=True),
sa.Column('safety_measures_use', sa.Integer(), nullable=True),
sa.Column('late_deceased', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['accident_id', 'provider_code'], [u'markers.id', u'markers.provider_code'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('report_preferences',
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('line_number', sa.Integer(), nullable=False),
sa.Column('historical_report', sa.Boolean(), nullable=True),
sa.Column('how_many_months_back', sa.Integer(), nullable=True),
sa.Column('latitude', sa.Float(), nullable=True),
sa.Column('longitude', sa.Float(), nullable=True),
sa.Column('radius', sa.Float(), nullable=True),
sa.Column('minimum_severity', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('user_id', 'line_number')
)
op.create_table('roles_users',
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['roles.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], )
)
op.create_table('vehicles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('provider_code', sa.Integer(), nullable=True),
sa.Column('accident_id', sa.Integer(), nullable=True),
sa.Column('engine_volume', sa.Integer(), nullable=True),
sa.Column('manufacturing_year', sa.Integer(), nullable=True),
sa.Column('driving_directions', sa.Integer(), nullable=True),
sa.Column('vehicle_status', sa.Integer(), nullable=True),
sa.Column('vehicle_attribution', sa.Integer(), nullable=True),
sa.Column('vehicle_type', sa.Integer(), nullable=True),
sa.Column('seats', sa.Integer(), nullable=True),
sa.Column('total_weight', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['accident_id', 'provider_code'], [u'markers.id', u'markers.provider_code'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('vehicles')
op.drop_table('roles_users')
op.drop_table('report_preferences')
op.drop_table('involved')
op.drop_table('general_preferences')
op.drop_table('users')
op.drop_table('roles')
op.drop_index(op.f('ix_markers_created'), table_name='markers')
op.drop_index('acc_long_lat_idx', table_name='markers')
op.drop_table('markers')
op.drop_index('highlight_long_lat_idx', table_name='highlight_markers')
op.drop_table('highlight_markers')
op.drop_index(op.f('ix_discussions_created'), table_name='discussions')
op.drop_index('disc_long_lat_idx', table_name='discussions')
op.drop_table('discussions')
### end Alembic commands ###
| 48.955224
| 125
| 0.683232
|
revision = '3741581c7fc4'
down_revision = None
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
(), nullable=True),
sa.Column('longitude', sa.Float(), nullable=True),
sa.Column('type', sa.Integer(), nullable=True),
sa.Column('title', sa.String(length=100), nullable=True),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('identifier', sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('identifier')
)
op.create_index('disc_long_lat_idx', 'discussions', ['latitude', 'longitude'], unique=False)
op.create_index(op.f('ix_discussions_created'), 'discussions', ['created'], unique=False)
op.create_table('highlight_markers',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('latitude', sa.Float(), nullable=True),
sa.Column('longitude', sa.Float(), nullable=True),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('type', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('highlight_long_lat_idx', 'highlight_markers', ['latitude', 'longitude'], unique=False)
op.create_table('markers',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('latitude', sa.Float(), nullable=True),
sa.Column('longitude', sa.Float(), nullable=True),
sa.Column('type', sa.Integer(), nullable=True),
sa.Column('title', sa.String(length=100), nullable=True),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('provider_code', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('subtype', sa.Integer(), nullable=True),
sa.Column('severity', sa.Integer(), nullable=True),
sa.Column('address', sa.Text(), nullable=True),
sa.Column('locationAccuracy', sa.Integer(), nullable=True),
sa.Column('roadType', sa.Integer(), nullable=True),
sa.Column('roadShape', sa.Integer(), nullable=True),
sa.Column('dayType', sa.Integer(), nullable=True),
sa.Column('unit', sa.Integer(), nullable=True),
sa.Column('mainStreet', sa.Text(), nullable=True),
sa.Column('secondaryStreet', sa.Text(), nullable=True),
sa.Column('junction', sa.Text(), nullable=True),
sa.Column('one_lane', sa.Integer(), nullable=True),
sa.Column('multi_lane', sa.Integer(), nullable=True),
sa.Column('speed_limit', sa.Integer(), nullable=True),
sa.Column('intactness', sa.Integer(), nullable=True),
sa.Column('road_width', sa.Integer(), nullable=True),
sa.Column('road_sign', sa.Integer(), nullable=True),
sa.Column('road_light', sa.Integer(), nullable=True),
sa.Column('road_control', sa.Integer(), nullable=True),
sa.Column('weather', sa.Integer(), nullable=True),
sa.Column('road_surface', sa.Integer(), nullable=True),
sa.Column('road_object', sa.Integer(), nullable=True),
sa.Column('object_distance', sa.Integer(), nullable=True),
sa.Column('didnt_cross', sa.Integer(), nullable=True),
sa.Column('cross_mode', sa.Integer(), nullable=True),
sa.Column('cross_location', sa.Integer(), nullable=True),
sa.Column('cross_direction', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id', 'provider_code')
)
op.create_index('acc_long_lat_idx', 'markers', ['latitude', 'longitude'], unique=False)
op.create_index(op.f('ix_markers_created'), 'markers', ['created'], unique=False)
op.create_table('roles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=80), nullable=True),
sa.Column('description', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('email', sa.String(length=120), nullable=True),
sa.Column('first_name', sa.String(length=50), nullable=True),
sa.Column('last_name', sa.String(length=50), nullable=True),
sa.Column('access_token', sa.String(length=100), nullable=True),
sa.Column('username', sa.String(length=50), nullable=True),
sa.Column('facebook_id', sa.String(length=50), nullable=True),
sa.Column('facebook_url', sa.String(length=100), nullable=True),
sa.Column('is_admin', sa.Boolean(), nullable=True),
sa.Column('new_features_subscription', sa.Boolean(), nullable=True),
sa.Column('password', sa.String(length=256), nullable=True),
sa.Column('active', sa.Boolean(), nullable=True),
sa.Column('confirmed_at', sa.DateTime(), nullable=True),
sa.Column('social_id', sa.String(length=64), nullable=True),
sa.Column('nickname', sa.String(length=64), nullable=True),
sa.Column('provider', sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('social_id'),
sa.UniqueConstraint('username')
)
op.create_table('general_preferences',
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('minimum_displayed_severity', sa.Integer(), nullable=True),
sa.Column('resource_type', sa.String(length=64), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('user_id')
)
op.create_table('involved',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('provider_code', sa.Integer(), nullable=True),
sa.Column('accident_id', sa.Integer(), nullable=True),
sa.Column('involved_type', sa.Integer(), nullable=True),
sa.Column('license_acquiring_date', sa.Integer(), nullable=True),
sa.Column('age_group', sa.Integer(), nullable=True),
sa.Column('sex', sa.Integer(), nullable=True),
sa.Column('car_type', sa.Integer(), nullable=True),
sa.Column('safety_measures', sa.Integer(), nullable=True),
sa.Column('home_city', sa.Integer(), nullable=True),
sa.Column('injury_severity', sa.Integer(), nullable=True),
sa.Column('injured_type', sa.Integer(), nullable=True),
sa.Column('injured_position', sa.Integer(), nullable=True),
sa.Column('population_type', sa.Integer(), nullable=True),
sa.Column('home_district', sa.Integer(), nullable=True),
sa.Column('home_nafa', sa.Integer(), nullable=True),
sa.Column('home_area', sa.Integer(), nullable=True),
sa.Column('home_municipal_status', sa.Integer(), nullable=True),
sa.Column('home_residence_type', sa.Integer(), nullable=True),
sa.Column('hospital_time', sa.Integer(), nullable=True),
sa.Column('medical_type', sa.Integer(), nullable=True),
sa.Column('release_dest', sa.Integer(), nullable=True),
sa.Column('safety_measures_use', sa.Integer(), nullable=True),
sa.Column('late_deceased', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['accident_id', 'provider_code'], [u'markers.id', u'markers.provider_code'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('report_preferences',
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('line_number', sa.Integer(), nullable=False),
sa.Column('historical_report', sa.Boolean(), nullable=True),
sa.Column('how_many_months_back', sa.Integer(), nullable=True),
sa.Column('latitude', sa.Float(), nullable=True),
sa.Column('longitude', sa.Float(), nullable=True),
sa.Column('radius', sa.Float(), nullable=True),
sa.Column('minimum_severity', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('user_id', 'line_number')
)
op.create_table('roles_users',
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['roles.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], )
)
op.create_table('vehicles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('provider_code', sa.Integer(), nullable=True),
sa.Column('accident_id', sa.Integer(), nullable=True),
sa.Column('engine_volume', sa.Integer(), nullable=True),
sa.Column('manufacturing_year', sa.Integer(), nullable=True),
sa.Column('driving_directions', sa.Integer(), nullable=True),
sa.Column('vehicle_status', sa.Integer(), nullable=True),
sa.Column('vehicle_attribution', sa.Integer(), nullable=True),
sa.Column('vehicle_type', sa.Integer(), nullable=True),
sa.Column('seats', sa.Integer(), nullable=True),
sa.Column('total_weight', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['accident_id', 'provider_code'], [u'markers.id', u'markers.provider_code'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
eral_preferences')
op.drop_table('users')
op.drop_table('roles')
op.drop_index(op.f('ix_markers_created'), table_name='markers')
op.drop_index('acc_long_lat_idx', table_name='markers')
op.drop_table('markers')
op.drop_index('highlight_long_lat_idx', table_name='highlight_markers')
op.drop_table('highlight_markers')
op.drop_index(op.f('ix_discussions_created'), table_name='discussions')
op.drop_index('disc_long_lat_idx', table_name='discussions')
op.drop_table('discussions')
| true
| true
|
790a43fc7b2d314ae42fcab39668731376907167
| 1,010
|
py
|
Python
|
example/demo/common.py
|
ideascf/data-packer
|
9a325af3164a1e83e1bd9a517de253a1ce622db8
|
[
"MIT"
] | 2
|
2016-10-13T12:34:09.000Z
|
2018-05-31T06:29:28.000Z
|
example/demo/common.py
|
ideascf/data-packer
|
9a325af3164a1e83e1bd9a517de253a1ce622db8
|
[
"MIT"
] | null | null | null |
example/demo/common.py
|
ideascf/data-packer
|
9a325af3164a1e83e1bd9a517de253a1ce622db8
|
[
"MIT"
] | null | null | null |
# coding=utf-8
from __future__ import print_function
import json
from data_packer import err, DataPacker, container
g_src = {
'a': 1,
'b': 'hello',
'c': ['a', 'b', 'c'],
'd': {
'1': 1,
'2': 2,
},
'e': {
'1': ['a', 'b'],
'2': {
'a': 'a',
'b': 'b'
}
},
'f': '0x123',
'g': 'longlonglonglonglong',
'h': 2,
}
def valid_container(c):
if isinstance(c, dict):
c = container.DictContainer(c)
else:
raise TypeError('dst Must be dict or DictContainer')
return c
def demo_run(fields, msg, dst=None, src=None):
print('')
print(msg)
if src is None:
src = g_src
if dst is None:
dst = {}
src = valid_container(src)
dst = valid_container(dst)
dp = DataPacker(fields)
try:
dp.run(src, dst)
except err.DataPackerError as e:
print('抛出了异常: ', type(e), e)
print(json.dumps(dst.raw_data(), indent=4))
return dst
| 17.118644
| 60
| 0.50297
|
from __future__ import print_function
import json
from data_packer import err, DataPacker, container
g_src = {
'a': 1,
'b': 'hello',
'c': ['a', 'b', 'c'],
'd': {
'1': 1,
'2': 2,
},
'e': {
'1': ['a', 'b'],
'2': {
'a': 'a',
'b': 'b'
}
},
'f': '0x123',
'g': 'longlonglonglonglong',
'h': 2,
}
def valid_container(c):
if isinstance(c, dict):
c = container.DictContainer(c)
else:
raise TypeError('dst Must be dict or DictContainer')
return c
def demo_run(fields, msg, dst=None, src=None):
print('')
print(msg)
if src is None:
src = g_src
if dst is None:
dst = {}
src = valid_container(src)
dst = valid_container(dst)
dp = DataPacker(fields)
try:
dp.run(src, dst)
except err.DataPackerError as e:
print('抛出了异常: ', type(e), e)
print(json.dumps(dst.raw_data(), indent=4))
return dst
| true
| true
|
790a4402c4f10eb8abc4ab589e2e485c10012e6b
| 645
|
py
|
Python
|
uwsgiconf/contrib/django/uwsgify/admin/models.py
|
graceshaw/uwsgiconf
|
205289bb279dfbcc1d9bfd599dd8ca5d6c527077
|
[
"BSD-3-Clause"
] | null | null | null |
uwsgiconf/contrib/django/uwsgify/admin/models.py
|
graceshaw/uwsgiconf
|
205289bb279dfbcc1d9bfd599dd8ca5d6c527077
|
[
"BSD-3-Clause"
] | null | null | null |
uwsgiconf/contrib/django/uwsgify/admin/models.py
|
graceshaw/uwsgiconf
|
205289bb279dfbcc1d9bfd599dd8ca5d6c527077
|
[
"BSD-3-Clause"
] | null | null | null |
from django.db import models
from django.utils.translation import gettext_lazy as _
class Summary(models.Model):
class Meta:
app_label = 'uwsgify'
managed = False
verbose_name = _('Summary')
verbose_name_plural = _('Summary')
class Configuration(models.Model):
class Meta:
app_label = 'uwsgify'
managed = False
verbose_name = _('Configuration')
verbose_name_plural = _('Configuration')
class Workers(models.Model):
class Meta:
app_label = 'uwsgify'
managed = False
verbose_name = _('Workers')
verbose_name_plural = _('Workers')
| 21.5
| 54
| 0.635659
|
from django.db import models
from django.utils.translation import gettext_lazy as _
class Summary(models.Model):
class Meta:
app_label = 'uwsgify'
managed = False
verbose_name = _('Summary')
verbose_name_plural = _('Summary')
class Configuration(models.Model):
class Meta:
app_label = 'uwsgify'
managed = False
verbose_name = _('Configuration')
verbose_name_plural = _('Configuration')
class Workers(models.Model):
class Meta:
app_label = 'uwsgify'
managed = False
verbose_name = _('Workers')
verbose_name_plural = _('Workers')
| true
| true
|
790a4507d31e38902dfe47fef274d952cd116e52
| 2,079
|
py
|
Python
|
Bio/PDB/Structure.py
|
uci-ics-32/biopython
|
ff7d3703d442192a1f6d84c52e028d566d44ff1c
|
[
"BSD-3-Clause"
] | 6
|
2015-04-22T17:18:46.000Z
|
2020-06-02T14:16:20.000Z
|
Bio/PDB/Structure.py
|
uci-ics-32/biopython
|
ff7d3703d442192a1f6d84c52e028d566d44ff1c
|
[
"BSD-3-Clause"
] | 14
|
2021-03-26T20:54:22.000Z
|
2021-04-06T17:18:53.000Z
|
Bio/PDB/Structure.py
|
uci-ics-32/biopython
|
ff7d3703d442192a1f6d84c52e028d566d44ff1c
|
[
"BSD-3-Clause"
] | 8
|
2016-02-20T22:53:21.000Z
|
2022-02-04T06:10:23.000Z
|
# Copyright (C) 2002, Thomas Hamelryck (thamelry@binf.ku.dk)
#
# This file is part of the Biopython distribution and governed by your
# choice of the "Biopython License Agreement" or the "BSD 3-Clause License".
# Please see the LICENSE file that should have been included as part of this
# package.
"""The structure class, representing a macromolecular structure."""
from Bio.PDB.Entity import Entity
from Bio.PDB.internal_coords import IC_Chain
class Structure(Entity):
"""The Structure class contains a collection of Model instances."""
def __init__(self, id):
"""Initialize the class."""
self.level = "S"
Entity.__init__(self, id)
def __repr__(self):
"""Return the structure identifier."""
return "<Structure id=%s>" % self.get_id()
def get_models(self):
"""Return models."""
yield from self
def get_chains(self):
"""Return chains from models."""
for m in self.get_models():
yield from m
def get_residues(self):
"""Return residues from chains."""
for c in self.get_chains():
yield from c
def get_atoms(self):
"""Return atoms from residue."""
for r in self.get_residues():
yield from r
def atom_to_internal_coordinates(self, verbose: bool = False) -> None:
"""Create/update internal coordinates from Atom X,Y,Z coordinates.
Internal coordinates are bond length, angle and dihedral angles.
:param verbose bool: default False
describe runtime problems
"""
for chn in self.get_chains():
chn.atom_to_internal_coordinates(verbose)
def internal_to_atom_coordinates(self, verbose: bool = False) -> None:
"""Create/update atom coordinates from internal coordinates.
:param verbose bool: default False
describe runtime problems
:raises Exception: if any chain does not have .pic attribute
"""
for chn in self.get_chains():
chn.internal_to_atom_coordinates(verbose)
| 31.029851
| 76
| 0.647908
|
from Bio.PDB.Entity import Entity
from Bio.PDB.internal_coords import IC_Chain
class Structure(Entity):
def __init__(self, id):
self.level = "S"
Entity.__init__(self, id)
def __repr__(self):
return "<Structure id=%s>" % self.get_id()
def get_models(self):
yield from self
def get_chains(self):
for m in self.get_models():
yield from m
def get_residues(self):
for c in self.get_chains():
yield from c
def get_atoms(self):
for r in self.get_residues():
yield from r
def atom_to_internal_coordinates(self, verbose: bool = False) -> None:
for chn in self.get_chains():
chn.atom_to_internal_coordinates(verbose)
def internal_to_atom_coordinates(self, verbose: bool = False) -> None:
for chn in self.get_chains():
chn.internal_to_atom_coordinates(verbose)
| true
| true
|
790a45fbd60d4b4c0b21668daf1344c954c47f0b
| 423
|
py
|
Python
|
fizzbuzz.py
|
anzpia/FizzBuzz
|
21312296defcfa87f4febd1bec23d8402d130f29
|
[
"MIT"
] | null | null | null |
fizzbuzz.py
|
anzpia/FizzBuzz
|
21312296defcfa87f4febd1bec23d8402d130f29
|
[
"MIT"
] | null | null | null |
fizzbuzz.py
|
anzpia/FizzBuzz
|
21312296defcfa87f4febd1bec23d8402d130f29
|
[
"MIT"
] | null | null | null |
stevilo = int(input("Select a number between 1 and 100:"))
x = 1
if stevilo >= 1 and stevilo <= 100:
while x <= stevilo:
ostanek_1 = x % 3
ostanek_2 = x % 5
if x % 3 == 0 and x % 5 == 0:
print('fizzbuzz')
elif ostanek_1 == 0:
print ("fizz")
elif ostanek_2 == 0:
print("buzz")
else:
print(x)
x += 1
| 24.882353
| 59
| 0.434988
|
stevilo = int(input("Select a number between 1 and 100:"))
x = 1
if stevilo >= 1 and stevilo <= 100:
while x <= stevilo:
ostanek_1 = x % 3
ostanek_2 = x % 5
if x % 3 == 0 and x % 5 == 0:
print('fizzbuzz')
elif ostanek_1 == 0:
print ("fizz")
elif ostanek_2 == 0:
print("buzz")
else:
print(x)
x += 1
| true
| true
|
790a467648c34c6d5587f6677448eece4263ea12
| 321
|
py
|
Python
|
scripts/extract_key.py
|
425629/esp32-weather-google-sheets
|
c4ee5f853e71ec9c25986150d7c7ed940e564953
|
[
"MIT"
] | 50
|
2019-07-06T09:18:18.000Z
|
2022-03-03T17:06:40.000Z
|
scripts/extract_key.py
|
425629/esp32-weather-google-sheets
|
c4ee5f853e71ec9c25986150d7c7ed940e564953
|
[
"MIT"
] | 10
|
2019-10-06T16:45:00.000Z
|
2021-07-16T15:31:17.000Z
|
scripts/extract_key.py
|
425629/esp32-weather-google-sheets
|
c4ee5f853e71ec9c25986150d7c7ed940e564953
|
[
"MIT"
] | 15
|
2019-06-24T14:45:49.000Z
|
2021-11-08T17:37:42.000Z
|
import json
import sys
from rsa import PrivateKey
with open(sys.argv[1], 'rb') as input:
key = PrivateKey.load_pkcs1(input.read())
d = {}
d['n'] = key.n
d['e'] = key.e
d['d'] = key.d
d['p'] = key.p
d['q'] = key.q
with open(sys.argv[2], 'w') as output:
output.write(json.dumps(d))
| 21.4
| 45
| 0.548287
|
import json
import sys
from rsa import PrivateKey
with open(sys.argv[1], 'rb') as input:
key = PrivateKey.load_pkcs1(input.read())
d = {}
d['n'] = key.n
d['e'] = key.e
d['d'] = key.d
d['p'] = key.p
d['q'] = key.q
with open(sys.argv[2], 'w') as output:
output.write(json.dumps(d))
| true
| true
|
790a481bfed4f88fab5a1d37c43fb0db08719bd8
| 33,626
|
py
|
Python
|
electrum/tests/test_lnmsg.py
|
Jesusown/electrum
|
0df05dd914c823acae1828cad3b20bdeb13150e9
|
[
"MIT"
] | 5,905
|
2015-01-02T17:05:36.000Z
|
2022-03-29T07:28:29.000Z
|
electrum/tests/test_lnmsg.py
|
Jesusown/electrum
|
0df05dd914c823acae1828cad3b20bdeb13150e9
|
[
"MIT"
] | 6,097
|
2015-01-01T21:20:25.000Z
|
2022-03-31T23:55:01.000Z
|
electrum/tests/test_lnmsg.py
|
Jesusown/electrum
|
0df05dd914c823acae1828cad3b20bdeb13150e9
|
[
"MIT"
] | 2,202
|
2015-01-02T18:31:25.000Z
|
2022-03-28T15:35:03.000Z
|
import io
from electrum.lnmsg import (read_bigsize_int, write_bigsize_int, FieldEncodingNotMinimal,
UnexpectedEndOfStream, LNSerializer, UnknownMandatoryTLVRecordType,
MalformedMsg, MsgTrailingGarbage, MsgInvalidFieldOrder, encode_msg,
decode_msg, UnexpectedFieldSizeForEncoder, OnionWireSerializer,
UnknownMsgType)
from electrum.lnonion import OnionRoutingFailure
from electrum.util import bfh
from electrum.lnutil import ShortChannelID, LnFeatures
from electrum import constants
from . import TestCaseForTestnet
class TestLNMsg(TestCaseForTestnet):
def test_write_bigsize_int(self):
self.assertEqual(bfh("00"), write_bigsize_int(0))
self.assertEqual(bfh("fc"), write_bigsize_int(252))
self.assertEqual(bfh("fd00fd"), write_bigsize_int(253))
self.assertEqual(bfh("fdffff"), write_bigsize_int(65535))
self.assertEqual(bfh("fe00010000"), write_bigsize_int(65536))
self.assertEqual(bfh("feffffffff"), write_bigsize_int(4294967295))
self.assertEqual(bfh("ff0000000100000000"), write_bigsize_int(4294967296))
self.assertEqual(bfh("ffffffffffffffffff"), write_bigsize_int(18446744073709551615))
def test_read_bigsize_int(self):
self.assertEqual(0, read_bigsize_int(io.BytesIO(bfh("00"))))
self.assertEqual(252, read_bigsize_int(io.BytesIO(bfh("fc"))))
self.assertEqual(253, read_bigsize_int(io.BytesIO(bfh("fd00fd"))))
self.assertEqual(65535, read_bigsize_int(io.BytesIO(bfh("fdffff"))))
self.assertEqual(65536, read_bigsize_int(io.BytesIO(bfh("fe00010000"))))
self.assertEqual(4294967295, read_bigsize_int(io.BytesIO(bfh("feffffffff"))))
self.assertEqual(4294967296, read_bigsize_int(io.BytesIO(bfh("ff0000000100000000"))))
self.assertEqual(18446744073709551615, read_bigsize_int(io.BytesIO(bfh("ffffffffffffffffff"))))
with self.assertRaises(FieldEncodingNotMinimal):
read_bigsize_int(io.BytesIO(bfh("fd00fc")))
with self.assertRaises(FieldEncodingNotMinimal):
read_bigsize_int(io.BytesIO(bfh("fe0000ffff")))
with self.assertRaises(FieldEncodingNotMinimal):
read_bigsize_int(io.BytesIO(bfh("ff00000000ffffffff")))
with self.assertRaises(UnexpectedEndOfStream):
read_bigsize_int(io.BytesIO(bfh("fd00")))
with self.assertRaises(UnexpectedEndOfStream):
read_bigsize_int(io.BytesIO(bfh("feffff")))
with self.assertRaises(UnexpectedEndOfStream):
read_bigsize_int(io.BytesIO(bfh("ffffffffff")))
self.assertEqual(None, read_bigsize_int(io.BytesIO(bfh(""))))
with self.assertRaises(UnexpectedEndOfStream):
read_bigsize_int(io.BytesIO(bfh("fd")))
with self.assertRaises(UnexpectedEndOfStream):
read_bigsize_int(io.BytesIO(bfh("fe")))
with self.assertRaises(UnexpectedEndOfStream):
read_bigsize_int(io.BytesIO(bfh("ff")))
def test_read_tlv_stream_tests1(self):
# from https://github.com/lightningnetwork/lightning-rfc/blob/452a0eb916fedf4c954137b4fd0b61b5002b34ad/01-messaging.md#tlv-decoding-failures
lnser = LNSerializer()
for tlv_stream_name in ("n1", "n2"):
with self.subTest(tlv_stream_name=tlv_stream_name):
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd01")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(FieldEncodingNotMinimal):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd000100")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd0101")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd26")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd2602")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(FieldEncodingNotMinimal):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd000100")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd0201000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")), tlv_stream_name="n1")
with self.assertRaises(UnknownMandatoryTLVRecordType):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("1200")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(UnknownMandatoryTLVRecordType):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd010200")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(UnknownMandatoryTLVRecordType):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fe0100000200")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(UnknownMandatoryTLVRecordType):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("ff010000000000000200")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(MsgTrailingGarbage):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0109ffffffffffffffffff")), tlv_stream_name="n1")
with self.assertRaises(FieldEncodingNotMinimal):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("010100")), tlv_stream_name="n1")
with self.assertRaises(FieldEncodingNotMinimal):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("01020001")), tlv_stream_name="n1")
with self.assertRaises(FieldEncodingNotMinimal):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0103000100")), tlv_stream_name="n1")
with self.assertRaises(FieldEncodingNotMinimal):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("010400010000")), tlv_stream_name="n1")
with self.assertRaises(FieldEncodingNotMinimal):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("01050001000000")), tlv_stream_name="n1")
with self.assertRaises(FieldEncodingNotMinimal):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0106000100000000")), tlv_stream_name="n1")
with self.assertRaises(FieldEncodingNotMinimal):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("010700010000000000")), tlv_stream_name="n1")
with self.assertRaises(FieldEncodingNotMinimal):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("01080001000000000000")), tlv_stream_name="n1")
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("020701010101010101")), tlv_stream_name="n1")
with self.assertRaises(MsgTrailingGarbage):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0209010101010101010101")), tlv_stream_name="n1")
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0321023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb")), tlv_stream_name="n1")
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0329023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb0000000000000001")), tlv_stream_name="n1")
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0330023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb000000000000000100000000000001")), tlv_stream_name="n1")
# check if ECC point is valid?... skip for now.
#with self.assertRaises(Exception):
# lnser.read_tlv_stream(fd=io.BytesIO(bfh("0331043da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb00000000000000010000000000000002")), tlv_stream_name="n1")
with self.assertRaises(MsgTrailingGarbage):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0332023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb0000000000000001000000000000000001")), tlv_stream_name="n1")
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00fe00")), tlv_stream_name="n1")
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00fe0101")), tlv_stream_name="n1")
with self.assertRaises(MsgTrailingGarbage):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00fe03010101")), tlv_stream_name="n1")
with self.assertRaises(UnknownMandatoryTLVRecordType):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0000")), tlv_stream_name="n1")
def test_read_tlv_stream_tests2(self):
# from https://github.com/lightningnetwork/lightning-rfc/blob/452a0eb916fedf4c954137b4fd0b61b5002b34ad/01-messaging.md#tlv-decoding-successes
lnser = LNSerializer()
for tlv_stream_name in ("n1", "n2"):
with self.subTest(tlv_stream_name=tlv_stream_name):
self.assertEqual({}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("")), tlv_stream_name=tlv_stream_name))
self.assertEqual({}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("2100")), tlv_stream_name=tlv_stream_name))
self.assertEqual({}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd020100")), tlv_stream_name=tlv_stream_name))
self.assertEqual({}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00fd00")), tlv_stream_name=tlv_stream_name))
self.assertEqual({}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00ff00")), tlv_stream_name=tlv_stream_name))
self.assertEqual({}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("fe0200000100")), tlv_stream_name=tlv_stream_name))
self.assertEqual({}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("ff020000000000000100")), tlv_stream_name=tlv_stream_name))
self.assertEqual({"tlv1": {"amount_msat": 0}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0100")), tlv_stream_name="n1"))
self.assertEqual({"tlv1": {"amount_msat": 1}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("010101")), tlv_stream_name="n1"))
self.assertEqual({"tlv1": {"amount_msat": 256}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("01020100")), tlv_stream_name="n1"))
self.assertEqual({"tlv1": {"amount_msat": 65536}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0103010000")), tlv_stream_name="n1"))
self.assertEqual({"tlv1": {"amount_msat": 16777216}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("010401000000")), tlv_stream_name="n1"))
self.assertEqual({"tlv1": {"amount_msat": 4294967296}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("01050100000000")), tlv_stream_name="n1"))
self.assertEqual({"tlv1": {"amount_msat": 1099511627776}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0106010000000000")), tlv_stream_name="n1"))
self.assertEqual({"tlv1": {"amount_msat": 281474976710656}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("010701000000000000")), tlv_stream_name="n1"))
self.assertEqual({"tlv1": {"amount_msat": 72057594037927936}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("01080100000000000000")), tlv_stream_name="n1"))
self.assertEqual({"tlv2": {"scid": ShortChannelID.from_components(0, 0, 550)}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("02080000000000000226")), tlv_stream_name="n1"))
self.assertEqual({"tlv3": {"node_id": bfh("023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb"),
"amount_msat_1": 1,
"amount_msat_2": 2}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0331023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb00000000000000010000000000000002")), tlv_stream_name="n1"))
self.assertEqual({"tlv4": {"cltv_delta": 550}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00fe020226")), tlv_stream_name="n1"))
def test_read_tlv_stream_tests3(self):
# from https://github.com/lightningnetwork/lightning-rfc/blob/452a0eb916fedf4c954137b4fd0b61b5002b34ad/01-messaging.md#tlv-stream-decoding-failure
lnser = LNSerializer()
with self.assertRaises(MsgInvalidFieldOrder):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0208000000000000022601012a")), tlv_stream_name="n1")
with self.assertRaises(MsgInvalidFieldOrder):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0208000000000000023102080000000000000451")), tlv_stream_name="n1")
with self.assertRaises(MsgInvalidFieldOrder):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("1f000f012a")), tlv_stream_name="n1")
with self.assertRaises(MsgInvalidFieldOrder):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("1f001f012a")), tlv_stream_name="n1")
with self.assertRaises(MsgInvalidFieldOrder):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("ffffffffffffffffff000000")), tlv_stream_name="n2")
def test_encode_decode_msg__missing_mandatory_field_gets_set_to_zeroes(self):
# "channel_update": "signature" missing -> gets set to zeroes
self.assertEqual(bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023000000003b9aca00"),
encode_msg(
"channel_update",
short_channel_id=ShortChannelID.from_components(54321, 111, 2),
channel_flags=b'\x00',
message_flags=b'\x01',
cltv_expiry_delta=144,
htlc_minimum_msat=200,
htlc_maximum_msat=1_000_000_000,
fee_base_msat=500,
fee_proportional_millionths=35,
chain_hash=constants.net.rev_genesis_bytes(),
timestamp=1584320643,
))
self.assertEqual(('channel_update',
{'chain_hash': b'CI\x7f\xd7\xf8&\x95q\x08\xf4\xa3\x0f\xd9\xce\xc3\xae\xbay\x97 \x84\xe9\x0e\xad\x01\xea3\t\x00\x00\x00\x00',
'channel_flags': b'\x00',
'cltv_expiry_delta': 144,
'fee_base_msat': 500,
'fee_proportional_millionths': 35,
'htlc_maximum_msat': 1000000000,
'htlc_minimum_msat': 200,
'message_flags': b'\x01',
'short_channel_id': b'\x00\xd41\x00\x00o\x00\x02',
'signature': bytes(64),
'timestamp': 1584320643}
),
decode_msg(bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023000000003b9aca00")))
def test_encode_decode_msg__missing_optional_field_will_not_appear_in_decoded_dict(self):
# "channel_update": optional field "htlc_maximum_msat" missing -> does not get put into dict
self.assertEqual(bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023"),
encode_msg(
"channel_update",
short_channel_id=ShortChannelID.from_components(54321, 111, 2),
channel_flags=b'\x00',
message_flags=b'\x01',
cltv_expiry_delta=144,
htlc_minimum_msat=200,
fee_base_msat=500,
fee_proportional_millionths=35,
chain_hash=constants.net.rev_genesis_bytes(),
timestamp=1584320643,
))
self.assertEqual(('channel_update',
{'chain_hash': b'CI\x7f\xd7\xf8&\x95q\x08\xf4\xa3\x0f\xd9\xce\xc3\xae\xbay\x97 \x84\xe9\x0e\xad\x01\xea3\t\x00\x00\x00\x00',
'channel_flags': b'\x00',
'cltv_expiry_delta': 144,
'fee_base_msat': 500,
'fee_proportional_millionths': 35,
'htlc_minimum_msat': 200,
'message_flags': b'\x01',
'short_channel_id': b'\x00\xd41\x00\x00o\x00\x02',
'signature': bytes(64),
'timestamp': 1584320643}
),
decode_msg(bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023")))
def test_encode_decode_msg__ints_can_be_passed_as_bytes(self):
self.assertEqual(bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023000000003b9aca00"),
encode_msg(
"channel_update",
short_channel_id=ShortChannelID.from_components(54321, 111, 2),
channel_flags=b'\x00',
message_flags=b'\x01',
cltv_expiry_delta=int.to_bytes(144, length=2, byteorder="big", signed=False),
htlc_minimum_msat=int.to_bytes(200, length=8, byteorder="big", signed=False),
htlc_maximum_msat=int.to_bytes(1_000_000_000, length=8, byteorder="big", signed=False),
fee_base_msat=int.to_bytes(500, length=4, byteorder="big", signed=False),
fee_proportional_millionths=int.to_bytes(35, length=4, byteorder="big", signed=False),
chain_hash=constants.net.rev_genesis_bytes(),
timestamp=int.to_bytes(1584320643, length=4, byteorder="big", signed=False),
))
self.assertEqual(('channel_update',
{'chain_hash': b'CI\x7f\xd7\xf8&\x95q\x08\xf4\xa3\x0f\xd9\xce\xc3\xae\xbay\x97 \x84\xe9\x0e\xad\x01\xea3\t\x00\x00\x00\x00',
'channel_flags': b'\x00',
'cltv_expiry_delta': 144,
'fee_base_msat': 500,
'fee_proportional_millionths': 35,
'htlc_maximum_msat': 1000000000,
'htlc_minimum_msat': 200,
'message_flags': b'\x01',
'short_channel_id': b'\x00\xd41\x00\x00o\x00\x02',
'signature': bytes(64),
'timestamp': 1584320643}
),
decode_msg(bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023000000003b9aca00")))
# "htlc_minimum_msat" is passed as bytes but with incorrect length
with self.assertRaises(UnexpectedFieldSizeForEncoder):
encode_msg(
"channel_update",
short_channel_id=ShortChannelID.from_components(54321, 111, 2),
channel_flags=b'\x00',
message_flags=b'\x01',
cltv_expiry_delta=int.to_bytes(144, length=2, byteorder="big", signed=False),
htlc_minimum_msat=int.to_bytes(200, length=4, byteorder="big", signed=False),
htlc_maximum_msat=int.to_bytes(1_000_000_000, length=8, byteorder="big", signed=False),
fee_base_msat=int.to_bytes(500, length=4, byteorder="big", signed=False),
fee_proportional_millionths=int.to_bytes(35, length=4, byteorder="big", signed=False),
chain_hash=constants.net.rev_genesis_bytes(),
timestamp=int.to_bytes(1584320643, length=4, byteorder="big", signed=False),
)
def test_encode_decode_msg__commitment_signed(self):
# "commitment_signed" is interesting because of the "htlc_signature" field,
# which is a concatenation of multiple ("num_htlcs") signatures.
# 5 htlcs
self.assertEqual(bfh("0084010101010101010101010101010101010101010101010101010101010101010106112951d0a6d7fc1dbca3bd1cdbda9acfee7f668b3c0a36bd944f7e2f305b274ba46a61279e15163b2d376c664bb3481d7c5e107a5b268301e39aebbda27d2d00056548bd093a2bd2f4f053f0c6eb2c5f541d55eb8a2ede4d35fe974e5d3cd0eec3138bfd4115f4483c3b14e7988b48811d2da75f29f5e6eee691251fb4fba5a2610ba8fe7007117fe1c9fa1a6b01805c84cfffbb0eba674b64342c7cac567dea50728c1bb1aadc6d23fc2f4145027eafca82d6072cc9ce6529542099f728a0521e4b2044df5d02f7f2cdf84404762b1979528aa689a3e060a2a90ba8ef9a83d24d31ffb0d95c71d9fb9049b24ecf2c949c1486e7eb3ae160d70d54e441dc785dc57f7f3c9901b9537398c66f546cfc1d65e0748895d14699342c407fe119ac17db079b103720124a5ba22d4ba14c12832324dea9cb60c61ee74376ee7dcffdd1836e354aa8838ce3b37854fa91465cc40c73b702915e3580bfebaace805d52373b57ac755ebe4a8fe97e5fc21669bea124b809c79968479148f7174f39b8014542"),
encode_msg(
"commitment_signed",
channel_id=b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01',
signature=b"\x06\x11)Q\xd0\xa6\xd7\xfc\x1d\xbc\xa3\xbd\x1c\xdb\xda\x9a\xcf\xee\x7ff\x8b<\n6\xbd\x94O~/0['K\xa4ja'\x9e\x15\x16;-7lfK\xb3H\x1d|^\x10z[&\x83\x01\xe3\x9a\xeb\xbd\xa2}-",
num_htlcs=5,
htlc_signature=bfh("6548bd093a2bd2f4f053f0c6eb2c5f541d55eb8a2ede4d35fe974e5d3cd0eec3138bfd4115f4483c3b14e7988b48811d2da75f29f5e6eee691251fb4fba5a2610ba8fe7007117fe1c9fa1a6b01805c84cfffbb0eba674b64342c7cac567dea50728c1bb1aadc6d23fc2f4145027eafca82d6072cc9ce6529542099f728a0521e4b2044df5d02f7f2cdf84404762b1979528aa689a3e060a2a90ba8ef9a83d24d31ffb0d95c71d9fb9049b24ecf2c949c1486e7eb3ae160d70d54e441dc785dc57f7f3c9901b9537398c66f546cfc1d65e0748895d14699342c407fe119ac17db079b103720124a5ba22d4ba14c12832324dea9cb60c61ee74376ee7dcffdd1836e354aa8838ce3b37854fa91465cc40c73b702915e3580bfebaace805d52373b57ac755ebe4a8fe97e5fc21669bea124b809c79968479148f7174f39b8014542"),
))
self.assertEqual(('commitment_signed',
{'channel_id': b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01',
'signature': b"\x06\x11)Q\xd0\xa6\xd7\xfc\x1d\xbc\xa3\xbd\x1c\xdb\xda\x9a\xcf\xee\x7ff\x8b<\n6\xbd\x94O~/0['K\xa4ja'\x9e\x15\x16;-7lfK\xb3H\x1d|^\x10z[&\x83\x01\xe3\x9a\xeb\xbd\xa2}-",
'num_htlcs': 5,
'htlc_signature': bfh("6548bd093a2bd2f4f053f0c6eb2c5f541d55eb8a2ede4d35fe974e5d3cd0eec3138bfd4115f4483c3b14e7988b48811d2da75f29f5e6eee691251fb4fba5a2610ba8fe7007117fe1c9fa1a6b01805c84cfffbb0eba674b64342c7cac567dea50728c1bb1aadc6d23fc2f4145027eafca82d6072cc9ce6529542099f728a0521e4b2044df5d02f7f2cdf84404762b1979528aa689a3e060a2a90ba8ef9a83d24d31ffb0d95c71d9fb9049b24ecf2c949c1486e7eb3ae160d70d54e441dc785dc57f7f3c9901b9537398c66f546cfc1d65e0748895d14699342c407fe119ac17db079b103720124a5ba22d4ba14c12832324dea9cb60c61ee74376ee7dcffdd1836e354aa8838ce3b37854fa91465cc40c73b702915e3580bfebaace805d52373b57ac755ebe4a8fe97e5fc21669bea124b809c79968479148f7174f39b8014542")}
),
decode_msg(bfh("0084010101010101010101010101010101010101010101010101010101010101010106112951d0a6d7fc1dbca3bd1cdbda9acfee7f668b3c0a36bd944f7e2f305b274ba46a61279e15163b2d376c664bb3481d7c5e107a5b268301e39aebbda27d2d00056548bd093a2bd2f4f053f0c6eb2c5f541d55eb8a2ede4d35fe974e5d3cd0eec3138bfd4115f4483c3b14e7988b48811d2da75f29f5e6eee691251fb4fba5a2610ba8fe7007117fe1c9fa1a6b01805c84cfffbb0eba674b64342c7cac567dea50728c1bb1aadc6d23fc2f4145027eafca82d6072cc9ce6529542099f728a0521e4b2044df5d02f7f2cdf84404762b1979528aa689a3e060a2a90ba8ef9a83d24d31ffb0d95c71d9fb9049b24ecf2c949c1486e7eb3ae160d70d54e441dc785dc57f7f3c9901b9537398c66f546cfc1d65e0748895d14699342c407fe119ac17db079b103720124a5ba22d4ba14c12832324dea9cb60c61ee74376ee7dcffdd1836e354aa8838ce3b37854fa91465cc40c73b702915e3580bfebaace805d52373b57ac755ebe4a8fe97e5fc21669bea124b809c79968479148f7174f39b8014542")))
# single htlc
self.assertEqual(bfh("008401010101010101010101010101010101010101010101010101010101010101013b14af0c549dfb1fb287ff57c012371b3932996db5929eda5f251704751fb49d0dc2dcb88e5021575cb572fb71693758543f97d89e9165f913bfb7488d7cc26500012d31103b9f6e71131e4fee86fdfbdeba90e52b43fcfd11e8e53811cd4d59b2575ae6c3c82f85bea144c88cc35e568f1e6bdd0c57337e86de0b5da7cd9994067a"),
encode_msg(
"commitment_signed",
channel_id=b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01',
signature=b';\x14\xaf\x0cT\x9d\xfb\x1f\xb2\x87\xffW\xc0\x127\x1b92\x99m\xb5\x92\x9e\xda_%\x17\x04u\x1f\xb4\x9d\r\xc2\xdc\xb8\x8eP!W\\\xb5r\xfbqi7XT?\x97\xd8\x9e\x91e\xf9\x13\xbf\xb7H\x8d|\xc2e',
num_htlcs=1,
htlc_signature=bfh("2d31103b9f6e71131e4fee86fdfbdeba90e52b43fcfd11e8e53811cd4d59b2575ae6c3c82f85bea144c88cc35e568f1e6bdd0c57337e86de0b5da7cd9994067a"),
))
self.assertEqual(('commitment_signed',
{'channel_id': b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01',
'signature': b';\x14\xaf\x0cT\x9d\xfb\x1f\xb2\x87\xffW\xc0\x127\x1b92\x99m\xb5\x92\x9e\xda_%\x17\x04u\x1f\xb4\x9d\r\xc2\xdc\xb8\x8eP!W\\\xb5r\xfbqi7XT?\x97\xd8\x9e\x91e\xf9\x13\xbf\xb7H\x8d|\xc2e',
'num_htlcs': 1,
'htlc_signature': bfh("2d31103b9f6e71131e4fee86fdfbdeba90e52b43fcfd11e8e53811cd4d59b2575ae6c3c82f85bea144c88cc35e568f1e6bdd0c57337e86de0b5da7cd9994067a")}
),
decode_msg(bfh("008401010101010101010101010101010101010101010101010101010101010101013b14af0c549dfb1fb287ff57c012371b3932996db5929eda5f251704751fb49d0dc2dcb88e5021575cb572fb71693758543f97d89e9165f913bfb7488d7cc26500012d31103b9f6e71131e4fee86fdfbdeba90e52b43fcfd11e8e53811cd4d59b2575ae6c3c82f85bea144c88cc35e568f1e6bdd0c57337e86de0b5da7cd9994067a")))
# zero htlcs
self.assertEqual(bfh("008401010101010101010101010101010101010101010101010101010101010101014e206ecf904d9237b1c5b4e08513555e9a5932c45b5f68be8764ce998df635ae04f6ce7bbcd3b4fd08e2daab7f9059b287ecab4155367b834682633497173f450000"),
encode_msg(
"commitment_signed",
channel_id=b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01',
signature=b'N n\xcf\x90M\x927\xb1\xc5\xb4\xe0\x85\x13U^\x9aY2\xc4[_h\xbe\x87d\xce\x99\x8d\xf65\xae\x04\xf6\xce{\xbc\xd3\xb4\xfd\x08\xe2\xda\xab\x7f\x90Y\xb2\x87\xec\xabAU6{\x83F\x82c4\x97\x17?E',
num_htlcs=0,
htlc_signature=bfh(""),
))
self.assertEqual(('commitment_signed',
{'channel_id': b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01',
'signature': b'N n\xcf\x90M\x927\xb1\xc5\xb4\xe0\x85\x13U^\x9aY2\xc4[_h\xbe\x87d\xce\x99\x8d\xf65\xae\x04\xf6\xce{\xbc\xd3\xb4\xfd\x08\xe2\xda\xab\x7f\x90Y\xb2\x87\xec\xabAU6{\x83F\x82c4\x97\x17?E',
'num_htlcs': 0,
'htlc_signature': bfh("")}
),
decode_msg(bfh("008401010101010101010101010101010101010101010101010101010101010101014e206ecf904d9237b1c5b4e08513555e9a5932c45b5f68be8764ce998df635ae04f6ce7bbcd3b4fd08e2daab7f9059b287ecab4155367b834682633497173f450000")))
def test_encode_decode_msg__init(self):
# "init" is interesting because it has TLVs optionally
self.assertEqual(bfh("00100000000220c2"),
encode_msg(
"init",
gflen=0,
flen=2,
features=(LnFeatures.OPTION_STATIC_REMOTEKEY_OPT |
LnFeatures.GOSSIP_QUERIES_OPT |
LnFeatures.GOSSIP_QUERIES_REQ |
LnFeatures.OPTION_DATA_LOSS_PROTECT_OPT),
))
self.assertEqual(bfh("00100000000220c2"),
encode_msg("init", gflen=0, flen=2, features=bfh("20c2")))
self.assertEqual(bfh("00100000000220c2012043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000"),
encode_msg(
"init",
gflen=0,
flen=2,
features=(LnFeatures.OPTION_STATIC_REMOTEKEY_OPT |
LnFeatures.GOSSIP_QUERIES_OPT |
LnFeatures.GOSSIP_QUERIES_REQ |
LnFeatures.OPTION_DATA_LOSS_PROTECT_OPT),
init_tlvs={
'networks':
{'chains': b'CI\x7f\xd7\xf8&\x95q\x08\xf4\xa3\x0f\xd9\xce\xc3\xae\xbay\x97 \x84\xe9\x0e\xad\x01\xea3\t\x00\x00\x00\x00'}
}
))
self.assertEqual(('init',
{'gflen': 2,
'globalfeatures': b'"\x00',
'flen': 3,
'features': b'\x02\xa2\xa1',
'init_tlvs': {}}
),
decode_msg(bfh("001000022200000302a2a1")))
self.assertEqual(('init',
{'gflen': 2,
'globalfeatures': b'"\x00',
'flen': 3,
'features': b'\x02\xaa\xa2',
'init_tlvs': {
'networks':
{'chains': b'CI\x7f\xd7\xf8&\x95q\x08\xf4\xa3\x0f\xd9\xce\xc3\xae\xbay\x97 \x84\xe9\x0e\xad\x01\xea3\t\x00\x00\x00\x00'}
}}),
decode_msg(bfh("001000022200000302aaa2012043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000")))
def test_decode_onion_error(self):
orf = OnionRoutingFailure.from_bytes(bfh("400f0000000017d2d8b0001d9458"))
self.assertEqual(('incorrect_or_unknown_payment_details', {'htlc_msat': 399694000, 'height': 1938520}),
OnionWireSerializer.decode_msg(orf.to_bytes()))
self.assertEqual({'htlc_msat': 399694000, 'height': 1938520},
orf.decode_data())
orf2 = OnionRoutingFailure(26399, bytes.fromhex("0000000017d2d8b0001d9458"))
with self.assertRaises(UnknownMsgType):
OnionWireSerializer.decode_msg(orf2.to_bytes())
self.assertEqual(None, orf2.decode_data())
| 83.855362
| 885
| 0.67195
|
import io
from electrum.lnmsg import (read_bigsize_int, write_bigsize_int, FieldEncodingNotMinimal,
UnexpectedEndOfStream, LNSerializer, UnknownMandatoryTLVRecordType,
MalformedMsg, MsgTrailingGarbage, MsgInvalidFieldOrder, encode_msg,
decode_msg, UnexpectedFieldSizeForEncoder, OnionWireSerializer,
UnknownMsgType)
from electrum.lnonion import OnionRoutingFailure
from electrum.util import bfh
from electrum.lnutil import ShortChannelID, LnFeatures
from electrum import constants
from . import TestCaseForTestnet
class TestLNMsg(TestCaseForTestnet):
def test_write_bigsize_int(self):
self.assertEqual(bfh("00"), write_bigsize_int(0))
self.assertEqual(bfh("fc"), write_bigsize_int(252))
self.assertEqual(bfh("fd00fd"), write_bigsize_int(253))
self.assertEqual(bfh("fdffff"), write_bigsize_int(65535))
self.assertEqual(bfh("fe00010000"), write_bigsize_int(65536))
self.assertEqual(bfh("feffffffff"), write_bigsize_int(4294967295))
self.assertEqual(bfh("ff0000000100000000"), write_bigsize_int(4294967296))
self.assertEqual(bfh("ffffffffffffffffff"), write_bigsize_int(18446744073709551615))
def test_read_bigsize_int(self):
self.assertEqual(0, read_bigsize_int(io.BytesIO(bfh("00"))))
self.assertEqual(252, read_bigsize_int(io.BytesIO(bfh("fc"))))
self.assertEqual(253, read_bigsize_int(io.BytesIO(bfh("fd00fd"))))
self.assertEqual(65535, read_bigsize_int(io.BytesIO(bfh("fdffff"))))
self.assertEqual(65536, read_bigsize_int(io.BytesIO(bfh("fe00010000"))))
self.assertEqual(4294967295, read_bigsize_int(io.BytesIO(bfh("feffffffff"))))
self.assertEqual(4294967296, read_bigsize_int(io.BytesIO(bfh("ff0000000100000000"))))
self.assertEqual(18446744073709551615, read_bigsize_int(io.BytesIO(bfh("ffffffffffffffffff"))))
with self.assertRaises(FieldEncodingNotMinimal):
read_bigsize_int(io.BytesIO(bfh("fd00fc")))
with self.assertRaises(FieldEncodingNotMinimal):
read_bigsize_int(io.BytesIO(bfh("fe0000ffff")))
with self.assertRaises(FieldEncodingNotMinimal):
read_bigsize_int(io.BytesIO(bfh("ff00000000ffffffff")))
with self.assertRaises(UnexpectedEndOfStream):
read_bigsize_int(io.BytesIO(bfh("fd00")))
with self.assertRaises(UnexpectedEndOfStream):
read_bigsize_int(io.BytesIO(bfh("feffff")))
with self.assertRaises(UnexpectedEndOfStream):
read_bigsize_int(io.BytesIO(bfh("ffffffffff")))
self.assertEqual(None, read_bigsize_int(io.BytesIO(bfh(""))))
with self.assertRaises(UnexpectedEndOfStream):
read_bigsize_int(io.BytesIO(bfh("fd")))
with self.assertRaises(UnexpectedEndOfStream):
read_bigsize_int(io.BytesIO(bfh("fe")))
with self.assertRaises(UnexpectedEndOfStream):
read_bigsize_int(io.BytesIO(bfh("ff")))
def test_read_tlv_stream_tests1(self):
ializer()
for tlv_stream_name in ("n1", "n2"):
with self.subTest(tlv_stream_name=tlv_stream_name):
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd01")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(FieldEncodingNotMinimal):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd000100")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd0101")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd26")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd2602")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(FieldEncodingNotMinimal):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd000100")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd0201000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")), tlv_stream_name="n1")
with self.assertRaises(UnknownMandatoryTLVRecordType):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("1200")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(UnknownMandatoryTLVRecordType):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd010200")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(UnknownMandatoryTLVRecordType):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fe0100000200")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(UnknownMandatoryTLVRecordType):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("ff010000000000000200")), tlv_stream_name=tlv_stream_name)
with self.assertRaises(MsgTrailingGarbage):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0109ffffffffffffffffff")), tlv_stream_name="n1")
with self.assertRaises(FieldEncodingNotMinimal):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("010100")), tlv_stream_name="n1")
with self.assertRaises(FieldEncodingNotMinimal):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("01020001")), tlv_stream_name="n1")
with self.assertRaises(FieldEncodingNotMinimal):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0103000100")), tlv_stream_name="n1")
with self.assertRaises(FieldEncodingNotMinimal):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("010400010000")), tlv_stream_name="n1")
with self.assertRaises(FieldEncodingNotMinimal):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("01050001000000")), tlv_stream_name="n1")
with self.assertRaises(FieldEncodingNotMinimal):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0106000100000000")), tlv_stream_name="n1")
with self.assertRaises(FieldEncodingNotMinimal):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("010700010000000000")), tlv_stream_name="n1")
with self.assertRaises(FieldEncodingNotMinimal):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("01080001000000000000")), tlv_stream_name="n1")
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("020701010101010101")), tlv_stream_name="n1")
with self.assertRaises(MsgTrailingGarbage):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0209010101010101010101")), tlv_stream_name="n1")
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0321023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb")), tlv_stream_name="n1")
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0329023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb0000000000000001")), tlv_stream_name="n1")
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0330023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb000000000000000100000000000001")), tlv_stream_name="n1")
with self.assertRaises(MsgTrailingGarbage):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0332023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb0000000000000001000000000000000001")), tlv_stream_name="n1")
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00fe00")), tlv_stream_name="n1")
with self.assertRaises(UnexpectedEndOfStream):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00fe0101")), tlv_stream_name="n1")
with self.assertRaises(MsgTrailingGarbage):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00fe03010101")), tlv_stream_name="n1")
with self.assertRaises(UnknownMandatoryTLVRecordType):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0000")), tlv_stream_name="n1")
def test_read_tlv_stream_tests2(self):
alizer()
for tlv_stream_name in ("n1", "n2"):
with self.subTest(tlv_stream_name=tlv_stream_name):
self.assertEqual({}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("")), tlv_stream_name=tlv_stream_name))
self.assertEqual({}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("2100")), tlv_stream_name=tlv_stream_name))
self.assertEqual({}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd020100")), tlv_stream_name=tlv_stream_name))
self.assertEqual({}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00fd00")), tlv_stream_name=tlv_stream_name))
self.assertEqual({}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00ff00")), tlv_stream_name=tlv_stream_name))
self.assertEqual({}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("fe0200000100")), tlv_stream_name=tlv_stream_name))
self.assertEqual({}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("ff020000000000000100")), tlv_stream_name=tlv_stream_name))
self.assertEqual({"tlv1": {"amount_msat": 0}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0100")), tlv_stream_name="n1"))
self.assertEqual({"tlv1": {"amount_msat": 1}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("010101")), tlv_stream_name="n1"))
self.assertEqual({"tlv1": {"amount_msat": 256}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("01020100")), tlv_stream_name="n1"))
self.assertEqual({"tlv1": {"amount_msat": 65536}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0103010000")), tlv_stream_name="n1"))
self.assertEqual({"tlv1": {"amount_msat": 16777216}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("010401000000")), tlv_stream_name="n1"))
self.assertEqual({"tlv1": {"amount_msat": 4294967296}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("01050100000000")), tlv_stream_name="n1"))
self.assertEqual({"tlv1": {"amount_msat": 1099511627776}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0106010000000000")), tlv_stream_name="n1"))
self.assertEqual({"tlv1": {"amount_msat": 281474976710656}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("010701000000000000")), tlv_stream_name="n1"))
self.assertEqual({"tlv1": {"amount_msat": 72057594037927936}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("01080100000000000000")), tlv_stream_name="n1"))
self.assertEqual({"tlv2": {"scid": ShortChannelID.from_components(0, 0, 550)}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("02080000000000000226")), tlv_stream_name="n1"))
self.assertEqual({"tlv3": {"node_id": bfh("023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb"),
"amount_msat_1": 1,
"amount_msat_2": 2}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0331023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb00000000000000010000000000000002")), tlv_stream_name="n1"))
self.assertEqual({"tlv4": {"cltv_delta": 550}},
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00fe020226")), tlv_stream_name="n1"))
def test_read_tlv_stream_tests3(self):
r()
with self.assertRaises(MsgInvalidFieldOrder):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0208000000000000022601012a")), tlv_stream_name="n1")
with self.assertRaises(MsgInvalidFieldOrder):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0208000000000000023102080000000000000451")), tlv_stream_name="n1")
with self.assertRaises(MsgInvalidFieldOrder):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("1f000f012a")), tlv_stream_name="n1")
with self.assertRaises(MsgInvalidFieldOrder):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("1f001f012a")), tlv_stream_name="n1")
with self.assertRaises(MsgInvalidFieldOrder):
lnser.read_tlv_stream(fd=io.BytesIO(bfh("ffffffffffffffffff000000")), tlv_stream_name="n2")
def test_encode_decode_msg__missing_mandatory_field_gets_set_to_zeroes(self):
self.assertEqual(bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023000000003b9aca00"),
encode_msg(
"channel_update",
short_channel_id=ShortChannelID.from_components(54321, 111, 2),
channel_flags=b'\x00',
message_flags=b'\x01',
cltv_expiry_delta=144,
htlc_minimum_msat=200,
htlc_maximum_msat=1_000_000_000,
fee_base_msat=500,
fee_proportional_millionths=35,
chain_hash=constants.net.rev_genesis_bytes(),
timestamp=1584320643,
))
self.assertEqual(('channel_update',
{'chain_hash': b'CI\x7f\xd7\xf8&\x95q\x08\xf4\xa3\x0f\xd9\xce\xc3\xae\xbay\x97 \x84\xe9\x0e\xad\x01\xea3\t\x00\x00\x00\x00',
'channel_flags': b'\x00',
'cltv_expiry_delta': 144,
'fee_base_msat': 500,
'fee_proportional_millionths': 35,
'htlc_maximum_msat': 1000000000,
'htlc_minimum_msat': 200,
'message_flags': b'\x01',
'short_channel_id': b'\x00\xd41\x00\x00o\x00\x02',
'signature': bytes(64),
'timestamp': 1584320643}
),
decode_msg(bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023000000003b9aca00")))
def test_encode_decode_msg__missing_optional_field_will_not_appear_in_decoded_dict(self):
self.assertEqual(bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023"),
encode_msg(
"channel_update",
short_channel_id=ShortChannelID.from_components(54321, 111, 2),
channel_flags=b'\x00',
message_flags=b'\x01',
cltv_expiry_delta=144,
htlc_minimum_msat=200,
fee_base_msat=500,
fee_proportional_millionths=35,
chain_hash=constants.net.rev_genesis_bytes(),
timestamp=1584320643,
))
self.assertEqual(('channel_update',
{'chain_hash': b'CI\x7f\xd7\xf8&\x95q\x08\xf4\xa3\x0f\xd9\xce\xc3\xae\xbay\x97 \x84\xe9\x0e\xad\x01\xea3\t\x00\x00\x00\x00',
'channel_flags': b'\x00',
'cltv_expiry_delta': 144,
'fee_base_msat': 500,
'fee_proportional_millionths': 35,
'htlc_minimum_msat': 200,
'message_flags': b'\x01',
'short_channel_id': b'\x00\xd41\x00\x00o\x00\x02',
'signature': bytes(64),
'timestamp': 1584320643}
),
decode_msg(bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023")))
def test_encode_decode_msg__ints_can_be_passed_as_bytes(self):
self.assertEqual(bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023000000003b9aca00"),
encode_msg(
"channel_update",
short_channel_id=ShortChannelID.from_components(54321, 111, 2),
channel_flags=b'\x00',
message_flags=b'\x01',
cltv_expiry_delta=int.to_bytes(144, length=2, byteorder="big", signed=False),
htlc_minimum_msat=int.to_bytes(200, length=8, byteorder="big", signed=False),
htlc_maximum_msat=int.to_bytes(1_000_000_000, length=8, byteorder="big", signed=False),
fee_base_msat=int.to_bytes(500, length=4, byteorder="big", signed=False),
fee_proportional_millionths=int.to_bytes(35, length=4, byteorder="big", signed=False),
chain_hash=constants.net.rev_genesis_bytes(),
timestamp=int.to_bytes(1584320643, length=4, byteorder="big", signed=False),
))
self.assertEqual(('channel_update',
{'chain_hash': b'CI\x7f\xd7\xf8&\x95q\x08\xf4\xa3\x0f\xd9\xce\xc3\xae\xbay\x97 \x84\xe9\x0e\xad\x01\xea3\t\x00\x00\x00\x00',
'channel_flags': b'\x00',
'cltv_expiry_delta': 144,
'fee_base_msat': 500,
'fee_proportional_millionths': 35,
'htlc_maximum_msat': 1000000000,
'htlc_minimum_msat': 200,
'message_flags': b'\x01',
'short_channel_id': b'\x00\xd41\x00\x00o\x00\x02',
'signature': bytes(64),
'timestamp': 1584320643}
),
decode_msg(bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023000000003b9aca00")))
with self.assertRaises(UnexpectedFieldSizeForEncoder):
encode_msg(
"channel_update",
short_channel_id=ShortChannelID.from_components(54321, 111, 2),
channel_flags=b'\x00',
message_flags=b'\x01',
cltv_expiry_delta=int.to_bytes(144, length=2, byteorder="big", signed=False),
htlc_minimum_msat=int.to_bytes(200, length=4, byteorder="big", signed=False),
htlc_maximum_msat=int.to_bytes(1_000_000_000, length=8, byteorder="big", signed=False),
fee_base_msat=int.to_bytes(500, length=4, byteorder="big", signed=False),
fee_proportional_millionths=int.to_bytes(35, length=4, byteorder="big", signed=False),
chain_hash=constants.net.rev_genesis_bytes(),
timestamp=int.to_bytes(1584320643, length=4, byteorder="big", signed=False),
)
def test_encode_decode_msg__commitment_signed(self):
self.assertEqual(bfh("0084010101010101010101010101010101010101010101010101010101010101010106112951d0a6d7fc1dbca3bd1cdbda9acfee7f668b3c0a36bd944f7e2f305b274ba46a61279e15163b2d376c664bb3481d7c5e107a5b268301e39aebbda27d2d00056548bd093a2bd2f4f053f0c6eb2c5f541d55eb8a2ede4d35fe974e5d3cd0eec3138bfd4115f4483c3b14e7988b48811d2da75f29f5e6eee691251fb4fba5a2610ba8fe7007117fe1c9fa1a6b01805c84cfffbb0eba674b64342c7cac567dea50728c1bb1aadc6d23fc2f4145027eafca82d6072cc9ce6529542099f728a0521e4b2044df5d02f7f2cdf84404762b1979528aa689a3e060a2a90ba8ef9a83d24d31ffb0d95c71d9fb9049b24ecf2c949c1486e7eb3ae160d70d54e441dc785dc57f7f3c9901b9537398c66f546cfc1d65e0748895d14699342c407fe119ac17db079b103720124a5ba22d4ba14c12832324dea9cb60c61ee74376ee7dcffdd1836e354aa8838ce3b37854fa91465cc40c73b702915e3580bfebaace805d52373b57ac755ebe4a8fe97e5fc21669bea124b809c79968479148f7174f39b8014542"),
encode_msg(
"commitment_signed",
channel_id=b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01',
signature=b"\x06\x11)Q\xd0\xa6\xd7\xfc\x1d\xbc\xa3\xbd\x1c\xdb\xda\x9a\xcf\xee\x7ff\x8b<\n6\xbd\x94O~/0['K\xa4ja'\x9e\x15\x16;-7lfK\xb3H\x1d|^\x10z[&\x83\x01\xe3\x9a\xeb\xbd\xa2}-",
num_htlcs=5,
htlc_signature=bfh("6548bd093a2bd2f4f053f0c6eb2c5f541d55eb8a2ede4d35fe974e5d3cd0eec3138bfd4115f4483c3b14e7988b48811d2da75f29f5e6eee691251fb4fba5a2610ba8fe7007117fe1c9fa1a6b01805c84cfffbb0eba674b64342c7cac567dea50728c1bb1aadc6d23fc2f4145027eafca82d6072cc9ce6529542099f728a0521e4b2044df5d02f7f2cdf84404762b1979528aa689a3e060a2a90ba8ef9a83d24d31ffb0d95c71d9fb9049b24ecf2c949c1486e7eb3ae160d70d54e441dc785dc57f7f3c9901b9537398c66f546cfc1d65e0748895d14699342c407fe119ac17db079b103720124a5ba22d4ba14c12832324dea9cb60c61ee74376ee7dcffdd1836e354aa8838ce3b37854fa91465cc40c73b702915e3580bfebaace805d52373b57ac755ebe4a8fe97e5fc21669bea124b809c79968479148f7174f39b8014542"),
))
self.assertEqual(('commitment_signed',
{'channel_id': b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01',
'signature': b"\x06\x11)Q\xd0\xa6\xd7\xfc\x1d\xbc\xa3\xbd\x1c\xdb\xda\x9a\xcf\xee\x7ff\x8b<\n6\xbd\x94O~/0['K\xa4ja'\x9e\x15\x16;-7lfK\xb3H\x1d|^\x10z[&\x83\x01\xe3\x9a\xeb\xbd\xa2}-",
'num_htlcs': 5,
'htlc_signature': bfh("6548bd093a2bd2f4f053f0c6eb2c5f541d55eb8a2ede4d35fe974e5d3cd0eec3138bfd4115f4483c3b14e7988b48811d2da75f29f5e6eee691251fb4fba5a2610ba8fe7007117fe1c9fa1a6b01805c84cfffbb0eba674b64342c7cac567dea50728c1bb1aadc6d23fc2f4145027eafca82d6072cc9ce6529542099f728a0521e4b2044df5d02f7f2cdf84404762b1979528aa689a3e060a2a90ba8ef9a83d24d31ffb0d95c71d9fb9049b24ecf2c949c1486e7eb3ae160d70d54e441dc785dc57f7f3c9901b9537398c66f546cfc1d65e0748895d14699342c407fe119ac17db079b103720124a5ba22d4ba14c12832324dea9cb60c61ee74376ee7dcffdd1836e354aa8838ce3b37854fa91465cc40c73b702915e3580bfebaace805d52373b57ac755ebe4a8fe97e5fc21669bea124b809c79968479148f7174f39b8014542")}
),
decode_msg(bfh("0084010101010101010101010101010101010101010101010101010101010101010106112951d0a6d7fc1dbca3bd1cdbda9acfee7f668b3c0a36bd944f7e2f305b274ba46a61279e15163b2d376c664bb3481d7c5e107a5b268301e39aebbda27d2d00056548bd093a2bd2f4f053f0c6eb2c5f541d55eb8a2ede4d35fe974e5d3cd0eec3138bfd4115f4483c3b14e7988b48811d2da75f29f5e6eee691251fb4fba5a2610ba8fe7007117fe1c9fa1a6b01805c84cfffbb0eba674b64342c7cac567dea50728c1bb1aadc6d23fc2f4145027eafca82d6072cc9ce6529542099f728a0521e4b2044df5d02f7f2cdf84404762b1979528aa689a3e060a2a90ba8ef9a83d24d31ffb0d95c71d9fb9049b24ecf2c949c1486e7eb3ae160d70d54e441dc785dc57f7f3c9901b9537398c66f546cfc1d65e0748895d14699342c407fe119ac17db079b103720124a5ba22d4ba14c12832324dea9cb60c61ee74376ee7dcffdd1836e354aa8838ce3b37854fa91465cc40c73b702915e3580bfebaace805d52373b57ac755ebe4a8fe97e5fc21669bea124b809c79968479148f7174f39b8014542")))
self.assertEqual(bfh("008401010101010101010101010101010101010101010101010101010101010101013b14af0c549dfb1fb287ff57c012371b3932996db5929eda5f251704751fb49d0dc2dcb88e5021575cb572fb71693758543f97d89e9165f913bfb7488d7cc26500012d31103b9f6e71131e4fee86fdfbdeba90e52b43fcfd11e8e53811cd4d59b2575ae6c3c82f85bea144c88cc35e568f1e6bdd0c57337e86de0b5da7cd9994067a"),
encode_msg(
"commitment_signed",
channel_id=b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01',
signature=b';\x14\xaf\x0cT\x9d\xfb\x1f\xb2\x87\xffW\xc0\x127\x1b92\x99m\xb5\x92\x9e\xda_%\x17\x04u\x1f\xb4\x9d\r\xc2\xdc\xb8\x8eP!W\\\xb5r\xfbqi7XT?\x97\xd8\x9e\x91e\xf9\x13\xbf\xb7H\x8d|\xc2e',
num_htlcs=1,
htlc_signature=bfh("2d31103b9f6e71131e4fee86fdfbdeba90e52b43fcfd11e8e53811cd4d59b2575ae6c3c82f85bea144c88cc35e568f1e6bdd0c57337e86de0b5da7cd9994067a"),
))
self.assertEqual(('commitment_signed',
{'channel_id': b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01',
'signature': b';\x14\xaf\x0cT\x9d\xfb\x1f\xb2\x87\xffW\xc0\x127\x1b92\x99m\xb5\x92\x9e\xda_%\x17\x04u\x1f\xb4\x9d\r\xc2\xdc\xb8\x8eP!W\\\xb5r\xfbqi7XT?\x97\xd8\x9e\x91e\xf9\x13\xbf\xb7H\x8d|\xc2e',
'num_htlcs': 1,
'htlc_signature': bfh("2d31103b9f6e71131e4fee86fdfbdeba90e52b43fcfd11e8e53811cd4d59b2575ae6c3c82f85bea144c88cc35e568f1e6bdd0c57337e86de0b5da7cd9994067a")}
),
decode_msg(bfh("008401010101010101010101010101010101010101010101010101010101010101013b14af0c549dfb1fb287ff57c012371b3932996db5929eda5f251704751fb49d0dc2dcb88e5021575cb572fb71693758543f97d89e9165f913bfb7488d7cc26500012d31103b9f6e71131e4fee86fdfbdeba90e52b43fcfd11e8e53811cd4d59b2575ae6c3c82f85bea144c88cc35e568f1e6bdd0c57337e86de0b5da7cd9994067a")))
self.assertEqual(bfh("008401010101010101010101010101010101010101010101010101010101010101014e206ecf904d9237b1c5b4e08513555e9a5932c45b5f68be8764ce998df635ae04f6ce7bbcd3b4fd08e2daab7f9059b287ecab4155367b834682633497173f450000"),
encode_msg(
"commitment_signed",
channel_id=b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01',
signature=b'N n\xcf\x90M\x927\xb1\xc5\xb4\xe0\x85\x13U^\x9aY2\xc4[_h\xbe\x87d\xce\x99\x8d\xf65\xae\x04\xf6\xce{\xbc\xd3\xb4\xfd\x08\xe2\xda\xab\x7f\x90Y\xb2\x87\xec\xabAU6{\x83F\x82c4\x97\x17?E',
num_htlcs=0,
htlc_signature=bfh(""),
))
self.assertEqual(('commitment_signed',
{'channel_id': b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01',
'signature': b'N n\xcf\x90M\x927\xb1\xc5\xb4\xe0\x85\x13U^\x9aY2\xc4[_h\xbe\x87d\xce\x99\x8d\xf65\xae\x04\xf6\xce{\xbc\xd3\xb4\xfd\x08\xe2\xda\xab\x7f\x90Y\xb2\x87\xec\xabAU6{\x83F\x82c4\x97\x17?E',
'num_htlcs': 0,
'htlc_signature': bfh("")}
),
decode_msg(bfh("008401010101010101010101010101010101010101010101010101010101010101014e206ecf904d9237b1c5b4e08513555e9a5932c45b5f68be8764ce998df635ae04f6ce7bbcd3b4fd08e2daab7f9059b287ecab4155367b834682633497173f450000")))
def test_encode_decode_msg__init(self):
self.assertEqual(bfh("00100000000220c2"),
encode_msg(
"init",
gflen=0,
flen=2,
features=(LnFeatures.OPTION_STATIC_REMOTEKEY_OPT |
LnFeatures.GOSSIP_QUERIES_OPT |
LnFeatures.GOSSIP_QUERIES_REQ |
LnFeatures.OPTION_DATA_LOSS_PROTECT_OPT),
))
self.assertEqual(bfh("00100000000220c2"),
encode_msg("init", gflen=0, flen=2, features=bfh("20c2")))
self.assertEqual(bfh("00100000000220c2012043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000"),
encode_msg(
"init",
gflen=0,
flen=2,
features=(LnFeatures.OPTION_STATIC_REMOTEKEY_OPT |
LnFeatures.GOSSIP_QUERIES_OPT |
LnFeatures.GOSSIP_QUERIES_REQ |
LnFeatures.OPTION_DATA_LOSS_PROTECT_OPT),
init_tlvs={
'networks':
{'chains': b'CI\x7f\xd7\xf8&\x95q\x08\xf4\xa3\x0f\xd9\xce\xc3\xae\xbay\x97 \x84\xe9\x0e\xad\x01\xea3\t\x00\x00\x00\x00'}
}
))
self.assertEqual(('init',
{'gflen': 2,
'globalfeatures': b'"\x00',
'flen': 3,
'features': b'\x02\xa2\xa1',
'init_tlvs': {}}
),
decode_msg(bfh("001000022200000302a2a1")))
self.assertEqual(('init',
{'gflen': 2,
'globalfeatures': b'"\x00',
'flen': 3,
'features': b'\x02\xaa\xa2',
'init_tlvs': {
'networks':
{'chains': b'CI\x7f\xd7\xf8&\x95q\x08\xf4\xa3\x0f\xd9\xce\xc3\xae\xbay\x97 \x84\xe9\x0e\xad\x01\xea3\t\x00\x00\x00\x00'}
}}),
decode_msg(bfh("001000022200000302aaa2012043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000")))
def test_decode_onion_error(self):
orf = OnionRoutingFailure.from_bytes(bfh("400f0000000017d2d8b0001d9458"))
self.assertEqual(('incorrect_or_unknown_payment_details', {'htlc_msat': 399694000, 'height': 1938520}),
OnionWireSerializer.decode_msg(orf.to_bytes()))
self.assertEqual({'htlc_msat': 399694000, 'height': 1938520},
orf.decode_data())
orf2 = OnionRoutingFailure(26399, bytes.fromhex("0000000017d2d8b0001d9458"))
with self.assertRaises(UnknownMsgType):
OnionWireSerializer.decode_msg(orf2.to_bytes())
self.assertEqual(None, orf2.decode_data())
| true
| true
|
790a484b4e43905b871c0f0d6ed2d132d3772adb
| 1,181
|
py
|
Python
|
tests/test_xshg_calendar.py
|
schild/exchange_calendars
|
7401ef1182af2af8a8f62acf03a48ae8091a8c88
|
[
"Apache-2.0"
] | 6
|
2021-01-15T22:44:09.000Z
|
2021-01-25T09:33:39.000Z
|
tests/test_xshg_calendar.py
|
gerrymanoim/trading_calendars
|
7401ef1182af2af8a8f62acf03a48ae8091a8c88
|
[
"Apache-2.0"
] | 3
|
2021-01-14T04:25:55.000Z
|
2021-01-28T04:22:42.000Z
|
tests/test_xshg_calendar.py
|
schild/exchange_calendars
|
7401ef1182af2af8a8f62acf03a48ae8091a8c88
|
[
"Apache-2.0"
] | 1
|
2021-01-20T18:37:07.000Z
|
2021-01-20T18:37:07.000Z
|
import pytest
from exchange_calendars.exchange_calendar_xshg import XSHGExchangeCalendar
from .test_exchange_calendar import ExchangeCalendarTestBase
from .test_utils import T
class TestXSHGCalendar(ExchangeCalendarTestBase):
@pytest.fixture(scope="class")
def calendar_cls(self):
yield XSHGExchangeCalendar
@pytest.fixture
def max_session_hours(self):
# Shanghai stock exchange is open from 9:30 am to 3pm
yield 5.5
@pytest.fixture
def start_bound(self):
yield T("1999-01-01")
@pytest.fixture
def end_bound(self):
yield T("2025-12-31")
@pytest.fixture
def regular_holidays_sample(self):
yield [
# 2017
"2017-01-02",
"2017-01-27",
"2017-01-30",
"2017-01-31",
"2017-02-01",
"2017-02-02",
"2017-04-03",
"2017-04-04",
"2017-05-01",
"2017-05-29",
"2017-05-30",
"2017-10-02",
"2017-10-03",
"2017-10-04",
"2017-10-05",
"2017-10-06",
# 2020
"2020-01-31"
]
| 23.62
| 74
| 0.538527
|
import pytest
from exchange_calendars.exchange_calendar_xshg import XSHGExchangeCalendar
from .test_exchange_calendar import ExchangeCalendarTestBase
from .test_utils import T
class TestXSHGCalendar(ExchangeCalendarTestBase):
@pytest.fixture(scope="class")
def calendar_cls(self):
yield XSHGExchangeCalendar
@pytest.fixture
def max_session_hours(self):
yield 5.5
@pytest.fixture
def start_bound(self):
yield T("1999-01-01")
@pytest.fixture
def end_bound(self):
yield T("2025-12-31")
@pytest.fixture
def regular_holidays_sample(self):
yield [
"2017-01-02",
"2017-01-27",
"2017-01-30",
"2017-01-31",
"2017-02-01",
"2017-02-02",
"2017-04-03",
"2017-04-04",
"2017-05-01",
"2017-05-29",
"2017-05-30",
"2017-10-02",
"2017-10-03",
"2017-10-04",
"2017-10-05",
"2017-10-06",
"2020-01-31"
]
| true
| true
|
790a48b1e544425c8bf354a0e74a112e8801c522
| 636
|
py
|
Python
|
manage.py
|
justindjeumenet/secure_auth_rest
|
63eab9f41c386b2998b0268f3bc5b07f45a1d8ee
|
[
"MIT"
] | null | null | null |
manage.py
|
justindjeumenet/secure_auth_rest
|
63eab9f41c386b2998b0268f3bc5b07f45a1d8ee
|
[
"MIT"
] | null | null | null |
manage.py
|
justindjeumenet/secure_auth_rest
|
63eab9f41c386b2998b0268f3bc5b07f45a1d8ee
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'secure_auth_rest.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.909091
| 80
| 0.687107
|
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'secure_auth_rest.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| true
| true
|
790a492a5ee328e07d44e3b32d8ed26d28ddde38
| 14,476
|
py
|
Python
|
sphinx/util/cfamily.py
|
jeromekelleher/sphinx
|
2e506c5ab457cba743bb47eb5b8c8eb9dd51d23d
|
[
"BSD-2-Clause"
] | null | null | null |
sphinx/util/cfamily.py
|
jeromekelleher/sphinx
|
2e506c5ab457cba743bb47eb5b8c8eb9dd51d23d
|
[
"BSD-2-Clause"
] | null | null | null |
sphinx/util/cfamily.py
|
jeromekelleher/sphinx
|
2e506c5ab457cba743bb47eb5b8c8eb9dd51d23d
|
[
"BSD-2-Clause"
] | null | null | null |
"""
sphinx.util.cfamily
~~~~~~~~~~~~~~~~~~~
Utility functions common to the C and C++ domains.
:copyright: Copyright 2007-2020 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
import warnings
from copy import deepcopy
from typing import (
Any, Callable, List, Match, Pattern, Tuple, Union
)
from docutils import nodes
from docutils.nodes import TextElement
from sphinx.config import Config
from sphinx.deprecation import RemovedInSphinx40Warning
from sphinx.util import logging
logger = logging.getLogger(__name__)
StringifyTransform = Callable[[Any], str]
_whitespace_re = re.compile(r'(?u)\s+')
anon_identifier_re = re.compile(r'(@[a-zA-Z0-9_])[a-zA-Z0-9_]*\b')
identifier_re = re.compile(r'''(?x)
( # This 'extends' _anon_identifier_re with the ordinary identifiers,
# make sure they are in sync.
(~?\b[a-zA-Z_]) # ordinary identifiers
| (@[a-zA-Z0-9_]) # our extension for names of anonymous entities
)
[a-zA-Z0-9_]*\b
''')
integer_literal_re = re.compile(r'[1-9][0-9]*')
octal_literal_re = re.compile(r'0[0-7]*')
hex_literal_re = re.compile(r'0[xX][0-9a-fA-F][0-9a-fA-F]*')
binary_literal_re = re.compile(r'0[bB][01][01]*')
float_literal_re = re.compile(r'''(?x)
[+-]?(
# decimal
([0-9]+[eE][+-]?[0-9]+)
| ([0-9]*\.[0-9]+([eE][+-]?[0-9]+)?)
| ([0-9]+\.([eE][+-]?[0-9]+)?)
# hex
| (0[xX][0-9a-fA-F]+[pP][+-]?[0-9a-fA-F]+)
| (0[xX][0-9a-fA-F]*\.[0-9a-fA-F]+([pP][+-]?[0-9a-fA-F]+)?)
| (0[xX][0-9a-fA-F]+\.([pP][+-]?[0-9a-fA-F]+)?)
)
''')
char_literal_re = re.compile(r'''(?x)
((?:u8)|u|U|L)?
'(
(?:[^\\'])
| (\\(
(?:['"?\\abfnrtv])
| (?:[0-7]{1,3})
| (?:x[0-9a-fA-F]{2})
| (?:u[0-9a-fA-F]{4})
| (?:U[0-9a-fA-F]{8})
))
)'
''')
def verify_description_mode(mode: str) -> None:
if mode not in ('lastIsName', 'noneIsName', 'markType', 'markName', 'param'):
raise Exception("Description mode '%s' is invalid." % mode)
class NoOldIdError(Exception):
# Used to avoid implementing unneeded id generation for old id schemes.
@property
def description(self) -> str:
warnings.warn('%s.description is deprecated. '
'Coerce the instance to a string instead.' % self.__class__.__name__,
RemovedInSphinx40Warning, stacklevel=2)
return str(self)
class ASTBaseBase:
def __eq__(self, other: Any) -> bool:
if type(self) is not type(other):
return False
try:
for key, value in self.__dict__.items():
if value != getattr(other, key):
return False
except AttributeError:
return False
return True
__hash__ = None # type: Callable[[], int]
def clone(self) -> Any:
"""Clone a definition expression node."""
return deepcopy(self)
def _stringify(self, transform: StringifyTransform) -> str:
raise NotImplementedError(repr(self))
def __str__(self) -> str:
return self._stringify(lambda ast: str(ast))
def get_display_string(self) -> str:
return self._stringify(lambda ast: ast.get_display_string())
def __repr__(self) -> str:
return '<%s>' % self.__class__.__name__
################################################################################
# Attributes
################################################################################
class ASTAttribute(ASTBaseBase):
def describe_signature(self, signode: TextElement) -> None:
raise NotImplementedError(repr(self))
class ASTCPPAttribute(ASTAttribute):
def __init__(self, arg: str) -> None:
self.arg = arg
def _stringify(self, transform: StringifyTransform) -> str:
return "[[" + self.arg + "]]"
def describe_signature(self, signode: TextElement) -> None:
txt = str(self)
signode.append(nodes.Text(txt, txt))
class ASTGnuAttribute(ASTBaseBase):
def __init__(self, name: str, args: Any) -> None:
self.name = name
self.args = args
def _stringify(self, transform: StringifyTransform) -> str:
res = [self.name]
if self.args:
res.append('(')
res.append(transform(self.args))
res.append(')')
return ''.join(res)
class ASTGnuAttributeList(ASTAttribute):
def __init__(self, attrs: List[ASTGnuAttribute]) -> None:
self.attrs = attrs
def _stringify(self, transform: StringifyTransform) -> str:
res = ['__attribute__((']
first = True
for attr in self.attrs:
if not first:
res.append(', ')
first = False
res.append(transform(attr))
res.append('))')
return ''.join(res)
def describe_signature(self, signode: TextElement) -> None:
txt = str(self)
signode.append(nodes.Text(txt, txt))
class ASTIdAttribute(ASTAttribute):
"""For simple attributes defined by the user."""
def __init__(self, id: str) -> None:
self.id = id
def _stringify(self, transform: StringifyTransform) -> str:
return self.id
def describe_signature(self, signode: TextElement) -> None:
signode.append(nodes.Text(self.id, self.id))
class ASTParenAttribute(ASTAttribute):
"""For paren attributes defined by the user."""
def __init__(self, id: str, arg: str) -> None:
self.id = id
self.arg = arg
def _stringify(self, transform: StringifyTransform) -> str:
return self.id + '(' + self.arg + ')'
def describe_signature(self, signode: TextElement) -> None:
txt = str(self)
signode.append(nodes.Text(txt, txt))
################################################################################
class UnsupportedMultiCharacterCharLiteral(Exception):
@property
def decoded(self) -> str:
warnings.warn('%s.decoded is deprecated. '
'Coerce the instance to a string instead.' % self.__class__.__name__,
RemovedInSphinx40Warning, stacklevel=2)
return str(self)
class DefinitionError(Exception):
@property
def description(self) -> str:
warnings.warn('%s.description is deprecated. '
'Coerce the instance to a string instead.' % self.__class__.__name__,
RemovedInSphinx40Warning, stacklevel=2)
return str(self)
class BaseParser:
def __init__(self, definition: str, *,
location: Union[nodes.Node, Tuple[str, int]],
config: "Config") -> None:
self.definition = definition.strip()
self.location = location # for warnings
self.config = config
self.pos = 0
self.end = len(self.definition)
self.last_match = None # type: Match
self._previous_state = (0, None) # type: Tuple[int, Match]
self.otherErrors = [] # type: List[DefinitionError]
# in our tests the following is set to False to capture bad parsing
self.allowFallbackExpressionParsing = True
def _make_multi_error(self, errors: List[Any], header: str) -> DefinitionError:
if len(errors) == 1:
if len(header) > 0:
return DefinitionError(header + '\n' + str(errors[0][0]))
else:
return DefinitionError(str(errors[0][0]))
result = [header, '\n']
for e in errors:
if len(e[1]) > 0:
indent = ' '
result.append(e[1])
result.append(':\n')
for line in str(e[0]).split('\n'):
if len(line) == 0:
continue
result.append(indent)
result.append(line)
result.append('\n')
else:
result.append(str(e[0]))
return DefinitionError(''.join(result))
@property
def language(self) -> str:
raise NotImplementedError
def status(self, msg: str) -> None:
# for debugging
indicator = '-' * self.pos + '^'
print("%s\n%s\n%s" % (msg, self.definition, indicator))
def fail(self, msg: str) -> None:
errors = []
indicator = '-' * self.pos + '^'
exMain = DefinitionError(
'Invalid %s declaration: %s [error at %d]\n %s\n %s' %
(self.language, msg, self.pos, self.definition, indicator))
errors.append((exMain, "Main error"))
for err in self.otherErrors:
errors.append((err, "Potential other error"))
self.otherErrors = []
raise self._make_multi_error(errors, '')
def warn(self, msg: str) -> None:
logger.warning(msg, location=self.location)
def match(self, regex: Pattern) -> bool:
match = regex.match(self.definition, self.pos)
if match is not None:
self._previous_state = (self.pos, self.last_match)
self.pos = match.end()
self.last_match = match
return True
return False
def skip_string(self, string: str) -> bool:
strlen = len(string)
if self.definition[self.pos:self.pos + strlen] == string:
self.pos += strlen
return True
return False
def skip_word(self, word: str) -> bool:
return self.match(re.compile(r'\b%s\b' % re.escape(word)))
def skip_ws(self) -> bool:
return self.match(_whitespace_re)
def skip_word_and_ws(self, word: str) -> bool:
if self.skip_word(word):
self.skip_ws()
return True
return False
def skip_string_and_ws(self, string: str) -> bool:
if self.skip_string(string):
self.skip_ws()
return True
return False
@property
def eof(self) -> bool:
return self.pos >= self.end
@property
def current_char(self) -> str:
try:
return self.definition[self.pos]
except IndexError:
return 'EOF'
@property
def matched_text(self) -> str:
if self.last_match is not None:
return self.last_match.group()
else:
return None
def read_rest(self) -> str:
rv = self.definition[self.pos:]
self.pos = self.end
return rv
def assert_end(self, *, allowSemicolon: bool = False) -> None:
self.skip_ws()
if allowSemicolon:
if not self.eof and self.definition[self.pos:] != ';':
self.fail('Expected end of definition or ;.')
else:
if not self.eof:
self.fail('Expected end of definition.')
################################################################################
@property
def id_attributes(self):
raise NotImplementedError
@property
def paren_attributes(self):
raise NotImplementedError
def _parse_balanced_token_seq(self, end: List[str]) -> str:
# TODO: add handling of string literals and similar
brackets = {'(': ')', '[': ']', '{': '}'}
startPos = self.pos
symbols = [] # type: List[str]
while not self.eof:
if len(symbols) == 0 and self.current_char in end:
break
if self.current_char in brackets.keys():
symbols.append(brackets[self.current_char])
elif len(symbols) > 0 and self.current_char == symbols[-1]:
symbols.pop()
elif self.current_char in ")]}":
self.fail("Unexpected '%s' in balanced-token-seq." % self.current_char)
self.pos += 1
if self.eof:
self.fail("Could not find end of balanced-token-seq starting at %d."
% startPos)
return self.definition[startPos:self.pos]
def _parse_attribute(self) -> ASTAttribute:
self.skip_ws()
# try C++11 style
startPos = self.pos
if self.skip_string_and_ws('['):
if not self.skip_string('['):
self.pos = startPos
else:
# TODO: actually implement the correct grammar
arg = self._parse_balanced_token_seq(end=[']'])
if not self.skip_string_and_ws(']'):
self.fail("Expected ']' in end of attribute.")
if not self.skip_string_and_ws(']'):
self.fail("Expected ']' in end of attribute after [[...]")
return ASTCPPAttribute(arg)
# try GNU style
if self.skip_word_and_ws('__attribute__'):
if not self.skip_string_and_ws('('):
self.fail("Expected '(' after '__attribute__'.")
if not self.skip_string_and_ws('('):
self.fail("Expected '(' after '__attribute__('.")
attrs = []
while 1:
if self.match(identifier_re):
name = self.matched_text
self.skip_ws()
if self.skip_string_and_ws('('):
self.fail('Parameterized GNU style attribute not yet supported.')
attrs.append(ASTGnuAttribute(name, None))
# TODO: parse arguments for the attribute
if self.skip_string_and_ws(','):
continue
elif self.skip_string_and_ws(')'):
break
else:
self.fail("Expected identifier, ')', or ',' in __attribute__.")
if not self.skip_string_and_ws(')'):
self.fail("Expected ')' after '__attribute__((...)'")
return ASTGnuAttributeList(attrs)
# try the simple id attributes defined by the user
for id in self.id_attributes:
if self.skip_word_and_ws(id):
return ASTIdAttribute(id)
# try the paren attributes defined by the user
for id in self.paren_attributes:
if not self.skip_string_and_ws(id):
continue
if not self.skip_string('('):
self.fail("Expected '(' after user-defined paren-attribute.")
arg = self._parse_balanced_token_seq(end=[')'])
if not self.skip_string(')'):
self.fail("Expected ')' to end user-defined paren-attribute.")
return ASTParenAttribute(id, arg)
return None
| 33.050228
| 91
| 0.550014
|
import re
import warnings
from copy import deepcopy
from typing import (
Any, Callable, List, Match, Pattern, Tuple, Union
)
from docutils import nodes
from docutils.nodes import TextElement
from sphinx.config import Config
from sphinx.deprecation import RemovedInSphinx40Warning
from sphinx.util import logging
logger = logging.getLogger(__name__)
StringifyTransform = Callable[[Any], str]
_whitespace_re = re.compile(r'(?u)\s+')
anon_identifier_re = re.compile(r'(@[a-zA-Z0-9_])[a-zA-Z0-9_]*\b')
identifier_re = re.compile(r'''(?x)
( # This 'extends' _anon_identifier_re with the ordinary identifiers,
# make sure they are in sync.
(~?\b[a-zA-Z_]) # ordinary identifiers
| (@[a-zA-Z0-9_]) # our extension for names of anonymous entities
)
[a-zA-Z0-9_]*\b
''')
integer_literal_re = re.compile(r'[1-9][0-9]*')
octal_literal_re = re.compile(r'0[0-7]*')
hex_literal_re = re.compile(r'0[xX][0-9a-fA-F][0-9a-fA-F]*')
binary_literal_re = re.compile(r'0[bB][01][01]*')
float_literal_re = re.compile(r'''(?x)
[+-]?(
# decimal
([0-9]+[eE][+-]?[0-9]+)
| ([0-9]*\.[0-9]+([eE][+-]?[0-9]+)?)
| ([0-9]+\.([eE][+-]?[0-9]+)?)
# hex
| (0[xX][0-9a-fA-F]+[pP][+-]?[0-9a-fA-F]+)
| (0[xX][0-9a-fA-F]*\.[0-9a-fA-F]+([pP][+-]?[0-9a-fA-F]+)?)
| (0[xX][0-9a-fA-F]+\.([pP][+-]?[0-9a-fA-F]+)?)
)
''')
char_literal_re = re.compile(r'''(?x)
((?:u8)|u|U|L)?
'(
(?:[^\\'])
| (\\(
(?:['"?\\abfnrtv])
| (?:[0-7]{1,3})
| (?:x[0-9a-fA-F]{2})
| (?:u[0-9a-fA-F]{4})
| (?:U[0-9a-fA-F]{8})
))
)'
''')
def verify_description_mode(mode: str) -> None:
if mode not in ('lastIsName', 'noneIsName', 'markType', 'markName', 'param'):
raise Exception("Description mode '%s' is invalid." % mode)
class NoOldIdError(Exception):
# Used to avoid implementing unneeded id generation for old id schemes.
@property
def description(self) -> str:
warnings.warn('%s.description is deprecated. '
'Coerce the instance to a string instead.' % self.__class__.__name__,
RemovedInSphinx40Warning, stacklevel=2)
return str(self)
class ASTBaseBase:
def __eq__(self, other: Any) -> bool:
if type(self) is not type(other):
return False
try:
for key, value in self.__dict__.items():
if value != getattr(other, key):
return False
except AttributeError:
return False
return True
__hash__ = None # type: Callable[[], int]
def clone(self) -> Any:
return deepcopy(self)
def _stringify(self, transform: StringifyTransform) -> str:
raise NotImplementedError(repr(self))
def __str__(self) -> str:
return self._stringify(lambda ast: str(ast))
def get_display_string(self) -> str:
return self._stringify(lambda ast: ast.get_display_string())
def __repr__(self) -> str:
return '<%s>' % self.__class__.__name__
################################################################################
# Attributes
################################################################################
class ASTAttribute(ASTBaseBase):
def describe_signature(self, signode: TextElement) -> None:
raise NotImplementedError(repr(self))
class ASTCPPAttribute(ASTAttribute):
def __init__(self, arg: str) -> None:
self.arg = arg
def _stringify(self, transform: StringifyTransform) -> str:
return "[[" + self.arg + "]]"
def describe_signature(self, signode: TextElement) -> None:
txt = str(self)
signode.append(nodes.Text(txt, txt))
class ASTGnuAttribute(ASTBaseBase):
def __init__(self, name: str, args: Any) -> None:
self.name = name
self.args = args
def _stringify(self, transform: StringifyTransform) -> str:
res = [self.name]
if self.args:
res.append('(')
res.append(transform(self.args))
res.append(')')
return ''.join(res)
class ASTGnuAttributeList(ASTAttribute):
def __init__(self, attrs: List[ASTGnuAttribute]) -> None:
self.attrs = attrs
def _stringify(self, transform: StringifyTransform) -> str:
res = ['__attribute__((']
first = True
for attr in self.attrs:
if not first:
res.append(', ')
first = False
res.append(transform(attr))
res.append('))')
return ''.join(res)
def describe_signature(self, signode: TextElement) -> None:
txt = str(self)
signode.append(nodes.Text(txt, txt))
class ASTIdAttribute(ASTAttribute):
def __init__(self, id: str) -> None:
self.id = id
def _stringify(self, transform: StringifyTransform) -> str:
return self.id
def describe_signature(self, signode: TextElement) -> None:
signode.append(nodes.Text(self.id, self.id))
class ASTParenAttribute(ASTAttribute):
def __init__(self, id: str, arg: str) -> None:
self.id = id
self.arg = arg
def _stringify(self, transform: StringifyTransform) -> str:
return self.id + '(' + self.arg + ')'
def describe_signature(self, signode: TextElement) -> None:
txt = str(self)
signode.append(nodes.Text(txt, txt))
################################################################################
class UnsupportedMultiCharacterCharLiteral(Exception):
@property
def decoded(self) -> str:
warnings.warn('%s.decoded is deprecated. '
'Coerce the instance to a string instead.' % self.__class__.__name__,
RemovedInSphinx40Warning, stacklevel=2)
return str(self)
class DefinitionError(Exception):
@property
def description(self) -> str:
warnings.warn('%s.description is deprecated. '
'Coerce the instance to a string instead.' % self.__class__.__name__,
RemovedInSphinx40Warning, stacklevel=2)
return str(self)
class BaseParser:
def __init__(self, definition: str, *,
location: Union[nodes.Node, Tuple[str, int]],
config: "Config") -> None:
self.definition = definition.strip()
self.location = location # for warnings
self.config = config
self.pos = 0
self.end = len(self.definition)
self.last_match = None # type: Match
self._previous_state = (0, None) # type: Tuple[int, Match]
self.otherErrors = [] # type: List[DefinitionError]
# in our tests the following is set to False to capture bad parsing
self.allowFallbackExpressionParsing = True
def _make_multi_error(self, errors: List[Any], header: str) -> DefinitionError:
if len(errors) == 1:
if len(header) > 0:
return DefinitionError(header + '\n' + str(errors[0][0]))
else:
return DefinitionError(str(errors[0][0]))
result = [header, '\n']
for e in errors:
if len(e[1]) > 0:
indent = ' '
result.append(e[1])
result.append(':\n')
for line in str(e[0]).split('\n'):
if len(line) == 0:
continue
result.append(indent)
result.append(line)
result.append('\n')
else:
result.append(str(e[0]))
return DefinitionError(''.join(result))
@property
def language(self) -> str:
raise NotImplementedError
def status(self, msg: str) -> None:
# for debugging
indicator = '-' * self.pos + '^'
print("%s\n%s\n%s" % (msg, self.definition, indicator))
def fail(self, msg: str) -> None:
errors = []
indicator = '-' * self.pos + '^'
exMain = DefinitionError(
'Invalid %s declaration: %s [error at %d]\n %s\n %s' %
(self.language, msg, self.pos, self.definition, indicator))
errors.append((exMain, "Main error"))
for err in self.otherErrors:
errors.append((err, "Potential other error"))
self.otherErrors = []
raise self._make_multi_error(errors, '')
def warn(self, msg: str) -> None:
logger.warning(msg, location=self.location)
def match(self, regex: Pattern) -> bool:
match = regex.match(self.definition, self.pos)
if match is not None:
self._previous_state = (self.pos, self.last_match)
self.pos = match.end()
self.last_match = match
return True
return False
def skip_string(self, string: str) -> bool:
strlen = len(string)
if self.definition[self.pos:self.pos + strlen] == string:
self.pos += strlen
return True
return False
def skip_word(self, word: str) -> bool:
return self.match(re.compile(r'\b%s\b' % re.escape(word)))
def skip_ws(self) -> bool:
return self.match(_whitespace_re)
def skip_word_and_ws(self, word: str) -> bool:
if self.skip_word(word):
self.skip_ws()
return True
return False
def skip_string_and_ws(self, string: str) -> bool:
if self.skip_string(string):
self.skip_ws()
return True
return False
@property
def eof(self) -> bool:
return self.pos >= self.end
@property
def current_char(self) -> str:
try:
return self.definition[self.pos]
except IndexError:
return 'EOF'
@property
def matched_text(self) -> str:
if self.last_match is not None:
return self.last_match.group()
else:
return None
def read_rest(self) -> str:
rv = self.definition[self.pos:]
self.pos = self.end
return rv
def assert_end(self, *, allowSemicolon: bool = False) -> None:
self.skip_ws()
if allowSemicolon:
if not self.eof and self.definition[self.pos:] != ';':
self.fail('Expected end of definition or ;.')
else:
if not self.eof:
self.fail('Expected end of definition.')
################################################################################
@property
def id_attributes(self):
raise NotImplementedError
@property
def paren_attributes(self):
raise NotImplementedError
def _parse_balanced_token_seq(self, end: List[str]) -> str:
# TODO: add handling of string literals and similar
brackets = {'(': ')', '[': ']', '{': '}'}
startPos = self.pos
symbols = [] # type: List[str]
while not self.eof:
if len(symbols) == 0 and self.current_char in end:
break
if self.current_char in brackets.keys():
symbols.append(brackets[self.current_char])
elif len(symbols) > 0 and self.current_char == symbols[-1]:
symbols.pop()
elif self.current_char in ")]}":
self.fail("Unexpected '%s' in balanced-token-seq." % self.current_char)
self.pos += 1
if self.eof:
self.fail("Could not find end of balanced-token-seq starting at %d."
% startPos)
return self.definition[startPos:self.pos]
def _parse_attribute(self) -> ASTAttribute:
self.skip_ws()
# try C++11 style
startPos = self.pos
if self.skip_string_and_ws('['):
if not self.skip_string('['):
self.pos = startPos
else:
# TODO: actually implement the correct grammar
arg = self._parse_balanced_token_seq(end=[']'])
if not self.skip_string_and_ws(']'):
self.fail("Expected ']' in end of attribute.")
if not self.skip_string_and_ws(']'):
self.fail("Expected ']' in end of attribute after [[...]")
return ASTCPPAttribute(arg)
# try GNU style
if self.skip_word_and_ws('__attribute__'):
if not self.skip_string_and_ws('('):
self.fail("Expected '(' after '__attribute__'.")
if not self.skip_string_and_ws('('):
self.fail("Expected '(' after '__attribute__('.")
attrs = []
while 1:
if self.match(identifier_re):
name = self.matched_text
self.skip_ws()
if self.skip_string_and_ws('('):
self.fail('Parameterized GNU style attribute not yet supported.')
attrs.append(ASTGnuAttribute(name, None))
# TODO: parse arguments for the attribute
if self.skip_string_and_ws(','):
continue
elif self.skip_string_and_ws(')'):
break
else:
self.fail("Expected identifier, ')', or ',' in __attribute__.")
if not self.skip_string_and_ws(')'):
self.fail("Expected ')' after '__attribute__((...)'")
return ASTGnuAttributeList(attrs)
# try the simple id attributes defined by the user
for id in self.id_attributes:
if self.skip_word_and_ws(id):
return ASTIdAttribute(id)
# try the paren attributes defined by the user
for id in self.paren_attributes:
if not self.skip_string_and_ws(id):
continue
if not self.skip_string('('):
self.fail("Expected '(' after user-defined paren-attribute.")
arg = self._parse_balanced_token_seq(end=[')'])
if not self.skip_string(')'):
self.fail("Expected ')' to end user-defined paren-attribute.")
return ASTParenAttribute(id, arg)
return None
| true
| true
|
790a49391565ebcb6e6f24d926317fa3cb9775ac
| 6,774
|
py
|
Python
|
ansible/lib/ansible/modules/extras/packaging/language/cpanm.py
|
kiv-box/kafka
|
debec1c4bc8c43776070ee447a53b55fef42bd52
|
[
"Apache-2.0"
] | null | null | null |
ansible/lib/ansible/modules/extras/packaging/language/cpanm.py
|
kiv-box/kafka
|
debec1c4bc8c43776070ee447a53b55fef42bd52
|
[
"Apache-2.0"
] | null | null | null |
ansible/lib/ansible/modules/extras/packaging/language/cpanm.py
|
kiv-box/kafka
|
debec1c4bc8c43776070ee447a53b55fef42bd52
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Franck Cuny <franck@lumberjaph.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: cpanm
short_description: Manages Perl library dependencies.
description:
- Manage Perl library dependencies.
version_added: "1.6"
options:
name:
description:
- The name of the Perl library to install. You may use the "full distribution path", e.g. MIYAGAWA/Plack-0.99_05.tar.gz
required: false
default: null
aliases: ["pkg"]
from_path:
description:
- The local directory from where to install
required: false
default: null
notest:
description:
- Do not run unit tests
required: false
default: false
locallib:
description:
- Specify the install base to install modules
required: false
default: false
mirror:
description:
- Specifies the base URL for the CPAN mirror to use
required: false
default: false
mirror_only:
description:
- Use the mirror's index file instead of the CPAN Meta DB
required: false
default: false
installdeps:
description:
- Only install dependencies
required: false
default: false
version_added: "2.0"
version:
description:
- minimum version of perl module to consider acceptable
required: false
default: false
version_added: "2.1"
system_lib:
description:
- Use this if you want to install modules to the system perl include path. You must be root or have "passwordless" sudo for this to work.
- This uses the cpanm commandline option '--sudo', which has nothing to do with ansible privilege escalation.
required: false
default: false
version_added: "2.0"
aliases: ['use_sudo']
executable:
description:
- Override the path to the cpanm executable
required: false
default: null
version_added: "2.1"
notes:
- Please note that U(http://search.cpan.org/dist/App-cpanminus/bin/cpanm, cpanm) must be installed on the remote host.
author: "Franck Cuny (@franckcuny)"
'''
EXAMPLES = '''
# install Dancer perl package
- cpanm: name=Dancer
# install version 0.99_05 of the Plack perl package
- cpanm: name=MIYAGAWA/Plack-0.99_05.tar.gz
# install Dancer into the specified locallib
- cpanm: name=Dancer locallib=/srv/webapps/my_app/extlib
# install perl dependencies from local directory
- cpanm: from_path=/srv/webapps/my_app/src/
# install Dancer perl package without running the unit tests in indicated locallib
- cpanm: name=Dancer notest=True locallib=/srv/webapps/my_app/extlib
# install Dancer perl package from a specific mirror
- cpanm: name=Dancer mirror=http://cpan.cpantesters.org/
# install Dancer perl package into the system root path
- cpanm: name=Dancer system_lib=yes
# install Dancer if it's not already installed
# OR the installed version is older than version 1.0
- cpanm: name=Dancer version=1.0
'''
def _is_package_installed(module, name, locallib, cpanm, version):
cmd = ""
if locallib:
os.environ["PERL5LIB"] = "%s/lib/perl5" % locallib
cmd = "%s perl -e ' use %s" % (cmd, name)
if version:
cmd = "%s %s;'" % (cmd, version)
else:
cmd = "%s;'" % cmd
res, stdout, stderr = module.run_command(cmd, check_rc=False)
if res == 0:
return True
else:
return False
def _build_cmd_line(name, from_path, notest, locallib, mirror, mirror_only, installdeps, cpanm, use_sudo):
# this code should use "%s" like everything else and just return early but not fixing all of it now.
# don't copy stuff like this
if from_path:
cmd = cpanm + " " + from_path
else:
cmd = cpanm + " " + name
if notest is True:
cmd = cmd + " -n"
if locallib is not None:
cmd = cmd + " -l " + locallib
if mirror is not None:
cmd = cmd + " --mirror " + mirror
if mirror_only is True:
cmd = cmd + " --mirror-only"
if installdeps is True:
cmd = cmd + " --installdeps"
if use_sudo is True:
cmd = cmd + " --sudo"
return cmd
def _get_cpanm_path(module):
if module.params['executable']:
return module.params['executable']
else:
return module.get_bin_path('cpanm', True)
def main():
arg_spec = dict(
name=dict(default=None, required=False, aliases=['pkg']),
from_path=dict(default=None, required=False, type='path'),
notest=dict(default=False, type='bool'),
locallib=dict(default=None, required=False, type='path'),
mirror=dict(default=None, required=False),
mirror_only=dict(default=False, type='bool'),
installdeps=dict(default=False, type='bool'),
system_lib=dict(default=False, type='bool', aliases=['use_sudo']),
version=dict(default=None, required=False),
executable=dict(required=False, type='path'),
)
module = AnsibleModule(
argument_spec=arg_spec,
required_one_of=[['name', 'from_path']],
)
cpanm = _get_cpanm_path(module)
name = module.params['name']
from_path = module.params['from_path']
notest = module.boolean(module.params.get('notest', False))
locallib = module.params['locallib']
mirror = module.params['mirror']
mirror_only = module.params['mirror_only']
installdeps = module.params['installdeps']
use_sudo = module.params['system_lib']
version = module.params['version']
changed = False
installed = _is_package_installed(module, name, locallib, cpanm, version)
if not installed:
cmd = _build_cmd_line(name, from_path, notest, locallib, mirror, mirror_only, installdeps, cpanm, use_sudo)
rc_cpanm, out_cpanm, err_cpanm = module.run_command(cmd, check_rc=False)
if rc_cpanm != 0:
module.fail_json(msg=err_cpanm, cmd=cmd)
if (err_cpanm.find('is up to date') == -1 and out_cpanm.find('is up to date') == -1):
changed = True
module.exit_json(changed=changed, binary=cpanm, name=name)
# import module snippets
from ansible.module_utils.basic import *
main()
| 30.651584
| 143
| 0.669767
|
DOCUMENTATION = '''
---
module: cpanm
short_description: Manages Perl library dependencies.
description:
- Manage Perl library dependencies.
version_added: "1.6"
options:
name:
description:
- The name of the Perl library to install. You may use the "full distribution path", e.g. MIYAGAWA/Plack-0.99_05.tar.gz
required: false
default: null
aliases: ["pkg"]
from_path:
description:
- The local directory from where to install
required: false
default: null
notest:
description:
- Do not run unit tests
required: false
default: false
locallib:
description:
- Specify the install base to install modules
required: false
default: false
mirror:
description:
- Specifies the base URL for the CPAN mirror to use
required: false
default: false
mirror_only:
description:
- Use the mirror's index file instead of the CPAN Meta DB
required: false
default: false
installdeps:
description:
- Only install dependencies
required: false
default: false
version_added: "2.0"
version:
description:
- minimum version of perl module to consider acceptable
required: false
default: false
version_added: "2.1"
system_lib:
description:
- Use this if you want to install modules to the system perl include path. You must be root or have "passwordless" sudo for this to work.
- This uses the cpanm commandline option '--sudo', which has nothing to do with ansible privilege escalation.
required: false
default: false
version_added: "2.0"
aliases: ['use_sudo']
executable:
description:
- Override the path to the cpanm executable
required: false
default: null
version_added: "2.1"
notes:
- Please note that U(http://search.cpan.org/dist/App-cpanminus/bin/cpanm, cpanm) must be installed on the remote host.
author: "Franck Cuny (@franckcuny)"
'''
EXAMPLES = '''
# install Dancer perl package
- cpanm: name=Dancer
# install version 0.99_05 of the Plack perl package
- cpanm: name=MIYAGAWA/Plack-0.99_05.tar.gz
# install Dancer into the specified locallib
- cpanm: name=Dancer locallib=/srv/webapps/my_app/extlib
# install perl dependencies from local directory
- cpanm: from_path=/srv/webapps/my_app/src/
# install Dancer perl package without running the unit tests in indicated locallib
- cpanm: name=Dancer notest=True locallib=/srv/webapps/my_app/extlib
# install Dancer perl package from a specific mirror
- cpanm: name=Dancer mirror=http://cpan.cpantesters.org/
# install Dancer perl package into the system root path
- cpanm: name=Dancer system_lib=yes
# install Dancer if it's not already installed
# OR the installed version is older than version 1.0
- cpanm: name=Dancer version=1.0
'''
def _is_package_installed(module, name, locallib, cpanm, version):
cmd = ""
if locallib:
os.environ["PERL5LIB"] = "%s/lib/perl5" % locallib
cmd = "%s perl -e ' use %s" % (cmd, name)
if version:
cmd = "%s %s;'" % (cmd, version)
else:
cmd = "%s;'" % cmd
res, stdout, stderr = module.run_command(cmd, check_rc=False)
if res == 0:
return True
else:
return False
def _build_cmd_line(name, from_path, notest, locallib, mirror, mirror_only, installdeps, cpanm, use_sudo):
# this code should use "%s" like everything else and just return early but not fixing all of it now.
# don't copy stuff like this
if from_path:
cmd = cpanm + " " + from_path
else:
cmd = cpanm + " " + name
if notest is True:
cmd = cmd + " -n"
if locallib is not None:
cmd = cmd + " -l " + locallib
if mirror is not None:
cmd = cmd + " --mirror " + mirror
if mirror_only is True:
cmd = cmd + " --mirror-only"
if installdeps is True:
cmd = cmd + " --installdeps"
if use_sudo is True:
cmd = cmd + " --sudo"
return cmd
def _get_cpanm_path(module):
if module.params['executable']:
return module.params['executable']
else:
return module.get_bin_path('cpanm', True)
def main():
arg_spec = dict(
name=dict(default=None, required=False, aliases=['pkg']),
from_path=dict(default=None, required=False, type='path'),
notest=dict(default=False, type='bool'),
locallib=dict(default=None, required=False, type='path'),
mirror=dict(default=None, required=False),
mirror_only=dict(default=False, type='bool'),
installdeps=dict(default=False, type='bool'),
system_lib=dict(default=False, type='bool', aliases=['use_sudo']),
version=dict(default=None, required=False),
executable=dict(required=False, type='path'),
)
module = AnsibleModule(
argument_spec=arg_spec,
required_one_of=[['name', 'from_path']],
)
cpanm = _get_cpanm_path(module)
name = module.params['name']
from_path = module.params['from_path']
notest = module.boolean(module.params.get('notest', False))
locallib = module.params['locallib']
mirror = module.params['mirror']
mirror_only = module.params['mirror_only']
installdeps = module.params['installdeps']
use_sudo = module.params['system_lib']
version = module.params['version']
changed = False
installed = _is_package_installed(module, name, locallib, cpanm, version)
if not installed:
cmd = _build_cmd_line(name, from_path, notest, locallib, mirror, mirror_only, installdeps, cpanm, use_sudo)
rc_cpanm, out_cpanm, err_cpanm = module.run_command(cmd, check_rc=False)
if rc_cpanm != 0:
module.fail_json(msg=err_cpanm, cmd=cmd)
if (err_cpanm.find('is up to date') == -1 and out_cpanm.find('is up to date') == -1):
changed = True
module.exit_json(changed=changed, binary=cpanm, name=name)
from ansible.module_utils.basic import *
main()
| true
| true
|
790a49eb2657bc4712d7f7c7decdd0ce992622ea
| 11,494
|
py
|
Python
|
django_ses/__init__.py
|
mlissner/django-ses
|
ede4756a82682d9b1181947affb0f51e08ad47ab
|
[
"MIT"
] | null | null | null |
django_ses/__init__.py
|
mlissner/django-ses
|
ede4756a82682d9b1181947affb0f51e08ad47ab
|
[
"MIT"
] | null | null | null |
django_ses/__init__.py
|
mlissner/django-ses
|
ede4756a82682d9b1181947affb0f51e08ad47ab
|
[
"MIT"
] | null | null | null |
import logging
import boto3
from botocore.vendored.requests.packages.urllib3.exceptions import ResponseError
from django.core.mail.backends.base import BaseEmailBackend
from django_ses import settings
from datetime import datetime, timedelta
from time import sleep
try:
import importlib.metadata as importlib_metadata
except ModuleNotFoundError:
# Shim for Python 3.7. Remove when support is dropped.
import importlib_metadata
__version__ = importlib_metadata.version(__name__)
__all__ = ('SESBackend',)
# These would be nice to make class-level variables, but the backend is
# re-created for each outgoing email/batch.
# recent_send_times also is not going to work quite right if there are multiple
# email backends with different rate limits returned by SES, but that seems
# like it would be rare.
cached_rate_limits = {}
recent_send_times = []
logger = logging.getLogger('django_ses')
def dkim_sign(message, dkim_domain=None, dkim_key=None, dkim_selector=None, dkim_headers=None):
"""Return signed email message if dkim package and settings are available."""
try:
import dkim
except ImportError:
pass
else:
if dkim_domain and dkim_key:
sig = dkim.sign(message,
dkim_selector,
dkim_domain,
dkim_key,
include_headers=dkim_headers)
message = sig + message
return message
def cast_nonzero_to_float(val):
"""Cast nonzero number to float; on zero or None, return None"""
if not val:
return None
return float(val)
class SESBackend(BaseEmailBackend):
"""A Django Email backend that uses Amazon's Simple Email Service.
"""
def __init__(self, fail_silently=False, aws_access_key=None,
aws_secret_key=None, aws_region_name=None,
aws_region_endpoint=None, aws_auto_throttle=None, aws_config=None,
dkim_domain=None, dkim_key=None, dkim_selector=None, dkim_headers=None,
ses_source_arn=None, ses_from_arn=None, ses_return_path_arn=None,
**kwargs):
super(SESBackend, self).__init__(fail_silently=fail_silently, **kwargs)
self._access_key_id = aws_access_key or settings.ACCESS_KEY
self._access_key = aws_secret_key or settings.SECRET_KEY
self._region_name = aws_region_name if aws_region_name else settings.AWS_SES_REGION_NAME
self._endpoint_url = aws_region_endpoint if aws_region_endpoint else settings.AWS_SES_REGION_ENDPOINT_URL
self._throttle = cast_nonzero_to_float(aws_auto_throttle or settings.AWS_SES_AUTO_THROTTLE)
self._config = aws_config or settings.AWS_SES_CONFIG
self.dkim_domain = dkim_domain or settings.DKIM_DOMAIN
self.dkim_key = dkim_key or settings.DKIM_PRIVATE_KEY
self.dkim_selector = dkim_selector or settings.DKIM_SELECTOR
self.dkim_headers = dkim_headers or settings.DKIM_HEADERS
self.ses_source_arn = ses_source_arn or settings.AWS_SES_SOURCE_ARN
self.ses_from_arn = ses_from_arn or settings.AWS_SES_FROM_ARN
self.ses_return_path_arn = ses_return_path_arn or settings.AWS_SES_RETURN_PATH_ARN
self.connection = None
def open(self):
"""Create a connection to the AWS API server. This can be reused for
sending multiple emails.
"""
if self.connection:
return False
try:
self.connection = boto3.client(
'ses',
aws_access_key_id=self._access_key_id,
aws_secret_access_key=self._access_key,
region_name=self._region_name,
endpoint_url=self._endpoint_url,
config=self._config
)
except Exception:
if not self.fail_silently:
raise
def close(self):
"""Close any open HTTP connections to the API server.
"""
self.connection = None
def send_messages(self, email_messages):
"""Sends one or more EmailMessage objects and returns the number of
email messages sent.
"""
if not email_messages:
return
new_conn_created = self.open()
if not self.connection:
# Failed silently
return
num_sent = 0
source = settings.AWS_SES_RETURN_PATH
for message in email_messages:
# SES Configuration sets. If the AWS_SES_CONFIGURATION_SET setting
# is not None, append the appropriate header to the message so that
# SES knows which configuration set it belongs to.
#
# If settings.AWS_SES_CONFIGURATION_SET is a callable, pass it the
# message object and dkim settings and expect it to return a string
# containing the SES Configuration Set name.
if (settings.AWS_SES_CONFIGURATION_SET
and 'X-SES-CONFIGURATION-SET' not in message.extra_headers):
if callable(settings.AWS_SES_CONFIGURATION_SET):
message.extra_headers[
'X-SES-CONFIGURATION-SET'] = settings.AWS_SES_CONFIGURATION_SET(
message,
dkim_domain=self.dkim_domain,
dkim_key=self.dkim_key,
dkim_selector=self.dkim_selector,
dkim_headers=self.dkim_headers
)
else:
message.extra_headers[
'X-SES-CONFIGURATION-SET'] = settings.AWS_SES_CONFIGURATION_SET
# Automatic throttling. Assumes that this is the only SES client
# currently operating. The AWS_SES_AUTO_THROTTLE setting is a
# factor to apply to the rate limit, with a default of 0.5 to stay
# well below the actual SES throttle.
# Set the setting to 0 or None to disable throttling.
if self._throttle:
global recent_send_times
now = datetime.now()
# Get and cache the current SES max-per-second rate limit
# returned by the SES API.
rate_limit = self.get_rate_limit()
logger.debug("send_messages.throttle rate_limit='{}'".format(rate_limit))
# Prune from recent_send_times anything more than a few seconds
# ago. Even though SES reports a maximum per-second, the way
# they enforce the limit may not be on a one-second window.
# To be safe, we use a two-second window (but allow 2 times the
# rate limit) and then also have a default rate limit factor of
# 0.5 so that we really limit the one-second amount in two
# seconds.
window = 2.0 # seconds
window_start = now - timedelta(seconds=window)
new_send_times = []
for time in recent_send_times:
if time > window_start:
new_send_times.append(time)
recent_send_times = new_send_times
# If the number of recent send times in the last 1/_throttle
# seconds exceeds the rate limit, add a delay.
# Since I'm not sure how Amazon determines at exactly what
# point to throttle, better be safe than sorry and let in, say,
# half of the allowed rate.
if len(new_send_times) > rate_limit * window * self._throttle:
# Sleep the remainder of the window period.
delta = now - new_send_times[0]
total_seconds = (delta.microseconds + (delta.seconds +
delta.days * 24 * 3600) * 10**6) / 10**6
delay = window - total_seconds
if delay > 0:
sleep(delay)
recent_send_times.append(now)
# end of throttling
kwargs = dict(
Source=source or message.from_email,
Destinations=message.recipients(),
# todo attachments?
RawMessage={'Data': dkim_sign(message.message().as_string(),
dkim_key=self.dkim_key,
dkim_domain=self.dkim_domain,
dkim_selector=self.dkim_selector,
dkim_headers=self.dkim_headers)}
)
if self.ses_source_arn:
kwargs['SourceArn'] = self.ses_source_arn
if self.ses_from_arn:
kwargs['FromArn'] = self.ses_from_arn
if self.ses_return_path_arn:
kwargs['ReturnPathArn'] = self.ses_return_path_arn
try:
response = self.connection.send_raw_email(**kwargs)
message.extra_headers['status'] = 200
message.extra_headers['message_id'] = response['MessageId']
message.extra_headers['request_id'] = response['ResponseMetadata']['RequestId']
num_sent += 1
if 'X-SES-CONFIGURATION-SET' in message.extra_headers:
logger.debug(
"send_messages.sent from='{}' recipients='{}' message_id='{}' request_id='{}' "
"ses-configuration-set='{}'".format(
message.from_email,
", ".join(message.recipients()),
message.extra_headers['message_id'],
message.extra_headers['request_id'],
message.extra_headers['X-SES-CONFIGURATION-SET']
))
else:
logger.debug("send_messages.sent from='{}' recipients='{}' message_id='{}' request_id='{}'".format(
message.from_email,
", ".join(message.recipients()),
message.extra_headers['message_id'],
message.extra_headers['request_id']
))
except ResponseError as err:
# Store failure information so to post process it if required
error_keys = ['status', 'reason', 'body', 'request_id',
'error_code', 'error_message']
for key in error_keys:
message.extra_headers[key] = getattr(err, key, None)
if not self.fail_silently:
raise
if new_conn_created:
self.close()
return num_sent
def get_rate_limit(self):
if self._access_key_id in cached_rate_limits:
return cached_rate_limits[self._access_key_id]
new_conn_created = self.open()
if not self.connection:
raise Exception(
"No connection is available to check current SES rate limit.")
try:
quota_dict = self.connection.get_send_quota()
max_per_second = quota_dict['MaxSendRate']
ret = float(max_per_second)
cached_rate_limits[self._access_key_id] = ret
return ret
finally:
if new_conn_created:
self.close()
| 42.88806
| 119
| 0.583522
|
import logging
import boto3
from botocore.vendored.requests.packages.urllib3.exceptions import ResponseError
from django.core.mail.backends.base import BaseEmailBackend
from django_ses import settings
from datetime import datetime, timedelta
from time import sleep
try:
import importlib.metadata as importlib_metadata
except ModuleNotFoundError:
import importlib_metadata
__version__ = importlib_metadata.version(__name__)
__all__ = ('SESBackend',)
cached_rate_limits = {}
recent_send_times = []
logger = logging.getLogger('django_ses')
def dkim_sign(message, dkim_domain=None, dkim_key=None, dkim_selector=None, dkim_headers=None):
try:
import dkim
except ImportError:
pass
else:
if dkim_domain and dkim_key:
sig = dkim.sign(message,
dkim_selector,
dkim_domain,
dkim_key,
include_headers=dkim_headers)
message = sig + message
return message
def cast_nonzero_to_float(val):
if not val:
return None
return float(val)
class SESBackend(BaseEmailBackend):
def __init__(self, fail_silently=False, aws_access_key=None,
aws_secret_key=None, aws_region_name=None,
aws_region_endpoint=None, aws_auto_throttle=None, aws_config=None,
dkim_domain=None, dkim_key=None, dkim_selector=None, dkim_headers=None,
ses_source_arn=None, ses_from_arn=None, ses_return_path_arn=None,
**kwargs):
super(SESBackend, self).__init__(fail_silently=fail_silently, **kwargs)
self._access_key_id = aws_access_key or settings.ACCESS_KEY
self._access_key = aws_secret_key or settings.SECRET_KEY
self._region_name = aws_region_name if aws_region_name else settings.AWS_SES_REGION_NAME
self._endpoint_url = aws_region_endpoint if aws_region_endpoint else settings.AWS_SES_REGION_ENDPOINT_URL
self._throttle = cast_nonzero_to_float(aws_auto_throttle or settings.AWS_SES_AUTO_THROTTLE)
self._config = aws_config or settings.AWS_SES_CONFIG
self.dkim_domain = dkim_domain or settings.DKIM_DOMAIN
self.dkim_key = dkim_key or settings.DKIM_PRIVATE_KEY
self.dkim_selector = dkim_selector or settings.DKIM_SELECTOR
self.dkim_headers = dkim_headers or settings.DKIM_HEADERS
self.ses_source_arn = ses_source_arn or settings.AWS_SES_SOURCE_ARN
self.ses_from_arn = ses_from_arn or settings.AWS_SES_FROM_ARN
self.ses_return_path_arn = ses_return_path_arn or settings.AWS_SES_RETURN_PATH_ARN
self.connection = None
def open(self):
if self.connection:
return False
try:
self.connection = boto3.client(
'ses',
aws_access_key_id=self._access_key_id,
aws_secret_access_key=self._access_key,
region_name=self._region_name,
endpoint_url=self._endpoint_url,
config=self._config
)
except Exception:
if not self.fail_silently:
raise
def close(self):
self.connection = None
def send_messages(self, email_messages):
if not email_messages:
return
new_conn_created = self.open()
if not self.connection:
return
num_sent = 0
source = settings.AWS_SES_RETURN_PATH
for message in email_messages:
if (settings.AWS_SES_CONFIGURATION_SET
and 'X-SES-CONFIGURATION-SET' not in message.extra_headers):
if callable(settings.AWS_SES_CONFIGURATION_SET):
message.extra_headers[
'X-SES-CONFIGURATION-SET'] = settings.AWS_SES_CONFIGURATION_SET(
message,
dkim_domain=self.dkim_domain,
dkim_key=self.dkim_key,
dkim_selector=self.dkim_selector,
dkim_headers=self.dkim_headers
)
else:
message.extra_headers[
'X-SES-CONFIGURATION-SET'] = settings.AWS_SES_CONFIGURATION_SET
if self._throttle:
global recent_send_times
now = datetime.now()
rate_limit = self.get_rate_limit()
logger.debug("send_messages.throttle rate_limit='{}'".format(rate_limit))
window = 2.0
window_start = now - timedelta(seconds=window)
new_send_times = []
for time in recent_send_times:
if time > window_start:
new_send_times.append(time)
recent_send_times = new_send_times
# point to throttle, better be safe than sorry and let in, say,
# half of the allowed rate.
if len(new_send_times) > rate_limit * window * self._throttle:
# Sleep the remainder of the window period.
delta = now - new_send_times[0]
total_seconds = (delta.microseconds + (delta.seconds +
delta.days * 24 * 3600) * 10**6) / 10**6
delay = window - total_seconds
if delay > 0:
sleep(delay)
recent_send_times.append(now)
# end of throttling
kwargs = dict(
Source=source or message.from_email,
Destinations=message.recipients(),
# todo attachments?
RawMessage={'Data': dkim_sign(message.message().as_string(),
dkim_key=self.dkim_key,
dkim_domain=self.dkim_domain,
dkim_selector=self.dkim_selector,
dkim_headers=self.dkim_headers)}
)
if self.ses_source_arn:
kwargs['SourceArn'] = self.ses_source_arn
if self.ses_from_arn:
kwargs['FromArn'] = self.ses_from_arn
if self.ses_return_path_arn:
kwargs['ReturnPathArn'] = self.ses_return_path_arn
try:
response = self.connection.send_raw_email(**kwargs)
message.extra_headers['status'] = 200
message.extra_headers['message_id'] = response['MessageId']
message.extra_headers['request_id'] = response['ResponseMetadata']['RequestId']
num_sent += 1
if 'X-SES-CONFIGURATION-SET' in message.extra_headers:
logger.debug(
"send_messages.sent from='{}' recipients='{}' message_id='{}' request_id='{}' "
"ses-configuration-set='{}'".format(
message.from_email,
", ".join(message.recipients()),
message.extra_headers['message_id'],
message.extra_headers['request_id'],
message.extra_headers['X-SES-CONFIGURATION-SET']
))
else:
logger.debug("send_messages.sent from='{}' recipients='{}' message_id='{}' request_id='{}'".format(
message.from_email,
", ".join(message.recipients()),
message.extra_headers['message_id'],
message.extra_headers['request_id']
))
except ResponseError as err:
# Store failure information so to post process it if required
error_keys = ['status', 'reason', 'body', 'request_id',
'error_code', 'error_message']
for key in error_keys:
message.extra_headers[key] = getattr(err, key, None)
if not self.fail_silently:
raise
if new_conn_created:
self.close()
return num_sent
def get_rate_limit(self):
if self._access_key_id in cached_rate_limits:
return cached_rate_limits[self._access_key_id]
new_conn_created = self.open()
if not self.connection:
raise Exception(
"No connection is available to check current SES rate limit.")
try:
quota_dict = self.connection.get_send_quota()
max_per_second = quota_dict['MaxSendRate']
ret = float(max_per_second)
cached_rate_limits[self._access_key_id] = ret
return ret
finally:
if new_conn_created:
self.close()
| true
| true
|
790a4ad9639aa28891c9f7f66e581362653139cc
| 1,968
|
py
|
Python
|
Photo.py
|
Rougnt/ArkNightAutoClick
|
693ba25227bdfbf228a3d5b3a04d86ea8135b78b
|
[
"MIT"
] | null | null | null |
Photo.py
|
Rougnt/ArkNightAutoClick
|
693ba25227bdfbf228a3d5b3a04d86ea8135b78b
|
[
"MIT"
] | null | null | null |
Photo.py
|
Rougnt/ArkNightAutoClick
|
693ba25227bdfbf228a3d5b3a04d86ea8135b78b
|
[
"MIT"
] | null | null | null |
import cv2
import os,shutil
import numpy as np
from Adb import Adb
import time
class Photo():
'''
提取图片信息,比较图片
'''
def __init__(self,img_path) -> None:
'''
读取图片
'''
self.img = cv2.imread(img_path)
class sourceData():
'''
获取测试数据
'''
def __init__(self) -> None:
pass
@staticmethod
def getScreenPhoto():
adb = Adb(device='d5c42b2a')
for x in range(100):
adb.screenCap()
adb.pullBackScreenCap(os.path.join('.','photo',time.strftime("%Y-%m-%d_%H-%M-%S.png", time.localtime()) ))
print("截图",time.asctime(time.localtime()))
time.sleep(3)
@staticmethod
def calcOujilide(img):
img_new = img[938:1035,1935:2247]
img_new_num = np.sum(img_new)/(img_new.shape[0]*img_new.shape[1]*img_new.shape[2])
return img_new_num
@staticmethod
def calcFangcha(img):
'''
计算938:1035,1935:2247区域间图片的方差,用于比较图片见相似程度
计算过程,对图像每一行像素求平均,对所有行像素平均值求方差
return (int)
'''
img_new = img[938:1013,1935:2247]
img_avg = np.mean(img_new,axis=(0,2))
return np.var(img_avg)
if __name__ is '__main__':
static_num = sourceData.calcFangcha(cv2.imread(os.path.join("adb","screen.png")))
for img_name in os.listdir(os.path.join("photo")):
img = cv2.imread(os.path.join("photo",img_name))
img_num = sourceData.calcFangcha(img)
chazhi = abs(static_num-img_num)
# chazhi = (abs(static_num**2-img_num**2))**0.5
print(img_name,"的差值为",chazhi)
if chazhi<20:
print("Copy this file: ",img_name)
shutil.copyfile(os.path.join("photo",img_name),os.path.join("photo2",img_name))
print("Write this file: ",img_name)
cv2.imwrite(os.path.join("photo3",img_name),img[938:1013,1935:2247])
# '''截图 400s'''
# sourceData.getScreenPhoto()
| 28.114286
| 118
| 0.579776
|
import cv2
import os,shutil
import numpy as np
from Adb import Adb
import time
class Photo():
def __init__(self,img_path) -> None:
self.img = cv2.imread(img_path)
class sourceData():
def __init__(self) -> None:
pass
@staticmethod
def getScreenPhoto():
adb = Adb(device='d5c42b2a')
for x in range(100):
adb.screenCap()
adb.pullBackScreenCap(os.path.join('.','photo',time.strftime("%Y-%m-%d_%H-%M-%S.png", time.localtime()) ))
print("截图",time.asctime(time.localtime()))
time.sleep(3)
@staticmethod
def calcOujilide(img):
img_new = img[938:1035,1935:2247]
img_new_num = np.sum(img_new)/(img_new.shape[0]*img_new.shape[1]*img_new.shape[2])
return img_new_num
@staticmethod
def calcFangcha(img):
img_new = img[938:1013,1935:2247]
img_avg = np.mean(img_new,axis=(0,2))
return np.var(img_avg)
if __name__ is '__main__':
static_num = sourceData.calcFangcha(cv2.imread(os.path.join("adb","screen.png")))
for img_name in os.listdir(os.path.join("photo")):
img = cv2.imread(os.path.join("photo",img_name))
img_num = sourceData.calcFangcha(img)
chazhi = abs(static_num-img_num)
print(img_name,"的差值为",chazhi)
if chazhi<20:
print("Copy this file: ",img_name)
shutil.copyfile(os.path.join("photo",img_name),os.path.join("photo2",img_name))
print("Write this file: ",img_name)
cv2.imwrite(os.path.join("photo3",img_name),img[938:1013,1935:2247])
| true
| true
|
790a4badc8ef66aa7ccea61012c5deeadf1e9938
| 208
|
py
|
Python
|
UNIVESPalgortimo_1/s4aula2.py
|
joaorobsonR/algoritmo1
|
9e6ef6ee8967b771d20d7ebf96478412b0a7940f
|
[
"MIT"
] | null | null | null |
UNIVESPalgortimo_1/s4aula2.py
|
joaorobsonR/algoritmo1
|
9e6ef6ee8967b771d20d7ebf96478412b0a7940f
|
[
"MIT"
] | null | null | null |
UNIVESPalgortimo_1/s4aula2.py
|
joaorobsonR/algoritmo1
|
9e6ef6ee8967b771d20d7ebf96478412b0a7940f
|
[
"MIT"
] | null | null | null |
ano = input('Digite o ano: ')
mes = input('Digite o mes: ')
dia = input('Digite o dia')
print('{}/{}/{}'.format(dia, mes, ano))
print(dia, mes, ano, sep='/')
print(type(ano))
eval(ano)
print(type(eval(ano)))
| 23.111111
| 39
| 0.605769
|
ano = input('Digite o ano: ')
mes = input('Digite o mes: ')
dia = input('Digite o dia')
print('{}/{}/{}'.format(dia, mes, ano))
print(dia, mes, ano, sep='/')
print(type(ano))
eval(ano)
print(type(eval(ano)))
| true
| true
|
790a4bb2f1f953e5fc8f6fb8f4eee7e4ee3ac94e
| 9,286
|
py
|
Python
|
aiida/cmdline/params/types/plugin.py
|
borellim/aiida_core
|
eebef392c81e8b130834a92e1d7abf5e2e30b3ce
|
[
"BSD-2-Clause"
] | 1
|
2019-03-15T10:37:53.000Z
|
2019-03-15T10:37:53.000Z
|
aiida/cmdline/params/types/plugin.py
|
odarbelaeze/aiida_core
|
934b4ccdc73a993f2a6656caf516500470e3da08
|
[
"BSD-2-Clause"
] | null | null | null |
aiida/cmdline/params/types/plugin.py
|
odarbelaeze/aiida_core
|
934b4ccdc73a993f2a6656caf516500470e3da08
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
###########################################################################
# Copyright (c), The AiiDA team. All rights reserved. #
# This file is part of the AiiDA code. #
# #
# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core #
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
"""Click parameter type for AiiDA Plugins."""
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import six
import click
from aiida.cmdline.utils import decorators
from aiida.common import exceptions
from aiida.plugins.entry_point import ENTRY_POINT_STRING_SEPARATOR, ENTRY_POINT_GROUP_PREFIX, EntryPointFormat
from aiida.plugins.entry_point import format_entry_point_string, get_entry_point_string_format
from aiida.plugins.entry_point import get_entry_point, get_entry_points, get_entry_point_groups
class PluginParamType(click.ParamType):
"""
AiiDA Plugin name parameter type.
:param group: string or tuple of strings, where each is a valid entry point group. Adding the `aiida.`
prefix is optional. If it is not detected it will be prepended internally.
:param load: when set to True, convert will not return the entry point, but the loaded entry point
Usage::
click.option(... type=PluginParamType(group='aiida.calculations')
or::
click.option(... type=PluginParamType(group=('calculations', 'data'))
"""
name = 'plugin'
def __init__(self, group=None, load=False, *args, **kwargs):
"""
Validate that group is either a string or a tuple of valid entry point groups, or if it
is not specified use the tuple of all recognized entry point groups.
"""
# pylint: disable=keyword-arg-before-vararg
valid_entry_point_groups = get_entry_point_groups()
if group is None:
self._groups = tuple(valid_entry_point_groups)
else:
if isinstance(group, six.string_types):
invalidated_groups = tuple([group])
elif isinstance(group, tuple):
invalidated_groups = group
else:
raise ValueError('invalid type for group')
groups = []
for grp in invalidated_groups:
if not grp.startswith(ENTRY_POINT_GROUP_PREFIX):
grp = ENTRY_POINT_GROUP_PREFIX + grp
if grp not in valid_entry_point_groups:
raise ValueError('entry point group {} is not recognized'.format(grp))
groups.append(grp)
self._groups = tuple(groups)
self._init_entry_points()
self.load = load
super(PluginParamType, self).__init__(*args, **kwargs)
def _init_entry_points(self):
"""
Populate entry point information that will be used later on. This should only be called
once in the constructor after setting self.groups because the groups should not be changed
after instantiation
"""
self._entry_points = [(group, entry_point) for group in self.groups for entry_point in get_entry_points(group)]
self._entry_point_names = [entry_point.name for group in self.groups for entry_point in get_entry_points(group)]
@property
def groups(self):
return self._groups
@property
def has_potential_ambiguity(self):
"""
Returns whether the set of supported entry point groups can lead to ambiguity when only an entry point name
is specified. This will happen if one ore more groups share an entry point with a common name
"""
return len(self._entry_point_names) != len(set(self._entry_point_names))
def get_valid_arguments(self):
"""
Return a list of all available plugins for the groups configured for this PluginParamType instance.
If the entry point names are not unique, because there are multiple groups that contain an entry
point that has an identical name, we need to prefix the names with the full group name
:returns: list of valid entry point strings
"""
if self.has_potential_ambiguity:
fmt = EntryPointFormat.FULL
return sorted([format_entry_point_string(group, ep.name, fmt=fmt) for group, ep in self._entry_points])
return sorted(self._entry_point_names)
def get_possibilities(self, incomplete=''):
"""
Return a list of plugins starting with incomplete
"""
if incomplete == '':
return self.get_valid_arguments()
# If there is a chance of ambiguity we always return the entry point string in FULL format, otherwise
# return the possibilities in the same format as the incomplete. Note that this may have some unexpected
# effects. For example if incomplete equals `aiida.` or `calculations` it will be detected as the MINIMAL
# format, even though they would also be the valid beginnings of a FULL or PARTIAL format, except that we
# cannot know that for sure at this time
if self.has_potential_ambiguity:
possibilites = [eps for eps in self.get_valid_arguments() if eps.startswith(incomplete)]
else:
possibilites = []
fmt = get_entry_point_string_format(incomplete)
for group, entry_point in self._entry_points:
entry_point_string = format_entry_point_string(group, entry_point.name, fmt=fmt)
if entry_point_string.startswith(incomplete):
possibilites.append(entry_point_string)
return possibilites
def complete(self, ctx, incomplete): # pylint: disable=unused-argument
"""
Return possible completions based on an incomplete value
:returns: list of tuples of valid entry points (matching incomplete) and a description
"""
return [(p, '') for p in self.get_possibilities(incomplete=incomplete)]
def get_missing_message(self, param):
return 'Possible arguments are:\n\n' + '\n'.join(self.get_valid_arguments())
def get_entry_point_from_string(self, entry_point_string):
"""
Validate a given entry point string, which means that it should have a valid entry point string format
and that the entry point unambiguously corresponds to an entry point in the groups configured for this
instance of PluginParameterType.
:returns: the entry point if valid
:raises: ValueError if the entry point string is invalid
"""
group = None
name = None
entry_point_format = get_entry_point_string_format(entry_point_string)
if entry_point_format in (EntryPointFormat.FULL, EntryPointFormat.PARTIAL):
group, name = entry_point_string.split(ENTRY_POINT_STRING_SEPARATOR)
if entry_point_format == EntryPointFormat.PARTIAL:
group = ENTRY_POINT_GROUP_PREFIX + group
if group not in self.groups:
raise ValueError('entry point group {} is not supported by this parameter')
elif entry_point_format == EntryPointFormat.MINIMAL:
name = entry_point_string
matching_groups = [group for group, entry_point in self._entry_points if entry_point.name == name]
if len(matching_groups) > 1:
raise ValueError("entry point '{}' matches more than one valid entry point group [{}], "
"please specify an explicit group prefix".format(name, ' '.join(matching_groups)))
elif not matching_groups:
raise ValueError("entry point '{}' is not valid for any of the allowed "
"entry point groups: {}".format(name, ' '.join(self.groups)))
else:
group = matching_groups[0]
else:
ValueError('invalid entry point string format: {}'.format(entry_point_string))
try:
entry_point = get_entry_point(group, name)
except exceptions.EntryPointError as exception:
raise ValueError(exception)
return entry_point
@decorators.with_dbenv()
def convert(self, value, param, ctx):
"""
Convert the string value to an entry point instance, if the value can be successfully parsed
into an actual entry point. Will raise click.BadParameter if validation fails.
"""
if not value:
raise click.BadParameter('plugin name cannot be empty')
try:
entry_point = self.get_entry_point_from_string(value)
except ValueError as exception:
raise click.BadParameter(str(exception))
if self.load:
try:
return entry_point.load()
except exceptions.LoadingEntryPointError as exception:
raise click.BadParameter(str(exception))
else:
return entry_point
| 42.0181
| 120
| 0.643011
|
xception))
else:
return entry_point
| true
| true
|
790a4c12b64b58048378a350ea475e30625123a6
| 2,635
|
py
|
Python
|
benchmark/opperf/nd_operations/unary_operators.py
|
paulk-asert/incubator-mxnet
|
6acf7e6a051e75d9f1cca0ec3c198c38c0f6a3fe
|
[
"Apache-2.0"
] | 4
|
2018-10-12T05:02:17.000Z
|
2019-06-27T04:44:11.000Z
|
benchmark/opperf/nd_operations/unary_operators.py
|
paulk-asert/incubator-mxnet
|
6acf7e6a051e75d9f1cca0ec3c198c38c0f6a3fe
|
[
"Apache-2.0"
] | 4
|
2018-03-21T00:00:03.000Z
|
2019-09-03T23:54:44.000Z
|
benchmark/opperf/nd_operations/unary_operators.py
|
ElaineBao/incubator-mxnet
|
bcb09fcda8e119dd5524bb4a99fc20248a8fbb24
|
[
"Apache-2.0"
] | 1
|
2020-04-09T09:26:31.000Z
|
2020-04-09T09:26:31.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Performance benchmark tests for MXNet NDArray Unary Operations.
1. Operators are automatically fetched from MXNet operator registry.
2. Default Inputs are generated. See rules/default_params.py. You can override the default values.
Below 54 unary Operators are covered:
['BlockGrad', 'Flatten', 'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
'argmax_channel', 'cbrt', 'ceil', 'cos', 'cosh', 'degrees', 'erf', 'erfinv', 'exp', 'expm1', 'fix', 'flatten',
'floor', 'gamma', 'gammaln', 'identity', 'log', 'log10', 'log1p', 'log2', 'logical_not', 'make_loss', 'negative',
'ones_like', 'radians', 'rcbrt', 'reciprocal', 'relu', 'rint', 'round', 'rsqrt', 'shuffle', 'sigmoid', 'sign',
'sin', 'sinh', 'size_array', 'softsign', 'sqrt', 'square', 'stop_gradient', 'tan', 'tanh', 'trunc', 'zeros_like']
"""
import mxnet as mx
from benchmark.opperf.utils.op_registry_utils import get_all_unary_operators
from benchmark.opperf.utils.benchmark_utils import run_op_benchmarks
def run_mx_unary_operators_benchmarks(ctx=mx.cpu(), dtype='float32', warmup=25, runs=100):
"""Runs benchmarks with the given context and precision (dtype)for all the unary
operators in MXNet.
Parameters
----------
ctx: mx.ctx
Context to run benchmarks
dtype: str, default 'float32'
Precision to use for benchmarks
warmup: int, default 25
Number of times to run for warmup
runs: int, default 100
Number of runs to capture benchmark results
Returns
-------
Dictionary of results. Key -> Name of the operator, Value -> Benchmark results.
"""
# Fetch all Unary Operators
mx_unary_broadcast_ops = get_all_unary_operators()
# Run benchmarks
mx_unary_op_results = run_op_benchmarks(mx_unary_broadcast_ops, dtype, ctx, warmup, runs)
return mx_unary_op_results
| 41.825397
| 113
| 0.718406
|
import mxnet as mx
from benchmark.opperf.utils.op_registry_utils import get_all_unary_operators
from benchmark.opperf.utils.benchmark_utils import run_op_benchmarks
def run_mx_unary_operators_benchmarks(ctx=mx.cpu(), dtype='float32', warmup=25, runs=100):
mx_unary_broadcast_ops = get_all_unary_operators()
mx_unary_op_results = run_op_benchmarks(mx_unary_broadcast_ops, dtype, ctx, warmup, runs)
return mx_unary_op_results
| true
| true
|
790a4cee5ffc44c339dbf2b5f3d567b15d1e4818
| 620
|
py
|
Python
|
problem2/views.py
|
byteknacker/eulerapps
|
5bebf00b4c77b84ceee8bbd73226db60e7fec03f
|
[
"BSD-3-Clause"
] | null | null | null |
problem2/views.py
|
byteknacker/eulerapps
|
5bebf00b4c77b84ceee8bbd73226db60e7fec03f
|
[
"BSD-3-Clause"
] | null | null | null |
problem2/views.py
|
byteknacker/eulerapps
|
5bebf00b4c77b84ceee8bbd73226db60e7fec03f
|
[
"BSD-3-Clause"
] | null | null | null |
"""Views of problem2 app."""
from django.shortcuts import render
from .forms import FiboForm
def display(request):
"""Function view to display form in the standard manner."""
if request.method == 'POST':
form = FiboForm(request.POST)
if form.is_valid():
fibo = form.save(commit=False)
evensum = fibo.evenFiboSum()
fibo.save()
return render(request, 'problem2/solution2.html',
{'evensum': evensum, 'form': form})
else:
form = FiboForm()
return render(request, 'problem2/solution2.html', {'form': form})
| 29.52381
| 69
| 0.593548
|
from django.shortcuts import render
from .forms import FiboForm
def display(request):
if request.method == 'POST':
form = FiboForm(request.POST)
if form.is_valid():
fibo = form.save(commit=False)
evensum = fibo.evenFiboSum()
fibo.save()
return render(request, 'problem2/solution2.html',
{'evensum': evensum, 'form': form})
else:
form = FiboForm()
return render(request, 'problem2/solution2.html', {'form': form})
| true
| true
|
790a4cfeef76cc205825bcf9a8937cb36166b47c
| 140
|
py
|
Python
|
pycronserver/__init__.py
|
pyscioffice/pycronserver
|
bdacdc99ccc000fe0da02a93f48f2af689c7cf16
|
[
"BSD-3-Clause"
] | null | null | null |
pycronserver/__init__.py
|
pyscioffice/pycronserver
|
bdacdc99ccc000fe0da02a93f48f2af689c7cf16
|
[
"BSD-3-Clause"
] | null | null | null |
pycronserver/__init__.py
|
pyscioffice/pycronserver
|
bdacdc99ccc000fe0da02a93f48f2af689c7cf16
|
[
"BSD-3-Clause"
] | null | null | null |
from pycronserver.server import get_pycronserver, execute_funct
from pycronserver.local import get_local_pycronserver, create_config_folder
| 46.666667
| 75
| 0.9
|
from pycronserver.server import get_pycronserver, execute_funct
from pycronserver.local import get_local_pycronserver, create_config_folder
| true
| true
|
790a4ef4e8d40496066bf195a1491a4ce6062cba
| 935
|
py
|
Python
|
ESEC.FSE.2017.Experimental.Replication/Figure2/F/Frelationrace/Table1CausalScore.py
|
austinatchley/Themis
|
67d5e639e9445f1612249ae7939b3625fea138db
|
[
"BSD-4-Clause-UC"
] | 88
|
2017-08-14T19:44:21.000Z
|
2021-11-20T00:48:01.000Z
|
ESEC.FSE.2017.Experimental.Replication/Figure2/F/Frelationrace/Table1CausalScore.py
|
kavithacd/Themis
|
67d5e639e9445f1612249ae7939b3625fea138db
|
[
"BSD-4-Clause-UC"
] | 25
|
2017-03-07T15:33:46.000Z
|
2020-06-18T01:39:26.000Z
|
ESEC.FSE.2017.Experimental.Replication/Figure2/F/Frelationrace/Table1CausalScore.py
|
kavithacd/Themis
|
67d5e639e9445f1612249ae7939b3625fea138db
|
[
"BSD-4-Clause-UC"
] | 19
|
2017-10-11T15:25:12.000Z
|
2021-08-16T01:47:43.000Z
|
'''
This script calculates the Causal discrimination score for the particular input file towards race or gender.
USAGE :
argv[1] : Input test suite
argv[2] : 0/1
0 for tace
1 for gender
'''
import sys
f = open(sys.argv[1],"r")
type = int(sys.argv[2])
#type = 0 means race
#type = 1 means gender
pos = 0
neg = 1
rowNum = 0
num=0
den=0
posFound = 0
negFound = 0
iter = 0
lst = []
lines =[]
for line in f:
line = line.strip()
l1 = line
lines.append(l1)
line =line.split(',')
if(float(line[-1])>0):
posFound=1
if(float(line[-1])<=0):
negFound=1
rowNum+=1
if(rowNum==30):
rowNum=0
if(posFound==1 and negFound==1):
num+=1
lst.append(iter/5*5)
#print l1,iter
den+=1
posFound = 0
negFound = 0
iter +=1
val = num*100.0/den
if(val < 0.01):
val=0.01
print("%.2f"%val)
| 17.641509
| 108
| 0.537968
|
import sys
f = open(sys.argv[1],"r")
type = int(sys.argv[2])
pos = 0
neg = 1
rowNum = 0
num=0
den=0
posFound = 0
negFound = 0
iter = 0
lst = []
lines =[]
for line in f:
line = line.strip()
l1 = line
lines.append(l1)
line =line.split(',')
if(float(line[-1])>0):
posFound=1
if(float(line[-1])<=0):
negFound=1
rowNum+=1
if(rowNum==30):
rowNum=0
if(posFound==1 and negFound==1):
num+=1
lst.append(iter/5*5)
den+=1
posFound = 0
negFound = 0
iter +=1
val = num*100.0/den
if(val < 0.01):
val=0.01
print("%.2f"%val)
| true
| true
|
790a4ef7640d16be62e9d15f580f76ce121919be
| 2,572
|
py
|
Python
|
safe_exploration/visualization/utils_visualization.py
|
Pathetiue/safe-exploration
|
9495484b19f4df4b4671e721962d77341ef022f2
|
[
"MIT"
] | 55
|
2019-05-13T07:17:08.000Z
|
2022-03-23T16:39:20.000Z
|
safe_exploration/visualization/utils_visualization.py
|
Pathetiue/safe-exploration
|
9495484b19f4df4b4671e721962d77341ef022f2
|
[
"MIT"
] | 2
|
2019-05-13T06:56:43.000Z
|
2019-12-12T22:19:28.000Z
|
safe_exploration/visualization/utils_visualization.py
|
befelix/safe-exploration
|
e6c0bc57b7b51fe3e3c97d51721893fe297b2b11
|
[
"MIT"
] | 18
|
2019-05-13T11:08:21.000Z
|
2021-12-23T12:51:36.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 21 15:49:49 2017
@author: tkoller
"""
import numpy as np
import numpy.linalg as nLa
from ..utils import unavailable
try:
import matplotlib.pyplot as plt
_has_matplotlib = True
except:
_has_matplotlib = False
@unavailable(not _has_matplotlib, "matplotlib")
def plot_ellipsoid_3D(p, q, ax, n_points=100):
""" Plot an ellipsoid in 3D
Based on
https://stackoverflow.com/questions/7819498/plotting-ellipsoid-with-matplotlib
TODO: Untested!
Parameters
----------
p: 3x1 array[float]
Center of the ellipsoid
q: 3x3 array[float]
Shape matrix of the ellipsoid
ax: matplotlib.Axes object
Ax on which to plot the ellipsoid
Returns
-------
ax: matplotlib.Axes object
The Ax containing the ellipsoid
"""
assert np.shape(p) == (3, 1), "p needs to be a 3x1 vector"
assert np.shape(q) == (3, 3), "q needs to be a spd 3x3 matrix"
assert np.allclose(q, 0.5 * (q + q.T), "q needs to be spd")
# transform to radius/center parametrization
U, s, rotation = linalg.svd(q)
assert np.all(s > 0), "q needs to be positive definite"
radii = 1.0 / np.sqrt(s)
# get x,y,z of sphere and transform
u = np.linspace(0.0, 2.0 * np.pi, n_points)
v = np.linspace(0.0, np.pi, n_points)
x = radii[0] * np.outer(np.cos(u), np.sin(v))
y = radii[1] * np.outer(np.sin(u), np.sin(v))
z = radii[2] * np.outer(np.ones_like(u), np.cos(v))
for i in range(len(x)):
for j in range(len(x)):
[x[i, j], y[i, j], z[i, j]] = np.dot([x[i, j], y[i, j], z[i, j]],
rotation) + center
# plot the result
ax.plot_wireframe(x, y, z, rstride=4, cstride=4, color='b', alpha=0.2)
return ax
@unavailable(not _has_matplotlib, "matplotlib")
def plot_ellipsoid_2D(p, q, ax, n_points=100, color="r"):
""" Plot an ellipsoid in 2D
TODO: Untested!
Parameters
----------
p: 3x1 array[float]
Center of the ellipsoid
q: 3x3 array[float]
Shape matrix of the ellipsoid
ax: matplotlib.Axes object
Ax on which to plot the ellipsoid
Returns
-------
ax: matplotlib.Axes object
The Ax containing the ellipsoid
"""
plt.sca(ax)
r = nLa.cholesky(q).T; # checks spd inside the function
t = np.linspace(0, 2 * np.pi, n_points);
z = [np.cos(t), np.sin(t)];
ellipse = np.dot(r, z) + p;
handle, = ax.plot(ellipse[0, :], ellipse[1, :], color)
return ax, handle
| 26.791667
| 82
| 0.591757
|
import numpy as np
import numpy.linalg as nLa
from ..utils import unavailable
try:
import matplotlib.pyplot as plt
_has_matplotlib = True
except:
_has_matplotlib = False
@unavailable(not _has_matplotlib, "matplotlib")
def plot_ellipsoid_3D(p, q, ax, n_points=100):
assert np.shape(p) == (3, 1), "p needs to be a 3x1 vector"
assert np.shape(q) == (3, 3), "q needs to be a spd 3x3 matrix"
assert np.allclose(q, 0.5 * (q + q.T), "q needs to be spd")
U, s, rotation = linalg.svd(q)
assert np.all(s > 0), "q needs to be positive definite"
radii = 1.0 / np.sqrt(s)
u = np.linspace(0.0, 2.0 * np.pi, n_points)
v = np.linspace(0.0, np.pi, n_points)
x = radii[0] * np.outer(np.cos(u), np.sin(v))
y = radii[1] * np.outer(np.sin(u), np.sin(v))
z = radii[2] * np.outer(np.ones_like(u), np.cos(v))
for i in range(len(x)):
for j in range(len(x)):
[x[i, j], y[i, j], z[i, j]] = np.dot([x[i, j], y[i, j], z[i, j]],
rotation) + center
ax.plot_wireframe(x, y, z, rstride=4, cstride=4, color='b', alpha=0.2)
return ax
@unavailable(not _has_matplotlib, "matplotlib")
def plot_ellipsoid_2D(p, q, ax, n_points=100, color="r"):
plt.sca(ax)
r = nLa.cholesky(q).T;
t = np.linspace(0, 2 * np.pi, n_points);
z = [np.cos(t), np.sin(t)];
ellipse = np.dot(r, z) + p;
handle, = ax.plot(ellipse[0, :], ellipse[1, :], color)
return ax, handle
| true
| true
|
790a4f9b1ca5315576470030e7218150601d0818
| 56
|
py
|
Python
|
pandoc_mustache/__init__.py
|
copart/pandoc-mustache
|
f6ace29cd0c8d6b4d8f182eedcf36ad38a2412fa
|
[
"CC0-1.0"
] | 43
|
2017-12-27T05:57:00.000Z
|
2022-03-18T10:07:28.000Z
|
pandoc_mustache/__init__.py
|
copart/pandoc-mustache
|
f6ace29cd0c8d6b4d8f182eedcf36ad38a2412fa
|
[
"CC0-1.0"
] | 10
|
2018-02-07T11:20:37.000Z
|
2021-04-22T21:44:19.000Z
|
pandoc_mustache/__init__.py
|
copart/pandoc-mustache
|
f6ace29cd0c8d6b4d8f182eedcf36ad38a2412fa
|
[
"CC0-1.0"
] | 8
|
2018-11-05T13:10:35.000Z
|
2021-08-30T18:14:02.000Z
|
from .version import __version__
import pandoc_mustache
| 18.666667
| 32
| 0.875
|
from .version import __version__
import pandoc_mustache
| true
| true
|
790a4fdab690a4aa3f7000ec30cef8d8feac2af3
| 1,110
|
py
|
Python
|
main.py
|
randomvi/opencv-color-detector
|
8106178434b15c116d8a93140a35f2c341480f11
|
[
"MIT"
] | null | null | null |
main.py
|
randomvi/opencv-color-detector
|
8106178434b15c116d8a93140a35f2c341480f11
|
[
"MIT"
] | null | null | null |
main.py
|
randomvi/opencv-color-detector
|
8106178434b15c116d8a93140a35f2c341480f11
|
[
"MIT"
] | null | null | null |
import numpy as np
import cv2
# To capture webcam live stream, simply change the following line to: cap = cv2.VideoCapture(0)
cap = cv2.VideoCapture('./assets/video.mp4')
while (True):
# Capture frame by frame
_, frame = cap.read()
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
# hsv (hue sat value) for the color red
lower_color = np.array([150, 150, 50])
upper_color = np.array([180, 255, 150])
# mask will be anything between range *lower_color to upper_color (Red)
mask = cv2.inRange(hsv, lower_color, upper_color)
res = cv2.bitwise_and(frame, frame, mask = mask)
contours, hierarchy = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)
cv2.drawContours(frame, contours, -1, (200, 255, 0), 4)
if len(contours) > 0:
cv2.putText(mask, 'Relavante Object Detected', (100, 300), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2, cv2.LINE_AA)
cv2.imshow('frame', frame)
cv2.imshow('mask', mask)
cv2.imshow('res', res)
k = cv2.waitKey(5) & 0xFF
if k == 27:
break
cap.release()
cv2.destroyAllWindows()
| 30.833333
| 128
| 0.658559
|
import numpy as np
import cv2
cap = cv2.VideoCapture('./assets/video.mp4')
while (True):
_, frame = cap.read()
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
lower_color = np.array([150, 150, 50])
upper_color = np.array([180, 255, 150])
mask = cv2.inRange(hsv, lower_color, upper_color)
res = cv2.bitwise_and(frame, frame, mask = mask)
contours, hierarchy = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)
cv2.drawContours(frame, contours, -1, (200, 255, 0), 4)
if len(contours) > 0:
cv2.putText(mask, 'Relavante Object Detected', (100, 300), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2, cv2.LINE_AA)
cv2.imshow('frame', frame)
cv2.imshow('mask', mask)
cv2.imshow('res', res)
k = cv2.waitKey(5) & 0xFF
if k == 27:
break
cap.release()
cv2.destroyAllWindows()
| true
| true
|
790a4fef92fb47fe2079920afbb4d0daaeb27d2b
| 6,563
|
py
|
Python
|
nova/tests/utils.py
|
bopopescu/nova-35
|
c32c01e08dccad921b4af6fc03d971d6e74c990f
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/utils.py
|
bopopescu/nova-35
|
c32c01e08dccad921b4af6fc03d971d6e74c990f
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/utils.py
|
bopopescu/nova-35
|
c32c01e08dccad921b4af6fc03d971d6e74c990f
|
[
"Apache-2.0"
] | 1
|
2020-07-24T09:13:22.000Z
|
2020-07-24T09:13:22.000Z
|
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import errno
import platform
import socket
import sys
from oslo.config import cfg
from nova.compute import flavors
import nova.context
import nova.db
from nova import exception
from nova.image import glance
from nova.network import minidns
from nova.network import model as network_model
from nova.objects import instance as instance_obj
CONF = cfg.CONF
CONF.import_opt('use_ipv6', 'nova.netconf')
def get_test_admin_context():
return nova.context.get_admin_context()
def get_test_image_info(context, instance_ref):
if not context:
context = get_test_admin_context()
image_ref = instance_ref['image_ref']
image_service, image_id = glance.get_remote_image_service(context,
image_ref)
return image_service.show(context, image_id)
def get_test_flavor(context=None, options=None):
options = options or {}
if not context:
context = get_test_admin_context()
test_flavor = {'name': 'kinda.big',
'flavorid': 'someid',
'memory_mb': 2048,
'vcpus': 4,
'root_gb': 40,
'ephemeral_gb': 80,
'swap': 1024}
test_flavor.update(options)
try:
flavor_ref = nova.db.flavor_create(context, test_flavor)
except (exception.FlavorExists, exception.FlavorIdExists):
flavor_ref = nova.db.flavor_get_by_name(context, 'kinda.big')
return flavor_ref
def get_test_instance(context=None, flavor=None, obj=False):
if not context:
context = get_test_admin_context()
if not flavor:
flavor = get_test_flavor(context)
metadata = {}
flavors.save_flavor_info(metadata, flavor, '')
test_instance = {'memory_kb': '2048000',
'basepath': '/some/path',
'bridge_name': 'br100',
'vcpus': 4,
'root_gb': 40,
'project_id': 'fake',
'bridge': 'br101',
'image_ref': 'cedef40a-ed67-4d10-800e-17455edce175',
'instance_type_id': '5',
'system_metadata': metadata,
'extra_specs': {}}
if obj:
instance = instance_obj.Instance(context, **test_instance)
instance.create()
else:
instance = nova.db.instance_create(context, test_instance)
return instance
def get_test_network_info(count=1):
ipv6 = CONF.use_ipv6
fake = 'fake'
fake_ip = '0.0.0.0'
fake_netmask = '255.255.255.255'
fake_vlan = 100
fake_bridge_interface = 'eth0'
def current():
subnet_4 = network_model.Subnet(cidr=fake_ip,
dns=[network_model.IP(fake_ip),
network_model.IP(fake_ip)],
gateway=network_model.IP(fake_ip),
ips=[network_model.IP(fake_ip),
network_model.IP(fake_ip)],
routes=None,
dhcp_server=fake_ip)
subnet_6 = network_model.Subnet(cidr=fake_ip,
gateway=network_model.IP(fake_ip),
ips=[network_model.IP(fake_ip),
network_model.IP(fake_ip),
network_model.IP(fake_ip)],
routes=None,
version=6)
subnets = [subnet_4]
if ipv6:
subnets.append(subnet_6)
network = network_model.Network(id=None,
bridge=fake,
label=None,
subnets=subnets,
vlan=fake_vlan,
bridge_interface=fake_bridge_interface,
injected=False)
vif = network_model.VIF(id='vif-xxx-yyy-zzz',
address=fake,
network=network,
type=network_model.VIF_TYPE_BRIDGE,
devname=None,
ovs_interfaceid=None)
return vif
return network_model.NetworkInfo([current() for x in xrange(0, count)])
def is_osx():
return platform.mac_ver()[0] != ''
test_dns_managers = []
def dns_manager():
global test_dns_managers
manager = minidns.MiniDNS()
test_dns_managers.append(manager)
return manager
def cleanup_dns_managers():
global test_dns_managers
for manager in test_dns_managers:
manager.delete_dns_file()
test_dns_managers = []
def killer_xml_body():
return (("""<!DOCTYPE x [
<!ENTITY a "%(a)s">
<!ENTITY b "%(b)s">
<!ENTITY c "%(c)s">]>
<foo>
<bar>
<v1>%(d)s</v1>
</bar>
</foo>""") % {
'a': 'A' * 10,
'b': '&a;' * 10,
'c': '&b;' * 10,
'd': '&c;' * 9999,
}).strip()
def is_ipv6_supported():
has_ipv6_support = socket.has_ipv6
try:
s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
s.close()
except socket.error as e:
if e.errno == errno.EAFNOSUPPORT:
has_ipv6_support = False
else:
raise
# check if there is at least one interface with ipv6
if has_ipv6_support and sys.platform.startswith('linux'):
try:
with open('/proc/net/if_inet6') as f:
if not f.read():
has_ipv6_support = False
except IOError:
has_ipv6_support = False
return has_ipv6_support
| 31.859223
| 79
| 0.534359
|
import errno
import platform
import socket
import sys
from oslo.config import cfg
from nova.compute import flavors
import nova.context
import nova.db
from nova import exception
from nova.image import glance
from nova.network import minidns
from nova.network import model as network_model
from nova.objects import instance as instance_obj
CONF = cfg.CONF
CONF.import_opt('use_ipv6', 'nova.netconf')
def get_test_admin_context():
return nova.context.get_admin_context()
def get_test_image_info(context, instance_ref):
if not context:
context = get_test_admin_context()
image_ref = instance_ref['image_ref']
image_service, image_id = glance.get_remote_image_service(context,
image_ref)
return image_service.show(context, image_id)
def get_test_flavor(context=None, options=None):
options = options or {}
if not context:
context = get_test_admin_context()
test_flavor = {'name': 'kinda.big',
'flavorid': 'someid',
'memory_mb': 2048,
'vcpus': 4,
'root_gb': 40,
'ephemeral_gb': 80,
'swap': 1024}
test_flavor.update(options)
try:
flavor_ref = nova.db.flavor_create(context, test_flavor)
except (exception.FlavorExists, exception.FlavorIdExists):
flavor_ref = nova.db.flavor_get_by_name(context, 'kinda.big')
return flavor_ref
def get_test_instance(context=None, flavor=None, obj=False):
if not context:
context = get_test_admin_context()
if not flavor:
flavor = get_test_flavor(context)
metadata = {}
flavors.save_flavor_info(metadata, flavor, '')
test_instance = {'memory_kb': '2048000',
'basepath': '/some/path',
'bridge_name': 'br100',
'vcpus': 4,
'root_gb': 40,
'project_id': 'fake',
'bridge': 'br101',
'image_ref': 'cedef40a-ed67-4d10-800e-17455edce175',
'instance_type_id': '5',
'system_metadata': metadata,
'extra_specs': {}}
if obj:
instance = instance_obj.Instance(context, **test_instance)
instance.create()
else:
instance = nova.db.instance_create(context, test_instance)
return instance
def get_test_network_info(count=1):
ipv6 = CONF.use_ipv6
fake = 'fake'
fake_ip = '0.0.0.0'
fake_netmask = '255.255.255.255'
fake_vlan = 100
fake_bridge_interface = 'eth0'
def current():
subnet_4 = network_model.Subnet(cidr=fake_ip,
dns=[network_model.IP(fake_ip),
network_model.IP(fake_ip)],
gateway=network_model.IP(fake_ip),
ips=[network_model.IP(fake_ip),
network_model.IP(fake_ip)],
routes=None,
dhcp_server=fake_ip)
subnet_6 = network_model.Subnet(cidr=fake_ip,
gateway=network_model.IP(fake_ip),
ips=[network_model.IP(fake_ip),
network_model.IP(fake_ip),
network_model.IP(fake_ip)],
routes=None,
version=6)
subnets = [subnet_4]
if ipv6:
subnets.append(subnet_6)
network = network_model.Network(id=None,
bridge=fake,
label=None,
subnets=subnets,
vlan=fake_vlan,
bridge_interface=fake_bridge_interface,
injected=False)
vif = network_model.VIF(id='vif-xxx-yyy-zzz',
address=fake,
network=network,
type=network_model.VIF_TYPE_BRIDGE,
devname=None,
ovs_interfaceid=None)
return vif
return network_model.NetworkInfo([current() for x in xrange(0, count)])
def is_osx():
return platform.mac_ver()[0] != ''
test_dns_managers = []
def dns_manager():
global test_dns_managers
manager = minidns.MiniDNS()
test_dns_managers.append(manager)
return manager
def cleanup_dns_managers():
global test_dns_managers
for manager in test_dns_managers:
manager.delete_dns_file()
test_dns_managers = []
def killer_xml_body():
return (("""<!DOCTYPE x [
<!ENTITY a "%(a)s">
<!ENTITY b "%(b)s">
<!ENTITY c "%(c)s">]>
<foo>
<bar>
<v1>%(d)s</v1>
</bar>
</foo>""") % {
'a': 'A' * 10,
'b': '&a;' * 10,
'c': '&b;' * 10,
'd': '&c;' * 9999,
}).strip()
def is_ipv6_supported():
has_ipv6_support = socket.has_ipv6
try:
s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
s.close()
except socket.error as e:
if e.errno == errno.EAFNOSUPPORT:
has_ipv6_support = False
else:
raise
if has_ipv6_support and sys.platform.startswith('linux'):
try:
with open('/proc/net/if_inet6') as f:
if not f.read():
has_ipv6_support = False
except IOError:
has_ipv6_support = False
return has_ipv6_support
| true
| true
|
790a5079a589c041aece3672f3d4adbfb0bf86b8
| 9,955
|
py
|
Python
|
modules/__init__.py
|
martinhoang11/vietnamese-ocr-toolbox
|
524b4908bedceb0c87b2c7cd7b5e3f6e1126ace5
|
[
"Apache-2.0"
] | 14
|
2021-09-05T10:42:14.000Z
|
2022-03-10T16:27:26.000Z
|
modules/__init__.py
|
martinhoang11/vietnamese-ocr-toolbox
|
524b4908bedceb0c87b2c7cd7b5e3f6e1126ace5
|
[
"Apache-2.0"
] | 1
|
2021-06-16T11:35:24.000Z
|
2021-06-16T11:35:24.000Z
|
modules/__init__.py
|
martinhoang11/vietnamese-ocr-toolbox
|
524b4908bedceb0c87b2c7cd7b5e3f6e1126ace5
|
[
"Apache-2.0"
] | 5
|
2021-09-05T13:26:51.000Z
|
2022-03-09T07:49:45.000Z
|
import os
import cv2
import shutil
import argparse
import torch
import numpy as np
import pandas as pd
from PIL import Image
import matplotlib.pyplot as plt
import matplotlib
from .preprocess import DocScanner
import modules.detection as detection
import modules.ocr as ocr
import modules.retrieval as retrieval
import modules.correction as correction
from tool.config import Config
from tool.utils import download_pretrained_weights
CACHE_DIR = '.cache'
class Preprocess:
def __init__(
self,
find_best_rotation=True,
det_model=None,
ocr_model=None):
self.find_best_rotation = find_best_rotation
if self.find_best_rotation:
self.crop_path = os.path.join(CACHE_DIR, 'crops')
if os.path.exists(self.crop_path):
shutil.rmtree(self.crop_path)
os.mkdir(self.crop_path)
self.det_model = det_model if det_model is not None else Detection()
self.ocr_model = ocr_model if ocr_model is not None else OCR()
self.scanner = DocScanner()
def __call__(self, image, return_score=False):
output = self.scanner.scan(image)
if self.find_best_rotation:
_ = self.det_model(
output,
crop_region=True,
return_result=False,
output_path=CACHE_DIR)
orientation_scores = np.array([0.,0.,0.,0.])
num_crops = len(os.listdir(self.crop_path))
for i in range(num_crops):
single_crop_path = os.path.join(self.crop_path, f'{i}.jpg')
if not os.path.isfile(single_crop_path):
continue
img = cv2.imread(single_crop_path)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
orientation_scores += ocr.find_rotation_score(img, self.ocr_model)
best_orient = np.argmax(orientation_scores)
print(f"Rotate image by {best_orient*90} degrees")
# Rotate the original image
output = ocr.rotate_img(output, best_orient)
if return_score:
return output, orientation_scores
else:
return output
class Detection:
def __init__(self, config_path=None, weight_path=None, model_name=None):
if config_path is None:
config_path = 'tool/config/detection/configs.yaml'
config = Config(config_path)
self.model_name = model_name
if weight_path is None:
if self.model_name is None:
self.model_name = "pan_resnet18_default"
tmp_path = os.path.join(CACHE_DIR, f'{self.model_name}.pth')
download_pretrained_weights(self.model_name, cached=tmp_path)
weight_path = tmp_path
self.model = detection.PAN(config, model_path=weight_path)
def __call__(
self,
image,
crop_region=False,
return_result=False,
output_path=None):
"""
Input: path to image
Output: boxes (coordinates of 4 points)
"""
if output_path is None:
assert crop_region, "Please specify output_path"
else:
output_path = os.path.join(output_path, 'crops')
if os.path.exists(output_path):
shutil.rmtree(output_path)
os.mkdir(output_path)
# Detect and OCR for final result
_, boxes_list, _ = self.model.predict(
image,
output_path,
crop_region=crop_region)
if return_result:
img = detection.draw_bbox(image, boxes_list)
if return_result:
return boxes_list, img
else:
return boxes_list
class OCR:
def __init__(self, config_path=None, weight_path=None, model_name=None):
if config_path is None:
config_path = 'tool/config/ocr/configs.yaml'
config = Config(config_path)
ocr_config = ocr.Config.load_config_from_name(config.model_name)
ocr_config['cnn']['pretrained']=False
ocr_config['device'] = 'cuda:0'
ocr_config['predictor']['beamsearch']=False
self.model_name = model_name
if weight_path is None:
if self.model_name is None:
self.model_name = "transformerocr_default_vgg"
tmp_path = os.path.join(CACHE_DIR, f'{self.model_name}.pth')
download_pretrained_weights(self.model_name, cached=tmp_path)
weight_path = tmp_path
ocr_config['weights'] = weight_path
self.model = ocr.Predictor(ocr_config)
def __call__(self, img, return_prob=False):
if isinstance(img, np.ndarray):
img = Image.fromarray(img)
return self.model.predict(img, return_prob)
def predict_folder(self, img_paths, return_probs=False):
texts = []
if return_probs:
probs = []
for i, img_path in enumerate(img_paths):
img = Image.open(img_path)
if return_probs:
text, prob = self(img, True)
texts.append(text)
probs.append(prob)
else:
text = self(img, False)
texts.append(text)
if return_probs:
return texts, probs
else:
return texts
class Retrieval:
def __init__(self, class_mapping, dictionary=None, mode="all", bert_weight=None):
assert mode in ["all", "bert", "trie", "ed"], "Mode is not supported"
self.mode = mode
self.dictionary = dictionary
self.class_mapping = class_mapping
self.idx_mapping = {v:k for k,v in class_mapping.items()}
if self.mode == 'bert':
self.use_bert = True
if self.mode == 'trie':
self.use_trie = True
if self.mode == 'ed':
self.use_ed = True
if self.mode == 'all':
self.use_bert = True
self.use_trie = True
self.use_ed = True
if self.use_bert:
self.bert = retrieval.PhoBERT(self.idx_mapping, bert_weight)
if self.use_ed:
self.ed = retrieval.get_heuristic_retrieval('diff')
if self.use_trie:
self.trie = retrieval.get_heuristic_retrieval('trie')
if self.use_ed or self.use_trie:
if self.dictionary is None:
self.dictionary = {}
df = pd.read_csv('./modules/retrieval/heuristic/custom-dictionary.csv')
for id, row in df.iterrows():
self.dictionary[row.text.lower()] = row.lbl
def ensemble(self, df):
preds = []
probs = []
for id, row in df.iterrows():
if row["timestamp"] == 1:
preds.append("TIMESTAMP")
probs.append(5.0)
elif row["bert_labels"] == row["diff_labels"]:
preds.append(row["bert_labels"])
probs.append(row["bert_probs"] + row["diff_probs"])
elif row["bert_labels"] == row["trie_labels"]:
preds.append(row["bert_labels"])
probs.append(row["bert_probs"] + row["trie_probs"])
elif row["trie_labels"] == row["diff_labels"]:
preds.append(row["trie_labels"])
probs.append(row["trie_probs"] + row["diff_probs"])
else:
if row["diff_probs"] >= 0.4:
preds.append(row["diff_labels"])
probs.append(row["diff_probs"])
elif row["trie_probs"] >= 0.25:
preds.append(row["trie_labels"])
probs.append(row["trie_probs"])
else:
preds.append(row["bert_labels"])
probs.append(row["bert_probs"]/3)
return preds, probs
def __call__(self, query_texts):
df = pd.DataFrame()
if self.use_bert:
preds, probs = self.bert(query_texts)
df["bert_labels"] = preds
df["bert_probs"] = probs
if self.use_ed:
preds, probs = self.ed(query_texts, self.dictionary)
df["diff_labels"] = [self.idx_mapping[x] for x in preds]
df["diff_probs"] = probs
if self.use_trie:
preds, probs = self.trie(query_texts, self.dictionary)
df["trie_labels"] = [self.idx_mapping[x] for x in preds]
df["trie_probs"] = probs
timestamps = retrieval.regex_timestamp(query_texts)
df["timestamp"] = timestamps
preds, probs = self.ensemble(df)
return preds, probs
class Correction:
def __init__(self, dictionary=None, mode="ed"):
assert mode in ["trie", "ed"], "Mode is not supported"
self.mode = mode
self.dictionary = dictionary
self.use_trie = False
self.use_ed = False
if self.mode == 'trie':
self.use_trie = True
if self.mode == 'ed':
self.use_ed = True
if self.use_ed:
self.ed = correction.get_heuristic_correction('diff')
if self.use_trie:
self.trie = correction.get_heuristic_correction('trie')
if self.use_ed or self.use_trie:
if self.dictionary is None:
self.dictionary = {}
df = pd.read_csv('./modules/retrieval/heuristic/custom-dictionary.csv')
for id, row in df.iterrows():
self.dictionary[row.text.lower()] = row.lbl
def __call__(self, query_texts, return_score=False):
if self.use_ed:
preds, score = self.ed(query_texts, self.dictionary)
if self.use_trie:
preds, score = self.trie(query_texts, self.dictionary)
if return_score:
return preds, score
else:
return preds
| 34.686411
| 87
| 0.571974
|
import os
import cv2
import shutil
import argparse
import torch
import numpy as np
import pandas as pd
from PIL import Image
import matplotlib.pyplot as plt
import matplotlib
from .preprocess import DocScanner
import modules.detection as detection
import modules.ocr as ocr
import modules.retrieval as retrieval
import modules.correction as correction
from tool.config import Config
from tool.utils import download_pretrained_weights
CACHE_DIR = '.cache'
class Preprocess:
def __init__(
self,
find_best_rotation=True,
det_model=None,
ocr_model=None):
self.find_best_rotation = find_best_rotation
if self.find_best_rotation:
self.crop_path = os.path.join(CACHE_DIR, 'crops')
if os.path.exists(self.crop_path):
shutil.rmtree(self.crop_path)
os.mkdir(self.crop_path)
self.det_model = det_model if det_model is not None else Detection()
self.ocr_model = ocr_model if ocr_model is not None else OCR()
self.scanner = DocScanner()
def __call__(self, image, return_score=False):
output = self.scanner.scan(image)
if self.find_best_rotation:
_ = self.det_model(
output,
crop_region=True,
return_result=False,
output_path=CACHE_DIR)
orientation_scores = np.array([0.,0.,0.,0.])
num_crops = len(os.listdir(self.crop_path))
for i in range(num_crops):
single_crop_path = os.path.join(self.crop_path, f'{i}.jpg')
if not os.path.isfile(single_crop_path):
continue
img = cv2.imread(single_crop_path)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
orientation_scores += ocr.find_rotation_score(img, self.ocr_model)
best_orient = np.argmax(orientation_scores)
print(f"Rotate image by {best_orient*90} degrees")
output = ocr.rotate_img(output, best_orient)
if return_score:
return output, orientation_scores
else:
return output
class Detection:
def __init__(self, config_path=None, weight_path=None, model_name=None):
if config_path is None:
config_path = 'tool/config/detection/configs.yaml'
config = Config(config_path)
self.model_name = model_name
if weight_path is None:
if self.model_name is None:
self.model_name = "pan_resnet18_default"
tmp_path = os.path.join(CACHE_DIR, f'{self.model_name}.pth')
download_pretrained_weights(self.model_name, cached=tmp_path)
weight_path = tmp_path
self.model = detection.PAN(config, model_path=weight_path)
def __call__(
self,
image,
crop_region=False,
return_result=False,
output_path=None):
if output_path is None:
assert crop_region, "Please specify output_path"
else:
output_path = os.path.join(output_path, 'crops')
if os.path.exists(output_path):
shutil.rmtree(output_path)
os.mkdir(output_path)
_, boxes_list, _ = self.model.predict(
image,
output_path,
crop_region=crop_region)
if return_result:
img = detection.draw_bbox(image, boxes_list)
if return_result:
return boxes_list, img
else:
return boxes_list
class OCR:
def __init__(self, config_path=None, weight_path=None, model_name=None):
if config_path is None:
config_path = 'tool/config/ocr/configs.yaml'
config = Config(config_path)
ocr_config = ocr.Config.load_config_from_name(config.model_name)
ocr_config['cnn']['pretrained']=False
ocr_config['device'] = 'cuda:0'
ocr_config['predictor']['beamsearch']=False
self.model_name = model_name
if weight_path is None:
if self.model_name is None:
self.model_name = "transformerocr_default_vgg"
tmp_path = os.path.join(CACHE_DIR, f'{self.model_name}.pth')
download_pretrained_weights(self.model_name, cached=tmp_path)
weight_path = tmp_path
ocr_config['weights'] = weight_path
self.model = ocr.Predictor(ocr_config)
def __call__(self, img, return_prob=False):
if isinstance(img, np.ndarray):
img = Image.fromarray(img)
return self.model.predict(img, return_prob)
def predict_folder(self, img_paths, return_probs=False):
texts = []
if return_probs:
probs = []
for i, img_path in enumerate(img_paths):
img = Image.open(img_path)
if return_probs:
text, prob = self(img, True)
texts.append(text)
probs.append(prob)
else:
text = self(img, False)
texts.append(text)
if return_probs:
return texts, probs
else:
return texts
class Retrieval:
def __init__(self, class_mapping, dictionary=None, mode="all", bert_weight=None):
assert mode in ["all", "bert", "trie", "ed"], "Mode is not supported"
self.mode = mode
self.dictionary = dictionary
self.class_mapping = class_mapping
self.idx_mapping = {v:k for k,v in class_mapping.items()}
if self.mode == 'bert':
self.use_bert = True
if self.mode == 'trie':
self.use_trie = True
if self.mode == 'ed':
self.use_ed = True
if self.mode == 'all':
self.use_bert = True
self.use_trie = True
self.use_ed = True
if self.use_bert:
self.bert = retrieval.PhoBERT(self.idx_mapping, bert_weight)
if self.use_ed:
self.ed = retrieval.get_heuristic_retrieval('diff')
if self.use_trie:
self.trie = retrieval.get_heuristic_retrieval('trie')
if self.use_ed or self.use_trie:
if self.dictionary is None:
self.dictionary = {}
df = pd.read_csv('./modules/retrieval/heuristic/custom-dictionary.csv')
for id, row in df.iterrows():
self.dictionary[row.text.lower()] = row.lbl
def ensemble(self, df):
preds = []
probs = []
for id, row in df.iterrows():
if row["timestamp"] == 1:
preds.append("TIMESTAMP")
probs.append(5.0)
elif row["bert_labels"] == row["diff_labels"]:
preds.append(row["bert_labels"])
probs.append(row["bert_probs"] + row["diff_probs"])
elif row["bert_labels"] == row["trie_labels"]:
preds.append(row["bert_labels"])
probs.append(row["bert_probs"] + row["trie_probs"])
elif row["trie_labels"] == row["diff_labels"]:
preds.append(row["trie_labels"])
probs.append(row["trie_probs"] + row["diff_probs"])
else:
if row["diff_probs"] >= 0.4:
preds.append(row["diff_labels"])
probs.append(row["diff_probs"])
elif row["trie_probs"] >= 0.25:
preds.append(row["trie_labels"])
probs.append(row["trie_probs"])
else:
preds.append(row["bert_labels"])
probs.append(row["bert_probs"]/3)
return preds, probs
def __call__(self, query_texts):
df = pd.DataFrame()
if self.use_bert:
preds, probs = self.bert(query_texts)
df["bert_labels"] = preds
df["bert_probs"] = probs
if self.use_ed:
preds, probs = self.ed(query_texts, self.dictionary)
df["diff_labels"] = [self.idx_mapping[x] for x in preds]
df["diff_probs"] = probs
if self.use_trie:
preds, probs = self.trie(query_texts, self.dictionary)
df["trie_labels"] = [self.idx_mapping[x] for x in preds]
df["trie_probs"] = probs
timestamps = retrieval.regex_timestamp(query_texts)
df["timestamp"] = timestamps
preds, probs = self.ensemble(df)
return preds, probs
class Correction:
def __init__(self, dictionary=None, mode="ed"):
assert mode in ["trie", "ed"], "Mode is not supported"
self.mode = mode
self.dictionary = dictionary
self.use_trie = False
self.use_ed = False
if self.mode == 'trie':
self.use_trie = True
if self.mode == 'ed':
self.use_ed = True
if self.use_ed:
self.ed = correction.get_heuristic_correction('diff')
if self.use_trie:
self.trie = correction.get_heuristic_correction('trie')
if self.use_ed or self.use_trie:
if self.dictionary is None:
self.dictionary = {}
df = pd.read_csv('./modules/retrieval/heuristic/custom-dictionary.csv')
for id, row in df.iterrows():
self.dictionary[row.text.lower()] = row.lbl
def __call__(self, query_texts, return_score=False):
if self.use_ed:
preds, score = self.ed(query_texts, self.dictionary)
if self.use_trie:
preds, score = self.trie(query_texts, self.dictionary)
if return_score:
return preds, score
else:
return preds
| true
| true
|
790a50b15268507121d2f017db89e93e56561b9b
| 634
|
py
|
Python
|
venv/lib/python3.6/site-packages/kappa/__init__.py
|
mrsaicharan1/Alexa-BestBuy
|
a22a0157b90d29b946d0f020e5f76744f73a6bff
|
[
"Apache-2.0"
] | 6
|
2018-05-31T04:40:53.000Z
|
2022-02-18T18:59:27.000Z
|
venv/lib/python3.6/site-packages/kappa/__init__.py
|
mrsaicharan1/iiita-updates
|
a22a0157b90d29b946d0f020e5f76744f73a6bff
|
[
"Apache-2.0"
] | 6
|
2020-09-05T01:40:23.000Z
|
2022-03-12T00:40:58.000Z
|
venv/lib/python3.6/site-packages/kappa/__init__.py
|
mrsaicharan1/iiita-updates
|
a22a0157b90d29b946d0f020e5f76744f73a6bff
|
[
"Apache-2.0"
] | 2
|
2019-04-29T14:16:10.000Z
|
2020-07-23T12:04:17.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2014, 2015 Mitch Garnaat
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '0.6.0'
| 37.294118
| 74
| 0.741325
|
__version__ = '0.6.0'
| true
| true
|
790a530bf4f20c9f3454fb0204cdee95b537d3f5
| 1,592
|
py
|
Python
|
checkov/kubernetes/checks/resource/k8s/KubeletCryptographicCiphers.py
|
vangundy-jason-pfg/checkov
|
2fb50908f62390c98dda665f1fa94fe24806b654
|
[
"Apache-2.0"
] | null | null | null |
checkov/kubernetes/checks/resource/k8s/KubeletCryptographicCiphers.py
|
vangundy-jason-pfg/checkov
|
2fb50908f62390c98dda665f1fa94fe24806b654
|
[
"Apache-2.0"
] | null | null | null |
checkov/kubernetes/checks/resource/k8s/KubeletCryptographicCiphers.py
|
vangundy-jason-pfg/checkov
|
2fb50908f62390c98dda665f1fa94fe24806b654
|
[
"Apache-2.0"
] | null | null | null |
from checkov.common.models.enums import CheckCategories, CheckResult
from checkov.kubernetes.checks.resource.base_spec_check import BaseK8Check
strongCiphers = ["TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256","TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256","TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305","TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384","TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305","TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384","TLS_RSA_WITH_AES_256_GCM_SHA384","TLS_RSA_WITH_AES_128_GCM_SHA256"]
class KubeletCryptographicCiphers(BaseK8Check):
def __init__(self):
# CIS-1.6 4.2.13
id = "CKV_K8S_151"
name = "Ensure that the Kubelet only makes use of Strong Cryptographic Ciphers"
categories = [CheckCategories.KUBERNETES]
supported_entities = ['containers']
super().__init__(name=name, id=id, categories=categories, supported_entities=supported_entities)
def get_resource_id(self, conf):
return f'{conf["parent"]} - {conf["name"]}' if conf.get('name') else conf["parent"]
def scan_spec_conf(self, conf):
if "command" in conf:
if "kubelet" in conf["command"]:
for command in conf["command"]:
if command.startswith("--tls-cipher-suites"):
value = command.split("=")[1]
ciphers = value.split(",")
for cipher in ciphers:
if cipher not in strongCiphers:
return CheckResult.FAILED
return CheckResult.PASSED
check = KubeletCryptographicCiphers()
| 49.75
| 329
| 0.672111
|
from checkov.common.models.enums import CheckCategories, CheckResult
from checkov.kubernetes.checks.resource.base_spec_check import BaseK8Check
strongCiphers = ["TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256","TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256","TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305","TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384","TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305","TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384","TLS_RSA_WITH_AES_256_GCM_SHA384","TLS_RSA_WITH_AES_128_GCM_SHA256"]
class KubeletCryptographicCiphers(BaseK8Check):
def __init__(self):
id = "CKV_K8S_151"
name = "Ensure that the Kubelet only makes use of Strong Cryptographic Ciphers"
categories = [CheckCategories.KUBERNETES]
supported_entities = ['containers']
super().__init__(name=name, id=id, categories=categories, supported_entities=supported_entities)
def get_resource_id(self, conf):
return f'{conf["parent"]} - {conf["name"]}' if conf.get('name') else conf["parent"]
def scan_spec_conf(self, conf):
if "command" in conf:
if "kubelet" in conf["command"]:
for command in conf["command"]:
if command.startswith("--tls-cipher-suites"):
value = command.split("=")[1]
ciphers = value.split(",")
for cipher in ciphers:
if cipher not in strongCiphers:
return CheckResult.FAILED
return CheckResult.PASSED
check = KubeletCryptographicCiphers()
| true
| true
|
790a531edb2049e0687fb2d5392df7cfd4396fec
| 5,404
|
py
|
Python
|
mapclientplugins/filechooserstep/configuredialog.py
|
mapclient-plugins/mapclientplugins.filechooserstep
|
6e46e4c832833e929b1b3f17b94877093e996c3b
|
[
"Apache-2.0"
] | null | null | null |
mapclientplugins/filechooserstep/configuredialog.py
|
mapclient-plugins/mapclientplugins.filechooserstep
|
6e46e4c832833e929b1b3f17b94877093e996c3b
|
[
"Apache-2.0"
] | null | null | null |
mapclientplugins/filechooserstep/configuredialog.py
|
mapclient-plugins/mapclientplugins.filechooserstep
|
6e46e4c832833e929b1b3f17b94877093e996c3b
|
[
"Apache-2.0"
] | 1
|
2021-11-05T01:04:22.000Z
|
2021-11-05T01:04:22.000Z
|
import os
from PySide2 import QtWidgets
from mapclientplugins.filechooserstep.ui_configuredialog import Ui_ConfigureDialog
INVALID_STYLE_SHEET = 'background-color: rgba(239, 0, 0, 50)'
DEFAULT_STYLE_SHEET = ''
class ConfigureDialog(QtWidgets.QDialog):
"""
Configure dialog to present the user with the options to configure this step.
"""
def __init__(self, parent=None):
QtWidgets.QDialog.__init__(self, parent)
self._ui = Ui_ConfigureDialog()
self._ui.setupUi(self)
self._workflow_location = None
# Keep track of the previous identifier so that we can track changes
# and know how many occurrences of the current identifier there should
# be.
self._previousIdentifier = ''
# Set a place holder for a callable that will get set from the step.
# We will use this method to decide whether the identifier is unique.
self.identifierOccursCount = None
self._previousLocation = ''
self._makeConnections()
def _makeConnections(self):
self._ui.lineEdit0.textChanged.connect(self.validate)
self._ui.lineEditFileLocation.textChanged.connect(self.validate)
self._ui.pushButtonFileChooser.clicked.connect(self._fileChooserClicked)
def _fileChooserClicked(self):
# Second parameter returned is the filter chosen
location, _ = QtWidgets.QFileDialog.getOpenFileName(self, 'Select File Location', self._previousLocation)
if location:
self._previousLocation = location
display_location = self._output_location(location)
self._ui.lineEditFileLocation.setText(display_location)
def _output_location(self, location=None):
if location is None:
display_path = self._ui.lineEditFileLocation.text()
else:
display_path = location
if self._workflow_location and os.path.isabs(display_path):
display_path = os.path.relpath(display_path, self._workflow_location)
return display_path
def setWorkflowLocation(self, location):
self._workflow_location = location
def accept(self):
"""
Override the accept method so that we can confirm saving an
invalid configuration.
"""
result = QtWidgets.QMessageBox.Yes
if not self.validate():
result = QtWidgets.QMessageBox.warning(self, 'Invalid Configuration',
'This configuration is invalid. '
' Unpredictable behaviour may result if you choose \'Yes\','
' are you sure you want to save this configuration?)',
QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No,
QtWidgets.QMessageBox.No)
if result == QtWidgets.QMessageBox.Yes:
QtWidgets.QDialog.accept(self)
def validate(self):
"""
Validate the configuration dialog fields. For any field that is not valid
set the style sheet to the INVALID_STYLE_SHEET. Return the outcome of the
overall validity of the configuration.
"""
# Determine if the current identifier is unique throughout the workflow
# The identifierOccursCount method is part of the interface to the workflow framework.
value = self.identifierOccursCount(self._ui.lineEdit0.text())
valid = (value == 0) or (value == 1 and self._previousIdentifier == self._ui.lineEdit0.text())
self._ui.lineEdit0.setStyleSheet(DEFAULT_STYLE_SHEET if valid else INVALID_STYLE_SHEET)
non_empty = len(self._ui.lineEditFileLocation.text())
file_path = self._output_location()
if self._workflow_location:
file_path = os.path.join(self._workflow_location, file_path)
location_valid = non_empty and os.path.isfile(file_path)
self._ui.lineEditFileLocation.setStyleSheet(DEFAULT_STYLE_SHEET if location_valid else INVALID_STYLE_SHEET)
return valid and location_valid
def getConfig(self):
"""
Get the current value of the configuration from the dialog. Also
set the _previousIdentifier value so that we can check uniqueness of the
identifier over the whole of the workflow.
"""
self._previousIdentifier = self._ui.lineEdit0.text()
config = {'identifier': self._ui.lineEdit0.text(), 'File': self._output_location()}
if self._previousLocation:
config['previous_location'] = os.path.relpath(self._previousLocation, self._workflow_location)
else:
config['previous_location'] = ''
return config
def setConfig(self, config):
"""
Set the current value of the configuration for the dialog. Also
set the _previousIdentifier value so that we can check uniqueness of the
identifier over the whole of the workflow.
"""
self._previousIdentifier = config['identifier']
self._ui.lineEdit0.setText(config['identifier'])
self._ui.lineEditFileLocation.setText(config['File'])
if 'previous_location' in config:
self._previousLocation = os.path.join(self._workflow_location, config['previous_location'])
| 41.569231
| 115
| 0.65544
|
import os
from PySide2 import QtWidgets
from mapclientplugins.filechooserstep.ui_configuredialog import Ui_ConfigureDialog
INVALID_STYLE_SHEET = 'background-color: rgba(239, 0, 0, 50)'
DEFAULT_STYLE_SHEET = ''
class ConfigureDialog(QtWidgets.QDialog):
def __init__(self, parent=None):
QtWidgets.QDialog.__init__(self, parent)
self._ui = Ui_ConfigureDialog()
self._ui.setupUi(self)
self._workflow_location = None
self._previousIdentifier = ''
self.identifierOccursCount = None
self._previousLocation = ''
self._makeConnections()
def _makeConnections(self):
self._ui.lineEdit0.textChanged.connect(self.validate)
self._ui.lineEditFileLocation.textChanged.connect(self.validate)
self._ui.pushButtonFileChooser.clicked.connect(self._fileChooserClicked)
def _fileChooserClicked(self):
location, _ = QtWidgets.QFileDialog.getOpenFileName(self, 'Select File Location', self._previousLocation)
if location:
self._previousLocation = location
display_location = self._output_location(location)
self._ui.lineEditFileLocation.setText(display_location)
def _output_location(self, location=None):
if location is None:
display_path = self._ui.lineEditFileLocation.text()
else:
display_path = location
if self._workflow_location and os.path.isabs(display_path):
display_path = os.path.relpath(display_path, self._workflow_location)
return display_path
def setWorkflowLocation(self, location):
self._workflow_location = location
def accept(self):
result = QtWidgets.QMessageBox.Yes
if not self.validate():
result = QtWidgets.QMessageBox.warning(self, 'Invalid Configuration',
'This configuration is invalid. '
' Unpredictable behaviour may result if you choose \'Yes\','
' are you sure you want to save this configuration?)',
QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No,
QtWidgets.QMessageBox.No)
if result == QtWidgets.QMessageBox.Yes:
QtWidgets.QDialog.accept(self)
def validate(self):
value = self.identifierOccursCount(self._ui.lineEdit0.text())
valid = (value == 0) or (value == 1 and self._previousIdentifier == self._ui.lineEdit0.text())
self._ui.lineEdit0.setStyleSheet(DEFAULT_STYLE_SHEET if valid else INVALID_STYLE_SHEET)
non_empty = len(self._ui.lineEditFileLocation.text())
file_path = self._output_location()
if self._workflow_location:
file_path = os.path.join(self._workflow_location, file_path)
location_valid = non_empty and os.path.isfile(file_path)
self._ui.lineEditFileLocation.setStyleSheet(DEFAULT_STYLE_SHEET if location_valid else INVALID_STYLE_SHEET)
return valid and location_valid
def getConfig(self):
self._previousIdentifier = self._ui.lineEdit0.text()
config = {'identifier': self._ui.lineEdit0.text(), 'File': self._output_location()}
if self._previousLocation:
config['previous_location'] = os.path.relpath(self._previousLocation, self._workflow_location)
else:
config['previous_location'] = ''
return config
def setConfig(self, config):
self._previousIdentifier = config['identifier']
self._ui.lineEdit0.setText(config['identifier'])
self._ui.lineEditFileLocation.setText(config['File'])
if 'previous_location' in config:
self._previousLocation = os.path.join(self._workflow_location, config['previous_location'])
| true
| true
|
790a54bf14fd4e3080ccf2119e2b6389f1e83c19
| 20,375
|
py
|
Python
|
run_demo.py
|
ybettan/AirSimTensorFlow
|
7c9a17d97a68e99da310f2e537bfb66433056066
|
[
"MIT"
] | null | null | null |
run_demo.py
|
ybettan/AirSimTensorFlow
|
7c9a17d97a68e99da310f2e537bfb66433056066
|
[
"MIT"
] | null | null | null |
run_demo.py
|
ybettan/AirSimTensorFlow
|
7c9a17d97a68e99da310f2e537bfb66433056066
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
import pprint
import os
import time
import msgpackrpc
import math
import msgpackrpc #install as admin: pip install msgpack-rpc-python
import msgpack
import sys
import inspect
import types
import re
import shutil
import numpy as np #pip install numpy
#==============================================================================
# Classes
#==============================================================================
class MsgpackMixin:
def to_msgpack(self, *args, **kwargs):
return self.__dict__ #msgpack.dump(self.to_dict(*args, **kwargs))
@classmethod
def from_msgpack(cls, encoded):
obj = cls()
obj.__dict__ = {k.decode('utf-8'): v for k, v in encoded.items()}
return obj
class AirSimImageType:
Scene = 0
DepthPlanner = 1
DepthPerspective = 2
DepthVis = 3
DisparityNormalized = 4
Segmentation = 5
SurfaceNormals = 6
class DrivetrainType:
MaxDegreeOfFreedom = 0
ForwardOnly = 1
class LandedState:
Landed = 0
Flying = 1
class Vector3r(MsgpackMixin):
x_val = np.float32(0)
y_val = np.float32(0)
z_val = np.float32(0)
def __init__(self, x_val = np.float32(0), y_val = np.float32(0), z_val = np.float32(0)):
self.x_val = x_val
self.y_val = y_val
self.z_val = z_val
class Quaternionr(MsgpackMixin):
w_val = np.float32(0)
x_val = np.float32(0)
y_val = np.float32(0)
z_val = np.float32(0)
def __init__(self, x_val = np.float32(0), y_val = np.float32(0), z_val = np.float32(0), w_val = np.float32(1)):
self.x_val = x_val
self.y_val = y_val
self.z_val = z_val
self.w_val = w_val
class Pose(MsgpackMixin):
position = Vector3r()
orientation = Quaternionr()
def __init__(self, position_val, orientation_val):
self.position = position_val
self.orientation = orientation_val
class CollisionInfo(MsgpackMixin):
has_collided = False
normal = Vector3r()
impact_point = Vector3r()
position = Vector3r()
penetration_depth = np.float32(0)
time_stamp = np.float32(0)
object_name = ""
object_id = -1
class GeoPoint(MsgpackMixin):
latitude = 0.0
longitude = 0.0
altitude = 0.0
class YawMode(MsgpackMixin):
is_rate = True
yaw_or_rate = 0.0
def __init__(self, is_rate = True, yaw_or_rate = 0.0):
self.is_rate = is_rate
self.yaw_or_rate = yaw_or_rate
class ImageRequest(MsgpackMixin):
camera_id = np.uint8(0)
image_type = AirSimImageType.Scene
pixels_as_float = False
compress = False
def __init__(self, camera_id, image_type, pixels_as_float = False, compress = True):
self.camera_id = camera_id
self.image_type = image_type
self.pixels_as_float = pixels_as_float
self.compress = compress
class ImageResponse(MsgpackMixin):
image_data_uint8 = np.uint8(0)
image_data_float = np.float32(0)
camera_position = Vector3r()
camera_orientation = Quaternionr()
time_stamp = np.uint64(0)
message = ''
pixels_as_float = np.float32(0)
compress = True
width = 0
height = 0
image_type = AirSimImageType.Scene
class CarControls(MsgpackMixin):
throttle = np.float32(0)
steering = np.float32(0)
brake = np.float32(0)
handbrake = False
is_manual_gear = False
manual_gear = 0
gear_immediate = True
def set_throttle(self, throttle_val, forward):
if (forward):
is_manual_gear = False
manual_gear = 0
throttle = abs(throttle_val)
else:
is_manual_gear = False
manual_gear = -1
throttle = - abs(throttle_val)
class CarState(MsgpackMixin):
speed = np.float32(0)
gear = 0
position = Vector3r()
velocity = Vector3r()
orientation = Quaternionr()
class AirSimClientBase:
def __init__(self, ip, port):
self.client = msgpackrpc.Client(msgpackrpc.Address(ip, port), timeout = 3600)
def ping(self):
return self.client.call('ping')
def reset(self):
self.client.call('reset')
def confirmConnection(self):
print('Waiting for connection: ', end='')
home = self.getHomeGeoPoint()
while ((home.latitude == 0 and home.longitude == 0 and home.altitude == 0) or
math.isnan(home.latitude) or math.isnan(home.longitude) or math.isnan(home.altitude)):
time.sleep(1)
home = self.getHomeGeoPoint()
print('X', end='')
print('')
def getHomeGeoPoint(self):
return GeoPoint.from_msgpack(self.client.call('getHomeGeoPoint'))
# basic flight control
def enableApiControl(self, is_enabled):
return self.client.call('enableApiControl', is_enabled)
def isApiControlEnabled(self):
return self.client.call('isApiControlEnabled')
def simSetSegmentationObjectID(self, mesh_name, object_id, is_name_regex = False):
return self.client.call('simSetSegmentationObjectID', mesh_name, object_id, is_name_regex)
def simGetSegmentationObjectID(self, mesh_name):
return self.client.call('simGetSegmentationObjectID', mesh_name)
# camera control
# simGetImage returns compressed png in array of bytes
# image_type uses one of the AirSimImageType members
def simGetImage(self, camera_id, image_type):
# because this method returns std::vector<uint8>, msgpack decides to encode it as a string unfortunately.
result = self.client.call('simGetImage', camera_id, image_type)
if (result == "" or result == "\0"):
return None
return result
# camera control
# simGetImage returns compressed png in array of bytes
# image_type uses one of the AirSimImageType members
def simGetImages(self, requests):
responses_raw = self.client.call('simGetImages', requests)
return [ImageResponse.from_msgpack(response_raw) for response_raw in responses_raw]
def getCollisionInfo(self):
return CollisionInfo.from_msgpack(self.client.call('getCollisionInfo'))
@staticmethod
def stringToUint8Array(bstr):
return np.fromstring(bstr, np.uint8)
@staticmethod
def stringToFloatArray(bstr):
return np.fromstring(bstr, np.float32)
@staticmethod
def listTo2DFloatArray(flst, width, height):
return np.reshape(np.asarray(flst, np.float32), (height, width))
@staticmethod
def getPfmArray(response):
return AirSimClientBase.listTo2DFloatArray(response.image_data_float, response.width, response.height)
@staticmethod
def get_public_fields(obj):
return [attr for attr in dir(obj)
if not (attr.startswith("_")
or inspect.isbuiltin(attr)
or inspect.isfunction(attr)
or inspect.ismethod(attr))]
@staticmethod
def to_dict(obj):
return dict([attr, getattr(obj, attr)] for attr in AirSimClientBase.get_public_fields(obj))
@staticmethod
def to_str(obj):
return str(AirSimClientBase.to_dict(obj))
@staticmethod
def write_file(filename, bstr):
with open(filename, 'wb') as afile:
afile.write(bstr)
def simSetPose(self, pose, ignore_collison):
self.client.call('simSetPose', pose, ignore_collison)
def simGetPose(self):
return self.client.call('simGetPose')
# helper method for converting getOrientation to roll/pitch/yaw
# https:#en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles
@staticmethod
def toEulerianAngle(q):
z = q.z_val
y = q.y_val
x = q.x_val
w = q.w_val
ysqr = y * y
# roll (x-axis rotation)
t0 = +2.0 * (w*x + y*z)
t1 = +1.0 - 2.0*(x*x + ysqr)
roll = math.atan2(t0, t1)
# pitch (y-axis rotation)
t2 = +2.0 * (w*y - z*x)
if (t2 > 1.0):
t2 = 1
if (t2 < -1.0):
t2 = -1.0
pitch = math.asin(t2)
# yaw (z-axis rotation)
t3 = +2.0 * (w*z + x*y)
t4 = +1.0 - 2.0 * (ysqr + z*z)
yaw = math.atan2(t3, t4)
return (pitch, roll, yaw)
@staticmethod
def toQuaternion(pitch, roll, yaw):
t0 = math.cos(yaw * 0.5)
t1 = math.sin(yaw * 0.5)
t2 = math.cos(roll * 0.5)
t3 = math.sin(roll * 0.5)
t4 = math.cos(pitch * 0.5)
t5 = math.sin(pitch * 0.5)
q = Quaternionr()
q.w_val = t0 * t2 * t4 + t1 * t3 * t5 #w
q.x_val = t0 * t3 * t4 - t1 * t2 * t5 #x
q.y_val = t0 * t2 * t5 + t1 * t3 * t4 #y
q.z_val = t1 * t2 * t4 - t0 * t3 * t5 #z
return q
@staticmethod
def wait_key(message = ''):
''' Wait for a key press on the console and return it. '''
if message != '':
print (message)
result = None
if os.name == 'nt':
import msvcrt
result = msvcrt.getch()
else:
import termios
fd = sys.stdin.fileno()
oldterm = termios.tcgetattr(fd)
newattr = termios.tcgetattr(fd)
newattr[3] = newattr[3] & ~termios.ICANON & ~termios.ECHO
termios.tcsetattr(fd, termios.TCSANOW, newattr)
try:
result = sys.stdin.read(1)
except IOError:
pass
finally:
termios.tcsetattr(fd, termios.TCSAFLUSH, oldterm)
return result
@staticmethod
def read_pfm(file):
""" Read a pfm file """
file = open(file, 'rb')
color = None
width = None
height = None
scale = None
endian = None
header = file.readline().rstrip()
header = str(bytes.decode(header, encoding='utf-8'))
if header == 'PF':
color = True
elif header == 'Pf':
color = False
else:
raise Exception('Not a PFM file.')
temp_str = str(bytes.decode(file.readline(), encoding='utf-8'))
dim_match = re.match(r'^(\d+)\s(\d+)\s$', temp_str)
if dim_match:
width, height = map(int, dim_match.groups())
else:
raise Exception('Malformed PFM header.')
scale = float(file.readline().rstrip())
if scale < 0: # little-endian
endian = '<'
scale = -scale
else:
endian = '>' # big-endian
data = np.fromfile(file, endian + 'f')
shape = (height, width, 3) if color else (height, width)
data = np.reshape(data, shape)
# DEY: I don't know why this was there.
#data = np.flipud(data)
file.close()
return data, scale
@staticmethod
def write_pfm(file, image, scale=1):
""" Write a pfm file """
file = open(file, 'wb')
color = None
if image.dtype.name != 'float32':
raise Exception('Image dtype must be float32.')
image = np.flipud(image)
if len(image.shape) == 3 and image.shape[2] == 3: # color image
color = True
elif len(image.shape) == 2 or len(image.shape) == 3 and image.shape[2] == 1: # greyscale
color = False
else:
raise Exception('Image must have H x W x 3, H x W x 1 or H x W dimensions.')
file.write('PF\n'.encode('utf-8') if color else 'Pf\n'.encode('utf-8'))
temp_str = '%d %d\n' % (image.shape[1], image.shape[0])
file.write(temp_str.encode('utf-8'))
endian = image.dtype.byteorder
if endian == '<' or endian == '=' and sys.byteorder == 'little':
scale = -scale
temp_str = '%f\n' % scale
file.write(temp_str.encode('utf-8'))
image.tofile(file)
@staticmethod
def write_png(filename, image):
""" image must be numpy array H X W X channels
"""
import zlib, struct
buf = image.flatten().tobytes()
width = image.shape[1]
height = image.shape[0]
# reverse the vertical line order and add null bytes at the start
width_byte_4 = width * 4
raw_data = b''.join(b'\x00' + buf[span:span + width_byte_4]
for span in range((height - 1) * width_byte_4, -1, - width_byte_4))
def png_pack(png_tag, data):
chunk_head = png_tag + data
return (struct.pack("!I", len(data)) +
chunk_head +
struct.pack("!I", 0xFFFFFFFF & zlib.crc32(chunk_head)))
png_bytes = b''.join([
b'\x89PNG\r\n\x1a\n',
png_pack(b'IHDR', struct.pack("!2I5B", width, height, 8, 6, 0, 0, 0)),
png_pack(b'IDAT', zlib.compress(raw_data, 9)),
png_pack(b'IEND', b'')])
AirSimClientBase.write_file(filename, png_bytes)
# ----------------------------------- Multirotor APIs ---------------------------------------------
class MultirotorClient(AirSimClientBase, object):
def __init__(self, ip = ""):
if (ip == ""):
ip = "127.0.0.1"
super(MultirotorClient, self).__init__(ip, 41451)
def armDisarm(self, arm):
return self.client.call('armDisarm', arm)
def takeoff(self, max_wait_seconds = 15):
return self.client.call('takeoff', max_wait_seconds)
def land(self, max_wait_seconds = 60):
return self.client.call('land', max_wait_seconds)
def goHome(self):
return self.client.call('goHome')
def hover(self):
return self.client.call('hover')
# query vehicle state
def getPosition(self):
return Vector3r.from_msgpack(self.client.call('getPosition'))
def getVelocity(self):
return Vector3r.from_msgpack(self.client.call('getVelocity'))
def getOrientation(self):
return Quaternionr.from_msgpack(self.client.call('getOrientation'))
def getLandedState(self):
return self.client.call('getLandedState')
def getGpsLocation(self):
return GeoPoint.from_msgpack(self.client.call('getGpsLocation'))
def getPitchRollYaw(self):
return self.toEulerianAngle(self.getOrientation())
#def getRCData(self):
# return self.client.call('getRCData')
def timestampNow(self):
return self.client.call('timestampNow')
def isApiControlEnabled(self):
return self.client.call('isApiControlEnabled')
def isSimulationMode(self):
return self.client.call('isSimulationMode')
def getServerDebugInfo(self):
return self.client.call('getServerDebugInfo')
# APIs for control
def moveByAngle(self, pitch, roll, z, yaw, duration):
return self.client.call('moveByAngle', pitch, roll, z, yaw, duration)
def moveByVelocity(self, vx, vy, vz, duration, drivetrain = DrivetrainType.MaxDegreeOfFreedom, yaw_mode = YawMode()):
return self.client.call('moveByVelocity', vx, vy, vz, duration, drivetrain, yaw_mode)
def moveByVelocityZ(self, vx, vy, z, duration, drivetrain = DrivetrainType.MaxDegreeOfFreedom, yaw_mode = YawMode()):
return self.client.call('moveByVelocityZ', vx, vy, z, duration, drivetrain, yaw_mode)
def moveOnPath(self, path, velocity, max_wait_seconds = 60, drivetrain = DrivetrainType.MaxDegreeOfFreedom, yaw_mode = YawMode(), lookahead = -1, adaptive_lookahead = 1):
return self.client.call('moveOnPath', path, velocity, max_wait_seconds, drivetrain, yaw_mode, lookahead, adaptive_lookahead)
def moveToZ(self, z, velocity, max_wait_seconds = 60, yaw_mode = YawMode(), lookahead = -1, adaptive_lookahead = 1):
return self.client.call('moveToZ', z, velocity, max_wait_seconds, yaw_mode, lookahead, adaptive_lookahead)
def moveToPosition(self, x, y, z, velocity, max_wait_seconds = 60, drivetrain = DrivetrainType.MaxDegreeOfFreedom, yaw_mode = YawMode(), lookahead = -1, adaptive_lookahead = 1):
return self.client.call('moveToPosition', x, y, z, velocity, max_wait_seconds, drivetrain, yaw_mode, lookahead, adaptive_lookahead)
def moveByManual(self, vx_max, vy_max, z_min, duration, drivetrain = DrivetrainType.MaxDegreeOfFreedom, yaw_mode = YawMode()):
return self.client.call('moveByManual', vx_max, vy_max, z_min, duration, drivetrain, yaw_mode)
def rotateToYaw(self, yaw, max_wait_seconds = 60, margin = 5):
return self.client.call('rotateToYaw', yaw, max_wait_seconds, margin)
def rotateByYawRate(self, yaw_rate, duration):
return self.client.call('rotateByYawRate', yaw_rate, duration)
# ----------------------------------- Car APIs ---------------------------------------------
class CarClient(AirSimClientBase, object):
def __init__(self, ip = ""):
if (ip == ""):
ip = "127.0.0.1"
super(CarClient, self).__init__(ip, 42451)
def setCarControls(self, controls):
self.client.call('setCarControls', controls)
def getCarState(self):
state_raw = self.client.call('getCarState')
return CarState.from_msgpack(state_raw)
#FIXME: keep it and remove all upper that already is in AirSimClient.py
#==============================================================================
# Functions
#==============================================================================
def drive(client, throttle, steering):
car_controls.throttle = throttle
car_controls.steering = steering
client.setCarControls(car_controls)
def drive_forward(client, car_controls):
drive(client, 1.0, 0)
def drive_right(client, car_controls):
drive(client, 1.0, 10)
def drive_left(client, car_controls):
drive(client, 1.0, -10)
def save_image(i):
# get a sinlgle image from the car's camera
responses = client.simGetImages([ImageRequest(1, AirSimImageType.Scene)])
single_image = responses[0].image_data_uint8
# save the image
AirSimClientBase.write_file(os.path.normpath(IMAGEDIR + \
'/image_{}.png'.format(i)), single_image)
#==============================================================================
# Main
#==============================================================================
# Constants
IMAGEDIR = "images"
# Create an empty image directory
try:
shutil.rmtree(IMAGEDIR, ignore_errors=True)
os.stat(IMAGEDIR)
except:
os.mkdir(IMAGEDIR)
# Connect to AirSim
client = CarClient()
client.confirmConnection()
client.enableApiControl(True)
client.reset()
print('Connected')
i = 0
car_controls = CarControls()
while True:
drive_forward(client, car_controls)
i += 1
save_image(i)
print("image {} has been saved".format(i))
time.sleep(0.1)
drive_right(client, car_controls)
i += 1
save_image(i)
print("image {} has been saved".format(i))
time.sleep(0.1)
drive_forward(client, car_controls)
i += 1
save_image(i)
print("image {} has been saved".format(i))
time.sleep(0.1)
drive_left(client, car_controls)
i += 1
save_image(i)
print("image {} has been saved".format(i))
time.sleep(0.1)
if i >= 40:
break
## get RGBA camera images from the car
#responses = client.simGetImages([ImageRequest(1, AirSimImageType.Scene)])
## add image to queue
#imagequeue.append(responses[0].image_data_uint8)
## dump queue when it gets full
#if len(imagequeue) == QUEUESIZE:
# for i in range(QUEUESIZE):
# AirSimClientBase.write_file(os.path.normpath(IMAGEDIR + \
# '/image%03d.png' % i ), imagequeue[i])
# imagequeue.pop(0)
#collision_info = client.getCollisionInfo()
#if collision_info.has_collided:
# print("Collision at pos %s, normal %s, impact pt %s, penetration %f, name %s, obj id %d" % (
# pprint.pformat(collision_info.position),
# pprint.pformat(collision_info.normal),
# pprint.pformat(collision_info.impact_point),
# collision_info.penetration_depth, collision_info.object_name, collision_info.object_id))
# break
#time.sleep(0.1)
client.enableApiControl(False)
| 31.491499
| 181
| 0.599362
|
from __future__ import print_function
import pprint
import os
import time
import msgpackrpc
import math
import msgpackrpc
import msgpack
import sys
import inspect
import types
import re
import shutil
import numpy as np
class MsgpackMixin:
def to_msgpack(self, *args, **kwargs):
return self.__dict__
@classmethod
def from_msgpack(cls, encoded):
obj = cls()
obj.__dict__ = {k.decode('utf-8'): v for k, v in encoded.items()}
return obj
class AirSimImageType:
Scene = 0
DepthPlanner = 1
DepthPerspective = 2
DepthVis = 3
DisparityNormalized = 4
Segmentation = 5
SurfaceNormals = 6
class DrivetrainType:
MaxDegreeOfFreedom = 0
ForwardOnly = 1
class LandedState:
Landed = 0
Flying = 1
class Vector3r(MsgpackMixin):
x_val = np.float32(0)
y_val = np.float32(0)
z_val = np.float32(0)
def __init__(self, x_val = np.float32(0), y_val = np.float32(0), z_val = np.float32(0)):
self.x_val = x_val
self.y_val = y_val
self.z_val = z_val
class Quaternionr(MsgpackMixin):
w_val = np.float32(0)
x_val = np.float32(0)
y_val = np.float32(0)
z_val = np.float32(0)
def __init__(self, x_val = np.float32(0), y_val = np.float32(0), z_val = np.float32(0), w_val = np.float32(1)):
self.x_val = x_val
self.y_val = y_val
self.z_val = z_val
self.w_val = w_val
class Pose(MsgpackMixin):
position = Vector3r()
orientation = Quaternionr()
def __init__(self, position_val, orientation_val):
self.position = position_val
self.orientation = orientation_val
class CollisionInfo(MsgpackMixin):
has_collided = False
normal = Vector3r()
impact_point = Vector3r()
position = Vector3r()
penetration_depth = np.float32(0)
time_stamp = np.float32(0)
object_name = ""
object_id = -1
class GeoPoint(MsgpackMixin):
latitude = 0.0
longitude = 0.0
altitude = 0.0
class YawMode(MsgpackMixin):
is_rate = True
yaw_or_rate = 0.0
def __init__(self, is_rate = True, yaw_or_rate = 0.0):
self.is_rate = is_rate
self.yaw_or_rate = yaw_or_rate
class ImageRequest(MsgpackMixin):
camera_id = np.uint8(0)
image_type = AirSimImageType.Scene
pixels_as_float = False
compress = False
def __init__(self, camera_id, image_type, pixels_as_float = False, compress = True):
self.camera_id = camera_id
self.image_type = image_type
self.pixels_as_float = pixels_as_float
self.compress = compress
class ImageResponse(MsgpackMixin):
image_data_uint8 = np.uint8(0)
image_data_float = np.float32(0)
camera_position = Vector3r()
camera_orientation = Quaternionr()
time_stamp = np.uint64(0)
message = ''
pixels_as_float = np.float32(0)
compress = True
width = 0
height = 0
image_type = AirSimImageType.Scene
class CarControls(MsgpackMixin):
throttle = np.float32(0)
steering = np.float32(0)
brake = np.float32(0)
handbrake = False
is_manual_gear = False
manual_gear = 0
gear_immediate = True
def set_throttle(self, throttle_val, forward):
if (forward):
is_manual_gear = False
manual_gear = 0
throttle = abs(throttle_val)
else:
is_manual_gear = False
manual_gear = -1
throttle = - abs(throttle_val)
class CarState(MsgpackMixin):
speed = np.float32(0)
gear = 0
position = Vector3r()
velocity = Vector3r()
orientation = Quaternionr()
class AirSimClientBase:
def __init__(self, ip, port):
self.client = msgpackrpc.Client(msgpackrpc.Address(ip, port), timeout = 3600)
def ping(self):
return self.client.call('ping')
def reset(self):
self.client.call('reset')
def confirmConnection(self):
print('Waiting for connection: ', end='')
home = self.getHomeGeoPoint()
while ((home.latitude == 0 and home.longitude == 0 and home.altitude == 0) or
math.isnan(home.latitude) or math.isnan(home.longitude) or math.isnan(home.altitude)):
time.sleep(1)
home = self.getHomeGeoPoint()
print('X', end='')
print('')
def getHomeGeoPoint(self):
return GeoPoint.from_msgpack(self.client.call('getHomeGeoPoint'))
def enableApiControl(self, is_enabled):
return self.client.call('enableApiControl', is_enabled)
def isApiControlEnabled(self):
return self.client.call('isApiControlEnabled')
def simSetSegmentationObjectID(self, mesh_name, object_id, is_name_regex = False):
return self.client.call('simSetSegmentationObjectID', mesh_name, object_id, is_name_regex)
def simGetSegmentationObjectID(self, mesh_name):
return self.client.call('simGetSegmentationObjectID', mesh_name)
def simGetImage(self, camera_id, image_type):
result = self.client.call('simGetImage', camera_id, image_type)
if (result == "" or result == "\0"):
return None
return result
def simGetImages(self, requests):
responses_raw = self.client.call('simGetImages', requests)
return [ImageResponse.from_msgpack(response_raw) for response_raw in responses_raw]
def getCollisionInfo(self):
return CollisionInfo.from_msgpack(self.client.call('getCollisionInfo'))
@staticmethod
def stringToUint8Array(bstr):
return np.fromstring(bstr, np.uint8)
@staticmethod
def stringToFloatArray(bstr):
return np.fromstring(bstr, np.float32)
@staticmethod
def listTo2DFloatArray(flst, width, height):
return np.reshape(np.asarray(flst, np.float32), (height, width))
@staticmethod
def getPfmArray(response):
return AirSimClientBase.listTo2DFloatArray(response.image_data_float, response.width, response.height)
@staticmethod
def get_public_fields(obj):
return [attr for attr in dir(obj)
if not (attr.startswith("_")
or inspect.isbuiltin(attr)
or inspect.isfunction(attr)
or inspect.ismethod(attr))]
@staticmethod
def to_dict(obj):
return dict([attr, getattr(obj, attr)] for attr in AirSimClientBase.get_public_fields(obj))
@staticmethod
def to_str(obj):
return str(AirSimClientBase.to_dict(obj))
@staticmethod
def write_file(filename, bstr):
with open(filename, 'wb') as afile:
afile.write(bstr)
def simSetPose(self, pose, ignore_collison):
self.client.call('simSetPose', pose, ignore_collison)
def simGetPose(self):
return self.client.call('simGetPose')
y = q.y_val
x = q.x_val
w = q.w_val
ysqr = y * y
t0 = +2.0 * (w*x + y*z)
t1 = +1.0 - 2.0*(x*x + ysqr)
roll = math.atan2(t0, t1)
t2 = +2.0 * (w*y - z*x)
if (t2 > 1.0):
t2 = 1
if (t2 < -1.0):
t2 = -1.0
pitch = math.asin(t2)
t3 = +2.0 * (w*z + x*y)
t4 = +1.0 - 2.0 * (ysqr + z*z)
yaw = math.atan2(t3, t4)
return (pitch, roll, yaw)
@staticmethod
def toQuaternion(pitch, roll, yaw):
t0 = math.cos(yaw * 0.5)
t1 = math.sin(yaw * 0.5)
t2 = math.cos(roll * 0.5)
t3 = math.sin(roll * 0.5)
t4 = math.cos(pitch * 0.5)
t5 = math.sin(pitch * 0.5)
q = Quaternionr()
q.w_val = t0 * t2 * t4 + t1 * t3 * t5
q.x_val = t0 * t3 * t4 - t1 * t2 * t5
q.y_val = t0 * t2 * t5 + t1 * t3 * t4
q.z_val = t1 * t2 * t4 - t0 * t3 * t5
return q
@staticmethod
def wait_key(message = ''):
if message != '':
print (message)
result = None
if os.name == 'nt':
import msvcrt
result = msvcrt.getch()
else:
import termios
fd = sys.stdin.fileno()
oldterm = termios.tcgetattr(fd)
newattr = termios.tcgetattr(fd)
newattr[3] = newattr[3] & ~termios.ICANON & ~termios.ECHO
termios.tcsetattr(fd, termios.TCSANOW, newattr)
try:
result = sys.stdin.read(1)
except IOError:
pass
finally:
termios.tcsetattr(fd, termios.TCSAFLUSH, oldterm)
return result
@staticmethod
def read_pfm(file):
file = open(file, 'rb')
color = None
width = None
height = None
scale = None
endian = None
header = file.readline().rstrip()
header = str(bytes.decode(header, encoding='utf-8'))
if header == 'PF':
color = True
elif header == 'Pf':
color = False
else:
raise Exception('Not a PFM file.')
temp_str = str(bytes.decode(file.readline(), encoding='utf-8'))
dim_match = re.match(r'^(\d+)\s(\d+)\s$', temp_str)
if dim_match:
width, height = map(int, dim_match.groups())
else:
raise Exception('Malformed PFM header.')
scale = float(file.readline().rstrip())
if scale < 0:
endian = '<'
scale = -scale
else:
endian = '>'
data = np.fromfile(file, endian + 'f')
shape = (height, width, 3) if color else (height, width)
data = np.reshape(data, shape)
#data = np.flipud(data)
file.close()
return data, scale
@staticmethod
def write_pfm(file, image, scale=1):
file = open(file, 'wb')
color = None
if image.dtype.name != 'float32':
raise Exception('Image dtype must be float32.')
image = np.flipud(image)
if len(image.shape) == 3 and image.shape[2] == 3: # color image
color = True
elif len(image.shape) == 2 or len(image.shape) == 3 and image.shape[2] == 1: # greyscale
color = False
else:
raise Exception('Image must have H x W x 3, H x W x 1 or H x W dimensions.')
file.write('PF\n'.encode('utf-8') if color else 'Pf\n'.encode('utf-8'))
temp_str = '%d %d\n' % (image.shape[1], image.shape[0])
file.write(temp_str.encode('utf-8'))
endian = image.dtype.byteorder
if endian == '<' or endian == '=' and sys.byteorder == 'little':
scale = -scale
temp_str = '%f\n' % scale
file.write(temp_str.encode('utf-8'))
image.tofile(file)
@staticmethod
def write_png(filename, image):
import zlib, struct
buf = image.flatten().tobytes()
width = image.shape[1]
height = image.shape[0]
# reverse the vertical line order and add null bytes at the start
width_byte_4 = width * 4
raw_data = b''.join(b'\x00' + buf[span:span + width_byte_4]
for span in range((height - 1) * width_byte_4, -1, - width_byte_4))
def png_pack(png_tag, data):
chunk_head = png_tag + data
return (struct.pack("!I", len(data)) +
chunk_head +
struct.pack("!I", 0xFFFFFFFF & zlib.crc32(chunk_head)))
png_bytes = b''.join([
b'\x89PNG\r\n\x1a\n',
png_pack(b'IHDR', struct.pack("!2I5B", width, height, 8, 6, 0, 0, 0)),
png_pack(b'IDAT', zlib.compress(raw_data, 9)),
png_pack(b'IEND', b'')])
AirSimClientBase.write_file(filename, png_bytes)
# ----------------------------------- Multirotor APIs ---------------------------------------------
class MultirotorClient(AirSimClientBase, object):
def __init__(self, ip = ""):
if (ip == ""):
ip = "127.0.0.1"
super(MultirotorClient, self).__init__(ip, 41451)
def armDisarm(self, arm):
return self.client.call('armDisarm', arm)
def takeoff(self, max_wait_seconds = 15):
return self.client.call('takeoff', max_wait_seconds)
def land(self, max_wait_seconds = 60):
return self.client.call('land', max_wait_seconds)
def goHome(self):
return self.client.call('goHome')
def hover(self):
return self.client.call('hover')
# query vehicle state
def getPosition(self):
return Vector3r.from_msgpack(self.client.call('getPosition'))
def getVelocity(self):
return Vector3r.from_msgpack(self.client.call('getVelocity'))
def getOrientation(self):
return Quaternionr.from_msgpack(self.client.call('getOrientation'))
def getLandedState(self):
return self.client.call('getLandedState')
def getGpsLocation(self):
return GeoPoint.from_msgpack(self.client.call('getGpsLocation'))
def getPitchRollYaw(self):
return self.toEulerianAngle(self.getOrientation())
#def getRCData(self):
# return self.client.call('getRCData')
def timestampNow(self):
return self.client.call('timestampNow')
def isApiControlEnabled(self):
return self.client.call('isApiControlEnabled')
def isSimulationMode(self):
return self.client.call('isSimulationMode')
def getServerDebugInfo(self):
return self.client.call('getServerDebugInfo')
# APIs for control
def moveByAngle(self, pitch, roll, z, yaw, duration):
return self.client.call('moveByAngle', pitch, roll, z, yaw, duration)
def moveByVelocity(self, vx, vy, vz, duration, drivetrain = DrivetrainType.MaxDegreeOfFreedom, yaw_mode = YawMode()):
return self.client.call('moveByVelocity', vx, vy, vz, duration, drivetrain, yaw_mode)
def moveByVelocityZ(self, vx, vy, z, duration, drivetrain = DrivetrainType.MaxDegreeOfFreedom, yaw_mode = YawMode()):
return self.client.call('moveByVelocityZ', vx, vy, z, duration, drivetrain, yaw_mode)
def moveOnPath(self, path, velocity, max_wait_seconds = 60, drivetrain = DrivetrainType.MaxDegreeOfFreedom, yaw_mode = YawMode(), lookahead = -1, adaptive_lookahead = 1):
return self.client.call('moveOnPath', path, velocity, max_wait_seconds, drivetrain, yaw_mode, lookahead, adaptive_lookahead)
def moveToZ(self, z, velocity, max_wait_seconds = 60, yaw_mode = YawMode(), lookahead = -1, adaptive_lookahead = 1):
return self.client.call('moveToZ', z, velocity, max_wait_seconds, yaw_mode, lookahead, adaptive_lookahead)
def moveToPosition(self, x, y, z, velocity, max_wait_seconds = 60, drivetrain = DrivetrainType.MaxDegreeOfFreedom, yaw_mode = YawMode(), lookahead = -1, adaptive_lookahead = 1):
return self.client.call('moveToPosition', x, y, z, velocity, max_wait_seconds, drivetrain, yaw_mode, lookahead, adaptive_lookahead)
def moveByManual(self, vx_max, vy_max, z_min, duration, drivetrain = DrivetrainType.MaxDegreeOfFreedom, yaw_mode = YawMode()):
return self.client.call('moveByManual', vx_max, vy_max, z_min, duration, drivetrain, yaw_mode)
def rotateToYaw(self, yaw, max_wait_seconds = 60, margin = 5):
return self.client.call('rotateToYaw', yaw, max_wait_seconds, margin)
def rotateByYawRate(self, yaw_rate, duration):
return self.client.call('rotateByYawRate', yaw_rate, duration)
# ----------------------------------- Car APIs ---------------------------------------------
class CarClient(AirSimClientBase, object):
def __init__(self, ip = ""):
if (ip == ""):
ip = "127.0.0.1"
super(CarClient, self).__init__(ip, 42451)
def setCarControls(self, controls):
self.client.call('setCarControls', controls)
def getCarState(self):
state_raw = self.client.call('getCarState')
return CarState.from_msgpack(state_raw)
#FIXME: keep it and remove all upper that already is in AirSimClient.py
#==============================================================================
# Functions
#==============================================================================
def drive(client, throttle, steering):
car_controls.throttle = throttle
car_controls.steering = steering
client.setCarControls(car_controls)
def drive_forward(client, car_controls):
drive(client, 1.0, 0)
def drive_right(client, car_controls):
drive(client, 1.0, 10)
def drive_left(client, car_controls):
drive(client, 1.0, -10)
def save_image(i):
# get a sinlgle image from the car's camera
responses = client.simGetImages([ImageRequest(1, AirSimImageType.Scene)])
single_image = responses[0].image_data_uint8
AirSimClientBase.write_file(os.path.normpath(IMAGEDIR + \
'/image_{}.png'.format(i)), single_image)
IMAGEDIR = "images"
try:
shutil.rmtree(IMAGEDIR, ignore_errors=True)
os.stat(IMAGEDIR)
except:
os.mkdir(IMAGEDIR)
client = CarClient()
client.confirmConnection()
client.enableApiControl(True)
client.reset()
print('Connected')
i = 0
car_controls = CarControls()
while True:
drive_forward(client, car_controls)
i += 1
save_image(i)
print("image {} has been saved".format(i))
time.sleep(0.1)
drive_right(client, car_controls)
i += 1
save_image(i)
print("image {} has been saved".format(i))
time.sleep(0.1)
drive_forward(client, car_controls)
i += 1
save_image(i)
print("image {} has been saved".format(i))
time.sleep(0.1)
drive_left(client, car_controls)
i += 1
save_image(i)
print("image {} has been saved".format(i))
time.sleep(0.1)
if i >= 40:
break
client.enableApiControl(False)
| true
| true
|
790a54e339d91d6c45f6fb98a1d9e53e0ef37fda
| 8,261
|
py
|
Python
|
bamboo/lib/schema_builder.py
|
pld/bamboo
|
a0fc77aebd6ff6b1087ba46896b0ce705fbb25a3
|
[
"BSD-3-Clause"
] | 27
|
2015-01-14T15:57:54.000Z
|
2020-12-27T19:34:41.000Z
|
bamboo/lib/schema_builder.py
|
biswapanda/bamboo
|
72fc260822a27ce52cbe65de178f8fa1b60311f3
|
[
"BSD-3-Clause"
] | 2
|
2015-08-06T15:23:28.000Z
|
2016-01-28T00:05:25.000Z
|
bamboo/lib/schema_builder.py
|
biswapanda/bamboo
|
72fc260822a27ce52cbe65de178f8fa1b60311f3
|
[
"BSD-3-Clause"
] | 10
|
2015-08-07T01:50:39.000Z
|
2019-05-15T21:41:18.000Z
|
from datetime import datetime
import numpy as np
import re
from bamboo.core.frame import RESERVED_KEYS
from bamboo.core.parser import Parser
from bamboo.lib.exceptions import ArgumentError
from bamboo.lib.mongo import reserve_encoded
CARDINALITY = 'cardinality'
OLAP_TYPE = 'olap_type'
SIMPLETYPE = 'simpletype'
LABEL = 'label'
# olap_types
DIMENSION = 'dimension'
MEASURE = 'measure'
# simpletypes
BOOLEAN = 'boolean'
DATETIME = 'datetime'
INTEGER = 'integer'
FLOAT = 'float'
STRING = 'string'
# map from numpy objects to olap_types
DTYPE_TO_OLAP_TYPE = {
np.object_: DIMENSION,
np.bool_: DIMENSION,
np.float64: MEASURE,
np.int64: MEASURE,
datetime: MEASURE,
}
# map from numpy objects to simpletypes
DTYPE_TO_SIMPLETYPE = {
np.bool_: BOOLEAN,
np.float64: FLOAT,
np.int64: INTEGER,
np.object_: STRING,
datetime: DATETIME,
}
SIMPLETYPE_TO_DTYPE = {
FLOAT: np.float64,
INTEGER: np.int64,
}
SIMPLETYPE_TO_OLAP_TYPE = {
v: DTYPE_TO_OLAP_TYPE[k] for (k, v) in DTYPE_TO_SIMPLETYPE.items()}
RE_ENCODED_COLUMN = re.compile(ur'(?u)\W')
class Schema(dict):
@classmethod
def safe_init(cls, arg):
"""Make schema with potential arg of None."""
return cls() if arg is None else cls(arg)
@property
def labels_to_slugs(self):
"""Build dict from column labels to slugs."""
return {
column_attrs[LABEL]: reserve_encoded(column_name) for
(column_name, column_attrs) in self.items()
}
@property
def numerics(self):
return [slug for slug, col_schema in self.items()
if col_schema[SIMPLETYPE] in [INTEGER, FLOAT]]
@property
def numerics_select(self):
return {col: 1 for col in self.numerics}
def cardinality(self, column):
if self.is_dimension(column):
return self[column].get(CARDINALITY)
def convert_type(self, slug, value):
column_schema = self.get(slug)
if column_schema:
type_func = SIMPLETYPE_TO_DTYPE.get(column_schema[SIMPLETYPE])
if type_func:
value = type_func(value)
return value
def datetimes(self, intersect_with):
return [slug for slug, col in self.items()
if col[SIMPLETYPE] == DATETIME and slug in intersect_with]
def is_date_simpletype(self, column):
return self[column][SIMPLETYPE] == DATETIME
def is_dimension(self, column):
col_schema = self.get(column)
return col_schema and col_schema[OLAP_TYPE] == DIMENSION
def rebuild(self, dframe, overwrite=False):
"""Rebuild a schema for a dframe.
:param dframe: The DataFrame whose schema to merge with the current
schema.
:param overwrite: If true replace schema, otherwise update.
"""
current_schema = self
new_schema = schema_from_dframe(dframe, self)
if current_schema and not overwrite:
# merge new schema with existing schema
current_schema.update(new_schema)
new_schema = current_schema
return new_schema
def rename_map_for_dframe(self, dframe):
"""Return a map from dframe columns to slugs.
:param dframe: The DataFrame to produce the map for.
"""
labels_to_slugs = self.labels_to_slugs
return {
column: labels_to_slugs[column] for column in
dframe.columns.tolist() if self._resluggable_column(
column, labels_to_slugs, dframe)
}
def set_olap_type(self, column, olap_type):
"""Set the OLAP Type for this `column` of schema.
Only columns with an original OLAP Type of 'measure' can be modified.
This includes columns with Simple Type integer, float, and datetime.
:param column: The column to set the OLAP Type for.
:param olap_type: The OLAP Type to set. Must be 'dimension' or
'measure'.
:raises: `ArgumentError` if trying to set the OLAP Type of an column
whose OLAP Type was not originally a 'measure'.
"""
self[column][OLAP_TYPE] = olap_type
def _resluggable_column(self, column, labels_to_slugs, dframe):
"""Test if column should be slugged.
A column should be slugged if:
1. The `column` is a key in `labels_to_slugs` and
2. The `column` is not a value in `labels_to_slugs` or
1. The `column` label is not equal to the `column` slug and
2. The slug is not in the `dframe`'s columns
:param column: The column to reslug.
:param labels_to_slugs: The labels to slugs map (only build once).
:param dframe: The DataFrame that column is in.
"""
return (column in labels_to_slugs.keys() and (
not column in labels_to_slugs.values() or (
labels_to_slugs[column] != column and
labels_to_slugs[column] not in dframe.columns)))
def schema_from_dframe(dframe, schema=None):
"""Build schema from the DataFrame and a schema.
:param dframe: The DataFrame to build a schema for.
:param schema: Existing schema, optional.
:returns: A dictionary schema.
"""
dtypes = dframe.dtypes.to_dict()
column_names = list()
names_to_labels = dict()
# use existing labels for existing columns
for name in dtypes.keys():
if name not in RESERVED_KEYS:
column_names.append(name)
if schema:
schema_for_name = schema.get(name)
if schema_for_name:
names_to_labels[name] = schema_for_name[
LABEL]
encoded_names = dict(zip(column_names, _slugify_columns(column_names)))
schema = Schema()
for (name, dtype) in dtypes.items():
if name not in RESERVED_KEYS:
column_schema = {
LABEL: names_to_labels.get(name, name),
OLAP_TYPE: _olap_type_for_data_and_dtype(
dframe[name], dtype),
SIMPLETYPE: _simpletype_for_data_and_dtype(
dframe[name], dtype),
}
try:
column_schema[CARDINALITY] = dframe[
name].nunique()
except AttributeError:
pass
except TypeError:
# E.g. dates with and without offset can not be compared and
# raise a type error.
pass
schema[encoded_names[name]] = column_schema
return schema
def _slugify_columns(column_names):
"""Convert list of strings into unique slugs.
Convert non-alphanumeric characters in column names into underscores and
ensure that all column names are unique.
:param column_names: A list of strings.
:returns: A list of slugified names with a one-to-one mapping to
`column_names`.
"""
encoded_names = []
for column_name in column_names:
slug = RE_ENCODED_COLUMN.sub('_', column_name).lower()
slug = make_unique(slug, encoded_names + Parser.reserved_words)
encoded_names.append(slug)
return encoded_names
def make_unique(name, reserved_names):
"""Return a slug ensuring name is not in `reserved_names`.
:param name: The name to make unique.
:param reserved_names: A list of names the column must not be included in.
"""
while name in reserved_names:
name += '_'
return name
def filter_schema(schema):
"""Remove not settable columns."""
for column, column_schema in schema.iteritems():
if column_schema.get(CARDINALITY):
del column_schema[CARDINALITY]
schema[column] = column_schema
return schema
def _olap_type_for_data_and_dtype(column, dtype):
return _type_for_data_and_dtypes(
DTYPE_TO_OLAP_TYPE, column, dtype.type)
def _simpletype_for_data_and_dtype(column, dtype):
return _type_for_data_and_dtypes(
DTYPE_TO_SIMPLETYPE, column, dtype.type)
def _type_for_data_and_dtypes(type_map, column, dtype_type):
has_datetime = any([isinstance(field, datetime) for field in column])
return type_map[datetime if has_datetime else dtype_type]
| 30.149635
| 78
| 0.641932
|
from datetime import datetime
import numpy as np
import re
from bamboo.core.frame import RESERVED_KEYS
from bamboo.core.parser import Parser
from bamboo.lib.exceptions import ArgumentError
from bamboo.lib.mongo import reserve_encoded
CARDINALITY = 'cardinality'
OLAP_TYPE = 'olap_type'
SIMPLETYPE = 'simpletype'
LABEL = 'label'
DIMENSION = 'dimension'
MEASURE = 'measure'
BOOLEAN = 'boolean'
DATETIME = 'datetime'
INTEGER = 'integer'
FLOAT = 'float'
STRING = 'string'
DTYPE_TO_OLAP_TYPE = {
np.object_: DIMENSION,
np.bool_: DIMENSION,
np.float64: MEASURE,
np.int64: MEASURE,
datetime: MEASURE,
}
DTYPE_TO_SIMPLETYPE = {
np.bool_: BOOLEAN,
np.float64: FLOAT,
np.int64: INTEGER,
np.object_: STRING,
datetime: DATETIME,
}
SIMPLETYPE_TO_DTYPE = {
FLOAT: np.float64,
INTEGER: np.int64,
}
SIMPLETYPE_TO_OLAP_TYPE = {
v: DTYPE_TO_OLAP_TYPE[k] for (k, v) in DTYPE_TO_SIMPLETYPE.items()}
RE_ENCODED_COLUMN = re.compile(ur'(?u)\W')
class Schema(dict):
@classmethod
def safe_init(cls, arg):
"""Make schema with potential arg of None."""
return cls() if arg is None else cls(arg)
@property
def labels_to_slugs(self):
"""Build dict from column labels to slugs."""
return {
column_attrs[LABEL]: reserve_encoded(column_name) for
(column_name, column_attrs) in self.items()
}
@property
def numerics(self):
return [slug for slug, col_schema in self.items()
if col_schema[SIMPLETYPE] in [INTEGER, FLOAT]]
@property
def numerics_select(self):
return {col: 1 for col in self.numerics}
def cardinality(self, column):
if self.is_dimension(column):
return self[column].get(CARDINALITY)
def convert_type(self, slug, value):
column_schema = self.get(slug)
if column_schema:
type_func = SIMPLETYPE_TO_DTYPE.get(column_schema[SIMPLETYPE])
if type_func:
value = type_func(value)
return value
def datetimes(self, intersect_with):
return [slug for slug, col in self.items()
if col[SIMPLETYPE] == DATETIME and slug in intersect_with]
def is_date_simpletype(self, column):
return self[column][SIMPLETYPE] == DATETIME
def is_dimension(self, column):
col_schema = self.get(column)
return col_schema and col_schema[OLAP_TYPE] == DIMENSION
def rebuild(self, dframe, overwrite=False):
"""Rebuild a schema for a dframe.
:param dframe: The DataFrame whose schema to merge with the current
schema.
:param overwrite: If true replace schema, otherwise update.
"""
current_schema = self
new_schema = schema_from_dframe(dframe, self)
if current_schema and not overwrite:
current_schema.update(new_schema)
new_schema = current_schema
return new_schema
def rename_map_for_dframe(self, dframe):
"""Return a map from dframe columns to slugs.
:param dframe: The DataFrame to produce the map for.
"""
labels_to_slugs = self.labels_to_slugs
return {
column: labels_to_slugs[column] for column in
dframe.columns.tolist() if self._resluggable_column(
column, labels_to_slugs, dframe)
}
def set_olap_type(self, column, olap_type):
"""Set the OLAP Type for this `column` of schema.
Only columns with an original OLAP Type of 'measure' can be modified.
This includes columns with Simple Type integer, float, and datetime.
:param column: The column to set the OLAP Type for.
:param olap_type: The OLAP Type to set. Must be 'dimension' or
'measure'.
:raises: `ArgumentError` if trying to set the OLAP Type of an column
whose OLAP Type was not originally a 'measure'.
"""
self[column][OLAP_TYPE] = olap_type
def _resluggable_column(self, column, labels_to_slugs, dframe):
"""Test if column should be slugged.
A column should be slugged if:
1. The `column` is a key in `labels_to_slugs` and
2. The `column` is not a value in `labels_to_slugs` or
1. The `column` label is not equal to the `column` slug and
2. The slug is not in the `dframe`'s columns
:param column: The column to reslug.
:param labels_to_slugs: The labels to slugs map (only build once).
:param dframe: The DataFrame that column is in.
"""
return (column in labels_to_slugs.keys() and (
not column in labels_to_slugs.values() or (
labels_to_slugs[column] != column and
labels_to_slugs[column] not in dframe.columns)))
def schema_from_dframe(dframe, schema=None):
"""Build schema from the DataFrame and a schema.
:param dframe: The DataFrame to build a schema for.
:param schema: Existing schema, optional.
:returns: A dictionary schema.
"""
dtypes = dframe.dtypes.to_dict()
column_names = list()
names_to_labels = dict()
# use existing labels for existing columns
for name in dtypes.keys():
if name not in RESERVED_KEYS:
column_names.append(name)
if schema:
schema_for_name = schema.get(name)
if schema_for_name:
names_to_labels[name] = schema_for_name[
LABEL]
encoded_names = dict(zip(column_names, _slugify_columns(column_names)))
schema = Schema()
for (name, dtype) in dtypes.items():
if name not in RESERVED_KEYS:
column_schema = {
LABEL: names_to_labels.get(name, name),
OLAP_TYPE: _olap_type_for_data_and_dtype(
dframe[name], dtype),
SIMPLETYPE: _simpletype_for_data_and_dtype(
dframe[name], dtype),
}
try:
column_schema[CARDINALITY] = dframe[
name].nunique()
except AttributeError:
pass
except TypeError:
# E.g. dates with and without offset can not be compared and
# raise a type error.
pass
schema[encoded_names[name]] = column_schema
return schema
def _slugify_columns(column_names):
"""Convert list of strings into unique slugs.
Convert non-alphanumeric characters in column names into underscores and
ensure that all column names are unique.
:param column_names: A list of strings.
:returns: A list of slugified names with a one-to-one mapping to
`column_names`.
"""
encoded_names = []
for column_name in column_names:
slug = RE_ENCODED_COLUMN.sub('_', column_name).lower()
slug = make_unique(slug, encoded_names + Parser.reserved_words)
encoded_names.append(slug)
return encoded_names
def make_unique(name, reserved_names):
"""Return a slug ensuring name is not in `reserved_names`.
:param name: The name to make unique.
:param reserved_names: A list of names the column must not be included in.
"""
while name in reserved_names:
name += '_'
return name
def filter_schema(schema):
"""Remove not settable columns."""
for column, column_schema in schema.iteritems():
if column_schema.get(CARDINALITY):
del column_schema[CARDINALITY]
schema[column] = column_schema
return schema
def _olap_type_for_data_and_dtype(column, dtype):
return _type_for_data_and_dtypes(
DTYPE_TO_OLAP_TYPE, column, dtype.type)
def _simpletype_for_data_and_dtype(column, dtype):
return _type_for_data_and_dtypes(
DTYPE_TO_SIMPLETYPE, column, dtype.type)
def _type_for_data_and_dtypes(type_map, column, dtype_type):
has_datetime = any([isinstance(field, datetime) for field in column])
return type_map[datetime if has_datetime else dtype_type]
| false
| true
|
790a558bc791a93fad1eae31222a7c127297e94e
| 877
|
py
|
Python
|
krgram/tl/api/functions/help.py
|
krow89/krgram
|
52257d143be44b3c038c9458837f2df4b1640fa7
|
[
"MIT"
] | null | null | null |
krgram/tl/api/functions/help.py
|
krow89/krgram
|
52257d143be44b3c038c9458837f2df4b1640fa7
|
[
"MIT"
] | null | null | null |
krgram/tl/api/functions/help.py
|
krow89/krgram
|
52257d143be44b3c038c9458837f2df4b1640fa7
|
[
"MIT"
] | null | null | null |
from krgram.tl.core_types.native import TL_string
from krgram.tl.base import *
class getConfig(TLFunction):
ID = 0xc4f9186b
TLRegister.register(getConfig)
class getNearestDc(TLFunction):
ID = 0x1fb33026
TLRegister.register(getNearestDc)
class getAppUpdate(TLFunction):
ID = 0xc812ac7e
def get_structure(self):
return ("device_model", TL_string()), ("system_version", TL_string()), \
("app_version", TL_string()), ("lang_code", TL_string()),
TLRegister.register(getAppUpdate)
class saveAppLog(TLFunction):
ID = 0x6f02f748
def get_structure(self):
return ("events", Vector()),
TLRegister.register(saveAppLog)
class getInviteText(TLFunction):
ID = 0xa4a95186
def get_structure(self):
return ("lang_code", TL_string()),
TLRegister.register(getInviteText)
class getSupport(TLFunction):
ID = 0x9cdf08cd
TLRegister.register(getSupport)
| 15.945455
| 74
| 0.749145
|
from krgram.tl.core_types.native import TL_string
from krgram.tl.base import *
class getConfig(TLFunction):
ID = 0xc4f9186b
TLRegister.register(getConfig)
class getNearestDc(TLFunction):
ID = 0x1fb33026
TLRegister.register(getNearestDc)
class getAppUpdate(TLFunction):
ID = 0xc812ac7e
def get_structure(self):
return ("device_model", TL_string()), ("system_version", TL_string()), \
("app_version", TL_string()), ("lang_code", TL_string()),
TLRegister.register(getAppUpdate)
class saveAppLog(TLFunction):
ID = 0x6f02f748
def get_structure(self):
return ("events", Vector()),
TLRegister.register(saveAppLog)
class getInviteText(TLFunction):
ID = 0xa4a95186
def get_structure(self):
return ("lang_code", TL_string()),
TLRegister.register(getInviteText)
class getSupport(TLFunction):
ID = 0x9cdf08cd
TLRegister.register(getSupport)
| true
| true
|
790a5659a4d9c50dfba399c8f82c0dd9c1a28788
| 6,566
|
py
|
Python
|
model/cpn/ade.cpn.R50_v1c.v7/network.py
|
akinoriosamura/TorchSeg-mirror
|
34033fe85fc24015bcef7a92aad39d2a25a001a5
|
[
"MIT"
] | null | null | null |
model/cpn/ade.cpn.R50_v1c.v7/network.py
|
akinoriosamura/TorchSeg-mirror
|
34033fe85fc24015bcef7a92aad39d2a25a001a5
|
[
"MIT"
] | 1
|
2021-06-08T20:36:43.000Z
|
2021-06-08T20:36:43.000Z
|
model/cpn/ade.cpn.R50_v1c.v7/network.py
|
akinoriosamura/TorchSeg-mirror
|
34033fe85fc24015bcef7a92aad39d2a25a001a5
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
from functools import partial
from collections import OrderedDict
import torch
import torch.nn as nn
import torch.nn.functional as F
from config import config
from base_model import resnet50
from seg_opr.seg_oprs import ConvBnRelu
class CPNet(nn.Module):
def __init__(self, out_planes, criterion, pretrained_model=None,
norm_layer=nn.BatchNorm2d):
super(CPNet, self).__init__()
self.backbone = resnet50(pretrained_model, norm_layer=norm_layer,
bn_eps=config.bn_eps,
bn_momentum=config.bn_momentum,
deep_stem=True, stem_width=64)
self.backbone.layer3.apply(partial(self._nostride_dilate, dilate=2))
self.backbone.layer4.apply(partial(self._nostride_dilate, dilate=4))
self.business_layer = []
self.context = ObjectContext(2048, 512, norm_layer)
self.head_layer = nn.Sequential(
ConvBnRelu(2048 + 1024, 512, 3, 1, 1,
has_bn=True,
has_relu=True, has_bias=False, norm_layer=norm_layer),
nn.Dropout2d(0.1, inplace=False),
nn.Conv2d(512, out_planes, kernel_size=1)
)
self.aux_layer = nn.Sequential(
ConvBnRelu(1024, 512, 3, 1, 1,
has_bn=True,
has_relu=True, has_bias=False, norm_layer=norm_layer),
nn.Dropout2d(0.1, inplace=False),
nn.Conv2d(512, out_planes, kernel_size=1)
)
self.business_layer.append(self.context)
self.business_layer.append(self.head_layer)
self.business_layer.append(self.aux_layer)
self.criterion = criterion
self.bce_criterion = nn.BCELoss(reduction='mean')
def forward(self, data, label=None, aux_label=None):
blocks = self.backbone(data)
fm, intra_sim_map = self.context(blocks[-1])
fm = self.head_layer(fm)
fm = F.interpolate(fm, scale_factor=8, mode='bilinear',
align_corners=True)
softmax_fm = F.log_softmax(fm, dim=1)
aux_fm = self.aux_layer(blocks[-2])
aux_fm = F.interpolate(aux_fm, scale_factor=8, mode='bilinear',
align_corners=True)
if label is not None:
main_loss = self.criterion(fm, label)
aux_loss = self.criterion(aux_fm, label)
intra_sim_loss = self.bce_criterion(intra_sim_map, aux_label)
loss = main_loss + 0.4 * aux_loss + intra_sim_loss
return loss
return softmax_fm
# @staticmethod
def _nostride_dilate(self, m, dilate):
if isinstance(m, nn.Conv2d):
if m.stride == (2, 2):
m.stride = (1, 1)
if m.kernel_size == (3, 3):
m.dilation = (dilate // 2, dilate // 2)
m.padding = (dilate // 2, dilate // 2)
else:
if m.kernel_size == (3, 3):
m.dilation = (dilate, dilate)
m.padding = (dilate, dilate)
class ObjectContext(nn.Module):
def __init__(self, in_channels, inner_channel, norm_layer=nn.BatchNorm2d):
super(ObjectContext, self).__init__()
self.in_channels = in_channels
self.inner_channel = inner_channel
self.reduce_conv = ConvBnRelu(self.in_channels, self.inner_channel,
1, 1, 0,
has_bn=True, has_relu=True,
has_bias=False, norm_layer=norm_layer)
self.intra_similarity_branch = nn.Sequential(
ConvBnRelu(self.inner_channel, self.inner_channel, 1, 1, 0,
has_bn=True, has_relu=True,
has_bias=False, norm_layer=norm_layer),
ConvBnRelu(self.inner_channel, 3600, 1, 1, 0,
has_bn=True, has_relu=False,
has_bias=False, norm_layer=norm_layer),
)
self.intra_post_conv = ConvBnRelu(self.inner_channel,
self.inner_channel,
1, 1, 0, has_bn=True, has_relu=True,
has_bias=False, norm_layer=norm_layer)
self.inter_post_conv = ConvBnRelu(self.inner_channel,
self.inner_channel,
1, 1, 0, has_bn=True, has_relu=True,
has_bias=False, norm_layer=norm_layer)
def forward(self, x):
b, h, w = x.size(0), x.size(2), x.size(3)
value = self.reduce_conv(x)
intra_similarity_map = self.intra_similarity_branch(value)
intra_similarity_map = intra_similarity_map.view(b, h * w, -1)
intra_similarity_map = intra_similarity_map.permute(0, 2, 1)
intra_similarity_map = torch.sigmoid(intra_similarity_map)
inter_similarity_map = 1 - intra_similarity_map
value = value.view(b, self.inner_channel, -1)
value = value.permute(0, 2, 1)
intra_context = torch.bmm(intra_similarity_map, value)
intra_mask = torch.ge(intra_similarity_map, 0.5).float()
intra_mask_count = intra_mask.sum(dim=-1, keepdim=True)
intra_mask_count = intra_mask_count.masked_fill_(intra_mask_count.eq(0),
1)
intra_context = intra_context.div(intra_mask_count)
intra_context = intra_context.permute(0, 2, 1).contiguous()
intra_context = intra_context.view(b, self.inner_channel, *x.size()[2:])
intra_context = self.intra_post_conv(intra_context)
inter_context = torch.bmm(inter_similarity_map, value)
inter_mask = torch.ge(inter_similarity_map, 0.5).float()
inter_mask_count = inter_mask.sum(dim=-1, keepdim=True)
inter_mask_count = inter_mask_count.masked_fill_(inter_mask_count.eq(0),
1)
inter_context = inter_context.div(inter_mask_count)
inter_context = inter_context.permute(0, 2, 1).contiguous()
inter_context = inter_context.view(b, self.inner_channel, *x.size()[2:])
inter_context = self.inter_post_conv(inter_context)
output = torch.cat([x, intra_context, inter_context], dim=1)
return output, intra_similarity_map
if __name__ == "__main__":
model = PSPNet(150, None)
print(model)
| 41.556962
| 80
| 0.581937
|
from functools import partial
from collections import OrderedDict
import torch
import torch.nn as nn
import torch.nn.functional as F
from config import config
from base_model import resnet50
from seg_opr.seg_oprs import ConvBnRelu
class CPNet(nn.Module):
def __init__(self, out_planes, criterion, pretrained_model=None,
norm_layer=nn.BatchNorm2d):
super(CPNet, self).__init__()
self.backbone = resnet50(pretrained_model, norm_layer=norm_layer,
bn_eps=config.bn_eps,
bn_momentum=config.bn_momentum,
deep_stem=True, stem_width=64)
self.backbone.layer3.apply(partial(self._nostride_dilate, dilate=2))
self.backbone.layer4.apply(partial(self._nostride_dilate, dilate=4))
self.business_layer = []
self.context = ObjectContext(2048, 512, norm_layer)
self.head_layer = nn.Sequential(
ConvBnRelu(2048 + 1024, 512, 3, 1, 1,
has_bn=True,
has_relu=True, has_bias=False, norm_layer=norm_layer),
nn.Dropout2d(0.1, inplace=False),
nn.Conv2d(512, out_planes, kernel_size=1)
)
self.aux_layer = nn.Sequential(
ConvBnRelu(1024, 512, 3, 1, 1,
has_bn=True,
has_relu=True, has_bias=False, norm_layer=norm_layer),
nn.Dropout2d(0.1, inplace=False),
nn.Conv2d(512, out_planes, kernel_size=1)
)
self.business_layer.append(self.context)
self.business_layer.append(self.head_layer)
self.business_layer.append(self.aux_layer)
self.criterion = criterion
self.bce_criterion = nn.BCELoss(reduction='mean')
def forward(self, data, label=None, aux_label=None):
blocks = self.backbone(data)
fm, intra_sim_map = self.context(blocks[-1])
fm = self.head_layer(fm)
fm = F.interpolate(fm, scale_factor=8, mode='bilinear',
align_corners=True)
softmax_fm = F.log_softmax(fm, dim=1)
aux_fm = self.aux_layer(blocks[-2])
aux_fm = F.interpolate(aux_fm, scale_factor=8, mode='bilinear',
align_corners=True)
if label is not None:
main_loss = self.criterion(fm, label)
aux_loss = self.criterion(aux_fm, label)
intra_sim_loss = self.bce_criterion(intra_sim_map, aux_label)
loss = main_loss + 0.4 * aux_loss + intra_sim_loss
return loss
return softmax_fm
def _nostride_dilate(self, m, dilate):
if isinstance(m, nn.Conv2d):
if m.stride == (2, 2):
m.stride = (1, 1)
if m.kernel_size == (3, 3):
m.dilation = (dilate // 2, dilate // 2)
m.padding = (dilate // 2, dilate // 2)
else:
if m.kernel_size == (3, 3):
m.dilation = (dilate, dilate)
m.padding = (dilate, dilate)
class ObjectContext(nn.Module):
def __init__(self, in_channels, inner_channel, norm_layer=nn.BatchNorm2d):
super(ObjectContext, self).__init__()
self.in_channels = in_channels
self.inner_channel = inner_channel
self.reduce_conv = ConvBnRelu(self.in_channels, self.inner_channel,
1, 1, 0,
has_bn=True, has_relu=True,
has_bias=False, norm_layer=norm_layer)
self.intra_similarity_branch = nn.Sequential(
ConvBnRelu(self.inner_channel, self.inner_channel, 1, 1, 0,
has_bn=True, has_relu=True,
has_bias=False, norm_layer=norm_layer),
ConvBnRelu(self.inner_channel, 3600, 1, 1, 0,
has_bn=True, has_relu=False,
has_bias=False, norm_layer=norm_layer),
)
self.intra_post_conv = ConvBnRelu(self.inner_channel,
self.inner_channel,
1, 1, 0, has_bn=True, has_relu=True,
has_bias=False, norm_layer=norm_layer)
self.inter_post_conv = ConvBnRelu(self.inner_channel,
self.inner_channel,
1, 1, 0, has_bn=True, has_relu=True,
has_bias=False, norm_layer=norm_layer)
def forward(self, x):
b, h, w = x.size(0), x.size(2), x.size(3)
value = self.reduce_conv(x)
intra_similarity_map = self.intra_similarity_branch(value)
intra_similarity_map = intra_similarity_map.view(b, h * w, -1)
intra_similarity_map = intra_similarity_map.permute(0, 2, 1)
intra_similarity_map = torch.sigmoid(intra_similarity_map)
inter_similarity_map = 1 - intra_similarity_map
value = value.view(b, self.inner_channel, -1)
value = value.permute(0, 2, 1)
intra_context = torch.bmm(intra_similarity_map, value)
intra_mask = torch.ge(intra_similarity_map, 0.5).float()
intra_mask_count = intra_mask.sum(dim=-1, keepdim=True)
intra_mask_count = intra_mask_count.masked_fill_(intra_mask_count.eq(0),
1)
intra_context = intra_context.div(intra_mask_count)
intra_context = intra_context.permute(0, 2, 1).contiguous()
intra_context = intra_context.view(b, self.inner_channel, *x.size()[2:])
intra_context = self.intra_post_conv(intra_context)
inter_context = torch.bmm(inter_similarity_map, value)
inter_mask = torch.ge(inter_similarity_map, 0.5).float()
inter_mask_count = inter_mask.sum(dim=-1, keepdim=True)
inter_mask_count = inter_mask_count.masked_fill_(inter_mask_count.eq(0),
1)
inter_context = inter_context.div(inter_mask_count)
inter_context = inter_context.permute(0, 2, 1).contiguous()
inter_context = inter_context.view(b, self.inner_channel, *x.size()[2:])
inter_context = self.inter_post_conv(inter_context)
output = torch.cat([x, intra_context, inter_context], dim=1)
return output, intra_similarity_map
if __name__ == "__main__":
model = PSPNet(150, None)
print(model)
| true
| true
|
790a57426c4f8796ad41518ae68262146cf82e95
| 665
|
py
|
Python
|
manage.py
|
nikhilchaudhary0126/goodshare
|
c1578796b6376f02970f61ea418e9eb87dc5bb64
|
[
"MIT"
] | 1
|
2022-02-26T09:25:39.000Z
|
2022-02-26T09:25:39.000Z
|
manage.py
|
nikhilchaudhary0126/goodshare
|
c1578796b6376f02970f61ea418e9eb87dc5bb64
|
[
"MIT"
] | null | null | null |
manage.py
|
nikhilchaudhary0126/goodshare
|
c1578796b6376f02970f61ea418e9eb87dc5bb64
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'goodshare.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.913043
| 73
| 0.679699
|
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'goodshare.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| true
| true
|
790a576bef2092f8848a34e75ca0c306984a4d62
| 588
|
py
|
Python
|
eggs/Fabric-1.4.2-py2.7.egg/fabric/thread_handling.py
|
bopopescu/phyG
|
023f505b705ab953f502cbc55e90612047867583
|
[
"CC-BY-3.0"
] | 2
|
2015-03-10T10:55:26.000Z
|
2020-12-29T06:05:43.000Z
|
eggs/Fabric-1.4.2-py2.7.egg/fabric/thread_handling.py
|
bopopescu/phyG
|
023f505b705ab953f502cbc55e90612047867583
|
[
"CC-BY-3.0"
] | null | null | null |
eggs/Fabric-1.4.2-py2.7.egg/fabric/thread_handling.py
|
bopopescu/phyG
|
023f505b705ab953f502cbc55e90612047867583
|
[
"CC-BY-3.0"
] | 1
|
2020-07-25T21:03:18.000Z
|
2020-07-25T21:03:18.000Z
|
import threading
import sys
class ThreadHandler(object):
def __init__(self, name, callable, *args, **kwargs):
# Set up exception handling
self.exception = None
def wrapper(*args, **kwargs):
try:
callable(*args, **kwargs)
except BaseException:
self.exception = sys.exc_info()
# Kick off thread
thread = threading.Thread(None, wrapper, name, args, kwargs)
thread.setDaemon(True)
thread.start()
# Make thread available to instantiator
self.thread = thread
| 28
| 68
| 0.588435
|
import threading
import sys
class ThreadHandler(object):
def __init__(self, name, callable, *args, **kwargs):
self.exception = None
def wrapper(*args, **kwargs):
try:
callable(*args, **kwargs)
except BaseException:
self.exception = sys.exc_info()
thread = threading.Thread(None, wrapper, name, args, kwargs)
thread.setDaemon(True)
thread.start()
self.thread = thread
| true
| true
|
790a577ece84db27e4160fb76a791f94a4e04f44
| 1,247
|
py
|
Python
|
logAxes.py
|
Henrynaut/Py622
|
1ac33db96f82c562fe4a85ca5dc0b9b77c5fd088
|
[
"MIT"
] | null | null | null |
logAxes.py
|
Henrynaut/Py622
|
1ac33db96f82c562fe4a85ca5dc0b9b77c5fd088
|
[
"MIT"
] | null | null | null |
logAxes.py
|
Henrynaut/Py622
|
1ac33db96f82c562fe4a85ca5dc0b9b77c5fd088
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.ticker import NullFormatter # useful for `logit` scale
# Fixing random state for reproducibility
np.random.seed(19680801)
# make up some data in the interval ]0, 1[
y = np.random.normal(loc=0.5, scale=0.4, size=1000)
y = y[(y > 0) & (y < 1)]
y.sort()
x = np.arange(len(y))
# plot with various axes scales
plt.figure(1)
# linear
plt.subplot(221)
plt.plot(x, y)
plt.yscale('linear')
plt.title('linear')
plt.grid(True)
# log
plt.subplot(222)
plt.plot(x, y)
plt.yscale('log')
plt.title('log')
plt.grid(True)
# symmetric log
plt.subplot(223)
plt.plot(x, y - y.mean())
plt.yscale('symlog', linthreshy=0.01)
plt.title('symlog')
plt.grid(True)
# logit
plt.subplot(224)
plt.plot(x, y)
plt.yscale('logit')
plt.title('logit')
plt.grid(True)
# Format the minor tick labels of the y-axis into empty strings with
# `NullFormatter`, to avoid cumbering the axis with too many labels.
plt.gca().yaxis.set_minor_formatter(NullFormatter())
# Adjust the subplot layout, because the logit one may take more space
# than usual, due to y-tick labels like "1 - 10^{-3}"
plt.subplots_adjust(top=0.92, bottom=0.08, left=0.10, right=0.95, hspace=0.25,
wspace=0.35)
plt.show()
| 22.672727
| 78
| 0.700882
|
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.ticker import NullFormatter
np.random.seed(19680801)
y = np.random.normal(loc=0.5, scale=0.4, size=1000)
y = y[(y > 0) & (y < 1)]
y.sort()
x = np.arange(len(y))
plt.figure(1)
plt.subplot(221)
plt.plot(x, y)
plt.yscale('linear')
plt.title('linear')
plt.grid(True)
plt.subplot(222)
plt.plot(x, y)
plt.yscale('log')
plt.title('log')
plt.grid(True)
plt.subplot(223)
plt.plot(x, y - y.mean())
plt.yscale('symlog', linthreshy=0.01)
plt.title('symlog')
plt.grid(True)
plt.subplot(224)
plt.plot(x, y)
plt.yscale('logit')
plt.title('logit')
plt.grid(True)
plt.gca().yaxis.set_minor_formatter(NullFormatter())
plt.subplots_adjust(top=0.92, bottom=0.08, left=0.10, right=0.95, hspace=0.25,
wspace=0.35)
plt.show()
| true
| true
|
790a58e79f1368e3c62b619aefa5153279d5551a
| 5,251
|
py
|
Python
|
tests/timestamp_tests.py
|
LeesahMasko/piwikibot
|
024af387ff48c21526ee206541178157d2653ddc
|
[
"MIT"
] | null | null | null |
tests/timestamp_tests.py
|
LeesahMasko/piwikibot
|
024af387ff48c21526ee206541178157d2653ddc
|
[
"MIT"
] | 6
|
2021-02-27T03:35:42.000Z
|
2021-03-07T22:17:40.000Z
|
tests/timestamp_tests.py
|
LeesahMasko/piwikibot
|
024af387ff48c21526ee206541178157d2653ddc
|
[
"MIT"
] | null | null | null |
"""Tests for the Timestamp class."""
#
# (C) Pywikibot team, 2014-2021
#
# Distributed under the terms of the MIT license.
#
import calendar
import datetime
import re
import unittest
from contextlib import suppress
from pywikibot import Timestamp
from tests.aspects import TestCase
class TestTimestamp(TestCase):
"""Test Timestamp class comparisons."""
net = False
def test_clone(self):
"""Test cloning a Timestamp instance."""
t1 = Timestamp.utcnow()
t2 = t1.clone()
self.assertEqual(t1, t2)
self.assertIsInstance(t2, Timestamp)
def test_instantiate_from_instance(self):
"""Test passing instance to factory methods works."""
t1 = Timestamp.utcnow()
self.assertIsNot(t1, Timestamp.fromISOformat(t1))
self.assertEqual(t1, Timestamp.fromISOformat(t1))
self.assertIsInstance(Timestamp.fromISOformat(t1), Timestamp)
self.assertIsNot(t1, Timestamp.fromtimestampformat(t1))
self.assertEqual(t1, Timestamp.fromtimestampformat(t1))
self.assertIsInstance(Timestamp.fromtimestampformat(t1), Timestamp)
def test_iso_format(self):
"""Test conversion from and to ISO format."""
sep = 'T'
t1 = Timestamp.utcnow()
if not t1.microsecond: # T199179: ensure microsecond is not 0
t1 = t1.replace(microsecond=1)
ts1 = t1.isoformat()
t2 = Timestamp.fromISOformat(ts1)
ts2 = t2.isoformat()
# MediaWiki ISO format doesn't include microseconds
self.assertNotEqual(t1, t2)
t1 = t1.replace(microsecond=0)
self.assertEqual(t1, t2)
self.assertEqual(ts1, ts2)
date, sep, time = ts1.partition(sep)
time = time.rstrip('Z')
self.assertEqual(date, str(t1.date()))
self.assertEqual(time, str(t1.time()))
def test_iso_format_with_sep(self):
"""Test conversion from and to ISO format with separator."""
sep = '*'
t1 = Timestamp.utcnow().replace(microsecond=0)
ts1 = t1.isoformat(sep=sep)
t2 = Timestamp.fromISOformat(ts1, sep=sep)
ts2 = t2.isoformat(sep=sep)
self.assertEqual(t1, t2)
self.assertEqual(t1, t2)
self.assertEqual(ts1, ts2)
date, sep, time = ts1.partition(sep)
time = time.rstrip('Z')
self.assertEqual(date, str(t1.date()))
self.assertEqual(time, str(t1.time()))
def test_iso_format_property(self):
"""Test iso format properties."""
self.assertEqual(Timestamp.ISO8601Format, Timestamp._ISO8601Format())
self.assertEqual(re.sub(r'[\-:TZ]', '', Timestamp.ISO8601Format),
Timestamp.mediawikiTSFormat)
def test_mediawiki_format(self):
"""Test conversion from and to Timestamp format."""
t1 = Timestamp.utcnow()
if not t1.microsecond: # T191827: ensure microsecond is not 0
t1 = t1.replace(microsecond=1000)
ts1 = t1.totimestampformat()
t2 = Timestamp.fromtimestampformat(ts1)
ts2 = t2.totimestampformat()
# MediaWiki timestamp format doesn't include microseconds
self.assertNotEqual(t1, t2)
t1 = t1.replace(microsecond=0)
self.assertEqual(t1, t2)
self.assertEqual(ts1, ts2)
def test_short_mediawiki_format(self):
"""Test short mw timestamp conversion from and to Timestamp format."""
t1 = Timestamp(2018, 12, 17)
t2 = Timestamp.fromtimestampformat('20181217') # short timestamp
ts1 = t1.totimestampformat()
ts2 = t2.totimestampformat()
self.assertEqual(t1, t2)
self.assertEqual(ts1, ts2)
def test_add_timedelta(self):
"""Test addin a timedelta to a Timestamp."""
t1 = Timestamp.utcnow()
t2 = t1 + datetime.timedelta(days=1)
if t1.month != t2.month:
self.assertEqual(1, t2.day)
else:
self.assertEqual(t1.day + 1, t2.day)
self.assertIsInstance(t2, Timestamp)
def test_add_timedate(self):
"""Test unsupported additions raise NotImplemented."""
t1 = datetime.datetime.utcnow()
t2 = t1 + datetime.timedelta(days=1)
t3 = t1.__add__(t2)
self.assertIs(t3, NotImplemented)
# Now check that the pywikibot sub-class behaves the same way
t1 = Timestamp.utcnow()
t2 = t1 + datetime.timedelta(days=1)
t3 = t1.__add__(t2)
self.assertIs(t3, NotImplemented)
def test_sub_timedelta(self):
"""Test subtracting a timedelta from a Timestamp."""
t1 = Timestamp.utcnow()
t2 = t1 - datetime.timedelta(days=1)
if t1.month != t2.month:
self.assertEqual(calendar.monthrange(t2.year, t2.month)[1], t2.day)
else:
self.assertEqual(t1.day - 1, t2.day)
self.assertIsInstance(t2, Timestamp)
def test_sub_timedate(self):
"""Test subtracting two timestamps."""
t1 = Timestamp.utcnow()
t2 = t1 - datetime.timedelta(days=1)
td = t1 - t2
self.assertIsInstance(td, datetime.timedelta)
self.assertEqual(t2 + td, t1)
if __name__ == '__main__': # pragma: no cover
with suppress(SystemExit):
unittest.main()
| 35.006667
| 79
| 0.629404
|
import calendar
import datetime
import re
import unittest
from contextlib import suppress
from pywikibot import Timestamp
from tests.aspects import TestCase
class TestTimestamp(TestCase):
net = False
def test_clone(self):
t1 = Timestamp.utcnow()
t2 = t1.clone()
self.assertEqual(t1, t2)
self.assertIsInstance(t2, Timestamp)
def test_instantiate_from_instance(self):
t1 = Timestamp.utcnow()
self.assertIsNot(t1, Timestamp.fromISOformat(t1))
self.assertEqual(t1, Timestamp.fromISOformat(t1))
self.assertIsInstance(Timestamp.fromISOformat(t1), Timestamp)
self.assertIsNot(t1, Timestamp.fromtimestampformat(t1))
self.assertEqual(t1, Timestamp.fromtimestampformat(t1))
self.assertIsInstance(Timestamp.fromtimestampformat(t1), Timestamp)
def test_iso_format(self):
sep = 'T'
t1 = Timestamp.utcnow()
if not t1.microsecond:
t1 = t1.replace(microsecond=1)
ts1 = t1.isoformat()
t2 = Timestamp.fromISOformat(ts1)
ts2 = t2.isoformat()
self.assertNotEqual(t1, t2)
t1 = t1.replace(microsecond=0)
self.assertEqual(t1, t2)
self.assertEqual(ts1, ts2)
date, sep, time = ts1.partition(sep)
time = time.rstrip('Z')
self.assertEqual(date, str(t1.date()))
self.assertEqual(time, str(t1.time()))
def test_iso_format_with_sep(self):
sep = '*'
t1 = Timestamp.utcnow().replace(microsecond=0)
ts1 = t1.isoformat(sep=sep)
t2 = Timestamp.fromISOformat(ts1, sep=sep)
ts2 = t2.isoformat(sep=sep)
self.assertEqual(t1, t2)
self.assertEqual(t1, t2)
self.assertEqual(ts1, ts2)
date, sep, time = ts1.partition(sep)
time = time.rstrip('Z')
self.assertEqual(date, str(t1.date()))
self.assertEqual(time, str(t1.time()))
def test_iso_format_property(self):
self.assertEqual(Timestamp.ISO8601Format, Timestamp._ISO8601Format())
self.assertEqual(re.sub(r'[\-:TZ]', '', Timestamp.ISO8601Format),
Timestamp.mediawikiTSFormat)
def test_mediawiki_format(self):
t1 = Timestamp.utcnow()
if not t1.microsecond: # T191827: ensure microsecond is not 0
t1 = t1.replace(microsecond=1000)
ts1 = t1.totimestampformat()
t2 = Timestamp.fromtimestampformat(ts1)
ts2 = t2.totimestampformat()
# MediaWiki timestamp format doesn't include microseconds
self.assertNotEqual(t1, t2)
t1 = t1.replace(microsecond=0)
self.assertEqual(t1, t2)
self.assertEqual(ts1, ts2)
def test_short_mediawiki_format(self):
t1 = Timestamp(2018, 12, 17)
t2 = Timestamp.fromtimestampformat('20181217')
ts1 = t1.totimestampformat()
ts2 = t2.totimestampformat()
self.assertEqual(t1, t2)
self.assertEqual(ts1, ts2)
def test_add_timedelta(self):
t1 = Timestamp.utcnow()
t2 = t1 + datetime.timedelta(days=1)
if t1.month != t2.month:
self.assertEqual(1, t2.day)
else:
self.assertEqual(t1.day + 1, t2.day)
self.assertIsInstance(t2, Timestamp)
def test_add_timedate(self):
t1 = datetime.datetime.utcnow()
t2 = t1 + datetime.timedelta(days=1)
t3 = t1.__add__(t2)
self.assertIs(t3, NotImplemented)
t1 = Timestamp.utcnow()
t2 = t1 + datetime.timedelta(days=1)
t3 = t1.__add__(t2)
self.assertIs(t3, NotImplemented)
def test_sub_timedelta(self):
t1 = Timestamp.utcnow()
t2 = t1 - datetime.timedelta(days=1)
if t1.month != t2.month:
self.assertEqual(calendar.monthrange(t2.year, t2.month)[1], t2.day)
else:
self.assertEqual(t1.day - 1, t2.day)
self.assertIsInstance(t2, Timestamp)
def test_sub_timedate(self):
t1 = Timestamp.utcnow()
t2 = t1 - datetime.timedelta(days=1)
td = t1 - t2
self.assertIsInstance(td, datetime.timedelta)
self.assertEqual(t2 + td, t1)
if __name__ == '__main__':
with suppress(SystemExit):
unittest.main()
| true
| true
|
790a59bd9f4ab48c7ffce8b4710fd684df18c5f8
| 3,858
|
py
|
Python
|
google/cloud/datacatalog_v1beta1/types/table_spec.py
|
steffnay/python-datacatalog
|
90a0be276e38e889a5086f8fd233d5b25e19965e
|
[
"Apache-2.0"
] | null | null | null |
google/cloud/datacatalog_v1beta1/types/table_spec.py
|
steffnay/python-datacatalog
|
90a0be276e38e889a5086f8fd233d5b25e19965e
|
[
"Apache-2.0"
] | null | null | null |
google/cloud/datacatalog_v1beta1/types/table_spec.py
|
steffnay/python-datacatalog
|
90a0be276e38e889a5086f8fd233d5b25e19965e
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.cloud.datacatalog.v1beta1",
manifest={
"TableSourceType",
"BigQueryTableSpec",
"ViewSpec",
"TableSpec",
"BigQueryDateShardedSpec",
},
)
class TableSourceType(proto.Enum):
r"""Table source type."""
TABLE_SOURCE_TYPE_UNSPECIFIED = 0
BIGQUERY_VIEW = 2
BIGQUERY_TABLE = 5
class BigQueryTableSpec(proto.Message):
r"""Describes a BigQuery table.
Attributes:
table_source_type (google.cloud.datacatalog_v1beta1.types.TableSourceType):
Output only. The table source type.
view_spec (google.cloud.datacatalog_v1beta1.types.ViewSpec):
Table view specification. This field should only be
populated if ``table_source_type`` is ``BIGQUERY_VIEW``.
table_spec (google.cloud.datacatalog_v1beta1.types.TableSpec):
Spec of a BigQuery table. This field should only be
populated if ``table_source_type`` is ``BIGQUERY_TABLE``.
"""
table_source_type = proto.Field(proto.ENUM, number=1, enum="TableSourceType",)
view_spec = proto.Field(
proto.MESSAGE, number=2, oneof="type_spec", message="ViewSpec",
)
table_spec = proto.Field(
proto.MESSAGE, number=3, oneof="type_spec", message="TableSpec",
)
class ViewSpec(proto.Message):
r"""Table view specification.
Attributes:
view_query (str):
Output only. The query that defines the table
view.
"""
view_query = proto.Field(proto.STRING, number=1,)
class TableSpec(proto.Message):
r"""Normal BigQuery table spec.
Attributes:
grouped_entry (str):
Output only. If the table is a dated shard, i.e., with name
pattern ``[prefix]YYYYMMDD``, ``grouped_entry`` is the Data
Catalog resource name of the date sharded grouped entry, for
example,
``projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}``.
Otherwise, ``grouped_entry`` is empty.
"""
grouped_entry = proto.Field(proto.STRING, number=1,)
class BigQueryDateShardedSpec(proto.Message):
r"""Spec for a group of BigQuery tables with name pattern
``[prefix]YYYYMMDD``. Context:
https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding
Attributes:
dataset (str):
Output only. The Data Catalog resource name of the dataset
entry the current table belongs to, for example,
``projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}``.
table_prefix (str):
Output only. The table name prefix of the shards. The name
of any given shard is ``[table_prefix]YYYYMMDD``, for
example, for shard ``MyTable20180101``, the ``table_prefix``
is ``MyTable``.
shard_count (int):
Output only. Total number of shards.
"""
dataset = proto.Field(proto.STRING, number=1,)
table_prefix = proto.Field(proto.STRING, number=2,)
shard_count = proto.Field(proto.INT64, number=3,)
__all__ = tuple(sorted(__protobuf__.manifest))
| 34.756757
| 107
| 0.667963
|
import proto
__protobuf__ = proto.module(
package="google.cloud.datacatalog.v1beta1",
manifest={
"TableSourceType",
"BigQueryTableSpec",
"ViewSpec",
"TableSpec",
"BigQueryDateShardedSpec",
},
)
class TableSourceType(proto.Enum):
TABLE_SOURCE_TYPE_UNSPECIFIED = 0
BIGQUERY_VIEW = 2
BIGQUERY_TABLE = 5
class BigQueryTableSpec(proto.Message):
table_source_type = proto.Field(proto.ENUM, number=1, enum="TableSourceType",)
view_spec = proto.Field(
proto.MESSAGE, number=2, oneof="type_spec", message="ViewSpec",
)
table_spec = proto.Field(
proto.MESSAGE, number=3, oneof="type_spec", message="TableSpec",
)
class ViewSpec(proto.Message):
view_query = proto.Field(proto.STRING, number=1,)
class TableSpec(proto.Message):
grouped_entry = proto.Field(proto.STRING, number=1,)
class BigQueryDateShardedSpec(proto.Message):
dataset = proto.Field(proto.STRING, number=1,)
table_prefix = proto.Field(proto.STRING, number=2,)
shard_count = proto.Field(proto.INT64, number=3,)
__all__ = tuple(sorted(__protobuf__.manifest))
| true
| true
|
790a59f0a0651c45430ea6219d0a12a690263e1e
| 5,155
|
py
|
Python
|
search/management/commands/test_selenium.py
|
pythonmentor/david-p8
|
61e24e2ecfd23e028d4927d8c18a5791e1b97d90
|
[
"MIT"
] | null | null | null |
search/management/commands/test_selenium.py
|
pythonmentor/david-p8
|
61e24e2ecfd23e028d4927d8c18a5791e1b97d90
|
[
"MIT"
] | null | null | null |
search/management/commands/test_selenium.py
|
pythonmentor/david-p8
|
61e24e2ecfd23e028d4927d8c18a5791e1b97d90
|
[
"MIT"
] | null | null | null |
import requests
import logging
import os
import selenium
import unittest
import time
import requests, re
from django.core.management.base import BaseCommand
from search.models import Product, Category, DetailProduct
from django.db import IntegrityError
from django.core.exceptions import MultipleObjectsReturned
from logging.handlers import RotatingFileHandler
from logging import handlers
from configparser import ConfigParser
from django.test import RequestFactory
from django.contrib.auth.models import User
from django.contrib.auth.tokens import default_token_generator
from django.core import mail
from django.http import request, HttpRequest
from django.utils.http import base36_to_int, int_to_base36
from django.utils.http import urlsafe_base64_encode
from django.db.models.query_utils import Q
from django.utils.encoding import force_bytes
from django.contrib.auth import get_user_model
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
class Command(BaseCommand):
help = "Tests Selenium"
def __init__(self):
if os.environ.get("ENV") == "DEV":
self.driver = webdriver.Firefox("/Users/david/Projets/selenium driver/")
self.url = "http://127.0.0.1:8000/"
self.driver.maximize_window()
if os.environ.get("ENV") == "TRAVIS":
self.BROWSERSTACK_URL = 'https://davidbarat1:FxhRcmmHYxhSpVrjeAWu@hub-cloud.browserstack.com/wd/hub'
self.desired_cap = {
'os' : 'Windows',
'os_version' : '10',
'browser' : 'Chrome',
'browser_version' : '80',
'name' : "P8 Test"
}
self.driver = webdriver.Remote(
command_executor=self.BROWSERSTACK_URL,
desired_capabilities=self.desired_cap)
self.driver.maximize_window()
self.url = "http://167.99.212.10/"
self.search = "Nutella"
self.user = "test@test.com"
self.password = "007Test!"
self.newpassword = "newpassword456"
def handle(self, *args, **options):
self.testMyProducts()
self.testMentionsContacts()
# self.testResetPassword()
self.tearDown()
def testResetPassword(self):
# self.driver.maximize_window()
self.driver.get(self.url)
time.sleep(5)
self.elem = self.driver.find_element_by_id("login")
self.elem.send_keys(Keys.RETURN)
time.sleep(5)
self.elem = self.driver.find_element_by_id("id_username")
self.elem.send_keys(self.user)
self.elem = self.driver.find_element_by_id("id_password")
self.elem.send_keys(self.password)
self.elem.send_keys(Keys.RETURN)
time.sleep(3)
self.elem = self.driver.find_element_by_id("logout")
self.elem.send_keys(Keys.RETURN)
time.sleep(3)
self.elem = self.driver.find_element_by_id("login")
self.elem.send_keys(Keys.RETURN)
time.sleep(5)
self.elem = self.driver.find_element_by_id("resetpassword")
self.elem.send_keys(Keys.RETURN)
time.sleep(3)
self.elem = self.driver.find_element_by_id("id_email")
self.elem.send_keys(self.user)
time.sleep(3)
self.user_filter = User.objects.filter(Q(email=self.user))
for self.user in self.user_filter:
print(self.user)
self.token = default_token_generator.make_token(self.user)
print(self.token)
self.uid = urlsafe_base64_encode(force_bytes(self.user.pk))
print(self.uid)
self.driver.get(self.url + "reset/%s/%s/" % (self.uid, self.token))
time.sleep(3)
self.driver.find_element_by_id("id_new_password1").send_keys(self.newpassword)
self.driver.find_element_by_id("id_new_password2").send_keys(self.newpassword)
self.elem = self.driver.find_element_by_id("id_new_password2")
time.sleep(3)
self.elem.send_keys(Keys.RETURN)
time.sleep(3)
self.driver.quit()
def testMyProducts(self):
# self.driver.maximize_window()
self.driver.get(self.url)
self.elem = self.driver.find_element_by_id("myproducts")
self.elem.send_keys(Keys.RETURN)
time.sleep(5)
self.elem = self.driver.find_element_by_id("id_username")
self.elem.send_keys(self.user)
self.elem = self.driver.find_element_by_id("id_password")
self.elem.send_keys(self.password)
self.elem.send_keys(Keys.RETURN)
time.sleep(5)
def testMentionsContacts(self):
# self.driver.maximize_window()
self.driver.get(self.url)
self.elem = self.driver.find_element_by_id("mentions")
self.elem.send_keys(Keys.RETURN)
time.sleep(5)
self.elem = self.driver.find_element_by_id("contact")
self.elem.send_keys(Keys.RETURN)
time.sleep(5)
def tearDown(self):
self.driver.quit()
| 38.470149
| 112
| 0.668477
|
import requests
import logging
import os
import selenium
import unittest
import time
import requests, re
from django.core.management.base import BaseCommand
from search.models import Product, Category, DetailProduct
from django.db import IntegrityError
from django.core.exceptions import MultipleObjectsReturned
from logging.handlers import RotatingFileHandler
from logging import handlers
from configparser import ConfigParser
from django.test import RequestFactory
from django.contrib.auth.models import User
from django.contrib.auth.tokens import default_token_generator
from django.core import mail
from django.http import request, HttpRequest
from django.utils.http import base36_to_int, int_to_base36
from django.utils.http import urlsafe_base64_encode
from django.db.models.query_utils import Q
from django.utils.encoding import force_bytes
from django.contrib.auth import get_user_model
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
class Command(BaseCommand):
help = "Tests Selenium"
def __init__(self):
if os.environ.get("ENV") == "DEV":
self.driver = webdriver.Firefox("/Users/david/Projets/selenium driver/")
self.url = "http://127.0.0.1:8000/"
self.driver.maximize_window()
if os.environ.get("ENV") == "TRAVIS":
self.BROWSERSTACK_URL = 'https://davidbarat1:FxhRcmmHYxhSpVrjeAWu@hub-cloud.browserstack.com/wd/hub'
self.desired_cap = {
'os' : 'Windows',
'os_version' : '10',
'browser' : 'Chrome',
'browser_version' : '80',
'name' : "P8 Test"
}
self.driver = webdriver.Remote(
command_executor=self.BROWSERSTACK_URL,
desired_capabilities=self.desired_cap)
self.driver.maximize_window()
self.url = "http://167.99.212.10/"
self.search = "Nutella"
self.user = "test@test.com"
self.password = "007Test!"
self.newpassword = "newpassword456"
def handle(self, *args, **options):
self.testMyProducts()
self.testMentionsContacts()
self.tearDown()
def testResetPassword(self):
self.driver.get(self.url)
time.sleep(5)
self.elem = self.driver.find_element_by_id("login")
self.elem.send_keys(Keys.RETURN)
time.sleep(5)
self.elem = self.driver.find_element_by_id("id_username")
self.elem.send_keys(self.user)
self.elem = self.driver.find_element_by_id("id_password")
self.elem.send_keys(self.password)
self.elem.send_keys(Keys.RETURN)
time.sleep(3)
self.elem = self.driver.find_element_by_id("logout")
self.elem.send_keys(Keys.RETURN)
time.sleep(3)
self.elem = self.driver.find_element_by_id("login")
self.elem.send_keys(Keys.RETURN)
time.sleep(5)
self.elem = self.driver.find_element_by_id("resetpassword")
self.elem.send_keys(Keys.RETURN)
time.sleep(3)
self.elem = self.driver.find_element_by_id("id_email")
self.elem.send_keys(self.user)
time.sleep(3)
self.user_filter = User.objects.filter(Q(email=self.user))
for self.user in self.user_filter:
print(self.user)
self.token = default_token_generator.make_token(self.user)
print(self.token)
self.uid = urlsafe_base64_encode(force_bytes(self.user.pk))
print(self.uid)
self.driver.get(self.url + "reset/%s/%s/" % (self.uid, self.token))
time.sleep(3)
self.driver.find_element_by_id("id_new_password1").send_keys(self.newpassword)
self.driver.find_element_by_id("id_new_password2").send_keys(self.newpassword)
self.elem = self.driver.find_element_by_id("id_new_password2")
time.sleep(3)
self.elem.send_keys(Keys.RETURN)
time.sleep(3)
self.driver.quit()
def testMyProducts(self):
self.driver.get(self.url)
self.elem = self.driver.find_element_by_id("myproducts")
self.elem.send_keys(Keys.RETURN)
time.sleep(5)
self.elem = self.driver.find_element_by_id("id_username")
self.elem.send_keys(self.user)
self.elem = self.driver.find_element_by_id("id_password")
self.elem.send_keys(self.password)
self.elem.send_keys(Keys.RETURN)
time.sleep(5)
def testMentionsContacts(self):
self.driver.get(self.url)
self.elem = self.driver.find_element_by_id("mentions")
self.elem.send_keys(Keys.RETURN)
time.sleep(5)
self.elem = self.driver.find_element_by_id("contact")
self.elem.send_keys(Keys.RETURN)
time.sleep(5)
def tearDown(self):
self.driver.quit()
| true
| true
|
790a5b04a30e033e22fa76b57a22baece01da042
| 4,691
|
py
|
Python
|
project_name/settings.py
|
YoggyPutra/cobaq
|
4ce2dbb76f21cf955c8992b826b9e3cb57aa932f
|
[
"Unlicense"
] | null | null | null |
project_name/settings.py
|
YoggyPutra/cobaq
|
4ce2dbb76f21cf955c8992b826b9e3cb57aa932f
|
[
"Unlicense"
] | null | null | null |
project_name/settings.py
|
YoggyPutra/cobaq
|
4ce2dbb76f21cf955c8992b826b9e3cb57aa932f
|
[
"Unlicense"
] | null | null | null |
"""
Django settings for project_name project.
Generated by 'django-admin startproject' using Django 3.2.8.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
import os
from pathlib import Path
import dj_database_url
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# for best-practices.
# SECURITY WARNING: keep the secret key used in production secret!
# Please set SECRET_KEY environment variable in your production environment
# (e.g. Heroku).
SECRET_KEY = os.getenv('SECRET_KEY', 'django-insecure-*%$(!wsn1rre5@c!$jm7w&$+s3y0xqn%cnmk)&6(ukjn)18b!0')
# Automatically determine environment by detecting if DATABASE_URL variable.
# DATABASE_URL is provided by Heroku if a database add-on is added
# (e.g. Heroku Postgres).
PRODUCTION = os.getenv('DATABASE_URL') is not None
# SECURITY WARNING: don't run with debug turned on in production!
# If you want to enable debugging on Heroku for learning purposes,
# set this to True.
DEBUG = not PRODUCTION
HEROKU_APP_NAME = os.getenv('HEROKU_APP_NAME', '')
ALLOWED_HOSTS = [f'{HEROKU_APP_NAME}.herokuapp.com']
if not PRODUCTION:
ALLOWED_HOSTS += ['.localhost', '127.0.0.1', '[::1]']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'main',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'project_name.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
BASE_DIR / 'templates',
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'project_name.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Set database settings automatically using DATABASE_URL.
if PRODUCTION:
DATABASES['default'] = dj_database_url.config(
conn_max_age=600, ssl_require=True
)
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
# Feel free to change these according to your needs.
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# This is the directory for storing `collectstatic` results.
# This shouldn't be included in your Git repository.
STATIC_ROOT = BASE_DIR / 'staticfiles'
# You can use this directory to store project-wide static files.
STATICFILES_DIRS = [
BASE_DIR / 'static',
]
# Make sure the directories exist to prevent errors when doing `collectstatic`.
for directory in [*STATICFILES_DIRS, STATIC_ROOT]:
directory.mkdir(exist_ok=True)
# Enable compression and caching features of whitenoise.
# You can remove this if it causes problems on your setup.
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
| 28.08982
| 106
| 0.715412
|
import os
from pathlib import Path
import dj_database_url
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = os.getenv('SECRET_KEY', 'django-insecure-*%$(!wsn1rre5@c!$jm7w&$+s3y0xqn%cnmk)&6(ukjn)18b!0')
PRODUCTION = os.getenv('DATABASE_URL') is not None
# If you want to enable debugging on Heroku for learning purposes,
# set this to True.
DEBUG = not PRODUCTION
HEROKU_APP_NAME = os.getenv('HEROKU_APP_NAME', '')
ALLOWED_HOSTS = [f'{HEROKU_APP_NAME}.herokuapp.com']
if not PRODUCTION:
ALLOWED_HOSTS += ['.localhost', '127.0.0.1', '[::1]']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'main',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'project_name.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
BASE_DIR / 'templates',
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'project_name.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Set database settings automatically using DATABASE_URL.
if PRODUCTION:
DATABASES['default'] = dj_database_url.config(
conn_max_age=600, ssl_require=True
)
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
# Feel free to change these according to your needs.
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# This is the directory for storing `collectstatic` results.
# This shouldn't be included in your Git repository.
STATIC_ROOT = BASE_DIR / 'staticfiles'
STATICFILES_DIRS = [
BASE_DIR / 'static',
]
for directory in [*STATICFILES_DIRS, STATIC_ROOT]:
directory.mkdir(exist_ok=True)
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
| true
| true
|
790a5b7407c9b5d974aa03fbf24850d0083df6b0
| 1,025
|
py
|
Python
|
python/poo_algoritmos/merge_sort.py
|
lmonsalve22/Learning-to-Code
|
2e32eba3fbd0bd63cc539e1e6d372ca346b765c9
|
[
"MIT"
] | null | null | null |
python/poo_algoritmos/merge_sort.py
|
lmonsalve22/Learning-to-Code
|
2e32eba3fbd0bd63cc539e1e6d372ca346b765c9
|
[
"MIT"
] | null | null | null |
python/poo_algoritmos/merge_sort.py
|
lmonsalve22/Learning-to-Code
|
2e32eba3fbd0bd63cc539e1e6d372ca346b765c9
|
[
"MIT"
] | null | null | null |
import random
def ordenamiento_por_mezcla(lista):
if len(lista) > 1:
medio = len(lista) // 2
izquierda = lista[:medio]
derecha = lista[medio:]
# llamada recursiva en cada mitad
ordenamiento_por_mezcla(izquierda)
ordenamiento_por_mezcla(derecha)
# Iteradores para recorrer las dos sublistas
i = 0
j = 0
# Iterador para la lista principal
k = 0
while i < len(izquierda) and j < len(derecha):
if izquierda[i] < derecha[j]:
lista[k] = izquierda[i]
i += 1
else:
lista[k] = derecha[j]
j += 1
k += 1
while i < len(izquierda):
lista[k] = izquierda[i]
i += 1
k += 1
while j < len(derecha):
lista[k] = derecha[j]
j += 1
k += 1
return lista
if __name__ == '__main__':
tamano_de_lista = int(input('De que tamaño será la lista? '))
lista = [random.randint(0, 100) for i in range(tamano_de_lista)]
print(lista)
print('-' * 20)
lista_ordenada = ordenamiento_por_mezcla(lista)
print(lista_ordenada)
| 20.098039
| 66
| 0.612683
|
import random
def ordenamiento_por_mezcla(lista):
if len(lista) > 1:
medio = len(lista) // 2
izquierda = lista[:medio]
derecha = lista[medio:]
ordenamiento_por_mezcla(izquierda)
ordenamiento_por_mezcla(derecha)
i = 0
j = 0
k = 0
while i < len(izquierda) and j < len(derecha):
if izquierda[i] < derecha[j]:
lista[k] = izquierda[i]
i += 1
else:
lista[k] = derecha[j]
j += 1
k += 1
while i < len(izquierda):
lista[k] = izquierda[i]
i += 1
k += 1
while j < len(derecha):
lista[k] = derecha[j]
j += 1
k += 1
return lista
if __name__ == '__main__':
tamano_de_lista = int(input('De que tamaño será la lista? '))
lista = [random.randint(0, 100) for i in range(tamano_de_lista)]
print(lista)
print('-' * 20)
lista_ordenada = ordenamiento_por_mezcla(lista)
print(lista_ordenada)
| true
| true
|
790a5bead5babede83d434e3f33b39197b1ed5d8
| 4,240
|
py
|
Python
|
cloudpassage/cve_exception.py
|
patricksanders/cloudpassage-halo-python-sdk
|
7fccaaacf20cc469e03d5727c6528a105ae7e790
|
[
"BSD-3-Clause"
] | 8
|
2016-10-24T12:37:32.000Z
|
2019-06-01T10:00:18.000Z
|
cloudpassage/cve_exception.py
|
patricksanders/cloudpassage-halo-python-sdk
|
7fccaaacf20cc469e03d5727c6528a105ae7e790
|
[
"BSD-3-Clause"
] | 109
|
2016-08-09T04:51:48.000Z
|
2020-02-11T02:33:35.000Z
|
cloudpassage/cve_exception.py
|
patricksanders/cloudpassage-halo-python-sdk
|
7fccaaacf20cc469e03d5727c6528a105ae7e790
|
[
"BSD-3-Clause"
] | 14
|
2016-01-23T00:10:30.000Z
|
2021-07-07T21:13:56.000Z
|
"""CveException Class"""
import cloudpassage.sanity as sanity
from .halo_endpoint import HaloEndpoint
from .http_helper import HttpHelper
class CveExceptions(HaloEndpoint):
"""Initializing the CveException class:
Args:
session (:class:`cloudpassage.HaloSession`): This will define how you
interact with the Halo API, including proxy settings and API keys
used for authentication.
Keyword args:
endpoint_version (int): Endpoint version override.
"""
object_name = "cve_exception"
objects_name = "cve_exceptions"
default_endpoint_version = 1
def endpoint(self):
"""Return the endpoint for API requests."""
return "/v{}/{}".format(self.endpoint_version, self.objects_name)
@classmethod
def object_key(cls):
"""Return the key used to pull the object from the json document."""
return cls.object_name
@classmethod
def pagination_key(cls):
"""Return the pagination key for parsing paged results."""
return cls.objects_name
def create(self, package_name, package_version, scope="all", scope_id=''):
"""This method allows user to create CVE exceptions.
Args:
package_name (str): The name of the vulnerable
package to be excepted.
package_version (str): The version number of the
vulnerable package.
scope (str): Possible values are server, group and all.
scope_id (str): If you pass the value server as scope, this field
will include server ID. If you pass the value group as scope,
this field will include group ID.
Returns:
str: ID of the newly-created cve exception
"""
body_ref = {
"server": "server_id",
"group": "group_id"
}
params = {
"package_name": package_name,
"package_version": package_version,
"scope": scope
}
endpoint = self.endpoint()
if scope != "all":
sanity.validate_cve_exception_scope_id(scope_id)
scope_key = body_ref[scope]
params[scope_key] = scope_id
body = {"cve_exception": params}
request = HttpHelper(self.session)
response = request.post(endpoint, body)
return response["cve_exception"]["id"]
def update(self, exception_id, **kwargs):
""" Update CVE Exceptions.
Args:
exception_id (str): Identifier for the CVE exception.
Keyword Args:
scope (str): Possible values are server, group and all.
group_id (str): The ID of the server group containing the server to
which this exception applies.
server_id (str): The ID of the server to which this exception
applies.
cve_entries : List of CVEs
Returns:
True if successful, throws exception otherwise.
"""
endpoint = "{}/{}".format(self.endpoint(), exception_id)
body = {"cve_exception": kwargs}
request = HttpHelper(self.session)
response = request.put(endpoint, body)
return response
# The following class needs to live on only in name, and should absorb the
# functionality of the current CveExceptions class.
class CveException(HaloEndpoint):
"""Initializing the CveException class:
Args:
session (:class:`cloudpassage.HaloSession`): This will define how you
interact with the Halo API, including proxy settings and API keys
used for authentication.
"""
object_name = "cve_exception"
objects_name = "cve_exceptions"
default_endpoint_version = 1
def endpoint(self):
"""Return the endpoint for API requests."""
return "/v{}/{}".format(self.endpoint_version, self.objects_name)
@classmethod
def object_key(cls):
"""Return the key used to pull the object from the json document."""
return cls.object_name
@classmethod
def pagination_key(cls):
"""Return the pagination key for parsing paged results."""
return cls.objects_name
| 32.366412
| 79
| 0.619811
|
import cloudpassage.sanity as sanity
from .halo_endpoint import HaloEndpoint
from .http_helper import HttpHelper
class CveExceptions(HaloEndpoint):
object_name = "cve_exception"
objects_name = "cve_exceptions"
default_endpoint_version = 1
def endpoint(self):
return "/v{}/{}".format(self.endpoint_version, self.objects_name)
@classmethod
def object_key(cls):
return cls.object_name
@classmethod
def pagination_key(cls):
return cls.objects_name
def create(self, package_name, package_version, scope="all", scope_id=''):
body_ref = {
"server": "server_id",
"group": "group_id"
}
params = {
"package_name": package_name,
"package_version": package_version,
"scope": scope
}
endpoint = self.endpoint()
if scope != "all":
sanity.validate_cve_exception_scope_id(scope_id)
scope_key = body_ref[scope]
params[scope_key] = scope_id
body = {"cve_exception": params}
request = HttpHelper(self.session)
response = request.post(endpoint, body)
return response["cve_exception"]["id"]
def update(self, exception_id, **kwargs):
endpoint = "{}/{}".format(self.endpoint(), exception_id)
body = {"cve_exception": kwargs}
request = HttpHelper(self.session)
response = request.put(endpoint, body)
return response
class CveException(HaloEndpoint):
object_name = "cve_exception"
objects_name = "cve_exceptions"
default_endpoint_version = 1
def endpoint(self):
return "/v{}/{}".format(self.endpoint_version, self.objects_name)
@classmethod
def object_key(cls):
return cls.object_name
@classmethod
def pagination_key(cls):
return cls.objects_name
| true
| true
|
790a5c0817c70c253aa110fa1a7e2219922f0ee5
| 10,590
|
py
|
Python
|
eduu/plugins/stickers.py
|
MikeOwino/EduuRobot
|
24cbdce466ae3dd45d26796df866a94198d0feec
|
[
"MIT"
] | null | null | null |
eduu/plugins/stickers.py
|
MikeOwino/EduuRobot
|
24cbdce466ae3dd45d26796df866a94198d0feec
|
[
"MIT"
] | 6
|
2022-01-04T12:40:41.000Z
|
2022-03-10T04:57:30.000Z
|
eduu/plugins/stickers.py
|
MikeOwino/EduuRobot
|
24cbdce466ae3dd45d26796df866a94198d0feec
|
[
"MIT"
] | null | null | null |
# SPDX-License-Identifier: MIT
# Copyright (c) 2018-2022 Amano Team
import os
import shutil
import tempfile
from PIL import Image
from pyrogram import Client, filters
from pyrogram.enums import MessageEntityType
from pyrogram.errors import PeerIdInvalid, StickersetInvalid
from pyrogram.raw.functions.messages import GetStickerSet, SendMedia
from pyrogram.raw.functions.stickers import AddStickerToSet, CreateStickerSet
from pyrogram.raw.types import (
DocumentAttributeFilename,
InputDocument,
InputMediaUploadedDocument,
InputStickerSetItem,
InputStickerSetShortName,
)
from pyrogram.types import InlineKeyboardButton, InlineKeyboardMarkup, Message
from eduu.config import LOG_CHAT, PREFIXES
from eduu.utils import EMOJI_PATTERN, http
from eduu.utils.localization import use_chat_lang
@Client.on_message(filters.command(["kang", "kibe", "steal"], PREFIXES))
@use_chat_lang()
async def kang_sticker(c: Client, m: Message, strings):
prog_msg = await m.reply_text(strings("kanging_sticker_msg"))
bot_username = c.me.username
sticker_emoji = "🤔"
packnum = 0
packname_found = False
resize = False
animated = False
reply = m.reply_to_message
user = await c.resolve_peer(m.from_user.username or m.from_user.id)
if reply and reply.media:
if reply.photo:
resize = True
elif reply.document:
if "image" in reply.document.mime_type:
# mime_type: image/webp
resize = True
elif "tgsticker" in reply.document.mime_type:
# mime_type: application/x-tgsticker
animated = True
elif reply.sticker:
if not reply.sticker.file_name:
return await prog_msg.edit_text(strings("err_sticker_no_file_name"))
if reply.sticker.emoji:
sticker_emoji = reply.sticker.emoji
animated = reply.sticker.is_animated
if not reply.sticker.file_name.endswith(".tgs"):
resize = True
else:
return await prog_msg.edit_text(strings("invalid_media_string"))
pack_prefix = "anim" if animated else "a"
packname = f"{pack_prefix}_{m.from_user.id}_by_{bot_username}"
if len(m.command) > 1:
if m.command[1].isdigit() and int(m.command[1]) > 0:
# provide pack number to kang in desired pack
packnum = m.command.pop(1)
packname = f"{pack_prefix}{packnum}_{m.from_user.id}_by_{bot_username}"
if len(m.command) > 1:
# matches all valid emojis in input
sticker_emoji = (
"".join(set(EMOJI_PATTERN.findall("".join(m.command[1:]))))
or sticker_emoji
)
filename = await c.download_media(m.reply_to_message)
if not filename:
# Failed to download
await prog_msg.delete()
return
elif m.entities and len(m.entities) > 1:
packname = f"a_{m.from_user.id}_by_{bot_username}"
pack_prefix = "a"
# searching if image_url is given
img_url = None
filename = "sticker.png"
for y in m.entities:
if y.type == MessageEntityType.URL:
img_url = m.text[y.offset : (y.offset + y.length)]
break
if not img_url:
await prog_msg.delete()
return
try:
r = await http.get(img_url)
if r.status_code == 200:
with open(filename, mode="wb") as f:
f.write(r.read())
except Exception as r_e:
return await prog_msg.edit_text(f"{r_e.__class__.__name__} : {r_e}")
if len(m.command) > 2:
# m.command[1] is image_url
if m.command[2].isdigit() and int(m.command[2]) > 0:
packnum = m.command.pop(2)
packname = f"a{packnum}_{m.from_user.id}_by_{bot_username}"
if len(m.command) > 2:
sticker_emoji = (
"".join(set(EMOJI_PATTERN.findall("".join(m.command[2:]))))
or sticker_emoji
)
resize = True
else:
return await prog_msg.delete()
try:
if resize:
filename = resize_image(filename)
max_stickers = 50 if animated else 120
while not packname_found:
try:
stickerset = await c.invoke(
GetStickerSet(
stickerset=InputStickerSetShortName(short_name=packname),
hash=0,
)
)
if stickerset.set.count >= max_stickers:
packnum += 1
packname = (
f"{pack_prefix}_{packnum}_{m.from_user.id}_by_{bot_username}"
)
else:
packname_found = True
except StickersetInvalid:
break
file = await c.save_file(filename)
media = await c.invoke(
SendMedia(
peer=(await c.resolve_peer(LOG_CHAT)),
media=InputMediaUploadedDocument(
file=file,
mime_type=c.guess_mime_type(filename),
attributes=[DocumentAttributeFilename(file_name=filename)],
),
message=f"#Sticker kang by UserID -> {m.from_user.id}",
random_id=c.rnd_id(),
)
)
stkr_file = media.updates[-1].message.media.document
if packname_found:
await prog_msg.edit_text(strings("use_existing_pack"))
await c.invoke(
AddStickerToSet(
stickerset=InputStickerSetShortName(short_name=packname),
sticker=InputStickerSetItem(
document=InputDocument(
id=stkr_file.id,
access_hash=stkr_file.access_hash,
file_reference=stkr_file.file_reference,
),
emoji=sticker_emoji,
),
)
)
else:
await prog_msg.edit_text(strings("create_new_pack_string"))
u_name = m.from_user.username
if u_name:
u_name = f"@{u_name}"
else:
u_name = str(m.from_user.id)
stkr_title = f"{u_name}'s "
if animated:
stkr_title += "Anim. "
stkr_title += "EduuPack"
if packnum != 0:
stkr_title += f" v{packnum}"
try:
await c.invoke(
CreateStickerSet(
user_id=user,
title=stkr_title,
short_name=packname,
stickers=[
InputStickerSetItem(
document=InputDocument(
id=stkr_file.id,
access_hash=stkr_file.access_hash,
file_reference=stkr_file.file_reference,
),
emoji=sticker_emoji,
)
],
animated=animated,
)
)
except PeerIdInvalid:
return await prog_msg.edit_text(
strings("cant_create_sticker_pack_string"),
reply_markup=InlineKeyboardMarkup(
[
[
InlineKeyboardButton(
"/start", url=f"https://t.me/{bot_username}?start"
)
]
]
),
)
except Exception as all_e:
await prog_msg.edit_text(f"{all_e.__class__.__name__} : {all_e}")
else:
markup = InlineKeyboardMarkup(
[
[
InlineKeyboardButton(
strings("view_sticker_pack_btn"),
url=f"t.me/addstickers/{packname}",
)
]
]
)
kanged_success_msg = strings("sticker_kanged_string")
await prog_msg.edit_text(
kanged_success_msg.format(sticker_emoji=sticker_emoji), reply_markup=markup
)
# Cleanup
try:
os.remove(filename)
except OSError:
pass
def resize_image(filename: str) -> str:
im = Image.open(filename)
maxsize = 512
scale = maxsize / max(im.width, im.height)
sizenew = (int(im.width * scale), int(im.height * scale))
im = im.resize(sizenew, Image.NEAREST)
downpath, f_name = os.path.split(filename)
# not hardcoding png_image as "sticker.png"
png_image = os.path.join(downpath, f"{f_name.split('.', 1)[0]}.png")
im.save(png_image, "PNG")
if png_image != filename:
os.remove(filename)
return png_image
@Client.on_message(filters.command("stickerid", PREFIXES) & filters.reply)
@use_chat_lang()
async def getstickerid(c: Client, m: Message, strings):
if m.reply_to_message.sticker:
await m.reply_text(
strings("get_sticker_id_string").format(
stickerid=m.reply_to_message.sticker.file_id
)
)
@Client.on_message(filters.command("getsticker", PREFIXES) & filters.reply)
@use_chat_lang()
async def getstickeraspng(c: Client, m: Message, strings):
sticker = m.reply_to_message.sticker
if sticker:
if sticker.is_animated:
await m.reply_text(strings("animated_not_supported"))
elif not sticker.is_animated:
with tempfile.TemporaryDirectory() as tempdir:
path = os.path.join(tempdir, "getsticker")
sticker_file = await c.download_media(
message=m.reply_to_message,
file_name=f"{path}/{sticker.set_name}.png",
)
await m.reply_to_message.reply_document(
document=sticker_file,
caption=strings("sticker_info").format(
emoji=sticker.emoji, id=sticker.file_id
),
)
shutil.rmtree(tempdir, ignore_errors=True)
else:
await m.reply_text(strings("not_sticker"))
| 37.821429
| 87
| 0.533428
|
import os
import shutil
import tempfile
from PIL import Image
from pyrogram import Client, filters
from pyrogram.enums import MessageEntityType
from pyrogram.errors import PeerIdInvalid, StickersetInvalid
from pyrogram.raw.functions.messages import GetStickerSet, SendMedia
from pyrogram.raw.functions.stickers import AddStickerToSet, CreateStickerSet
from pyrogram.raw.types import (
DocumentAttributeFilename,
InputDocument,
InputMediaUploadedDocument,
InputStickerSetItem,
InputStickerSetShortName,
)
from pyrogram.types import InlineKeyboardButton, InlineKeyboardMarkup, Message
from eduu.config import LOG_CHAT, PREFIXES
from eduu.utils import EMOJI_PATTERN, http
from eduu.utils.localization import use_chat_lang
@Client.on_message(filters.command(["kang", "kibe", "steal"], PREFIXES))
@use_chat_lang()
async def kang_sticker(c: Client, m: Message, strings):
prog_msg = await m.reply_text(strings("kanging_sticker_msg"))
bot_username = c.me.username
sticker_emoji = "🤔"
packnum = 0
packname_found = False
resize = False
animated = False
reply = m.reply_to_message
user = await c.resolve_peer(m.from_user.username or m.from_user.id)
if reply and reply.media:
if reply.photo:
resize = True
elif reply.document:
if "image" in reply.document.mime_type:
resize = True
elif "tgsticker" in reply.document.mime_type:
animated = True
elif reply.sticker:
if not reply.sticker.file_name:
return await prog_msg.edit_text(strings("err_sticker_no_file_name"))
if reply.sticker.emoji:
sticker_emoji = reply.sticker.emoji
animated = reply.sticker.is_animated
if not reply.sticker.file_name.endswith(".tgs"):
resize = True
else:
return await prog_msg.edit_text(strings("invalid_media_string"))
pack_prefix = "anim" if animated else "a"
packname = f"{pack_prefix}_{m.from_user.id}_by_{bot_username}"
if len(m.command) > 1:
if m.command[1].isdigit() and int(m.command[1]) > 0:
packnum = m.command.pop(1)
packname = f"{pack_prefix}{packnum}_{m.from_user.id}_by_{bot_username}"
if len(m.command) > 1:
sticker_emoji = (
"".join(set(EMOJI_PATTERN.findall("".join(m.command[1:]))))
or sticker_emoji
)
filename = await c.download_media(m.reply_to_message)
if not filename:
await prog_msg.delete()
return
elif m.entities and len(m.entities) > 1:
packname = f"a_{m.from_user.id}_by_{bot_username}"
pack_prefix = "a"
img_url = None
filename = "sticker.png"
for y in m.entities:
if y.type == MessageEntityType.URL:
img_url = m.text[y.offset : (y.offset + y.length)]
break
if not img_url:
await prog_msg.delete()
return
try:
r = await http.get(img_url)
if r.status_code == 200:
with open(filename, mode="wb") as f:
f.write(r.read())
except Exception as r_e:
return await prog_msg.edit_text(f"{r_e.__class__.__name__} : {r_e}")
if len(m.command) > 2:
if m.command[2].isdigit() and int(m.command[2]) > 0:
packnum = m.command.pop(2)
packname = f"a{packnum}_{m.from_user.id}_by_{bot_username}"
if len(m.command) > 2:
sticker_emoji = (
"".join(set(EMOJI_PATTERN.findall("".join(m.command[2:]))))
or sticker_emoji
)
resize = True
else:
return await prog_msg.delete()
try:
if resize:
filename = resize_image(filename)
max_stickers = 50 if animated else 120
while not packname_found:
try:
stickerset = await c.invoke(
GetStickerSet(
stickerset=InputStickerSetShortName(short_name=packname),
hash=0,
)
)
if stickerset.set.count >= max_stickers:
packnum += 1
packname = (
f"{pack_prefix}_{packnum}_{m.from_user.id}_by_{bot_username}"
)
else:
packname_found = True
except StickersetInvalid:
break
file = await c.save_file(filename)
media = await c.invoke(
SendMedia(
peer=(await c.resolve_peer(LOG_CHAT)),
media=InputMediaUploadedDocument(
file=file,
mime_type=c.guess_mime_type(filename),
attributes=[DocumentAttributeFilename(file_name=filename)],
),
message=f"#Sticker kang by UserID -> {m.from_user.id}",
random_id=c.rnd_id(),
)
)
stkr_file = media.updates[-1].message.media.document
if packname_found:
await prog_msg.edit_text(strings("use_existing_pack"))
await c.invoke(
AddStickerToSet(
stickerset=InputStickerSetShortName(short_name=packname),
sticker=InputStickerSetItem(
document=InputDocument(
id=stkr_file.id,
access_hash=stkr_file.access_hash,
file_reference=stkr_file.file_reference,
),
emoji=sticker_emoji,
),
)
)
else:
await prog_msg.edit_text(strings("create_new_pack_string"))
u_name = m.from_user.username
if u_name:
u_name = f"@{u_name}"
else:
u_name = str(m.from_user.id)
stkr_title = f"{u_name}'s "
if animated:
stkr_title += "Anim. "
stkr_title += "EduuPack"
if packnum != 0:
stkr_title += f" v{packnum}"
try:
await c.invoke(
CreateStickerSet(
user_id=user,
title=stkr_title,
short_name=packname,
stickers=[
InputStickerSetItem(
document=InputDocument(
id=stkr_file.id,
access_hash=stkr_file.access_hash,
file_reference=stkr_file.file_reference,
),
emoji=sticker_emoji,
)
],
animated=animated,
)
)
except PeerIdInvalid:
return await prog_msg.edit_text(
strings("cant_create_sticker_pack_string"),
reply_markup=InlineKeyboardMarkup(
[
[
InlineKeyboardButton(
"/start", url=f"https://t.me/{bot_username}?start"
)
]
]
),
)
except Exception as all_e:
await prog_msg.edit_text(f"{all_e.__class__.__name__} : {all_e}")
else:
markup = InlineKeyboardMarkup(
[
[
InlineKeyboardButton(
strings("view_sticker_pack_btn"),
url=f"t.me/addstickers/{packname}",
)
]
]
)
kanged_success_msg = strings("sticker_kanged_string")
await prog_msg.edit_text(
kanged_success_msg.format(sticker_emoji=sticker_emoji), reply_markup=markup
)
# Cleanup
try:
os.remove(filename)
except OSError:
pass
def resize_image(filename: str) -> str:
im = Image.open(filename)
maxsize = 512
scale = maxsize / max(im.width, im.height)
sizenew = (int(im.width * scale), int(im.height * scale))
im = im.resize(sizenew, Image.NEAREST)
downpath, f_name = os.path.split(filename)
# not hardcoding png_image as "sticker.png"
png_image = os.path.join(downpath, f"{f_name.split('.', 1)[0]}.png")
im.save(png_image, "PNG")
if png_image != filename:
os.remove(filename)
return png_image
@Client.on_message(filters.command("stickerid", PREFIXES) & filters.reply)
@use_chat_lang()
async def getstickerid(c: Client, m: Message, strings):
if m.reply_to_message.sticker:
await m.reply_text(
strings("get_sticker_id_string").format(
stickerid=m.reply_to_message.sticker.file_id
)
)
@Client.on_message(filters.command("getsticker", PREFIXES) & filters.reply)
@use_chat_lang()
async def getstickeraspng(c: Client, m: Message, strings):
sticker = m.reply_to_message.sticker
if sticker:
if sticker.is_animated:
await m.reply_text(strings("animated_not_supported"))
elif not sticker.is_animated:
with tempfile.TemporaryDirectory() as tempdir:
path = os.path.join(tempdir, "getsticker")
sticker_file = await c.download_media(
message=m.reply_to_message,
file_name=f"{path}/{sticker.set_name}.png",
)
await m.reply_to_message.reply_document(
document=sticker_file,
caption=strings("sticker_info").format(
emoji=sticker.emoji, id=sticker.file_id
),
)
shutil.rmtree(tempdir, ignore_errors=True)
else:
await m.reply_text(strings("not_sticker"))
| true
| true
|
790a5c4c72fb6389d787e50af8dc5df8286f221b
| 4,233
|
py
|
Python
|
examples/pooling.py
|
tsaycal/romodel
|
0d9bd0616dad41b1683452a0c7ff8e31caf47742
|
[
"MIT"
] | null | null | null |
examples/pooling.py
|
tsaycal/romodel
|
0d9bd0616dad41b1683452a0c7ff8e31caf47742
|
[
"MIT"
] | null | null | null |
examples/pooling.py
|
tsaycal/romodel
|
0d9bd0616dad41b1683452a0c7ff8e31caf47742
|
[
"MIT"
] | null | null | null |
import pyomo.environ as pe
import romodel as ro
feeds = range(5)
products = range(4)
pools = range(2)
qualities = range(4)
con_feed_pool = [(0, 0), (1, 0), (2, 0), (3, 1), (4, 1)]
con_pool_prod = [(0, 0), (0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2), (1, 3)]
con_feed_prod = []
price_product = [16, 25, 15, 10]
price_feed = [7, 3, 2, 10, 5]
max_flow = [float('inf'), float('inf'), float('inf'), float('inf'), float('inf')]
min_flow = [0, 0, 0, 0, 0]
pool_size = [float('inf'), float('inf')]
max_demand = [10, 25, 30, 10]
min_demand = [0, 0, 0, 0]
feed_cons = [[1.0, 6.0, 4.0, 0.5],
[4.0, 1.0, 3.0, 2.0],
[4.0, 5.5, 3.0, 0.9],
[3.0, 3.0, 3.0, 1.0],
[1.0, 2.7, 4.0, 1.6]]
max_cons = [[3.00, 3.00, 3.25, 0.75],
[4.00, 2.50, 3.50, 1.50],
[1.50, 5.50, 3.90, 0.80],
[3.00, 4.00, 4.00, 1.80]]
min_cons = [[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0]]
m = pe.ConcreteModel()
m.q = pe.Var(con_feed_pool, bounds=(0, 1))
m.y = pe.Var(con_pool_prod, within=pe.NonNegativeReals)
m.z = pe.Var(con_feed_prod, within=pe.NonNegativeReals)
m.U = ro.UncSet()
m.price_product = ro.UncParam(products, nominal=price_product, uncset=m.U)
expr = 0
for j in products:
expr += (m.price_product[j] - price_product[j])**2
m.U.c = pe.Constraint(expr=expr <= 0.1)
price_product = m.price_product
obj = 0
for i, l in con_feed_pool:
for j in [jj for ll, jj in con_pool_prod if ll == l]:
obj += price_feed[j]*m.y[(l, j)]*m.q[i, l]
for l, j in con_pool_prod:
obj -= price_product[j]*m.y[(l, j)]
for i, j in con_feed_prod:
obj -= (price_product[j] - price_feed[i])*m.z[(i, j)]
m.obj = pe.Objective(expr=obj, sense=pe.minimize)
# Feed availability
def feed_availability_rule(m, i):
expr = 0
for l in [ll for ii, ll in con_feed_pool if ii == i]:
for j in [jj for ll, jj in con_pool_prod if ll == l]:
expr += m.q[(i, l)]*m.y[(l, j)]
for j in [jj for ii, jj in con_feed_prod if ii == i]:
expr += m.z[(i, l)]
return min_flow[i], expr, max_flow[i]
m.feed_availability = pe.Constraint(feeds, rule=feed_availability_rule)
# Pool capacity
def pool_capacity_rule(m, l):
expr = 0
for j in [jj for ll, jj in con_pool_prod if ll == l]:
expr += m.y[(l, j)]
return None, expr, pool_size[l]
m.pool_capacity = pe.Constraint(pools, rule=pool_capacity_rule)
# Product demand
def prod_demand_rule(m, j):
expr = 0
for l in [ll for ll, jj in con_pool_prod if jj == j]:
expr += m.y[(l, j)]
for i in [ii for ii, jj in con_feed_prod if jj == j]:
expr += m.z[(i, j)]
return min_demand[j], expr, max_demand[j]
m.product_demand = pe.Constraint(products, rule=prod_demand_rule)
# Simplex
def simplex_rule(m, l):
return pe.quicksum(m.q[(i, l)] for i, ll in m.q if ll == l) == 1
m.simplex = pe.Constraint(pools, rule=simplex_rule)
# Product quality
def prod_quality_rule_upper(m, j, k):
expr = 0
flow = 0
for l in [ll for ll, jj in con_pool_prod if jj == j]:
flow += m.y[l, j]
for i in [ii for ii, ll in con_feed_pool if ll == l]:
expr += feed_cons[i][k]*m.q[(i, l)]*m.y[(l, j)]
for i in [ii for ii, jj in con_feed_prod if jj == j]:
flow += m.z[i, j]
expr += feed_cons[i][k]*m.z[(i, j)]
return expr <= max_cons[j][k]*flow
def prod_quality_rule_lower(m, j, k):
expr = 0
flow = 0
for l in [ll for ll, jj in con_pool_prod if jj == j]:
flow += m.y[l, j]
for i in [ii for ii, ll in con_feed_pool if ll == l]:
expr += feed_cons[i][k]*m.q[(i, l)]*m.y[(l, j)]
for i in [ii for ii, jj in con_feed_prod if jj == j]:
flow += m.z[i, j]
expr += feed_cons[i][k]*m.z[(i, j)]
return min_cons[j][k]*flow <= expr
m.prod_quality_upper = pe.Constraint(products, qualities,
rule=prod_quality_rule_upper)
m.prod_quality_lower = pe.Constraint(products, qualities,
rule=prod_quality_rule_lower)
solver = pe.SolverFactory('romodel.cuts')
solver.options['NonConvex'] = 2
solver.solve(m, tee=True)
| 29.601399
| 81
| 0.566501
|
import pyomo.environ as pe
import romodel as ro
feeds = range(5)
products = range(4)
pools = range(2)
qualities = range(4)
con_feed_pool = [(0, 0), (1, 0), (2, 0), (3, 1), (4, 1)]
con_pool_prod = [(0, 0), (0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2), (1, 3)]
con_feed_prod = []
price_product = [16, 25, 15, 10]
price_feed = [7, 3, 2, 10, 5]
max_flow = [float('inf'), float('inf'), float('inf'), float('inf'), float('inf')]
min_flow = [0, 0, 0, 0, 0]
pool_size = [float('inf'), float('inf')]
max_demand = [10, 25, 30, 10]
min_demand = [0, 0, 0, 0]
feed_cons = [[1.0, 6.0, 4.0, 0.5],
[4.0, 1.0, 3.0, 2.0],
[4.0, 5.5, 3.0, 0.9],
[3.0, 3.0, 3.0, 1.0],
[1.0, 2.7, 4.0, 1.6]]
max_cons = [[3.00, 3.00, 3.25, 0.75],
[4.00, 2.50, 3.50, 1.50],
[1.50, 5.50, 3.90, 0.80],
[3.00, 4.00, 4.00, 1.80]]
min_cons = [[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0]]
m = pe.ConcreteModel()
m.q = pe.Var(con_feed_pool, bounds=(0, 1))
m.y = pe.Var(con_pool_prod, within=pe.NonNegativeReals)
m.z = pe.Var(con_feed_prod, within=pe.NonNegativeReals)
m.U = ro.UncSet()
m.price_product = ro.UncParam(products, nominal=price_product, uncset=m.U)
expr = 0
for j in products:
expr += (m.price_product[j] - price_product[j])**2
m.U.c = pe.Constraint(expr=expr <= 0.1)
price_product = m.price_product
obj = 0
for i, l in con_feed_pool:
for j in [jj for ll, jj in con_pool_prod if ll == l]:
obj += price_feed[j]*m.y[(l, j)]*m.q[i, l]
for l, j in con_pool_prod:
obj -= price_product[j]*m.y[(l, j)]
for i, j in con_feed_prod:
obj -= (price_product[j] - price_feed[i])*m.z[(i, j)]
m.obj = pe.Objective(expr=obj, sense=pe.minimize)
def feed_availability_rule(m, i):
expr = 0
for l in [ll for ii, ll in con_feed_pool if ii == i]:
for j in [jj for ll, jj in con_pool_prod if ll == l]:
expr += m.q[(i, l)]*m.y[(l, j)]
for j in [jj for ii, jj in con_feed_prod if ii == i]:
expr += m.z[(i, l)]
return min_flow[i], expr, max_flow[i]
m.feed_availability = pe.Constraint(feeds, rule=feed_availability_rule)
def pool_capacity_rule(m, l):
expr = 0
for j in [jj for ll, jj in con_pool_prod if ll == l]:
expr += m.y[(l, j)]
return None, expr, pool_size[l]
m.pool_capacity = pe.Constraint(pools, rule=pool_capacity_rule)
def prod_demand_rule(m, j):
expr = 0
for l in [ll for ll, jj in con_pool_prod if jj == j]:
expr += m.y[(l, j)]
for i in [ii for ii, jj in con_feed_prod if jj == j]:
expr += m.z[(i, j)]
return min_demand[j], expr, max_demand[j]
m.product_demand = pe.Constraint(products, rule=prod_demand_rule)
def simplex_rule(m, l):
return pe.quicksum(m.q[(i, l)] for i, ll in m.q if ll == l) == 1
m.simplex = pe.Constraint(pools, rule=simplex_rule)
def prod_quality_rule_upper(m, j, k):
expr = 0
flow = 0
for l in [ll for ll, jj in con_pool_prod if jj == j]:
flow += m.y[l, j]
for i in [ii for ii, ll in con_feed_pool if ll == l]:
expr += feed_cons[i][k]*m.q[(i, l)]*m.y[(l, j)]
for i in [ii for ii, jj in con_feed_prod if jj == j]:
flow += m.z[i, j]
expr += feed_cons[i][k]*m.z[(i, j)]
return expr <= max_cons[j][k]*flow
def prod_quality_rule_lower(m, j, k):
expr = 0
flow = 0
for l in [ll for ll, jj in con_pool_prod if jj == j]:
flow += m.y[l, j]
for i in [ii for ii, ll in con_feed_pool if ll == l]:
expr += feed_cons[i][k]*m.q[(i, l)]*m.y[(l, j)]
for i in [ii for ii, jj in con_feed_prod if jj == j]:
flow += m.z[i, j]
expr += feed_cons[i][k]*m.z[(i, j)]
return min_cons[j][k]*flow <= expr
m.prod_quality_upper = pe.Constraint(products, qualities,
rule=prod_quality_rule_upper)
m.prod_quality_lower = pe.Constraint(products, qualities,
rule=prod_quality_rule_lower)
solver = pe.SolverFactory('romodel.cuts')
solver.options['NonConvex'] = 2
solver.solve(m, tee=True)
| true
| true
|
790a5c4c9795055da7b8d7049d3aede00aebbac6
| 285
|
py
|
Python
|
frontend/views.py
|
yuliiabuchko/todo
|
2a553ef6177f978126bbd746113eb4a50b9e3dea
|
[
"MIT"
] | null | null | null |
frontend/views.py
|
yuliiabuchko/todo
|
2a553ef6177f978126bbd746113eb4a50b9e3dea
|
[
"MIT"
] | 1
|
2021-04-08T21:17:38.000Z
|
2021-04-08T21:17:38.000Z
|
frontend/views.py
|
yuliiabuchko/todo
|
2a553ef6177f978126bbd746113eb4a50b9e3dea
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.views.generic.detail import DetailView
from todos.models import Task
def index(request):
return render(request, 'frontend/index.html')
class TodoDetailView(DetailView):
model = Task
template_name = 'frontend/index.html'
| 20.357143
| 50
| 0.768421
|
from django.shortcuts import render
from django.views.generic.detail import DetailView
from todos.models import Task
def index(request):
return render(request, 'frontend/index.html')
class TodoDetailView(DetailView):
model = Task
template_name = 'frontend/index.html'
| true
| true
|
790a5d86ee689a83f18761d7920ca96cd2d159a8
| 3,586
|
py
|
Python
|
110borwein_2017/compare.py
|
ltabis/epitech-projects
|
e38b3f00a4ac44c969d5e4880cd65084dc2c870a
|
[
"MIT"
] | null | null | null |
110borwein_2017/compare.py
|
ltabis/epitech-projects
|
e38b3f00a4ac44c969d5e4880cd65084dc2c870a
|
[
"MIT"
] | null | null | null |
110borwein_2017/compare.py
|
ltabis/epitech-projects
|
e38b3f00a4ac44c969d5e4880cd65084dc2c870a
|
[
"MIT"
] | 1
|
2021-01-07T17:41:14.000Z
|
2021-01-07T17:41:14.000Z
|
#!/usr/bin/python3
import time
from calcul import *
import sys
import os
max_exec = 10
red = "\033[31m"
white = "\033[39m"
cyan = "\033[36m"
green = "\033[32m"
save = sys.stdout
so = open("file.log", 'w')
sys.stdout = so
def rectangle_time(n):
time_rect = []
i = 0
while i < max_exec:
start_time = time.time()
calcul_rectangles(n)
time_rect.append(time.time() - start_time)
i += 1
return time_rect
def trapeze_time(n):
time_trap = []
i = 0
while i < max_exec:
start_time = time.time()
calcul_trapezoïds(n)
time_trap.append(time.time() - start_time)
i += 1
return time_trap
def simpson_time(n):
time_simp = []
i = 0
while i < max_exec:
start_time = time.time()
calcul_simpson(n)
time_simp.append(time.time() - start_time)
i += 1
return time_simp
def calc_dict(tab, name):
i = 0
result = 0
dic = {}
while i < max_exec:
result += tab[i]
i += 1
result = result / max_exec
dic["Name"] = name
dic["Value"] = result
return dic
def get_min_time(dict1, dict2, dict3):
if dict1.get("Value") < dict2.get("Value") and dict1.get("Value") < dict3.get("Value"):
return 1
if dict2.get("Value") < dict1.get("Value") and dict2.get("Value") < dict3.get("Value"):
return 2
if dict3.get("Value") < dict2.get("Value") and dict3.get("Value") < dict1.get("Value"):
return 3
def get_min_precision(prec1, prec2, prec3):
prec1 = abs(prec1)
prec2 = abs(prec2)
prec3 = abs(prec3)
if prec1 < prec2 and prec1 < prec3:
return 1
if prec2 < prec1 and prec2 < prec3:
return 2
if prec3 < prec2 and prec3 < prec1:
return 3
def main():
n = int(sys.argv[1])
time_rect = rectangle_time(n)
time_trap = trapeze_time(n)
time_simp = simpson_time(n)
dict_rect = calc_dict(time_rect, "Rectangles")
dict_trap = calc_dict(time_trap, "Trapezoids")
dict_simp = calc_dict(time_simp, "Simpson")
preci_rect = calcul_rectangles(n) - (pi / 2)
preci_trap = calcul_trapezoïds(n) - (pi / 2)
preci_simp = calcul_simpson(n) - (pi / 2)
sys.stdout = save
print("{}Compute time:\n{}".format(cyan, white))
print("Method : {}\t: {}{:.6f}{} sec".format(dict_rect.get("Name"), red, dict_rect.get("Value"), white))
print("Method : {}\t: {}{:.6f}{} sec".format(dict_trap.get("Name"), red, dict_trap.get("Value"), white))
print("Method : {}\t: {}{:.6f}{} sec".format(dict_simp.get("Name"), red, dict_simp.get("Value"), white))
min_time = get_min_time(dict_rect, dict_trap, dict_simp)
print("The fastest Method is:", end='')
print(green, end='')
if min_time == 1:
print("\tRectangles Method")
elif min_time == 2:
print("\tTrapezoids Method")
else:
print("\tSimpson Method")
print(white, end='')
print("\n{}Relative precision:\n{}".format(cyan, white))
print("Method : {}\t: {}{}{} a.u.".format(dict_rect.get("Name"), red, preci_rect, white))
print("Method : {}\t: {}{}{} a.u.".format(dict_trap.get("Name"), red, preci_trap, white))
print("Method : {}\t: {}{}{} a.u.".format(dict_simp.get("Name"), red, preci_simp, white))
preci = get_min_precision(preci_rect, preci_trap, preci_simp)
print("The most accurate:", end='')
print(green, end='')
if preci == 1:
print("\tRectangles Method")
elif preci == 2:
print("\tTrapezoids Method")
else:
print("\tSimpson Method")
print(white, end='')
main()
| 29.883333
| 108
| 0.594255
|
import time
from calcul import *
import sys
import os
max_exec = 10
red = "\033[31m"
white = "\033[39m"
cyan = "\033[36m"
green = "\033[32m"
save = sys.stdout
so = open("file.log", 'w')
sys.stdout = so
def rectangle_time(n):
time_rect = []
i = 0
while i < max_exec:
start_time = time.time()
calcul_rectangles(n)
time_rect.append(time.time() - start_time)
i += 1
return time_rect
def trapeze_time(n):
time_trap = []
i = 0
while i < max_exec:
start_time = time.time()
calcul_trapezoïds(n)
time_trap.append(time.time() - start_time)
i += 1
return time_trap
def simpson_time(n):
time_simp = []
i = 0
while i < max_exec:
start_time = time.time()
calcul_simpson(n)
time_simp.append(time.time() - start_time)
i += 1
return time_simp
def calc_dict(tab, name):
i = 0
result = 0
dic = {}
while i < max_exec:
result += tab[i]
i += 1
result = result / max_exec
dic["Name"] = name
dic["Value"] = result
return dic
def get_min_time(dict1, dict2, dict3):
if dict1.get("Value") < dict2.get("Value") and dict1.get("Value") < dict3.get("Value"):
return 1
if dict2.get("Value") < dict1.get("Value") and dict2.get("Value") < dict3.get("Value"):
return 2
if dict3.get("Value") < dict2.get("Value") and dict3.get("Value") < dict1.get("Value"):
return 3
def get_min_precision(prec1, prec2, prec3):
prec1 = abs(prec1)
prec2 = abs(prec2)
prec3 = abs(prec3)
if prec1 < prec2 and prec1 < prec3:
return 1
if prec2 < prec1 and prec2 < prec3:
return 2
if prec3 < prec2 and prec3 < prec1:
return 3
def main():
n = int(sys.argv[1])
time_rect = rectangle_time(n)
time_trap = trapeze_time(n)
time_simp = simpson_time(n)
dict_rect = calc_dict(time_rect, "Rectangles")
dict_trap = calc_dict(time_trap, "Trapezoids")
dict_simp = calc_dict(time_simp, "Simpson")
preci_rect = calcul_rectangles(n) - (pi / 2)
preci_trap = calcul_trapezoïds(n) - (pi / 2)
preci_simp = calcul_simpson(n) - (pi / 2)
sys.stdout = save
print("{}Compute time:\n{}".format(cyan, white))
print("Method : {}\t: {}{:.6f}{} sec".format(dict_rect.get("Name"), red, dict_rect.get("Value"), white))
print("Method : {}\t: {}{:.6f}{} sec".format(dict_trap.get("Name"), red, dict_trap.get("Value"), white))
print("Method : {}\t: {}{:.6f}{} sec".format(dict_simp.get("Name"), red, dict_simp.get("Value"), white))
min_time = get_min_time(dict_rect, dict_trap, dict_simp)
print("The fastest Method is:", end='')
print(green, end='')
if min_time == 1:
print("\tRectangles Method")
elif min_time == 2:
print("\tTrapezoids Method")
else:
print("\tSimpson Method")
print(white, end='')
print("\n{}Relative precision:\n{}".format(cyan, white))
print("Method : {}\t: {}{}{} a.u.".format(dict_rect.get("Name"), red, preci_rect, white))
print("Method : {}\t: {}{}{} a.u.".format(dict_trap.get("Name"), red, preci_trap, white))
print("Method : {}\t: {}{}{} a.u.".format(dict_simp.get("Name"), red, preci_simp, white))
preci = get_min_precision(preci_rect, preci_trap, preci_simp)
print("The most accurate:", end='')
print(green, end='')
if preci == 1:
print("\tRectangles Method")
elif preci == 2:
print("\tTrapezoids Method")
else:
print("\tSimpson Method")
print(white, end='')
main()
| true
| true
|
790a5d93cb79bf9effeb45f3f8da517081c6c594
| 9,997
|
py
|
Python
|
models/densenet121.py
|
arp95/pytorch_image_classifier
|
81db0a99b79dcebc39843869bf684c5090db6fdb
|
[
"MIT"
] | 3
|
2020-08-17T16:09:00.000Z
|
2021-02-02T04:52:17.000Z
|
models/densenet121.py
|
arp95/pytorch_image_classifier
|
81db0a99b79dcebc39843869bf684c5090db6fdb
|
[
"MIT"
] | 1
|
2020-10-14T02:21:46.000Z
|
2020-10-14T02:21:46.000Z
|
models/densenet121.py
|
arp95/cnn_architectures_image_classification
|
81db0a99b79dcebc39843869bf684c5090db6fdb
|
[
"MIT"
] | null | null | null |
# header files
import torch
import torch.nn as nn
import torchvision
import numpy as np
# define network (remember input size: (224 x 224 x 3))
class DenseNet_121(torch.nn.Module):
# define dense block
def dense_block(self, input_channels):
return torch.nn.Sequential(
torch.nn.Conv2d(input_channels, 128, kernel_size=1, bias=False),
torch.nn.BatchNorm2d(128),
torch.nn.ReLU(inplace=True),
torch.nn.Conv2d(128, 32, kernel_size=3, padding=1, bias=False),
torch.nn.BatchNorm2d(32),
torch.nn.ReLU(inplace=True)
)
# init function
def __init__(self, num_classes = 2):
super(DenseNet_121, self).__init__()
self.features = torch.nn.Sequential(
torch.nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False),
torch.nn.BatchNorm2d(64),
torch.nn.ReLU(inplace=True),
torch.nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
)
# dense block 1 (56 x 56 x 64)
self.dense_block_1_1 = self.dense_block(64)
self.dense_block_1_2 = self.dense_block(96)
self.dense_block_1_3 = self.dense_block(128)
self.dense_block_1_4 = self.dense_block(160)
self.dense_block_1_5 = self.dense_block(192)
self.dense_block_1_6 = self.dense_block(224)
# transition block 1
self.transition_block_1 = torch.nn.Sequential(
torch.nn.Conv2d(256, 128, kernel_size=1, bias=False),
torch.nn.AvgPool2d(kernel_size=2, stride=2)
)
# dense block 2 (28 x 28 x 128)
self.dense_block_2_1 = self.dense_block(128)
self.dense_block_2_2 = self.dense_block(160)
self.dense_block_2_3 = self.dense_block(192)
self.dense_block_2_4 = self.dense_block(224)
self.dense_block_2_5 = self.dense_block(256)
self.dense_block_2_6 = self.dense_block(288)
self.dense_block_2_7 = self.dense_block(320)
self.dense_block_2_8 = self.dense_block(352)
self.dense_block_2_9 = self.dense_block(384)
self.dense_block_2_10 = self.dense_block(416)
self.dense_block_2_11 = self.dense_block(448)
self.dense_block_2_12 = self.dense_block(480)
# transition block 2
self.transition_block_2 = torch.nn.Sequential(
torch.nn.Conv2d(512, 256, kernel_size=1, bias=False),
torch.nn.AvgPool2d(kernel_size=2, stride=2)
)
# dense block 3 (14 x 14 x 240)
self.dense_block_3_1 = self.dense_block(256)
self.dense_block_3_2 = self.dense_block(288)
self.dense_block_3_3 = self.dense_block(320)
self.dense_block_3_4 = self.dense_block(352)
self.dense_block_3_5 = self.dense_block(384)
self.dense_block_3_6 = self.dense_block(416)
self.dense_block_3_7 = self.dense_block(448)
self.dense_block_3_8 = self.dense_block(480)
self.dense_block_3_9 = self.dense_block(512)
self.dense_block_3_10 = self.dense_block(544)
self.dense_block_3_11 = self.dense_block(576)
self.dense_block_3_12 = self.dense_block(608)
self.dense_block_3_13 = self.dense_block(640)
self.dense_block_3_14 = self.dense_block(672)
self.dense_block_3_15 = self.dense_block(704)
self.dense_block_3_16 = self.dense_block(736)
self.dense_block_3_17 = self.dense_block(768)
self.dense_block_3_18 = self.dense_block(800)
self.dense_block_3_19 = self.dense_block(832)
self.dense_block_3_20 = self.dense_block(864)
self.dense_block_3_21 = self.dense_block(896)
self.dense_block_3_22 = self.dense_block(928)
self.dense_block_3_23 = self.dense_block(960)
self.dense_block_3_24 = self.dense_block(992)
# transition block 3
self.transition_block_3 = torch.nn.Sequential(
torch.nn.Conv2d(1024, 512, kernel_size=1, bias=False),
torch.nn.AvgPool2d(kernel_size=2, stride=2)
)
# dense block 4 (7 x 7 x 512)
self.dense_block_4_1 = self.dense_block(512)
self.dense_block_4_2 = self.dense_block(544)
self.dense_block_4_3 = self.dense_block(576)
self.dense_block_4_4 = self.dense_block(608)
self.dense_block_4_5 = self.dense_block(640)
self.dense_block_4_6 = self.dense_block(672)
self.dense_block_4_7 = self.dense_block(704)
self.dense_block_4_8 = self.dense_block(736)
self.dense_block_4_9 = self.dense_block(768)
self.dense_block_4_10 = self.dense_block(800)
self.dense_block_4_11 = self.dense_block(832)
self.dense_block_4_12 = self.dense_block(864)
self.dense_block_4_13 = self.dense_block(896)
self.dense_block_4_14 = self.dense_block(928)
self.dense_block_4_15 = self.dense_block(960)
self.dense_block_4_16 = self.dense_block(992)
self.avgpool = torch.nn.AdaptiveAvgPool2d(7)
self.classifier = torch.nn.Sequential(
torch.nn.Linear(1024 * 7 * 7, num_classes)
)
def forward(self, x):
x = self.features(x)
# dense block 1
x_1 = self.dense_block_1_1(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_1_2(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_1_3(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_1_4(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_1_5(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_1_6(x)
x = torch.cat([x, x_1], 1)
# transition block 1
x = self.transition_block_1(x)
# dense block 2
x_1 = self.dense_block_2_1(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_2(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_3(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_4(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_5(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_6(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_7(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_8(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_9(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_10(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_11(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_12(x)
x = torch.cat([x, x_1], 1)
# transition block 2
x = self.transition_block_2(x)
# dense block 3
x_1 = self.dense_block_3_1(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_2(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_3(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_4(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_5(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_6(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_7(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_8(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_9(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_10(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_11(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_12(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_13(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_14(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_15(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_16(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_17(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_18(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_19(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_20(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_21(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_22(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_23(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_24(x)
x = torch.cat([x, x_1], 1)
# transition block 3
x = self.transition_block_3(x)
# dense block 4
x_1 = self.dense_block_4_1(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_2(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_3(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_4(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_5(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_6(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_7(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_8(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_9(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_10(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_11(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_12(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_13(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_14(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_15(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_16(x)
x = torch.cat([x, x_1], 1)
x = self.avgpool(x)
x = torch.flatten(x, 1)
x = self.classifier(x)
return x
| 37.582707
| 83
| 0.577873
|
import torch
import torch.nn as nn
import torchvision
import numpy as np
class DenseNet_121(torch.nn.Module):
def dense_block(self, input_channels):
return torch.nn.Sequential(
torch.nn.Conv2d(input_channels, 128, kernel_size=1, bias=False),
torch.nn.BatchNorm2d(128),
torch.nn.ReLU(inplace=True),
torch.nn.Conv2d(128, 32, kernel_size=3, padding=1, bias=False),
torch.nn.BatchNorm2d(32),
torch.nn.ReLU(inplace=True)
)
def __init__(self, num_classes = 2):
super(DenseNet_121, self).__init__()
self.features = torch.nn.Sequential(
torch.nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False),
torch.nn.BatchNorm2d(64),
torch.nn.ReLU(inplace=True),
torch.nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
)
self.dense_block_1_1 = self.dense_block(64)
self.dense_block_1_2 = self.dense_block(96)
self.dense_block_1_3 = self.dense_block(128)
self.dense_block_1_4 = self.dense_block(160)
self.dense_block_1_5 = self.dense_block(192)
self.dense_block_1_6 = self.dense_block(224)
self.transition_block_1 = torch.nn.Sequential(
torch.nn.Conv2d(256, 128, kernel_size=1, bias=False),
torch.nn.AvgPool2d(kernel_size=2, stride=2)
)
self.dense_block_2_1 = self.dense_block(128)
self.dense_block_2_2 = self.dense_block(160)
self.dense_block_2_3 = self.dense_block(192)
self.dense_block_2_4 = self.dense_block(224)
self.dense_block_2_5 = self.dense_block(256)
self.dense_block_2_6 = self.dense_block(288)
self.dense_block_2_7 = self.dense_block(320)
self.dense_block_2_8 = self.dense_block(352)
self.dense_block_2_9 = self.dense_block(384)
self.dense_block_2_10 = self.dense_block(416)
self.dense_block_2_11 = self.dense_block(448)
self.dense_block_2_12 = self.dense_block(480)
self.transition_block_2 = torch.nn.Sequential(
torch.nn.Conv2d(512, 256, kernel_size=1, bias=False),
torch.nn.AvgPool2d(kernel_size=2, stride=2)
)
self.dense_block_3_1 = self.dense_block(256)
self.dense_block_3_2 = self.dense_block(288)
self.dense_block_3_3 = self.dense_block(320)
self.dense_block_3_4 = self.dense_block(352)
self.dense_block_3_5 = self.dense_block(384)
self.dense_block_3_6 = self.dense_block(416)
self.dense_block_3_7 = self.dense_block(448)
self.dense_block_3_8 = self.dense_block(480)
self.dense_block_3_9 = self.dense_block(512)
self.dense_block_3_10 = self.dense_block(544)
self.dense_block_3_11 = self.dense_block(576)
self.dense_block_3_12 = self.dense_block(608)
self.dense_block_3_13 = self.dense_block(640)
self.dense_block_3_14 = self.dense_block(672)
self.dense_block_3_15 = self.dense_block(704)
self.dense_block_3_16 = self.dense_block(736)
self.dense_block_3_17 = self.dense_block(768)
self.dense_block_3_18 = self.dense_block(800)
self.dense_block_3_19 = self.dense_block(832)
self.dense_block_3_20 = self.dense_block(864)
self.dense_block_3_21 = self.dense_block(896)
self.dense_block_3_22 = self.dense_block(928)
self.dense_block_3_23 = self.dense_block(960)
self.dense_block_3_24 = self.dense_block(992)
self.transition_block_3 = torch.nn.Sequential(
torch.nn.Conv2d(1024, 512, kernel_size=1, bias=False),
torch.nn.AvgPool2d(kernel_size=2, stride=2)
)
self.dense_block_4_1 = self.dense_block(512)
self.dense_block_4_2 = self.dense_block(544)
self.dense_block_4_3 = self.dense_block(576)
self.dense_block_4_4 = self.dense_block(608)
self.dense_block_4_5 = self.dense_block(640)
self.dense_block_4_6 = self.dense_block(672)
self.dense_block_4_7 = self.dense_block(704)
self.dense_block_4_8 = self.dense_block(736)
self.dense_block_4_9 = self.dense_block(768)
self.dense_block_4_10 = self.dense_block(800)
self.dense_block_4_11 = self.dense_block(832)
self.dense_block_4_12 = self.dense_block(864)
self.dense_block_4_13 = self.dense_block(896)
self.dense_block_4_14 = self.dense_block(928)
self.dense_block_4_15 = self.dense_block(960)
self.dense_block_4_16 = self.dense_block(992)
self.avgpool = torch.nn.AdaptiveAvgPool2d(7)
self.classifier = torch.nn.Sequential(
torch.nn.Linear(1024 * 7 * 7, num_classes)
)
def forward(self, x):
x = self.features(x)
x_1 = self.dense_block_1_1(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_1_2(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_1_3(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_1_4(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_1_5(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_1_6(x)
x = torch.cat([x, x_1], 1)
x = self.transition_block_1(x)
x_1 = self.dense_block_2_1(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_2(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_3(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_4(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_5(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_6(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_7(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_8(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_9(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_10(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_11(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_2_12(x)
x = torch.cat([x, x_1], 1)
x = self.transition_block_2(x)
x_1 = self.dense_block_3_1(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_2(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_3(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_4(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_5(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_6(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_7(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_8(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_9(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_10(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_11(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_12(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_13(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_14(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_15(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_16(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_17(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_18(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_19(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_20(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_21(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_22(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_23(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_3_24(x)
x = torch.cat([x, x_1], 1)
x = self.transition_block_3(x)
x_1 = self.dense_block_4_1(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_2(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_3(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_4(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_5(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_6(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_7(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_8(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_9(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_10(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_11(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_12(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_13(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_14(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_15(x)
x = torch.cat([x, x_1], 1)
x_1 = self.dense_block_4_16(x)
x = torch.cat([x, x_1], 1)
x = self.avgpool(x)
x = torch.flatten(x, 1)
x = self.classifier(x)
return x
| true
| true
|
790a5daece740f88ebd8bfbd6ecde92682285a42
| 19,954
|
py
|
Python
|
pyleecan/Classes/LamSlotWind.py
|
IrakozeFD/pyleecan
|
5a93bd98755d880176c1ce8ac90f36ca1b907055
|
[
"Apache-2.0"
] | null | null | null |
pyleecan/Classes/LamSlotWind.py
|
IrakozeFD/pyleecan
|
5a93bd98755d880176c1ce8ac90f36ca1b907055
|
[
"Apache-2.0"
] | null | null | null |
pyleecan/Classes/LamSlotWind.py
|
IrakozeFD/pyleecan
|
5a93bd98755d880176c1ce8ac90f36ca1b907055
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# File generated according to Generator/ClassesRef/Machine/LamSlotWind.csv
# WARNING! All changes made in this file will be lost!
"""Method code available at https://github.com/Eomys/pyleecan/tree/master/pyleecan/Methods/Machine/LamSlotWind
"""
from os import linesep
from sys import getsizeof
from logging import getLogger
from ._check import check_var, raise_
from ..Functions.get_logger import get_logger
from ..Functions.save import save
from ..Functions.copy import copy
from ..Functions.load import load_init_dict
from ..Functions.Load.import_class import import_class
from .LamSlot import LamSlot
# Import all class method
# Try/catch to remove unnecessary dependencies in unused method
try:
from ..Methods.Machine.LamSlotWind.build_geometry import build_geometry
except ImportError as error:
build_geometry = error
try:
from ..Methods.Machine.LamSlotWind.check import check
except ImportError as error:
check = error
try:
from ..Methods.Machine.LamSlotWind.comp_masses import comp_masses
except ImportError as error:
comp_masses = error
try:
from ..Methods.Machine.LamSlotWind.comp_surfaces import comp_surfaces
except ImportError as error:
comp_surfaces = error
try:
from ..Methods.Machine.LamSlotWind.comp_volumes import comp_volumes
except ImportError as error:
comp_volumes = error
try:
from ..Methods.Machine.LamSlotWind.get_pole_pair_number import get_pole_pair_number
except ImportError as error:
get_pole_pair_number = error
try:
from ..Methods.Machine.LamSlotWind.get_name_phase import get_name_phase
except ImportError as error:
get_name_phase = error
try:
from ..Methods.Machine.LamSlotWind.plot import plot
except ImportError as error:
plot = error
try:
from ..Methods.Machine.LamSlotWind.plot_winding import plot_winding
except ImportError as error:
plot_winding = error
try:
from ..Methods.Machine.LamSlotWind.comp_fill_factor import comp_fill_factor
except ImportError as error:
comp_fill_factor = error
try:
from ..Methods.Machine.LamSlotWind.comp_output_geo import comp_output_geo
except ImportError as error:
comp_output_geo = error
try:
from ..Methods.Machine.LamSlotWind.get_polar_eq import get_polar_eq
except ImportError as error:
get_polar_eq = error
try:
from ..Methods.Machine.LamSlotWind.comp_wind_function import comp_wind_function
except ImportError as error:
comp_wind_function = error
try:
from ..Methods.Machine.LamSlotWind.plot_mmf_unit import plot_mmf_unit
except ImportError as error:
plot_mmf_unit = error
try:
from ..Methods.Machine.LamSlotWind.comp_resistance_wind import comp_resistance_wind
except ImportError as error:
comp_resistance_wind = error
try:
from ..Methods.Machine.LamSlotWind.comp_angle_d_axis import comp_angle_d_axis
except ImportError as error:
comp_angle_d_axis = error
try:
from ..Methods.Machine.LamSlotWind.comp_mmf_unit import comp_mmf_unit
except ImportError as error:
comp_mmf_unit = error
try:
from ..Methods.Machine.LamSlotWind.comp_rot_dir import comp_rot_dir
except ImportError as error:
comp_rot_dir = error
try:
from ..Methods.Machine.LamSlotWind.comp_lengths_winding import comp_lengths_winding
except ImportError as error:
comp_lengths_winding = error
try:
from ..Methods.Machine.LamSlotWind.comp_number_phase_eq import comp_number_phase_eq
except ImportError as error:
comp_number_phase_eq = error
try:
from ..Methods.Machine.LamSlotWind.comp_periodicity import comp_periodicity
except ImportError as error:
comp_periodicity = error
from ._check import InitUnKnowClassError
from .Winding import Winding
from .Slot import Slot
from .Material import Material
from .Hole import Hole
from .Notch import Notch
class LamSlotWind(LamSlot):
"""Lamination with Slot filled with winding"""
VERSION = 1
# Check ImportError to remove unnecessary dependencies in unused method
# cf Methods.Machine.LamSlotWind.build_geometry
if isinstance(build_geometry, ImportError):
build_geometry = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method build_geometry: "
+ str(build_geometry)
)
)
)
else:
build_geometry = build_geometry
# cf Methods.Machine.LamSlotWind.check
if isinstance(check, ImportError):
check = property(
fget=lambda x: raise_(
ImportError("Can't use LamSlotWind method check: " + str(check))
)
)
else:
check = check
# cf Methods.Machine.LamSlotWind.comp_masses
if isinstance(comp_masses, ImportError):
comp_masses = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_masses: " + str(comp_masses)
)
)
)
else:
comp_masses = comp_masses
# cf Methods.Machine.LamSlotWind.comp_surfaces
if isinstance(comp_surfaces, ImportError):
comp_surfaces = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_surfaces: " + str(comp_surfaces)
)
)
)
else:
comp_surfaces = comp_surfaces
# cf Methods.Machine.LamSlotWind.comp_volumes
if isinstance(comp_volumes, ImportError):
comp_volumes = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_volumes: " + str(comp_volumes)
)
)
)
else:
comp_volumes = comp_volumes
# cf Methods.Machine.LamSlotWind.get_pole_pair_number
if isinstance(get_pole_pair_number, ImportError):
get_pole_pair_number = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method get_pole_pair_number: "
+ str(get_pole_pair_number)
)
)
)
else:
get_pole_pair_number = get_pole_pair_number
# cf Methods.Machine.LamSlotWind.get_name_phase
if isinstance(get_name_phase, ImportError):
get_name_phase = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method get_name_phase: "
+ str(get_name_phase)
)
)
)
else:
get_name_phase = get_name_phase
# cf Methods.Machine.LamSlotWind.plot
if isinstance(plot, ImportError):
plot = property(
fget=lambda x: raise_(
ImportError("Can't use LamSlotWind method plot: " + str(plot))
)
)
else:
plot = plot
# cf Methods.Machine.LamSlotWind.plot_winding
if isinstance(plot_winding, ImportError):
plot_winding = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method plot_winding: " + str(plot_winding)
)
)
)
else:
plot_winding = plot_winding
# cf Methods.Machine.LamSlotWind.comp_fill_factor
if isinstance(comp_fill_factor, ImportError):
comp_fill_factor = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_fill_factor: "
+ str(comp_fill_factor)
)
)
)
else:
comp_fill_factor = comp_fill_factor
# cf Methods.Machine.LamSlotWind.comp_output_geo
if isinstance(comp_output_geo, ImportError):
comp_output_geo = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_output_geo: "
+ str(comp_output_geo)
)
)
)
else:
comp_output_geo = comp_output_geo
# cf Methods.Machine.LamSlotWind.get_polar_eq
if isinstance(get_polar_eq, ImportError):
get_polar_eq = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method get_polar_eq: " + str(get_polar_eq)
)
)
)
else:
get_polar_eq = get_polar_eq
# cf Methods.Machine.LamSlotWind.comp_wind_function
if isinstance(comp_wind_function, ImportError):
comp_wind_function = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_wind_function: "
+ str(comp_wind_function)
)
)
)
else:
comp_wind_function = comp_wind_function
# cf Methods.Machine.LamSlotWind.plot_mmf_unit
if isinstance(plot_mmf_unit, ImportError):
plot_mmf_unit = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method plot_mmf_unit: " + str(plot_mmf_unit)
)
)
)
else:
plot_mmf_unit = plot_mmf_unit
# cf Methods.Machine.LamSlotWind.comp_resistance_wind
if isinstance(comp_resistance_wind, ImportError):
comp_resistance_wind = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_resistance_wind: "
+ str(comp_resistance_wind)
)
)
)
else:
comp_resistance_wind = comp_resistance_wind
# cf Methods.Machine.LamSlotWind.comp_angle_d_axis
if isinstance(comp_angle_d_axis, ImportError):
comp_angle_d_axis = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_angle_d_axis: "
+ str(comp_angle_d_axis)
)
)
)
else:
comp_angle_d_axis = comp_angle_d_axis
# cf Methods.Machine.LamSlotWind.comp_mmf_unit
if isinstance(comp_mmf_unit, ImportError):
comp_mmf_unit = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_mmf_unit: " + str(comp_mmf_unit)
)
)
)
else:
comp_mmf_unit = comp_mmf_unit
# cf Methods.Machine.LamSlotWind.comp_rot_dir
if isinstance(comp_rot_dir, ImportError):
comp_rot_dir = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_rot_dir: " + str(comp_rot_dir)
)
)
)
else:
comp_rot_dir = comp_rot_dir
# cf Methods.Machine.LamSlotWind.comp_lengths_winding
if isinstance(comp_lengths_winding, ImportError):
comp_lengths_winding = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_lengths_winding: "
+ str(comp_lengths_winding)
)
)
)
else:
comp_lengths_winding = comp_lengths_winding
# cf Methods.Machine.LamSlotWind.comp_number_phase_eq
if isinstance(comp_number_phase_eq, ImportError):
comp_number_phase_eq = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_number_phase_eq: "
+ str(comp_number_phase_eq)
)
)
)
else:
comp_number_phase_eq = comp_number_phase_eq
# cf Methods.Machine.LamSlotWind.comp_periodicity
if isinstance(comp_periodicity, ImportError):
comp_periodicity = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_periodicity: "
+ str(comp_periodicity)
)
)
)
else:
comp_periodicity = comp_periodicity
# save and copy methods are available in all object
save = save
copy = copy
# get_logger method is available in all object
get_logger = get_logger
def __init__(
self,
Ksfill=None,
winding=-1,
slot=-1,
L1=0.35,
mat_type=-1,
Nrvd=0,
Wrvd=0,
Kf1=0.95,
is_internal=True,
Rint=0,
Rext=1,
is_stator=True,
axial_vent=-1,
notch=-1,
init_dict=None,
init_str=None,
):
"""Constructor of the class. Can be use in three ways :
- __init__ (arg1 = 1, arg3 = 5) every parameters have name and default values
for pyleecan type, -1 will call the default constructor
- __init__ (init_dict = d) d must be a dictionnary with property names as keys
- __init__ (init_str = s) s must be a string
s is the file path to load
ndarray or list can be given for Vector and Matrix
object or dict can be given for pyleecan Object"""
if init_str is not None: # Load from a file
init_dict = load_init_dict(init_str)[1]
if init_dict is not None: # Initialisation by dict
assert type(init_dict) is dict
# Overwrite default value with init_dict content
if "Ksfill" in list(init_dict.keys()):
Ksfill = init_dict["Ksfill"]
if "winding" in list(init_dict.keys()):
winding = init_dict["winding"]
if "slot" in list(init_dict.keys()):
slot = init_dict["slot"]
if "L1" in list(init_dict.keys()):
L1 = init_dict["L1"]
if "mat_type" in list(init_dict.keys()):
mat_type = init_dict["mat_type"]
if "Nrvd" in list(init_dict.keys()):
Nrvd = init_dict["Nrvd"]
if "Wrvd" in list(init_dict.keys()):
Wrvd = init_dict["Wrvd"]
if "Kf1" in list(init_dict.keys()):
Kf1 = init_dict["Kf1"]
if "is_internal" in list(init_dict.keys()):
is_internal = init_dict["is_internal"]
if "Rint" in list(init_dict.keys()):
Rint = init_dict["Rint"]
if "Rext" in list(init_dict.keys()):
Rext = init_dict["Rext"]
if "is_stator" in list(init_dict.keys()):
is_stator = init_dict["is_stator"]
if "axial_vent" in list(init_dict.keys()):
axial_vent = init_dict["axial_vent"]
if "notch" in list(init_dict.keys()):
notch = init_dict["notch"]
# Set the properties (value check and convertion are done in setter)
self.Ksfill = Ksfill
self.winding = winding
# Call LamSlot init
super(LamSlotWind, self).__init__(
slot=slot,
L1=L1,
mat_type=mat_type,
Nrvd=Nrvd,
Wrvd=Wrvd,
Kf1=Kf1,
is_internal=is_internal,
Rint=Rint,
Rext=Rext,
is_stator=is_stator,
axial_vent=axial_vent,
notch=notch,
)
# The class is frozen (in LamSlot init), for now it's impossible to
# add new properties
def __str__(self):
"""Convert this object in a readeable string (for print)"""
LamSlotWind_str = ""
# Get the properties inherited from LamSlot
LamSlotWind_str += super(LamSlotWind, self).__str__()
LamSlotWind_str += "Ksfill = " + str(self.Ksfill) + linesep
if self.winding is not None:
tmp = self.winding.__str__().replace(linesep, linesep + "\t").rstrip("\t")
LamSlotWind_str += "winding = " + tmp
else:
LamSlotWind_str += "winding = None" + linesep + linesep
return LamSlotWind_str
def __eq__(self, other):
"""Compare two objects (skip parent)"""
if type(other) != type(self):
return False
# Check the properties inherited from LamSlot
if not super(LamSlotWind, self).__eq__(other):
return False
if other.Ksfill != self.Ksfill:
return False
if other.winding != self.winding:
return False
return True
def compare(self, other, name="self"):
"""Compare two objects and return list of differences"""
if type(other) != type(self):
return ["type(" + name + ")"]
diff_list = list()
# Check the properties inherited from LamSlot
diff_list.extend(super(LamSlotWind, self).compare(other, name=name))
if other._Ksfill != self._Ksfill:
diff_list.append(name + ".Ksfill")
if (other.winding is None and self.winding is not None) or (
other.winding is not None and self.winding is None
):
diff_list.append(name + ".winding None mismatch")
elif self.winding is not None:
diff_list.extend(
self.winding.compare(other.winding, name=name + ".winding")
)
return diff_list
def __sizeof__(self):
"""Return the size in memory of the object (including all subobject)"""
S = 0 # Full size of the object
# Get size of the properties inherited from LamSlot
S += super(LamSlotWind, self).__sizeof__()
S += getsizeof(self.Ksfill)
S += getsizeof(self.winding)
return S
def as_dict(self):
"""Convert this object in a json seriable dict (can be use in __init__)"""
# Get the properties inherited from LamSlot
LamSlotWind_dict = super(LamSlotWind, self).as_dict()
LamSlotWind_dict["Ksfill"] = self.Ksfill
if self.winding is None:
LamSlotWind_dict["winding"] = None
else:
LamSlotWind_dict["winding"] = self.winding.as_dict()
# The class name is added to the dict for deserialisation purpose
# Overwrite the mother class name
LamSlotWind_dict["__class__"] = "LamSlotWind"
return LamSlotWind_dict
def _set_None(self):
"""Set all the properties to None (except pyleecan object)"""
self.Ksfill = None
if self.winding is not None:
self.winding._set_None()
# Set to None the properties inherited from LamSlot
super(LamSlotWind, self)._set_None()
def _get_Ksfill(self):
"""getter of Ksfill"""
return self._Ksfill
def _set_Ksfill(self, value):
"""setter of Ksfill"""
check_var("Ksfill", value, "float", Vmin=0, Vmax=1)
self._Ksfill = value
Ksfill = property(
fget=_get_Ksfill,
fset=_set_Ksfill,
doc=u"""Imposed Slot Fill factor (if None, will be computed according to the winding and the slot)
:Type: float
:min: 0
:max: 1
""",
)
def _get_winding(self):
"""getter of winding"""
return self._winding
def _set_winding(self, value):
"""setter of winding"""
if isinstance(value, str): # Load from file
value = load_init_dict(value)[1]
if isinstance(value, dict) and "__class__" in value:
class_obj = import_class(
"pyleecan.Classes", value.get("__class__"), "winding"
)
value = class_obj(init_dict=value)
elif type(value) is int and value == -1: # Default constructor
value = Winding()
check_var("winding", value, "Winding")
self._winding = value
if self._winding is not None:
self._winding.parent = self
winding = property(
fget=_get_winding,
fset=_set_winding,
doc=u"""Lamination's Winding
:Type: Winding
""",
)
| 33.201331
| 110
| 0.606545
|
from os import linesep
from sys import getsizeof
from logging import getLogger
from ._check import check_var, raise_
from ..Functions.get_logger import get_logger
from ..Functions.save import save
from ..Functions.copy import copy
from ..Functions.load import load_init_dict
from ..Functions.Load.import_class import import_class
from .LamSlot import LamSlot
try:
from ..Methods.Machine.LamSlotWind.build_geometry import build_geometry
except ImportError as error:
build_geometry = error
try:
from ..Methods.Machine.LamSlotWind.check import check
except ImportError as error:
check = error
try:
from ..Methods.Machine.LamSlotWind.comp_masses import comp_masses
except ImportError as error:
comp_masses = error
try:
from ..Methods.Machine.LamSlotWind.comp_surfaces import comp_surfaces
except ImportError as error:
comp_surfaces = error
try:
from ..Methods.Machine.LamSlotWind.comp_volumes import comp_volumes
except ImportError as error:
comp_volumes = error
try:
from ..Methods.Machine.LamSlotWind.get_pole_pair_number import get_pole_pair_number
except ImportError as error:
get_pole_pair_number = error
try:
from ..Methods.Machine.LamSlotWind.get_name_phase import get_name_phase
except ImportError as error:
get_name_phase = error
try:
from ..Methods.Machine.LamSlotWind.plot import plot
except ImportError as error:
plot = error
try:
from ..Methods.Machine.LamSlotWind.plot_winding import plot_winding
except ImportError as error:
plot_winding = error
try:
from ..Methods.Machine.LamSlotWind.comp_fill_factor import comp_fill_factor
except ImportError as error:
comp_fill_factor = error
try:
from ..Methods.Machine.LamSlotWind.comp_output_geo import comp_output_geo
except ImportError as error:
comp_output_geo = error
try:
from ..Methods.Machine.LamSlotWind.get_polar_eq import get_polar_eq
except ImportError as error:
get_polar_eq = error
try:
from ..Methods.Machine.LamSlotWind.comp_wind_function import comp_wind_function
except ImportError as error:
comp_wind_function = error
try:
from ..Methods.Machine.LamSlotWind.plot_mmf_unit import plot_mmf_unit
except ImportError as error:
plot_mmf_unit = error
try:
from ..Methods.Machine.LamSlotWind.comp_resistance_wind import comp_resistance_wind
except ImportError as error:
comp_resistance_wind = error
try:
from ..Methods.Machine.LamSlotWind.comp_angle_d_axis import comp_angle_d_axis
except ImportError as error:
comp_angle_d_axis = error
try:
from ..Methods.Machine.LamSlotWind.comp_mmf_unit import comp_mmf_unit
except ImportError as error:
comp_mmf_unit = error
try:
from ..Methods.Machine.LamSlotWind.comp_rot_dir import comp_rot_dir
except ImportError as error:
comp_rot_dir = error
try:
from ..Methods.Machine.LamSlotWind.comp_lengths_winding import comp_lengths_winding
except ImportError as error:
comp_lengths_winding = error
try:
from ..Methods.Machine.LamSlotWind.comp_number_phase_eq import comp_number_phase_eq
except ImportError as error:
comp_number_phase_eq = error
try:
from ..Methods.Machine.LamSlotWind.comp_periodicity import comp_periodicity
except ImportError as error:
comp_periodicity = error
from ._check import InitUnKnowClassError
from .Winding import Winding
from .Slot import Slot
from .Material import Material
from .Hole import Hole
from .Notch import Notch
class LamSlotWind(LamSlot):
VERSION = 1
if isinstance(build_geometry, ImportError):
build_geometry = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method build_geometry: "
+ str(build_geometry)
)
)
)
else:
build_geometry = build_geometry
# cf Methods.Machine.LamSlotWind.check
if isinstance(check, ImportError):
check = property(
fget=lambda x: raise_(
ImportError("Can't use LamSlotWind method check: " + str(check))
)
)
else:
check = check
if isinstance(comp_masses, ImportError):
comp_masses = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_masses: " + str(comp_masses)
)
)
)
else:
comp_masses = comp_masses
# cf Methods.Machine.LamSlotWind.comp_surfaces
if isinstance(comp_surfaces, ImportError):
comp_surfaces = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_surfaces: " + str(comp_surfaces)
)
)
)
else:
comp_surfaces = comp_surfaces
if isinstance(comp_volumes, ImportError):
comp_volumes = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_volumes: " + str(comp_volumes)
)
)
)
else:
comp_volumes = comp_volumes
# cf Methods.Machine.LamSlotWind.get_pole_pair_number
if isinstance(get_pole_pair_number, ImportError):
get_pole_pair_number = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method get_pole_pair_number: "
+ str(get_pole_pair_number)
)
)
)
else:
get_pole_pair_number = get_pole_pair_number
if isinstance(get_name_phase, ImportError):
get_name_phase = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method get_name_phase: "
+ str(get_name_phase)
)
)
)
else:
get_name_phase = get_name_phase
# cf Methods.Machine.LamSlotWind.plot
if isinstance(plot, ImportError):
plot = property(
fget=lambda x: raise_(
ImportError("Can't use LamSlotWind method plot: " + str(plot))
)
)
else:
plot = plot
if isinstance(plot_winding, ImportError):
plot_winding = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method plot_winding: " + str(plot_winding)
)
)
)
else:
plot_winding = plot_winding
# cf Methods.Machine.LamSlotWind.comp_fill_factor
if isinstance(comp_fill_factor, ImportError):
comp_fill_factor = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_fill_factor: "
+ str(comp_fill_factor)
)
)
)
else:
comp_fill_factor = comp_fill_factor
if isinstance(comp_output_geo, ImportError):
comp_output_geo = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_output_geo: "
+ str(comp_output_geo)
)
)
)
else:
comp_output_geo = comp_output_geo
# cf Methods.Machine.LamSlotWind.get_polar_eq
if isinstance(get_polar_eq, ImportError):
get_polar_eq = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method get_polar_eq: " + str(get_polar_eq)
)
)
)
else:
get_polar_eq = get_polar_eq
if isinstance(comp_wind_function, ImportError):
comp_wind_function = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_wind_function: "
+ str(comp_wind_function)
)
)
)
else:
comp_wind_function = comp_wind_function
# cf Methods.Machine.LamSlotWind.plot_mmf_unit
if isinstance(plot_mmf_unit, ImportError):
plot_mmf_unit = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method plot_mmf_unit: " + str(plot_mmf_unit)
)
)
)
else:
plot_mmf_unit = plot_mmf_unit
if isinstance(comp_resistance_wind, ImportError):
comp_resistance_wind = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_resistance_wind: "
+ str(comp_resistance_wind)
)
)
)
else:
comp_resistance_wind = comp_resistance_wind
# cf Methods.Machine.LamSlotWind.comp_angle_d_axis
if isinstance(comp_angle_d_axis, ImportError):
comp_angle_d_axis = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_angle_d_axis: "
+ str(comp_angle_d_axis)
)
)
)
else:
comp_angle_d_axis = comp_angle_d_axis
if isinstance(comp_mmf_unit, ImportError):
comp_mmf_unit = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_mmf_unit: " + str(comp_mmf_unit)
)
)
)
else:
comp_mmf_unit = comp_mmf_unit
# cf Methods.Machine.LamSlotWind.comp_rot_dir
if isinstance(comp_rot_dir, ImportError):
comp_rot_dir = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_rot_dir: " + str(comp_rot_dir)
)
)
)
else:
comp_rot_dir = comp_rot_dir
if isinstance(comp_lengths_winding, ImportError):
comp_lengths_winding = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_lengths_winding: "
+ str(comp_lengths_winding)
)
)
)
else:
comp_lengths_winding = comp_lengths_winding
# cf Methods.Machine.LamSlotWind.comp_number_phase_eq
if isinstance(comp_number_phase_eq, ImportError):
comp_number_phase_eq = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_number_phase_eq: "
+ str(comp_number_phase_eq)
)
)
)
else:
comp_number_phase_eq = comp_number_phase_eq
if isinstance(comp_periodicity, ImportError):
comp_periodicity = property(
fget=lambda x: raise_(
ImportError(
"Can't use LamSlotWind method comp_periodicity: "
+ str(comp_periodicity)
)
)
)
else:
comp_periodicity = comp_periodicity
# save and copy methods are available in all object
save = save
copy = copy
# get_logger method is available in all object
get_logger = get_logger
def __init__(
self,
Ksfill=None,
winding=-1,
slot=-1,
L1=0.35,
mat_type=-1,
Nrvd=0,
Wrvd=0,
Kf1=0.95,
is_internal=True,
Rint=0,
Rext=1,
is_stator=True,
axial_vent=-1,
notch=-1,
init_dict=None,
init_str=None,
):
if init_str is not None: # Load from a file
init_dict = load_init_dict(init_str)[1]
if init_dict is not None: # Initialisation by dict
assert type(init_dict) is dict
# Overwrite default value with init_dict content
if "Ksfill" in list(init_dict.keys()):
Ksfill = init_dict["Ksfill"]
if "winding" in list(init_dict.keys()):
winding = init_dict["winding"]
if "slot" in list(init_dict.keys()):
slot = init_dict["slot"]
if "L1" in list(init_dict.keys()):
L1 = init_dict["L1"]
if "mat_type" in list(init_dict.keys()):
mat_type = init_dict["mat_type"]
if "Nrvd" in list(init_dict.keys()):
Nrvd = init_dict["Nrvd"]
if "Wrvd" in list(init_dict.keys()):
Wrvd = init_dict["Wrvd"]
if "Kf1" in list(init_dict.keys()):
Kf1 = init_dict["Kf1"]
if "is_internal" in list(init_dict.keys()):
is_internal = init_dict["is_internal"]
if "Rint" in list(init_dict.keys()):
Rint = init_dict["Rint"]
if "Rext" in list(init_dict.keys()):
Rext = init_dict["Rext"]
if "is_stator" in list(init_dict.keys()):
is_stator = init_dict["is_stator"]
if "axial_vent" in list(init_dict.keys()):
axial_vent = init_dict["axial_vent"]
if "notch" in list(init_dict.keys()):
notch = init_dict["notch"]
# Set the properties (value check and convertion are done in setter)
self.Ksfill = Ksfill
self.winding = winding
# Call LamSlot init
super(LamSlotWind, self).__init__(
slot=slot,
L1=L1,
mat_type=mat_type,
Nrvd=Nrvd,
Wrvd=Wrvd,
Kf1=Kf1,
is_internal=is_internal,
Rint=Rint,
Rext=Rext,
is_stator=is_stator,
axial_vent=axial_vent,
notch=notch,
)
# The class is frozen (in LamSlot init), for now it's impossible to
def __str__(self):
LamSlotWind_str = ""
LamSlotWind_str += super(LamSlotWind, self).__str__()
LamSlotWind_str += "Ksfill = " + str(self.Ksfill) + linesep
if self.winding is not None:
tmp = self.winding.__str__().replace(linesep, linesep + "\t").rstrip("\t")
LamSlotWind_str += "winding = " + tmp
else:
LamSlotWind_str += "winding = None" + linesep + linesep
return LamSlotWind_str
def __eq__(self, other):
if type(other) != type(self):
return False
if not super(LamSlotWind, self).__eq__(other):
return False
if other.Ksfill != self.Ksfill:
return False
if other.winding != self.winding:
return False
return True
def compare(self, other, name="self"):
if type(other) != type(self):
return ["type(" + name + ")"]
diff_list = list()
diff_list.extend(super(LamSlotWind, self).compare(other, name=name))
if other._Ksfill != self._Ksfill:
diff_list.append(name + ".Ksfill")
if (other.winding is None and self.winding is not None) or (
other.winding is not None and self.winding is None
):
diff_list.append(name + ".winding None mismatch")
elif self.winding is not None:
diff_list.extend(
self.winding.compare(other.winding, name=name + ".winding")
)
return diff_list
def __sizeof__(self):
S = 0
S += super(LamSlotWind, self).__sizeof__()
S += getsizeof(self.Ksfill)
S += getsizeof(self.winding)
return S
def as_dict(self):
LamSlotWind_dict = super(LamSlotWind, self).as_dict()
LamSlotWind_dict["Ksfill"] = self.Ksfill
if self.winding is None:
LamSlotWind_dict["winding"] = None
else:
LamSlotWind_dict["winding"] = self.winding.as_dict()
LamSlotWind_dict["__class__"] = "LamSlotWind"
return LamSlotWind_dict
def _set_None(self):
self.Ksfill = None
if self.winding is not None:
self.winding._set_None()
super(LamSlotWind, self)._set_None()
def _get_Ksfill(self):
return self._Ksfill
def _set_Ksfill(self, value):
check_var("Ksfill", value, "float", Vmin=0, Vmax=1)
self._Ksfill = value
Ksfill = property(
fget=_get_Ksfill,
fset=_set_Ksfill,
doc=u"""Imposed Slot Fill factor (if None, will be computed according to the winding and the slot)
:Type: float
:min: 0
:max: 1
""",
)
def _get_winding(self):
return self._winding
def _set_winding(self, value):
if isinstance(value, str):
value = load_init_dict(value)[1]
if isinstance(value, dict) and "__class__" in value:
class_obj = import_class(
"pyleecan.Classes", value.get("__class__"), "winding"
)
value = class_obj(init_dict=value)
elif type(value) is int and value == -1:
value = Winding()
check_var("winding", value, "Winding")
self._winding = value
if self._winding is not None:
self._winding.parent = self
winding = property(
fget=_get_winding,
fset=_set_winding,
doc=u"""Lamination's Winding
:Type: Winding
""",
)
| true
| true
|
790a5dfe71aa219c908137d06d5f413bfb35dba6
| 1,220
|
py
|
Python
|
dvc/command/checkout.py
|
yfarjoun/dvc
|
eaca7dc80c765dd3a8dbe4c8fb3b206656bbc5e2
|
[
"Apache-2.0"
] | null | null | null |
dvc/command/checkout.py
|
yfarjoun/dvc
|
eaca7dc80c765dd3a8dbe4c8fb3b206656bbc5e2
|
[
"Apache-2.0"
] | null | null | null |
dvc/command/checkout.py
|
yfarjoun/dvc
|
eaca7dc80c765dd3a8dbe4c8fb3b206656bbc5e2
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import unicode_literals
from dvc.command.base import CmdBase
class CmdCheckout(CmdBase):
def run(self):
if not self.args.targets:
self.project.checkout(force=self.args.force)
else:
for target in self.args.targets:
self.project.checkout(
target=target, with_deps=self.args.with_deps, force=self.args.force
)
return 0
def add_parser(subparsers, parent_parser):
CHECKOUT_HELP = "Checkout data files from cache."
checkout_parser = subparsers.add_parser(
"checkout",
parents=[parent_parser],
description=CHECKOUT_HELP,
help=CHECKOUT_HELP,
)
checkout_parser.add_argument(
"-d",
"--with-deps",
action="store_true",
default=False,
help="Checkout all dependencies of the specified target.",
)
checkout_parser.add_argument(
"-f",
"--force",
action="store_true",
default=False,
help="Do not prompt when removing working directory files.",
)
checkout_parser.add_argument("targets", nargs="*", help="DVC files.")
checkout_parser.set_defaults(func=CmdCheckout)
| 29.047619
| 87
| 0.62377
|
from __future__ import unicode_literals
from dvc.command.base import CmdBase
class CmdCheckout(CmdBase):
def run(self):
if not self.args.targets:
self.project.checkout(force=self.args.force)
else:
for target in self.args.targets:
self.project.checkout(
target=target, with_deps=self.args.with_deps, force=self.args.force
)
return 0
def add_parser(subparsers, parent_parser):
CHECKOUT_HELP = "Checkout data files from cache."
checkout_parser = subparsers.add_parser(
"checkout",
parents=[parent_parser],
description=CHECKOUT_HELP,
help=CHECKOUT_HELP,
)
checkout_parser.add_argument(
"-d",
"--with-deps",
action="store_true",
default=False,
help="Checkout all dependencies of the specified target.",
)
checkout_parser.add_argument(
"-f",
"--force",
action="store_true",
default=False,
help="Do not prompt when removing working directory files.",
)
checkout_parser.add_argument("targets", nargs="*", help="DVC files.")
checkout_parser.set_defaults(func=CmdCheckout)
| true
| true
|
790a5e04c67be0d563b2732ff23f9394a10b8a66
| 397
|
py
|
Python
|
tests/tune/_utils/test_values.py
|
fugue-project/tune
|
bf2288ddcb29c8345d996a9b22c0910da9002da1
|
[
"Apache-2.0"
] | 14
|
2021-03-03T20:02:09.000Z
|
2021-11-10T20:32:22.000Z
|
tests/tune/_utils/test_values.py
|
fugue-project/tune
|
bf2288ddcb29c8345d996a9b22c0910da9002da1
|
[
"Apache-2.0"
] | 26
|
2021-04-30T19:56:06.000Z
|
2022-01-18T04:40:00.000Z
|
tests/tune/_utils/test_values.py
|
fugue-project/tune
|
bf2288ddcb29c8345d996a9b22c0910da9002da1
|
[
"Apache-2.0"
] | 2
|
2021-04-30T03:12:21.000Z
|
2022-02-05T12:13:37.000Z
|
import json
import numpy as np
from tune._utils import normalize_hp
def test_normalize_hp():
assert isinstance(np.int64(10), np.int64)
assert 10 == normalize_hp(np.int64(10))
assert not isinstance(normalize_hp(np.int64(10)), np.int64)
assert json.dumps(dict(a=[0, 1], b=1.1, c="x")) == json.dumps(
normalize_hp(dict(a=[np.int64(0), 1], b=np.float64(1.1), c="x"))
)
| 26.466667
| 72
| 0.657431
|
import json
import numpy as np
from tune._utils import normalize_hp
def test_normalize_hp():
assert isinstance(np.int64(10), np.int64)
assert 10 == normalize_hp(np.int64(10))
assert not isinstance(normalize_hp(np.int64(10)), np.int64)
assert json.dumps(dict(a=[0, 1], b=1.1, c="x")) == json.dumps(
normalize_hp(dict(a=[np.int64(0), 1], b=np.float64(1.1), c="x"))
)
| true
| true
|
790a5e6702e1eacd68e7a245338708212141c8a9
| 1,495
|
py
|
Python
|
examples/sum_worker.py
|
abau171/highfive
|
07b3829331072035ab100d1d66deca3e8f3f372a
|
[
"MIT"
] | 3
|
2017-09-19T07:48:11.000Z
|
2018-05-25T21:40:08.000Z
|
examples/sum_worker.py
|
abau171/highfive
|
07b3829331072035ab100d1d66deca3e8f3f372a
|
[
"MIT"
] | null | null | null |
examples/sum_worker.py
|
abau171/highfive
|
07b3829331072035ab100d1d66deca3e8f3f372a
|
[
"MIT"
] | null | null | null |
import time
import random
import highfive
# This is the remote worker for the sum example. Here, we define what the
# workers do when they get a call from the master. All we need is a single
# function which takes the call, does some processing, and returns a response.
# An interesting way to play with the workers is to spin some up, then shut
# them down before the job set running on the master is complete. The jobs
# which the workers are running will be requeued on the master so that when
# more workers connect, the jobs will be tried again. This makes network
# problems no big deal as long as you reconnect the workers at some point.
# In our case, we take in a pair of numbers and return their sum. To make
# it easier to watch the progress of the job set in real time, we sleep for
# anywhere between 0 and 1/4 seconds before the sum to simulate heavy
# processing.
def delayed_sum(numbers):
time.sleep(random.random() / 4)
return sum(numbers)
# Now we can easily start a worker pool to connect to a local HighFive master.
# We can also add a `host=<host name>` and `port=<port number>` to connect to a
# remote HighFive master. By default, `run_worker_pool()` creates a worker
# process for each available CPU core to maximize CPU utilization, but we can
# we can limit this with `max_workers=<number of workers>`.
if __name__ == "__main__":
try:
highfive.run_worker_pool(delayed_sum)
except KeyboardInterrupt:
print("keyboard interrupt")
| 38.333333
| 79
| 0.745819
|
import time
import random
import highfive
def delayed_sum(numbers):
time.sleep(random.random() / 4)
return sum(numbers)
if __name__ == "__main__":
try:
highfive.run_worker_pool(delayed_sum)
except KeyboardInterrupt:
print("keyboard interrupt")
| true
| true
|
790a5e6e2e75add62cd780c155680f5a7e609996
| 1,701
|
py
|
Python
|
mayan/apps/document_indexing/forms.py
|
Syunkolee9891/Mayan-EDMS
|
3759a9503a264a180b74cc8518388f15ca66ac1a
|
[
"Apache-2.0"
] | 1
|
2021-06-17T18:24:25.000Z
|
2021-06-17T18:24:25.000Z
|
mayan/apps/document_indexing/forms.py
|
Syunkolee9891/Mayan-EDMS
|
3759a9503a264a180b74cc8518388f15ca66ac1a
|
[
"Apache-2.0"
] | 7
|
2020-06-06T00:01:04.000Z
|
2022-01-13T01:47:17.000Z
|
mayan/apps/document_indexing/forms.py
|
Syunkolee9891/Mayan-EDMS
|
3759a9503a264a180b74cc8518388f15ca66ac1a
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import, unicode_literals
from django import forms
from django.utils.encoding import force_text
from django.utils.translation import ugettext_lazy as _
from mayan.apps.common.classes import ModelProperty
from mayan.apps.common.forms import FilteredSelectionForm
from mayan.apps.documents.models import Document
from .models import Index, IndexTemplateNode
from .permissions import permission_document_indexing_rebuild
class IndexTemplateFilteredForm(FilteredSelectionForm):
class Meta:
allow_multiple = True
field_name = 'index_templates'
help_text = _('Index templates to be queued for rebuilding.')
label = _('Index templates')
queryset = Index.objects.filter(enabled=True)
permission = permission_document_indexing_rebuild
widget_attributes = {'class': 'select2'}
class IndexTemplateNodeForm(forms.ModelForm):
"""
A standard model form to allow users to create a new index template node
"""
def __init__(self, *args, **kwargs):
super(IndexTemplateNodeForm, self).__init__(*args, **kwargs)
self.fields['index'].widget = forms.widgets.HiddenInput()
self.fields['parent'].widget = forms.widgets.HiddenInput()
self.fields['expression'].help_text = ' '.join(
[
force_text(self.fields['expression'].help_text),
'<br>',
ModelProperty.get_help_text_for(
model=Document, show_name=True
).replace('\n', '<br>')
]
)
class Meta:
fields = ('parent', 'index', 'expression', 'enabled', 'link_documents')
model = IndexTemplateNode
| 36.191489
| 79
| 0.678424
|
from __future__ import absolute_import, unicode_literals
from django import forms
from django.utils.encoding import force_text
from django.utils.translation import ugettext_lazy as _
from mayan.apps.common.classes import ModelProperty
from mayan.apps.common.forms import FilteredSelectionForm
from mayan.apps.documents.models import Document
from .models import Index, IndexTemplateNode
from .permissions import permission_document_indexing_rebuild
class IndexTemplateFilteredForm(FilteredSelectionForm):
class Meta:
allow_multiple = True
field_name = 'index_templates'
help_text = _('Index templates to be queued for rebuilding.')
label = _('Index templates')
queryset = Index.objects.filter(enabled=True)
permission = permission_document_indexing_rebuild
widget_attributes = {'class': 'select2'}
class IndexTemplateNodeForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(IndexTemplateNodeForm, self).__init__(*args, **kwargs)
self.fields['index'].widget = forms.widgets.HiddenInput()
self.fields['parent'].widget = forms.widgets.HiddenInput()
self.fields['expression'].help_text = ' '.join(
[
force_text(self.fields['expression'].help_text),
'<br>',
ModelProperty.get_help_text_for(
model=Document, show_name=True
).replace('\n', '<br>')
]
)
class Meta:
fields = ('parent', 'index', 'expression', 'enabled', 'link_documents')
model = IndexTemplateNode
| true
| true
|
790a5f8c8cf690ffb397e8405f96dd3d982b74c7
| 2,360
|
py
|
Python
|
venv/Lib/site-packages/jupyter_client/asynchronous/client.py
|
ajayiagbebaku/NFL-Model
|
afcc67a85ca7138c58c3334d45988ada2da158ed
|
[
"MIT"
] | 7
|
2022-01-16T12:28:16.000Z
|
2022-03-04T15:31:45.000Z
|
venv/Lib/site-packages/jupyter_client/asynchronous/client.py
|
ajayiagbebaku/NFL-Model
|
afcc67a85ca7138c58c3334d45988ada2da158ed
|
[
"MIT"
] | 8
|
2021-09-22T12:47:32.000Z
|
2022-01-14T21:30:38.000Z
|
venv/Lib/site-packages/jupyter_client/asynchronous/client.py
|
ajayiagbebaku/NFL-Model
|
afcc67a85ca7138c58c3334d45988ada2da158ed
|
[
"MIT"
] | 3
|
2020-08-04T02:48:32.000Z
|
2020-08-17T01:20:09.000Z
|
"""Implements an async kernel client"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from traitlets import Type # type: ignore
from jupyter_client.channels import HBChannel
from jupyter_client.channels import ZMQSocketChannel
from jupyter_client.client import KernelClient
from jupyter_client.client import reqrep
def wrapped(meth, channel):
def _(self, *args, **kwargs):
reply = kwargs.pop("reply", False)
timeout = kwargs.pop("timeout", None)
msg_id = meth(self, *args, **kwargs)
if not reply:
return msg_id
return self._async_recv_reply(msg_id, timeout=timeout, channel=channel)
return _
class AsyncKernelClient(KernelClient):
"""A KernelClient with async APIs
``get_[channel]_msg()`` methods wait for and return messages on channels,
raising :exc:`queue.Empty` if no message arrives within ``timeout`` seconds.
"""
# --------------------------------------------------------------------------
# Channel proxy methods
# --------------------------------------------------------------------------
get_shell_msg = KernelClient._async_get_shell_msg
get_iopub_msg = KernelClient._async_get_iopub_msg
get_stdin_msg = KernelClient._async_get_stdin_msg
get_control_msg = KernelClient._async_get_control_msg
wait_for_ready = KernelClient._async_wait_for_ready
# The classes to use for the various channels
shell_channel_class = Type(ZMQSocketChannel)
iopub_channel_class = Type(ZMQSocketChannel)
stdin_channel_class = Type(ZMQSocketChannel)
hb_channel_class = Type(HBChannel)
control_channel_class = Type(ZMQSocketChannel)
_recv_reply = KernelClient._async_recv_reply
# replies come on the shell channel
execute = reqrep(wrapped, KernelClient.execute)
history = reqrep(wrapped, KernelClient.history)
complete = reqrep(wrapped, KernelClient.complete)
inspect = reqrep(wrapped, KernelClient.inspect)
kernel_info = reqrep(wrapped, KernelClient.kernel_info)
comm_info = reqrep(wrapped, KernelClient.comm_info)
is_alive = KernelClient._async_is_alive
execute_interactive = KernelClient._async_execute_interactive
# replies come on the control channel
shutdown = reqrep(wrapped, KernelClient.shutdown, channel="control")
| 36.875
| 80
| 0.7
|
from traitlets import Type
from jupyter_client.channels import HBChannel
from jupyter_client.channels import ZMQSocketChannel
from jupyter_client.client import KernelClient
from jupyter_client.client import reqrep
def wrapped(meth, channel):
def _(self, *args, **kwargs):
reply = kwargs.pop("reply", False)
timeout = kwargs.pop("timeout", None)
msg_id = meth(self, *args, **kwargs)
if not reply:
return msg_id
return self._async_recv_reply(msg_id, timeout=timeout, channel=channel)
return _
class AsyncKernelClient(KernelClient):
get_shell_msg = KernelClient._async_get_shell_msg
get_iopub_msg = KernelClient._async_get_iopub_msg
get_stdin_msg = KernelClient._async_get_stdin_msg
get_control_msg = KernelClient._async_get_control_msg
wait_for_ready = KernelClient._async_wait_for_ready
shell_channel_class = Type(ZMQSocketChannel)
iopub_channel_class = Type(ZMQSocketChannel)
stdin_channel_class = Type(ZMQSocketChannel)
hb_channel_class = Type(HBChannel)
control_channel_class = Type(ZMQSocketChannel)
_recv_reply = KernelClient._async_recv_reply
execute = reqrep(wrapped, KernelClient.execute)
history = reqrep(wrapped, KernelClient.history)
complete = reqrep(wrapped, KernelClient.complete)
inspect = reqrep(wrapped, KernelClient.inspect)
kernel_info = reqrep(wrapped, KernelClient.kernel_info)
comm_info = reqrep(wrapped, KernelClient.comm_info)
is_alive = KernelClient._async_is_alive
execute_interactive = KernelClient._async_execute_interactive
shutdown = reqrep(wrapped, KernelClient.shutdown, channel="control")
| true
| true
|
790a5fab2bd71fa2690fa7a244b882847cde4739
| 2,020
|
py
|
Python
|
quick-start/python/quick-start.py
|
vektonn/vektonn-examples
|
fa69328a4fd106f8de55319cec1408d88cfdba52
|
[
"Apache-2.0"
] | 8
|
2021-11-22T07:20:23.000Z
|
2021-12-07T08:32:58.000Z
|
quick-start/python/quick-start.py
|
vektonn/vektonn-examples
|
fa69328a4fd106f8de55319cec1408d88cfdba52
|
[
"Apache-2.0"
] | null | null | null |
quick-start/python/quick-start.py
|
vektonn/vektonn-examples
|
fa69328a4fd106f8de55319cec1408d88cfdba52
|
[
"Apache-2.0"
] | null | null | null |
from vektonn import Vektonn
from vektonn.dtos import Attribute, AttributeValue, Vector, InputDataPoint, SearchQuery
vektonn_client = Vektonn('http://localhost:8081')
input_data_points = [
InputDataPoint(
attributes=[
Attribute(key='id', value=AttributeValue(int64=1)),
Attribute(key='payload', value=AttributeValue(string='first data point')),
],
vector=Vector(is_sparse=False, coordinates=[0.0, 1.0])),
InputDataPoint(
attributes=[
Attribute(key='id', value=AttributeValue(int64=2)),
Attribute(key='payload', value=AttributeValue(string='second data point')),
],
vector=Vector(is_sparse=False, coordinates=[1.0, 0.0])),
InputDataPoint(
attributes=[
Attribute(key='id', value=AttributeValue(int64=3)),
Attribute(key='payload', value=AttributeValue(string='third data point')),
],
vector=Vector(is_sparse=False, coordinates=[-0.5, 0.0])),
]
vektonn_client.upload(
data_source_name='QuickStart.Source',
data_source_version='1.0',
input_data_points=input_data_points)
k = 2
search_query = SearchQuery(k=k, query_vectors=[
Vector(is_sparse=False, coordinates=[0.0, 2.0]),
])
search_results = vektonn_client.search(
index_name='QuickStart.Index',
index_version='1.0',
search_query=search_query)
print(f'For query vector {search_results[0].query_vector.coordinates} {k} nearest data points are:')
for fdp in search_results[0].nearest_data_points:
attrs = {x.key : x.value for x in fdp.attributes}
distance, vector, dp_id, payload = fdp.distance, fdp.vector, attrs['id'].int64, attrs['payload'].string
print(f' - "{payload}" with id = {dp_id}, vector = {vector.coordinates}, distance = {distance}')
'''
Expected output:
For query vector [0.0, 2.0] 2 nearest data points are:
- "first data point" with id = 1, vector = [0.0, 1.0], distance = 1.0
- "third data point" with id = 3, vector = [-0.5, 0.0], distance = 4.25
'''
| 36.727273
| 107
| 0.668812
|
from vektonn import Vektonn
from vektonn.dtos import Attribute, AttributeValue, Vector, InputDataPoint, SearchQuery
vektonn_client = Vektonn('http://localhost:8081')
input_data_points = [
InputDataPoint(
attributes=[
Attribute(key='id', value=AttributeValue(int64=1)),
Attribute(key='payload', value=AttributeValue(string='first data point')),
],
vector=Vector(is_sparse=False, coordinates=[0.0, 1.0])),
InputDataPoint(
attributes=[
Attribute(key='id', value=AttributeValue(int64=2)),
Attribute(key='payload', value=AttributeValue(string='second data point')),
],
vector=Vector(is_sparse=False, coordinates=[1.0, 0.0])),
InputDataPoint(
attributes=[
Attribute(key='id', value=AttributeValue(int64=3)),
Attribute(key='payload', value=AttributeValue(string='third data point')),
],
vector=Vector(is_sparse=False, coordinates=[-0.5, 0.0])),
]
vektonn_client.upload(
data_source_name='QuickStart.Source',
data_source_version='1.0',
input_data_points=input_data_points)
k = 2
search_query = SearchQuery(k=k, query_vectors=[
Vector(is_sparse=False, coordinates=[0.0, 2.0]),
])
search_results = vektonn_client.search(
index_name='QuickStart.Index',
index_version='1.0',
search_query=search_query)
print(f'For query vector {search_results[0].query_vector.coordinates} {k} nearest data points are:')
for fdp in search_results[0].nearest_data_points:
attrs = {x.key : x.value for x in fdp.attributes}
distance, vector, dp_id, payload = fdp.distance, fdp.vector, attrs['id'].int64, attrs['payload'].string
print(f' - "{payload}" with id = {dp_id}, vector = {vector.coordinates}, distance = {distance}')
| true
| true
|
790a60a49dc26ab2276e5b7f9d6b8ef287df3e6a
| 1,052
|
py
|
Python
|
spark_auto_mapper_fhir/value_sets/contract_resource_asset_availiability_codes.py
|
imranq2/SparkAutoMapper.FHIR
|
dd23b218fb0097d1edc2f3e688e8d6d4d7278bd2
|
[
"Apache-2.0"
] | 1
|
2020-10-31T23:25:07.000Z
|
2020-10-31T23:25:07.000Z
|
spark_auto_mapper_fhir/value_sets/contract_resource_asset_availiability_codes.py
|
icanbwell/SparkAutoMapper.FHIR
|
98f368e781b46523142c7cb513c670d659a93c9b
|
[
"Apache-2.0"
] | null | null | null |
spark_auto_mapper_fhir/value_sets/contract_resource_asset_availiability_codes.py
|
icanbwell/SparkAutoMapper.FHIR
|
98f368e781b46523142c7cb513c670d659a93c9b
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import annotations
from spark_auto_mapper_fhir.fhir_types.uri import FhirUri
from spark_auto_mapper_fhir.value_sets.generic_type import GenericTypeCode
from spark_auto_mapper.type_definitions.defined_types import AutoMapperTextInputType
# This file is auto-generated by generate_classes so do not edit manually
# noinspection PyPep8Naming
class ContractResourceAssetAvailiabilityCodesCode(GenericTypeCode):
"""
ContractResourceAssetAvailiabilityCodes
From: http://hl7.org/fhir/asset-availability in valuesets.xml
This value set has asset availability codes.
"""
def __init__(self, value: AutoMapperTextInputType):
super().__init__(value=value)
"""
http://hl7.org/fhir/asset-availability
"""
codeset: FhirUri = "http://hl7.org/fhir/asset-availability"
class ContractResourceAssetAvailiabilityCodesCodeValues:
"""
To be completed
From: http://hl7.org/fhir/asset-availability in valuesets.xml
"""
Lease = ContractResourceAssetAvailiabilityCodesCode("lease")
| 30.941176
| 84
| 0.773764
|
from __future__ import annotations
from spark_auto_mapper_fhir.fhir_types.uri import FhirUri
from spark_auto_mapper_fhir.value_sets.generic_type import GenericTypeCode
from spark_auto_mapper.type_definitions.defined_types import AutoMapperTextInputType
class ContractResourceAssetAvailiabilityCodesCode(GenericTypeCode):
def __init__(self, value: AutoMapperTextInputType):
super().__init__(value=value)
codeset: FhirUri = "http://hl7.org/fhir/asset-availability"
class ContractResourceAssetAvailiabilityCodesCodeValues:
Lease = ContractResourceAssetAvailiabilityCodesCode("lease")
| true
| true
|
790a615280e0c22c06d8a7e914dff9de55aa0383
| 6,376
|
py
|
Python
|
src/data_collection/data_collector.py
|
BigPeet/pr0tagger
|
2b71bf7615dd8e8f7ba8e1875af7af90734c0f41
|
[
"MIT"
] | null | null | null |
src/data_collection/data_collector.py
|
BigPeet/pr0tagger
|
2b71bf7615dd8e8f7ba8e1875af7af90734c0f41
|
[
"MIT"
] | null | null | null |
src/data_collection/data_collector.py
|
BigPeet/pr0tagger
|
2b71bf7615dd8e8f7ba8e1875af7af90734c0f41
|
[
"MIT"
] | null | null | null |
from datetime import timedelta
import json
from os import listdir
from os.path import isfile, join
import pr0gramm
import logging
__author__ = "Peter Wolf"
__mail__ = "pwolf2310@gmail.com"
__date__ = "2016-12-26"
LOG = logging.getLogger(__name__)
class DataSources:
IMAGE, THUMBNAIL, FULL_SIZE = range(3)
class DataCollector:
""" The DataCollector retrieves relevant data from
pr0gramm and saves it locally.
"""
def __init__(self, api, last_id=None):
self.api = api
self.last_id = last_id
self.age_threshold = timedelta(hours=5)
self.min_num_of_tags = 5
self.search_forwards = True
self.media_directory = "/tmp"
self.data_source = DataSources.IMAGE
self.annotation_file = "/tmp/annotation.txt"
self.json_dir = "/tmp"
self.download_media = True
self.save_json = False
self.use_local_storage = False
self.last_batch_size = None
def setAgeThreshold(self, days=0, hours=5, minutes=0, seconds=0):
self.age_threshold = timedelta(
days=days, hours=hours, minutes=minutes, seconds=seconds)
def setMinimumNumberOfTags(self, threshold):
self.min_num_of_tags = threshold
def setLastId(self, last_id):
self.last_id = last_id
def getLastId(self):
return self.last_id
def useBackwardsSearch(self):
self.search_forwards = False
def useForwardsSearch(self):
self.search_forwards = True
def setMediaDirectory(self, directory):
self.media_directory = directory
def setDataSource(self, source):
self.data_source = source
def setAnnotationFile(self, annotation_file):
self.annotation_file = annotation_file
def setJsonDir(self, directory):
self.json_dir = directory
def setDownloadMedia(self, download_media):
self.download_media = download_media
def setSaveJSON(self, save_json):
self.save_json = save_json
def setUseLocalStorage(self, use_local_storage):
self.use_local_storage = use_local_storage
def getSizeOfLastBatch(self):
return self.last_batch_size
def download(self, item):
if self.data_source == DataSources.IMAGE:
return self.api.downloadMedia(
item, save_dir=self.media_directory, file_name=item.id)
elif self.data_source == DataSources.THUMBNAIL:
return self.api.downloadThumbnail(
item, save_dir=self.media_directory, file_name=item.id)
elif self.data_source == DataSources.FULL_SIZE:
return self.api.downloadFullsize(
item, save_dir=self.media_directory, file_name=item.id)
else:
print "No valid data source chosen:", str(self.data_source)
return None
def writeAnnotation(self, item, media_path):
# Read the current annotation file
content = []
if isfile(self.annotation_file):
with open(self.annotation_file, "r") as f:
content = f.readlines()
# write every item as a line with the following structure:
# ID;IMAGE_PATH;AMOUNT_OF_TAGS;...TAG_TEXT;TAG_CONFIDENCE;...
new_line = str(item.id) + ";"
new_line += str(media_path) + ";"
new_line += str(len(item.tags)) + ";"
new_line += ";".join([str(tag.getText()) + ";" +
str(tag.getConfidence()) for tag in item.tags])
# Check if the item already has an entry in the annotation file
# and replace it.
contained = False
for i in range(len(content)):
if content[i].strip().startswith(str(item.id)):
content[i] = new_line
contained = True
break
# If no entry already exists, add a new line for the item
if not contained:
content.append(new_line)
# Write the new content to the file.
with open(self.annotation_file, "w") as f:
for line in content:
f.write(line.strip() + "\n")
def getItemsFromAPI(self):
if self.search_forwards:
return self.api.getItemsNewer(self.last_id)
else:
return self.api.getItemsOlder(self.last_id)
def getItemsFromLocalStorage(self):
json_files = [join(self.json_dir, f) for f in listdir(self.json_dir)
if isfile(join(self.json_dir, f)) and f.endswith(".json")]
data = []
for json_file in json_files:
with open(json_file, "r") as f:
json_item = json.load(f)
item = pr0gramm.Item.Item.parseFromJSON(json_item)
if not self.last_id \
or (self.search_forwards and item.getSortId() > self.last_id) \
or (not self.search_forwards and item.getSortId() < self.last_id):
data.append(item)
data.sort(reverse=True)
return data
def collectDataBatch(self, data=[]):
# retrieve data if none has been given
if not data:
if self.use_local_storage:
data = self.getItemsFromLocalStorage()
else:
data = self.getItemsFromAPI()
if not data:
return
# filter data based on age and tags
valid_data = []
for item in data:
if item.getAge() >= self.age_threshold and len(item.tags) > 0:
valid_data.append(item)
# save size of collected data batch
self.last_batch_size = len(valid_data)
if not valid_data:
return
# save id of last item to fit age criteria in search direction
if self.search_forwards:
self.last_id = valid_data[0].getSortId()
else:
self.last_id = valid_data[-1].getSortId()
for item in valid_data:
if self.download:
# download media
target_path = self.download(item)
if target_path:
# write id(s), link to media and tags to file
self.writeAnnotation(item, target_path)
if self.save_json:
with open(self.json_dir + "/" + str(item.id) + ".json", "w") as f:
json.dump(item.asDict(), f)
return self.last_id
| 33.208333
| 90
| 0.601317
|
from datetime import timedelta
import json
from os import listdir
from os.path import isfile, join
import pr0gramm
import logging
__author__ = "Peter Wolf"
__mail__ = "pwolf2310@gmail.com"
__date__ = "2016-12-26"
LOG = logging.getLogger(__name__)
class DataSources:
IMAGE, THUMBNAIL, FULL_SIZE = range(3)
class DataCollector:
""" The DataCollector retrieves relevant data from
pr0gramm and saves it locally.
"""
def __init__(self, api, last_id=None):
self.api = api
self.last_id = last_id
self.age_threshold = timedelta(hours=5)
self.min_num_of_tags = 5
self.search_forwards = True
self.media_directory = "/tmp"
self.data_source = DataSources.IMAGE
self.annotation_file = "/tmp/annotation.txt"
self.json_dir = "/tmp"
self.download_media = True
self.save_json = False
self.use_local_storage = False
self.last_batch_size = None
def setAgeThreshold(self, days=0, hours=5, minutes=0, seconds=0):
self.age_threshold = timedelta(
days=days, hours=hours, minutes=minutes, seconds=seconds)
def setMinimumNumberOfTags(self, threshold):
self.min_num_of_tags = threshold
def setLastId(self, last_id):
self.last_id = last_id
def getLastId(self):
return self.last_id
def useBackwardsSearch(self):
self.search_forwards = False
def useForwardsSearch(self):
self.search_forwards = True
def setMediaDirectory(self, directory):
self.media_directory = directory
def setDataSource(self, source):
self.data_source = source
def setAnnotationFile(self, annotation_file):
self.annotation_file = annotation_file
def setJsonDir(self, directory):
self.json_dir = directory
def setDownloadMedia(self, download_media):
self.download_media = download_media
def setSaveJSON(self, save_json):
self.save_json = save_json
def setUseLocalStorage(self, use_local_storage):
self.use_local_storage = use_local_storage
def getSizeOfLastBatch(self):
return self.last_batch_size
def download(self, item):
if self.data_source == DataSources.IMAGE:
return self.api.downloadMedia(
item, save_dir=self.media_directory, file_name=item.id)
elif self.data_source == DataSources.THUMBNAIL:
return self.api.downloadThumbnail(
item, save_dir=self.media_directory, file_name=item.id)
elif self.data_source == DataSources.FULL_SIZE:
return self.api.downloadFullsize(
item, save_dir=self.media_directory, file_name=item.id)
else:
print "No valid data source chosen:", str(self.data_source)
return None
def writeAnnotation(self, item, media_path):
content = []
if isfile(self.annotation_file):
with open(self.annotation_file, "r") as f:
content = f.readlines()
new_line = str(item.id) + ";"
new_line += str(media_path) + ";"
new_line += str(len(item.tags)) + ";"
new_line += ";".join([str(tag.getText()) + ";" +
str(tag.getConfidence()) for tag in item.tags])
contained = False
for i in range(len(content)):
if content[i].strip().startswith(str(item.id)):
content[i] = new_line
contained = True
break
if not contained:
content.append(new_line)
with open(self.annotation_file, "w") as f:
for line in content:
f.write(line.strip() + "\n")
def getItemsFromAPI(self):
if self.search_forwards:
return self.api.getItemsNewer(self.last_id)
else:
return self.api.getItemsOlder(self.last_id)
def getItemsFromLocalStorage(self):
json_files = [join(self.json_dir, f) for f in listdir(self.json_dir)
if isfile(join(self.json_dir, f)) and f.endswith(".json")]
data = []
for json_file in json_files:
with open(json_file, "r") as f:
json_item = json.load(f)
item = pr0gramm.Item.Item.parseFromJSON(json_item)
if not self.last_id \
or (self.search_forwards and item.getSortId() > self.last_id) \
or (not self.search_forwards and item.getSortId() < self.last_id):
data.append(item)
data.sort(reverse=True)
return data
def collectDataBatch(self, data=[]):
if not data:
if self.use_local_storage:
data = self.getItemsFromLocalStorage()
else:
data = self.getItemsFromAPI()
if not data:
return
valid_data = []
for item in data:
if item.getAge() >= self.age_threshold and len(item.tags) > 0:
valid_data.append(item)
self.last_batch_size = len(valid_data)
if not valid_data:
return
if self.search_forwards:
self.last_id = valid_data[0].getSortId()
else:
self.last_id = valid_data[-1].getSortId()
for item in valid_data:
if self.download:
target_path = self.download(item)
if target_path:
self.writeAnnotation(item, target_path)
if self.save_json:
with open(self.json_dir + "/" + str(item.id) + ".json", "w") as f:
json.dump(item.asDict(), f)
return self.last_id
| false
| true
|
790a617397094ee99d1a32c7cebf6ff874f00fb3
| 2,029
|
py
|
Python
|
app.py
|
asaxena2019/FakeNewsify-Article-Credibility-Checker
|
b7ce42a42f536cc758dee66fd5a8cd455195d8e8
|
[
"MIT"
] | null | null | null |
app.py
|
asaxena2019/FakeNewsify-Article-Credibility-Checker
|
b7ce42a42f536cc758dee66fd5a8cd455195d8e8
|
[
"MIT"
] | null | null | null |
app.py
|
asaxena2019/FakeNewsify-Article-Credibility-Checker
|
b7ce42a42f536cc758dee66fd5a8cd455195d8e8
|
[
"MIT"
] | 2
|
2020-06-28T06:56:43.000Z
|
2020-06-28T13:17:27.000Z
|
from flask import Flask, render_template, request
import model
from model import get_headlines
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'POST':
url = request.form['url']
predict = model.predict(url)
value = predict[1]
clickbait = predict[2]
text = predict[3]
article_title = predict[0]
model.update(value)
model.update(clickbait)
return render_template('index.html',
value = value,
clickbait = clickbait,
text = text,
article_title=article_title,
url=url)
else:
return render_template('index.html')
@app.route('/about')
def about():
return render_template('about.html')
@app.route('/feed')
def feed():
headlines = get_headlines()
return render_template('feed.html',headlines = headlines)
@app.route('/trends')
def trends():
return render_template('trends.html',
num_fake = model.get_data("FAKE"),
num_real = model.get_data("REAL"),
num_clickbait = model.get_data("CLICKBAIT"), num_notclickbait = model.get_data("NOT CLICKBAIT"))
app.run(host='0.0.0.0', port=8080, debug=True)
| 5.229381
| 121
| 0.377526
|
from flask import Flask, render_template, request
import model
from model import get_headlines
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'POST':
url = request.form['url']
predict = model.predict(url)
value = predict[1]
clickbait = predict[2]
text = predict[3]
article_title = predict[0]
model.update(value)
model.update(clickbait)
return render_template('index.html',
value = value,
clickbait = clickbait,
text = text,
article_title=article_title,
url=url)
else:
return render_template('index.html')
@app.route('/about')
def about():
return render_template('about.html')
@app.route('/feed')
def feed():
headlines = get_headlines()
return render_template('feed.html',headlines = headlines)
@app.route('/trends')
def trends():
return render_template('trends.html',
num_fake = model.get_data("FAKE"),
num_real = model.get_data("REAL"),
num_clickbait = model.get_data("CLICKBAIT"), num_notclickbait = model.get_data("NOT CLICKBAIT"))
app.run(host='0.0.0.0', port=8080, debug=True)
| true
| true
|
790a61bd42bf4aee38a4d49463c7c31640a629fb
| 250
|
py
|
Python
|
autoflow/workflow/components/regression/decision_tree.py
|
auto-flow/autoflow
|
f5903424ad8694d57741a0bd6dfeaba320ea6517
|
[
"BSD-3-Clause"
] | 49
|
2020-04-16T11:17:28.000Z
|
2020-05-06T01:32:44.000Z
|
autoflow/workflow/components/regression/decision_tree.py
|
auto-flow/autoflow
|
f5903424ad8694d57741a0bd6dfeaba320ea6517
|
[
"BSD-3-Clause"
] | null | null | null |
autoflow/workflow/components/regression/decision_tree.py
|
auto-flow/autoflow
|
f5903424ad8694d57741a0bd6dfeaba320ea6517
|
[
"BSD-3-Clause"
] | 3
|
2020-04-17T00:53:24.000Z
|
2020-04-23T03:04:26.000Z
|
from autoflow.workflow.components.regression_base import AutoFlowRegressionAlgorithm
__all__ = ["DecisionTreeRegressor"]
class DecisionTreeRegressor(AutoFlowRegressionAlgorithm):
module__ = "sklearn.tree"
class__ = "DecisionTreeRegressor"
| 27.777778
| 84
| 0.824
|
from autoflow.workflow.components.regression_base import AutoFlowRegressionAlgorithm
__all__ = ["DecisionTreeRegressor"]
class DecisionTreeRegressor(AutoFlowRegressionAlgorithm):
module__ = "sklearn.tree"
class__ = "DecisionTreeRegressor"
| true
| true
|
790a62b53845b48a68a1ce0d3d33b573ca9fa60a
| 18,846
|
py
|
Python
|
scripts/validate_docstrings.py
|
mattboggess/pandas
|
5551bcf9d297ea8a0aeffb70b17ae6730e8abf89
|
[
"BSD-3-Clause"
] | 2
|
2018-09-06T13:36:11.000Z
|
2018-09-09T12:38:22.000Z
|
scripts/validate_docstrings.py
|
mattboggess/pandas
|
5551bcf9d297ea8a0aeffb70b17ae6730e8abf89
|
[
"BSD-3-Clause"
] | 3
|
2018-09-24T22:09:28.000Z
|
2018-10-01T21:10:00.000Z
|
scripts/validate_docstrings.py
|
mattboggess/pandas
|
5551bcf9d297ea8a0aeffb70b17ae6730e8abf89
|
[
"BSD-3-Clause"
] | 1
|
2020-06-06T13:28:03.000Z
|
2020-06-06T13:28:03.000Z
|
#!/usr/bin/env python
"""
Analyze docstrings to detect errors.
If no argument is provided, it does a quick check of docstrings and returns
a csv with all API functions and results of basic checks.
If a function or method is provided in the form "pandas.function",
"pandas.module.class.method", etc. a list of all errors in the docstring for
the specified function or method.
Usage::
$ ./validate_docstrings.py
$ ./validate_docstrings.py pandas.DataFrame.head
"""
import os
import sys
import csv
import re
import functools
import collections
import argparse
import pydoc
import inspect
import importlib
import doctest
try:
from io import StringIO
except ImportError:
from cStringIO import StringIO
import numpy
BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.join(BASE_PATH))
import pandas
from pandas.compat import signature
sys.path.insert(1, os.path.join(BASE_PATH, 'doc', 'sphinxext'))
from numpydoc.docscrape import NumpyDocString
from pandas.io.formats.printing import pprint_thing
PRIVATE_CLASSES = ['NDFrame', 'IndexOpsMixin']
DIRECTIVES = ['versionadded', 'versionchanged', 'deprecated']
def _load_obj(obj_name):
for maxsplit in range(1, obj_name.count('.') + 1):
# TODO when py3 only replace by: module, *func_parts = ...
func_name_split = obj_name.rsplit('.', maxsplit)
module = func_name_split[0]
func_parts = func_name_split[1:]
try:
obj = importlib.import_module(module)
except ImportError:
pass
else:
continue
if 'module' not in locals():
raise ImportError('No module can be imported '
'from "{}"'.format(obj_name))
for part in func_parts:
obj = getattr(obj, part)
return obj
def _to_original_callable(obj):
while True:
if inspect.isfunction(obj) or inspect.isclass(obj):
f = inspect.getfile(obj)
if f.startswith('<') and f.endswith('>'):
return None
return obj
if inspect.ismethod(obj):
obj = obj.__func__
elif isinstance(obj, functools.partial):
obj = obj.func
elif isinstance(obj, property):
obj = obj.fget
else:
return None
def _output_header(title, width=80, char='#'):
full_line = char * width
side_len = (width - len(title) - 2) // 2
adj = '' if len(title) % 2 == 0 else ' '
title_line = '{side} {title}{adj} {side}'.format(side=char * side_len,
title=title,
adj=adj)
return '\n{full_line}\n{title_line}\n{full_line}\n\n'.format(
full_line=full_line, title_line=title_line)
class Docstring(object):
def __init__(self, method_name, method_obj):
self.method_name = method_name
self.method_obj = method_obj
self.raw_doc = method_obj.__doc__ or ''
self.clean_doc = pydoc.getdoc(self.method_obj)
self.doc = NumpyDocString(self.clean_doc)
def __len__(self):
return len(self.raw_doc)
@property
def is_function_or_method(self):
# TODO(py27): remove ismethod
return (inspect.isfunction(self.method_obj)
or inspect.ismethod(self.method_obj))
@property
def source_file_name(self):
fname = inspect.getsourcefile(self.method_obj)
if fname:
fname = os.path.relpath(fname, BASE_PATH)
return fname
@property
def source_file_def_line(self):
try:
return inspect.getsourcelines(self.method_obj)[-1]
except OSError:
pass
@property
def github_url(self):
url = 'https://github.com/pandas-dev/pandas/blob/master/'
url += '{}#L{}'.format(self.source_file_name,
self.source_file_def_line)
return url
@property
def start_blank_lines(self):
i = None
if self.raw_doc:
for i, row in enumerate(self.raw_doc.split('\n')):
if row.strip():
break
return i
@property
def end_blank_lines(self):
i = None
if self.raw_doc:
for i, row in enumerate(reversed(self.raw_doc.split('\n'))):
if row.strip():
break
return i
@property
def double_blank_lines(self):
prev = True
for row in self.raw_doc.split('\n'):
if not prev and not row.strip():
return True
prev = row.strip()
return False
@property
def summary(self):
return ' '.join(self.doc['Summary'])
@property
def num_summary_lines(self):
return len(self.doc['Summary'])
@property
def extended_summary(self):
if not self.doc['Extended Summary'] and len(self.doc['Summary']) > 1:
return ' '.join(self.doc['Summary'])
return ' '.join(self.doc['Extended Summary'])
@property
def needs_summary(self):
return not (bool(self.summary) and bool(self.extended_summary))
@property
def doc_parameters(self):
return collections.OrderedDict((name, (type_, ''.join(desc)))
for name, type_, desc
in self.doc['Parameters'])
@property
def signature_parameters(self):
if inspect.isclass(self.method_obj):
if hasattr(self.method_obj, '_accessors') and (
self.method_name.split('.')[-1] in
self.method_obj._accessors):
# accessor classes have a signature but don't want to show this
return tuple()
try:
sig = signature(self.method_obj)
except (TypeError, ValueError):
# Some objects, mainly in C extensions do not support introspection
# of the signature
return tuple()
params = sig.args
if sig.varargs:
params.append("*" + sig.varargs)
if sig.keywords:
params.append("**" + sig.keywords)
params = tuple(params)
if params and params[0] in ('self', 'cls'):
return params[1:]
return params
@property
def parameter_mismatches(self):
errs = []
signature_params = self.signature_parameters
doc_params = tuple(self.doc_parameters)
missing = set(signature_params) - set(doc_params)
if missing:
errs.append(
'Parameters {} not documented'.format(pprint_thing(missing)))
extra = set(doc_params) - set(signature_params)
if extra:
errs.append('Unknown parameters {}'.format(pprint_thing(extra)))
if (not missing and not extra and signature_params != doc_params
and not (not signature_params and not doc_params)):
errs.append('Wrong parameters order. ' +
'Actual: {!r}. '.format(signature_params) +
'Documented: {!r}'.format(doc_params))
return errs
@property
def correct_parameters(self):
return not bool(self.parameter_mismatches)
def parameter_type(self, param):
return self.doc_parameters[param][0]
def parameter_desc(self, param):
desc = self.doc_parameters[param][1]
# Find and strip out any sphinx directives
for directive in DIRECTIVES:
full_directive = '.. {}'.format(directive)
if full_directive in desc:
# Only retain any description before the directive
desc = desc[:desc.index(full_directive)]
return desc
@property
def see_also(self):
return collections.OrderedDict((name, ''.join(desc))
for name, desc, _
in self.doc['See Also'])
@property
def examples(self):
return self.doc['Examples']
@property
def returns(self):
return self.doc['Returns']
@property
def yields(self):
return self.doc['Yields']
@property
def method_source(self):
return inspect.getsource(self.method_obj)
@property
def first_line_ends_in_dot(self):
if self.doc:
return self.doc.split('\n')[0][-1] == '.'
@property
def deprecated(self):
pattern = re.compile('.. deprecated:: ')
return (self.method_name.startswith('pandas.Panel') or
bool(pattern.search(self.summary)) or
bool(pattern.search(self.extended_summary)))
@property
def mentioned_private_classes(self):
return [klass for klass in PRIVATE_CLASSES if klass in self.raw_doc]
@property
def examples_errors(self):
flags = doctest.NORMALIZE_WHITESPACE | doctest.IGNORE_EXCEPTION_DETAIL
finder = doctest.DocTestFinder()
runner = doctest.DocTestRunner(optionflags=flags)
context = {'np': numpy, 'pd': pandas}
error_msgs = ''
for test in finder.find(self.raw_doc, self.method_name, globs=context):
f = StringIO()
runner.run(test, out=f.write)
error_msgs += f.getvalue()
return error_msgs
def get_api_items():
api_fname = os.path.join(BASE_PATH, 'doc', 'source', 'api.rst')
previous_line = current_section = current_subsection = ''
position = None
with open(api_fname) as f:
for line in f:
line = line.strip()
if len(line) == len(previous_line):
if set(line) == set('-'):
current_section = previous_line
continue
if set(line) == set('~'):
current_subsection = previous_line
continue
if line.startswith('.. currentmodule::'):
current_module = line.replace('.. currentmodule::', '').strip()
continue
if line == '.. autosummary::':
position = 'autosummary'
continue
if position == 'autosummary':
if line == '':
position = 'items'
continue
if position == 'items':
if line == '':
position = None
continue
item = line.strip()
func = importlib.import_module(current_module)
for part in item.split('.'):
func = getattr(func, part)
yield ('.'.join([current_module, item]), func,
current_section, current_subsection)
previous_line = line
def _csv_row(func_name, func_obj, section, subsection, in_api, seen={}):
obj_type = type(func_obj).__name__
original_callable = _to_original_callable(func_obj)
if original_callable is None:
return [func_name, obj_type] + [''] * 12, ''
else:
doc = Docstring(func_name, original_callable)
key = doc.source_file_name, doc.source_file_def_line
shared_code = seen.get(key, '')
return [func_name,
obj_type,
in_api,
int(doc.deprecated),
section,
subsection,
doc.source_file_name,
doc.source_file_def_line,
doc.github_url,
int(bool(doc.summary)),
int(bool(doc.extended_summary)),
int(doc.correct_parameters),
int(bool(doc.examples)),
shared_code], key
def validate_all():
writer = csv.writer(sys.stdout)
cols = ('Function or method',
'Type',
'In API doc',
'Is deprecated',
'Section',
'Subsection',
'File',
'Code line',
'GitHub link',
'Has summary',
'Has extended summary',
'Parameters ok',
'Has examples',
'Shared code with')
writer.writerow(cols)
seen = {}
api_items = list(get_api_items())
for func_name, func, section, subsection in api_items:
row, key = _csv_row(func_name, func, section, subsection,
in_api=1, seen=seen)
seen[key] = func_name
writer.writerow(row)
api_item_names = set(list(zip(*api_items))[0])
for class_ in (pandas.Series, pandas.DataFrame, pandas.Panel):
for member in inspect.getmembers(class_):
func_name = 'pandas.{}.{}'.format(class_.__name__, member[0])
if (not member[0].startswith('_') and
func_name not in api_item_names):
func = _load_obj(func_name)
row, key = _csv_row(func_name, func, section='', subsection='',
in_api=0)
writer.writerow(row)
return 0
def validate_one(func_name):
"""
Validate the docstring for the given func_name
Parameters
----------
func_name : function
Function whose docstring will be evaluated
Returns
-------
int
The number of errors found in the `func_name` docstring
"""
func_obj = _load_obj(func_name)
doc = Docstring(func_name, func_obj)
sys.stderr.write(_output_header('Docstring ({})'.format(func_name)))
sys.stderr.write('{}\n'.format(doc.clean_doc))
errs = []
wrns = []
if doc.start_blank_lines != 1:
errs.append('Docstring text (summary) should start in the line '
'immediately after the opening quotes (not in the same '
'line, or leaving a blank line in between)')
if doc.end_blank_lines != 1:
errs.append('Closing quotes should be placed in the line after '
'the last text in the docstring (do not close the '
'quotes in the same line as the text, or leave a '
'blank line between the last text and the quotes)')
if doc.double_blank_lines:
errs.append('Use only one blank line to separate sections or '
'paragraphs')
if not doc.summary:
errs.append('No summary found (a short summary in a single line '
'should be present at the beginning of the docstring)')
else:
if not doc.summary[0].isupper():
errs.append('Summary does not start with a capital letter')
if doc.summary[-1] != '.':
errs.append('Summary does not end with a period')
if (doc.is_function_or_method and
doc.summary.split(' ')[0][-1] == 's'):
errs.append('Summary must start with infinitive verb, '
'not third person (e.g. use "Generate" instead of '
'"Generates")')
if doc.num_summary_lines > 1:
errs.append("Summary should fit in a single line.")
if not doc.extended_summary:
wrns.append('No extended summary found')
param_errs = doc.parameter_mismatches
for param in doc.doc_parameters:
if not param.startswith("*"): # Check can ignore var / kwargs
if not doc.parameter_type(param):
param_errs.append('Parameter "{}" has no type'.format(param))
else:
if doc.parameter_type(param)[-1] == '.':
param_errs.append('Parameter "{}" type should '
'not finish with "."'.format(param))
if not doc.parameter_desc(param):
param_errs.append('Parameter "{}" '
'has no description'.format(param))
else:
if not doc.parameter_desc(param)[0].isupper():
param_errs.append('Parameter "{}" description '
'should start with a '
'capital letter'.format(param))
if doc.parameter_desc(param)[-1] != '.':
param_errs.append('Parameter "{}" description '
'should finish with "."'.format(param))
if param_errs:
errs.append('Errors in parameters section')
for param_err in param_errs:
errs.append('\t{}'.format(param_err))
if doc.is_function_or_method:
if not doc.returns and "return" in doc.method_source:
errs.append('No Returns section found')
if not doc.yields and "yield" in doc.method_source:
errs.append('No Yields section found')
mentioned_errs = doc.mentioned_private_classes
if mentioned_errs:
errs.append('Private classes ({}) should not be mentioned in public '
'docstring.'.format(mentioned_errs))
if not doc.see_also:
wrns.append('See Also section not found')
else:
for rel_name, rel_desc in doc.see_also.items():
if not rel_desc:
errs.append('Missing description for '
'See Also "{}" reference'.format(rel_name))
for line in doc.raw_doc.splitlines():
if re.match("^ *\t", line):
errs.append('Tabs found at the start of line "{}", '
'please use whitespace only'.format(line.lstrip()))
examples_errs = ''
if not doc.examples:
wrns.append('No examples section found')
else:
examples_errs = doc.examples_errors
if examples_errs:
errs.append('Examples do not pass tests')
sys.stderr.write(_output_header('Validation'))
if errs:
sys.stderr.write('Errors found:\n')
for err in errs:
sys.stderr.write('\t{}\n'.format(err))
if wrns:
sys.stderr.write('Warnings found:\n')
for wrn in wrns:
sys.stderr.write('\t{}\n'.format(wrn))
if not errs:
sys.stderr.write('Docstring for "{}" correct. :)\n'.format(func_name))
if examples_errs:
sys.stderr.write(_output_header('Doctests'))
sys.stderr.write(examples_errs)
return len(errs)
def main(function):
if function is None:
return validate_all()
else:
return validate_one(function)
if __name__ == '__main__':
argparser = argparse.ArgumentParser(
description='validate pandas docstrings')
argparser.add_argument('function',
nargs='?',
default=None,
help=('function or method to validate '
'(e.g. pandas.DataFrame.head) '
'if not provided, all docstrings '
'are validated'))
args = argparser.parse_args()
sys.exit(main(args.function))
| 33.713775
| 79
| 0.566699
|
import os
import sys
import csv
import re
import functools
import collections
import argparse
import pydoc
import inspect
import importlib
import doctest
try:
from io import StringIO
except ImportError:
from cStringIO import StringIO
import numpy
BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.join(BASE_PATH))
import pandas
from pandas.compat import signature
sys.path.insert(1, os.path.join(BASE_PATH, 'doc', 'sphinxext'))
from numpydoc.docscrape import NumpyDocString
from pandas.io.formats.printing import pprint_thing
PRIVATE_CLASSES = ['NDFrame', 'IndexOpsMixin']
DIRECTIVES = ['versionadded', 'versionchanged', 'deprecated']
def _load_obj(obj_name):
for maxsplit in range(1, obj_name.count('.') + 1):
func_name_split = obj_name.rsplit('.', maxsplit)
module = func_name_split[0]
func_parts = func_name_split[1:]
try:
obj = importlib.import_module(module)
except ImportError:
pass
else:
continue
if 'module' not in locals():
raise ImportError('No module can be imported '
'from "{}"'.format(obj_name))
for part in func_parts:
obj = getattr(obj, part)
return obj
def _to_original_callable(obj):
while True:
if inspect.isfunction(obj) or inspect.isclass(obj):
f = inspect.getfile(obj)
if f.startswith('<') and f.endswith('>'):
return None
return obj
if inspect.ismethod(obj):
obj = obj.__func__
elif isinstance(obj, functools.partial):
obj = obj.func
elif isinstance(obj, property):
obj = obj.fget
else:
return None
def _output_header(title, width=80, char='#'):
full_line = char * width
side_len = (width - len(title) - 2) // 2
adj = '' if len(title) % 2 == 0 else ' '
title_line = '{side} {title}{adj} {side}'.format(side=char * side_len,
title=title,
adj=adj)
return '\n{full_line}\n{title_line}\n{full_line}\n\n'.format(
full_line=full_line, title_line=title_line)
class Docstring(object):
def __init__(self, method_name, method_obj):
self.method_name = method_name
self.method_obj = method_obj
self.raw_doc = method_obj.__doc__ or ''
self.clean_doc = pydoc.getdoc(self.method_obj)
self.doc = NumpyDocString(self.clean_doc)
def __len__(self):
return len(self.raw_doc)
@property
def is_function_or_method(self):
return (inspect.isfunction(self.method_obj)
or inspect.ismethod(self.method_obj))
@property
def source_file_name(self):
fname = inspect.getsourcefile(self.method_obj)
if fname:
fname = os.path.relpath(fname, BASE_PATH)
return fname
@property
def source_file_def_line(self):
try:
return inspect.getsourcelines(self.method_obj)[-1]
except OSError:
pass
@property
def github_url(self):
url = 'https://github.com/pandas-dev/pandas/blob/master/'
url += '{}#L{}'.format(self.source_file_name,
self.source_file_def_line)
return url
@property
def start_blank_lines(self):
i = None
if self.raw_doc:
for i, row in enumerate(self.raw_doc.split('\n')):
if row.strip():
break
return i
@property
def end_blank_lines(self):
i = None
if self.raw_doc:
for i, row in enumerate(reversed(self.raw_doc.split('\n'))):
if row.strip():
break
return i
@property
def double_blank_lines(self):
prev = True
for row in self.raw_doc.split('\n'):
if not prev and not row.strip():
return True
prev = row.strip()
return False
@property
def summary(self):
return ' '.join(self.doc['Summary'])
@property
def num_summary_lines(self):
return len(self.doc['Summary'])
@property
def extended_summary(self):
if not self.doc['Extended Summary'] and len(self.doc['Summary']) > 1:
return ' '.join(self.doc['Summary'])
return ' '.join(self.doc['Extended Summary'])
@property
def needs_summary(self):
return not (bool(self.summary) and bool(self.extended_summary))
@property
def doc_parameters(self):
return collections.OrderedDict((name, (type_, ''.join(desc)))
for name, type_, desc
in self.doc['Parameters'])
@property
def signature_parameters(self):
if inspect.isclass(self.method_obj):
if hasattr(self.method_obj, '_accessors') and (
self.method_name.split('.')[-1] in
self.method_obj._accessors):
return tuple()
try:
sig = signature(self.method_obj)
except (TypeError, ValueError):
# Some objects, mainly in C extensions do not support introspection
# of the signature
return tuple()
params = sig.args
if sig.varargs:
params.append("*" + sig.varargs)
if sig.keywords:
params.append("**" + sig.keywords)
params = tuple(params)
if params and params[0] in ('self', 'cls'):
return params[1:]
return params
@property
def parameter_mismatches(self):
errs = []
signature_params = self.signature_parameters
doc_params = tuple(self.doc_parameters)
missing = set(signature_params) - set(doc_params)
if missing:
errs.append(
'Parameters {} not documented'.format(pprint_thing(missing)))
extra = set(doc_params) - set(signature_params)
if extra:
errs.append('Unknown parameters {}'.format(pprint_thing(extra)))
if (not missing and not extra and signature_params != doc_params
and not (not signature_params and not doc_params)):
errs.append('Wrong parameters order. ' +
'Actual: {!r}. '.format(signature_params) +
'Documented: {!r}'.format(doc_params))
return errs
@property
def correct_parameters(self):
return not bool(self.parameter_mismatches)
def parameter_type(self, param):
return self.doc_parameters[param][0]
def parameter_desc(self, param):
desc = self.doc_parameters[param][1]
# Find and strip out any sphinx directives
for directive in DIRECTIVES:
full_directive = '.. {}'.format(directive)
if full_directive in desc:
# Only retain any description before the directive
desc = desc[:desc.index(full_directive)]
return desc
@property
def see_also(self):
return collections.OrderedDict((name, ''.join(desc))
for name, desc, _
in self.doc['See Also'])
@property
def examples(self):
return self.doc['Examples']
@property
def returns(self):
return self.doc['Returns']
@property
def yields(self):
return self.doc['Yields']
@property
def method_source(self):
return inspect.getsource(self.method_obj)
@property
def first_line_ends_in_dot(self):
if self.doc:
return self.doc.split('\n')[0][-1] == '.'
@property
def deprecated(self):
pattern = re.compile('.. deprecated:: ')
return (self.method_name.startswith('pandas.Panel') or
bool(pattern.search(self.summary)) or
bool(pattern.search(self.extended_summary)))
@property
def mentioned_private_classes(self):
return [klass for klass in PRIVATE_CLASSES if klass in self.raw_doc]
@property
def examples_errors(self):
flags = doctest.NORMALIZE_WHITESPACE | doctest.IGNORE_EXCEPTION_DETAIL
finder = doctest.DocTestFinder()
runner = doctest.DocTestRunner(optionflags=flags)
context = {'np': numpy, 'pd': pandas}
error_msgs = ''
for test in finder.find(self.raw_doc, self.method_name, globs=context):
f = StringIO()
runner.run(test, out=f.write)
error_msgs += f.getvalue()
return error_msgs
def get_api_items():
api_fname = os.path.join(BASE_PATH, 'doc', 'source', 'api.rst')
previous_line = current_section = current_subsection = ''
position = None
with open(api_fname) as f:
for line in f:
line = line.strip()
if len(line) == len(previous_line):
if set(line) == set('-'):
current_section = previous_line
continue
if set(line) == set('~'):
current_subsection = previous_line
continue
if line.startswith('.. currentmodule::'):
current_module = line.replace('.. currentmodule::', '').strip()
continue
if line == '.. autosummary::':
position = 'autosummary'
continue
if position == 'autosummary':
if line == '':
position = 'items'
continue
if position == 'items':
if line == '':
position = None
continue
item = line.strip()
func = importlib.import_module(current_module)
for part in item.split('.'):
func = getattr(func, part)
yield ('.'.join([current_module, item]), func,
current_section, current_subsection)
previous_line = line
def _csv_row(func_name, func_obj, section, subsection, in_api, seen={}):
obj_type = type(func_obj).__name__
original_callable = _to_original_callable(func_obj)
if original_callable is None:
return [func_name, obj_type] + [''] * 12, ''
else:
doc = Docstring(func_name, original_callable)
key = doc.source_file_name, doc.source_file_def_line
shared_code = seen.get(key, '')
return [func_name,
obj_type,
in_api,
int(doc.deprecated),
section,
subsection,
doc.source_file_name,
doc.source_file_def_line,
doc.github_url,
int(bool(doc.summary)),
int(bool(doc.extended_summary)),
int(doc.correct_parameters),
int(bool(doc.examples)),
shared_code], key
def validate_all():
writer = csv.writer(sys.stdout)
cols = ('Function or method',
'Type',
'In API doc',
'Is deprecated',
'Section',
'Subsection',
'File',
'Code line',
'GitHub link',
'Has summary',
'Has extended summary',
'Parameters ok',
'Has examples',
'Shared code with')
writer.writerow(cols)
seen = {}
api_items = list(get_api_items())
for func_name, func, section, subsection in api_items:
row, key = _csv_row(func_name, func, section, subsection,
in_api=1, seen=seen)
seen[key] = func_name
writer.writerow(row)
api_item_names = set(list(zip(*api_items))[0])
for class_ in (pandas.Series, pandas.DataFrame, pandas.Panel):
for member in inspect.getmembers(class_):
func_name = 'pandas.{}.{}'.format(class_.__name__, member[0])
if (not member[0].startswith('_') and
func_name not in api_item_names):
func = _load_obj(func_name)
row, key = _csv_row(func_name, func, section='', subsection='',
in_api=0)
writer.writerow(row)
return 0
def validate_one(func_name):
func_obj = _load_obj(func_name)
doc = Docstring(func_name, func_obj)
sys.stderr.write(_output_header('Docstring ({})'.format(func_name)))
sys.stderr.write('{}\n'.format(doc.clean_doc))
errs = []
wrns = []
if doc.start_blank_lines != 1:
errs.append('Docstring text (summary) should start in the line '
'immediately after the opening quotes (not in the same '
'line, or leaving a blank line in between)')
if doc.end_blank_lines != 1:
errs.append('Closing quotes should be placed in the line after '
'the last text in the docstring (do not close the '
'quotes in the same line as the text, or leave a '
'blank line between the last text and the quotes)')
if doc.double_blank_lines:
errs.append('Use only one blank line to separate sections or '
'paragraphs')
if not doc.summary:
errs.append('No summary found (a short summary in a single line '
'should be present at the beginning of the docstring)')
else:
if not doc.summary[0].isupper():
errs.append('Summary does not start with a capital letter')
if doc.summary[-1] != '.':
errs.append('Summary does not end with a period')
if (doc.is_function_or_method and
doc.summary.split(' ')[0][-1] == 's'):
errs.append('Summary must start with infinitive verb, '
'not third person (e.g. use "Generate" instead of '
'"Generates")')
if doc.num_summary_lines > 1:
errs.append("Summary should fit in a single line.")
if not doc.extended_summary:
wrns.append('No extended summary found')
param_errs = doc.parameter_mismatches
for param in doc.doc_parameters:
if not param.startswith("*"): # Check can ignore var / kwargs
if not doc.parameter_type(param):
param_errs.append('Parameter "{}" has no type'.format(param))
else:
if doc.parameter_type(param)[-1] == '.':
param_errs.append('Parameter "{}" type should '
'not finish with "."'.format(param))
if not doc.parameter_desc(param):
param_errs.append('Parameter "{}" '
'has no description'.format(param))
else:
if not doc.parameter_desc(param)[0].isupper():
param_errs.append('Parameter "{}" description '
'should start with a '
'capital letter'.format(param))
if doc.parameter_desc(param)[-1] != '.':
param_errs.append('Parameter "{}" description '
'should finish with "."'.format(param))
if param_errs:
errs.append('Errors in parameters section')
for param_err in param_errs:
errs.append('\t{}'.format(param_err))
if doc.is_function_or_method:
if not doc.returns and "return" in doc.method_source:
errs.append('No Returns section found')
if not doc.yields and "yield" in doc.method_source:
errs.append('No Yields section found')
mentioned_errs = doc.mentioned_private_classes
if mentioned_errs:
errs.append('Private classes ({}) should not be mentioned in public '
'docstring.'.format(mentioned_errs))
if not doc.see_also:
wrns.append('See Also section not found')
else:
for rel_name, rel_desc in doc.see_also.items():
if not rel_desc:
errs.append('Missing description for '
'See Also "{}" reference'.format(rel_name))
for line in doc.raw_doc.splitlines():
if re.match("^ *\t", line):
errs.append('Tabs found at the start of line "{}", '
'please use whitespace only'.format(line.lstrip()))
examples_errs = ''
if not doc.examples:
wrns.append('No examples section found')
else:
examples_errs = doc.examples_errors
if examples_errs:
errs.append('Examples do not pass tests')
sys.stderr.write(_output_header('Validation'))
if errs:
sys.stderr.write('Errors found:\n')
for err in errs:
sys.stderr.write('\t{}\n'.format(err))
if wrns:
sys.stderr.write('Warnings found:\n')
for wrn in wrns:
sys.stderr.write('\t{}\n'.format(wrn))
if not errs:
sys.stderr.write('Docstring for "{}" correct. :)\n'.format(func_name))
if examples_errs:
sys.stderr.write(_output_header('Doctests'))
sys.stderr.write(examples_errs)
return len(errs)
def main(function):
if function is None:
return validate_all()
else:
return validate_one(function)
if __name__ == '__main__':
argparser = argparse.ArgumentParser(
description='validate pandas docstrings')
argparser.add_argument('function',
nargs='?',
default=None,
help=('function or method to validate '
'(e.g. pandas.DataFrame.head) '
'if not provided, all docstrings '
'are validated'))
args = argparser.parse_args()
sys.exit(main(args.function))
| true
| true
|
790a631401282bc2c74fe83c59baab22405ceddc
| 409
|
py
|
Python
|
oeis/tribonacci.py
|
reidhoch/oeis-seq
|
5ee5fa0743cbe8fbcdb38c1c085fee2c2f96f7e0
|
[
"MIT"
] | null | null | null |
oeis/tribonacci.py
|
reidhoch/oeis-seq
|
5ee5fa0743cbe8fbcdb38c1c085fee2c2f96f7e0
|
[
"MIT"
] | 37
|
2021-06-10T14:48:48.000Z
|
2022-03-29T14:01:57.000Z
|
oeis/tribonacci.py
|
reidhoch/oeis-seq
|
5ee5fa0743cbe8fbcdb38c1c085fee2c2f96f7e0
|
[
"MIT"
] | null | null | null |
# coding=utf-8
from typing import Iterable
from .registry import registry
@registry.register("A000073")
def tribonacci() -> Iterable[int]:
"""Tribonacci numbers."""
yield 0
yield 0
yield 1
p3: int = 0 # tribonacci(0)
p2: int = 0 # tribonacci(1)
p1: int = 1 # tribonacci(2)
while True:
curr: int = p1 + p2 + p3
yield curr
p1, p2, p3 = curr, p1, p2
| 20.45
| 34
| 0.581907
|
from typing import Iterable
from .registry import registry
@registry.register("A000073")
def tribonacci() -> Iterable[int]:
yield 0
yield 0
yield 1
p3: int = 0
p2: int = 0
p1: int = 1
while True:
curr: int = p1 + p2 + p3
yield curr
p1, p2, p3 = curr, p1, p2
| true
| true
|
790a636642b90802ff3afe4d29661ae23d6f172c
| 2,181
|
py
|
Python
|
axiom/plugins/offeringcmd.py
|
jonathanj/mantissa
|
53e5502aba23ce99be78b27f923a276593033fe8
|
[
"MIT"
] | 6
|
2016-02-17T15:04:53.000Z
|
2021-08-20T09:44:10.000Z
|
axiom/plugins/offeringcmd.py
|
jonathanj/mantissa
|
53e5502aba23ce99be78b27f923a276593033fe8
|
[
"MIT"
] | 62
|
2015-02-04T23:40:55.000Z
|
2021-02-18T19:56:02.000Z
|
axiom/plugins/offeringcmd.py
|
jonathanj/mantissa
|
53e5502aba23ce99be78b27f923a276593033fe8
|
[
"MIT"
] | 8
|
2015-11-15T17:26:42.000Z
|
2020-12-02T06:36:52.000Z
|
# -*- test-case-name: xmantissa.test.test_offering -*-
# Copyright 2008 Divmod, Inc. See LICENSE file for details
"""
Axiomatic commands for manipulating Mantissa offerings.
"""
from twisted.python import usage
from axiom.scripts import axiomatic
from xmantissa import offering, publicweb
class Install(axiomatic.AxiomaticSubCommand):
synopsis = "<offering>"
def parseArgs(self, offering):
self["offering"] = self.decodeCommandLine(offering)
def postOptions(self):
for o in offering.getOfferings():
if o.name == self["offering"]:
offering.installOffering(self.store, o, None)
break
else:
raise usage.UsageError("No such offering")
class List(axiomatic.AxiomaticSubCommand):
def postOptions(self):
for o in offering.getOfferings():
print "%s: %s" % (o.name, o.description)
class SetFrontPage(axiomatic.AxiomaticSubCommand):
"""
Command for selecting the site front page.
"""
def parseArgs(self, offering):
"""
Collect an installed offering's name.
"""
self["name"] = self.decodeCommandLine(offering)
def postOptions(self):
"""
Find an installed offering and set the site front page to its
application's front page.
"""
o = self.store.findFirst(
offering.InstalledOffering,
(offering.InstalledOffering.offeringName ==
self["name"]))
if o is None:
raise usage.UsageError("No offering of that name"
" is installed.")
fp = self.store.findUnique(publicweb.FrontPage)
fp.defaultApplication = o.application
class OfferingCommand(axiomatic.AxiomaticCommand):
name = "offering"
description = "View and accept the offerings of puny mortals."
subCommands = [
("install", None, Install, "Install an offering."),
("list", None, List, "List available offerings."),
("frontpage", None, SetFrontPage,
"Select an application for the front page."),
]
def getStore(self):
return self.parent.getStore()
| 28.697368
| 69
| 0.624484
|
"""
Axiomatic commands for manipulating Mantissa offerings.
"""
from twisted.python import usage
from axiom.scripts import axiomatic
from xmantissa import offering, publicweb
class Install(axiomatic.AxiomaticSubCommand):
synopsis = "<offering>"
def parseArgs(self, offering):
self["offering"] = self.decodeCommandLine(offering)
def postOptions(self):
for o in offering.getOfferings():
if o.name == self["offering"]:
offering.installOffering(self.store, o, None)
break
else:
raise usage.UsageError("No such offering")
class List(axiomatic.AxiomaticSubCommand):
def postOptions(self):
for o in offering.getOfferings():
print "%s: %s" % (o.name, o.description)
class SetFrontPage(axiomatic.AxiomaticSubCommand):
"""
Command for selecting the site front page.
"""
def parseArgs(self, offering):
"""
Collect an installed offering's name.
"""
self["name"] = self.decodeCommandLine(offering)
def postOptions(self):
"""
Find an installed offering and set the site front page to its
application's front page.
"""
o = self.store.findFirst(
offering.InstalledOffering,
(offering.InstalledOffering.offeringName ==
self["name"]))
if o is None:
raise usage.UsageError("No offering of that name"
" is installed.")
fp = self.store.findUnique(publicweb.FrontPage)
fp.defaultApplication = o.application
class OfferingCommand(axiomatic.AxiomaticCommand):
name = "offering"
description = "View and accept the offerings of puny mortals."
subCommands = [
("install", None, Install, "Install an offering."),
("list", None, List, "List available offerings."),
("frontpage", None, SetFrontPage,
"Select an application for the front page."),
]
def getStore(self):
return self.parent.getStore()
| false
| true
|
790a6455010972eec2db6ca0b66815f5563e0259
| 11,669
|
py
|
Python
|
tests/BlazingSQLTest/EndToEndTests/allE2ETest.py
|
jglaser/blazingsql
|
072dc007cf3e7ac9bdafc1ed8318a8f7b24a2657
|
[
"Apache-2.0"
] | null | null | null |
tests/BlazingSQLTest/EndToEndTests/allE2ETest.py
|
jglaser/blazingsql
|
072dc007cf3e7ac9bdafc1ed8318a8f7b24a2657
|
[
"Apache-2.0"
] | null | null | null |
tests/BlazingSQLTest/EndToEndTests/allE2ETest.py
|
jglaser/blazingsql
|
072dc007cf3e7ac9bdafc1ed8318a8f7b24a2657
|
[
"Apache-2.0"
] | null | null | null |
# from blazingsql import BlazingContext
from Configuration import ExecutionMode
from Configuration import Settings as Settings
# from dask.distributed import Client
from DataBase import createSchema as createSchema
# from EndToEndTests import countDistincTest
from EndToEndTests import (
GroupByWitoutAggregations,
aggregationsWithoutGroupByTest,
bindableAliasTest,
booleanTest,
caseTest,
castTest,
)
from EndToEndTests import coalesceTest as coalesceTest
from EndToEndTests import columnBasisTest as columnBasisTest
from EndToEndTests import (
commonTableExpressionsTest,
concatTest,
countWithoutGroupByTest,
dateTest,
dirTest,
fileSystemGSTest,
fileSystemLocalTest,
fileSystemS3Test,
)
from EndToEndTests import fullOuterJoinsTest as fullOuterJoinsTest
from EndToEndTests import groupByTest as groupByTest
from EndToEndTests import innerJoinsTest as innerJoinsTest
from EndToEndTests import crossJoinsTest as crossJoinsTest
from EndToEndTests import leftOuterJoinsTest as leftOuterJoinsTest
from EndToEndTests import (
likeTest,
literalTest,
# loadDataTest,
nestedQueriesTest,
nonEquiJoinsTest,
)
from EndToEndTests import orderbyTest as orderbyTest
from EndToEndTests import (
predicatesWithNulls,
roundTest,
simpleDistributionTest,
stringTests,
substringTest,
tablesFromPandasTest,
# timestampdiffTest,
timestampTest,
tpchQueriesTest,
)
from EndToEndTests import unaryOpsTest as unaryOpsTest
from EndToEndTests import unifyTablesTest
from EndToEndTests import unionTest as unionTest
from EndToEndTests import useLimitTest
from EndToEndTests import whereClauseTest as whereClauseTest
from EndToEndTests import wildCardTest
from pynvml import nvmlInit
from pyspark.sql import SparkSession
from Runner import runTest
from Utils import Execution, init_context
def main():
print("**init end2end**")
Execution.getArgs()
nvmlInit()
dir_data_file = Settings.data["TestSettings"]["dataDirectory"]
nRals = Settings.data["RunSettings"]["nRals"]
drill = "drill"
spark = "spark"
compareResults = True
if "compare_results" in Settings.data["RunSettings"]:
compareResults = Settings.data["RunSettings"]["compare_results"]
if (
Settings.execution_mode == ExecutionMode.FULL and compareResults == "true"
) or Settings.execution_mode == ExecutionMode.GENERATOR:
# Create Table Drill -----------------------------------------
from pydrill.client import PyDrill
drill = PyDrill(host="localhost", port=8047)
createSchema.init_drill_schema(
drill, Settings.data["TestSettings"]["dataDirectory"], bool_test=True
)
# Create Table Spark -------------------------------------------------
spark = SparkSession.builder.appName("allE2ETest").getOrCreate()
createSchema.init_spark_schema(
spark, Settings.data["TestSettings"]["dataDirectory"]
)
# Create Context For BlazingSQL
bc, dask_client = init_context()
targetTestGroups = Settings.data["RunSettings"]["targetTestGroups"]
runAllTests = (
len(targetTestGroups) == 0
) # if targetTestGroups was empty the user wants to run all the tests
if runAllTests or ("aggregationsWithoutGroupByTest" in targetTestGroups):
aggregationsWithoutGroupByTest.main(
dask_client, drill, dir_data_file, bc, nRals
)
if runAllTests or ("coalesceTest" in targetTestGroups):
coalesceTest.main(
dask_client, drill, dir_data_file, bc, nRals
) # we are not supporting coalesce yet
if runAllTests or ("columnBasisTest" in targetTestGroups):
columnBasisTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("commonTableExpressionsTest" in targetTestGroups):
commonTableExpressionsTest.main(dask_client, drill, dir_data_file, bc, nRals)
# we are not supporting count distinct yet
# countDistincTest.main(dask_client, drill, dir_data_file, bc)
if runAllTests or ("countWithoutGroupByTest" in targetTestGroups):
countWithoutGroupByTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("dateTest" in targetTestGroups):
dateTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("timestampTest" in targetTestGroups):
timestampTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("fullOuterJoinsTest" in targetTestGroups):
fullOuterJoinsTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("groupByTest" in targetTestGroups):
groupByTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("GroupByWitoutAggregations" in targetTestGroups):
GroupByWitoutAggregations.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("innerJoinsTest" in targetTestGroups):
innerJoinsTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("crossJoinsTest" in targetTestGroups):
crossJoinsTest.main(dask_client, spark, dir_data_file, bc, nRals)
if runAllTests or ("" in targetTestGroups):
leftOuterJoinsTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("nonEquiJoinsTest" in targetTestGroups):
nonEquiJoinsTest.main(dask_client, drill, dir_data_file, bc, nRals)
# loadDataTest.main(dask_client, bc) #check this
if runAllTests or ("nestedQueriesTest" in targetTestGroups):
nestedQueriesTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("orderbyTest" in targetTestGroups):
orderbyTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("predicatesWithNulls" in targetTestGroups):
predicatesWithNulls.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("stringTests" in targetTestGroups):
stringTests.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("tablesFromPandasTest" in targetTestGroups):
tablesFromPandasTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("unaryOpsTest" in targetTestGroups):
unaryOpsTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("unifyTablesTest" in targetTestGroups):
unifyTablesTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("unionTest" in targetTestGroups):
unionTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("useLimitTest" in targetTestGroups):
useLimitTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("whereClauseTest" in targetTestGroups):
whereClauseTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("bindableAliasTest" in targetTestGroups):
bindableAliasTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("booleanTest" in targetTestGroups):
booleanTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("caseTest" in targetTestGroups):
caseTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("castTest" in targetTestGroups):
castTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("concatTest" in targetTestGroups):
concatTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("literalTest" in targetTestGroups):
literalTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("dirTest" in targetTestGroups):
dirTest.main(dask_client, drill, dir_data_file, bc, nRals)
# HDFS is not working yet
# fileSystemHdfsTest.main(dask_client, drill, dir_data_file, bc)
# HDFS is not working yet
# mixedFileSystemTest.main(dask_client, drill, dir_data_file, bc)
if runAllTests or ("likeTest" in targetTestGroups):
likeTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("simpleDistributionTest" in targetTestGroups):
simpleDistributionTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("substringTest" in targetTestGroups):
substringTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("wildCardTest" in targetTestGroups):
wildCardTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("tpchQueriesTest" in targetTestGroups):
tpchQueriesTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("roundTest" in targetTestGroups):
roundTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("fileSystemLocalTest" in targetTestGroups):
fileSystemLocalTest.main(dask_client, drill, dir_data_file, bc, nRals)
if Settings.execution_mode != ExecutionMode.GPUCI:
if runAllTests or ("fileSystemS3Test" in targetTestGroups):
fileSystemS3Test.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("fileSystemGSTest" in targetTestGroups):
fileSystemGSTest.main(dask_client, drill, dir_data_file, bc, nRals)
# timestampdiffTest.main(dask_client, spark, dir_data_file, bc, nRals)
if Settings.execution_mode != ExecutionMode.GENERATOR:
result, error_msgs = runTest.save_log(
Settings.execution_mode == ExecutionMode.GPUCI
)
max = 0
for i in range(0, len(Settings.memory_list)):
if (Settings.memory_list[i].delta) > max:
max = Settings.memory_list[i].delta
print("MAX DELTA: " + str(max))
print(
"""***********************************************************
********************"""
)
for i in range(0, len(Settings.memory_list)):
print(
Settings.memory_list[i].name
+ ":"
+ " Start Mem: "
+ str(Settings.memory_list[i].start_mem)
+ " End Mem: "
+ str(Settings.memory_list[i].end_mem)
+ " Diff: "
+ str(Settings.memory_list[i].delta)
)
return result, error_msgs
return True, []
if __name__ == "__main__":
import time
start = time.time() # in seconds
result, error_msgs = main()
if Settings.execution_mode != ExecutionMode.GENERATOR:
# NOTE kahro william percy mario : here we tell to gpuci there was
# an error comparing with historic results
# TODO william kharoly felipe we should try to enable and
# use this function in the future
result = True
if result is False:
for error_msg in error_msgs:
print(error_msg)
# import sys
end = time.time() # in seconds
elapsed = end - start # in seconds
time_delta_desc = (
str(elapsed / 60)
+ " minutes and "
+ str(int(elapsed) % 60)
+ " seconds"
)
print(
"==>> E2E FAILED against previous run, total time was: "
+ time_delta_desc
)
# TODO percy kharo willian: uncomment this line
# when gpuci has all the env vars set
# return error exit status to the command prompt (shell)
# sys.exit(1)
| 37.28115
| 88
| 0.68592
|
from Configuration import ExecutionMode
from Configuration import Settings as Settings
from DataBase import createSchema as createSchema
from EndToEndTests import (
GroupByWitoutAggregations,
aggregationsWithoutGroupByTest,
bindableAliasTest,
booleanTest,
caseTest,
castTest,
)
from EndToEndTests import coalesceTest as coalesceTest
from EndToEndTests import columnBasisTest as columnBasisTest
from EndToEndTests import (
commonTableExpressionsTest,
concatTest,
countWithoutGroupByTest,
dateTest,
dirTest,
fileSystemGSTest,
fileSystemLocalTest,
fileSystemS3Test,
)
from EndToEndTests import fullOuterJoinsTest as fullOuterJoinsTest
from EndToEndTests import groupByTest as groupByTest
from EndToEndTests import innerJoinsTest as innerJoinsTest
from EndToEndTests import crossJoinsTest as crossJoinsTest
from EndToEndTests import leftOuterJoinsTest as leftOuterJoinsTest
from EndToEndTests import (
likeTest,
literalTest,
nestedQueriesTest,
nonEquiJoinsTest,
)
from EndToEndTests import orderbyTest as orderbyTest
from EndToEndTests import (
predicatesWithNulls,
roundTest,
simpleDistributionTest,
stringTests,
substringTest,
tablesFromPandasTest,
timestampTest,
tpchQueriesTest,
)
from EndToEndTests import unaryOpsTest as unaryOpsTest
from EndToEndTests import unifyTablesTest
from EndToEndTests import unionTest as unionTest
from EndToEndTests import useLimitTest
from EndToEndTests import whereClauseTest as whereClauseTest
from EndToEndTests import wildCardTest
from pynvml import nvmlInit
from pyspark.sql import SparkSession
from Runner import runTest
from Utils import Execution, init_context
def main():
print("**init end2end**")
Execution.getArgs()
nvmlInit()
dir_data_file = Settings.data["TestSettings"]["dataDirectory"]
nRals = Settings.data["RunSettings"]["nRals"]
drill = "drill"
spark = "spark"
compareResults = True
if "compare_results" in Settings.data["RunSettings"]:
compareResults = Settings.data["RunSettings"]["compare_results"]
if (
Settings.execution_mode == ExecutionMode.FULL and compareResults == "true"
) or Settings.execution_mode == ExecutionMode.GENERATOR:
from pydrill.client import PyDrill
drill = PyDrill(host="localhost", port=8047)
createSchema.init_drill_schema(
drill, Settings.data["TestSettings"]["dataDirectory"], bool_test=True
)
spark = SparkSession.builder.appName("allE2ETest").getOrCreate()
createSchema.init_spark_schema(
spark, Settings.data["TestSettings"]["dataDirectory"]
)
bc, dask_client = init_context()
targetTestGroups = Settings.data["RunSettings"]["targetTestGroups"]
runAllTests = (
len(targetTestGroups) == 0
)
if runAllTests or ("aggregationsWithoutGroupByTest" in targetTestGroups):
aggregationsWithoutGroupByTest.main(
dask_client, drill, dir_data_file, bc, nRals
)
if runAllTests or ("coalesceTest" in targetTestGroups):
coalesceTest.main(
dask_client, drill, dir_data_file, bc, nRals
)
if runAllTests or ("columnBasisTest" in targetTestGroups):
columnBasisTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("commonTableExpressionsTest" in targetTestGroups):
commonTableExpressionsTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("countWithoutGroupByTest" in targetTestGroups):
countWithoutGroupByTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("dateTest" in targetTestGroups):
dateTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("timestampTest" in targetTestGroups):
timestampTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("fullOuterJoinsTest" in targetTestGroups):
fullOuterJoinsTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("groupByTest" in targetTestGroups):
groupByTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("GroupByWitoutAggregations" in targetTestGroups):
GroupByWitoutAggregations.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("innerJoinsTest" in targetTestGroups):
innerJoinsTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("crossJoinsTest" in targetTestGroups):
crossJoinsTest.main(dask_client, spark, dir_data_file, bc, nRals)
if runAllTests or ("" in targetTestGroups):
leftOuterJoinsTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("nonEquiJoinsTest" in targetTestGroups):
nonEquiJoinsTest.main(dask_client, drill, dir_data_file, bc, nRals)
nAllTests or ("nestedQueriesTest" in targetTestGroups):
nestedQueriesTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("orderbyTest" in targetTestGroups):
orderbyTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("predicatesWithNulls" in targetTestGroups):
predicatesWithNulls.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("stringTests" in targetTestGroups):
stringTests.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("tablesFromPandasTest" in targetTestGroups):
tablesFromPandasTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("unaryOpsTest" in targetTestGroups):
unaryOpsTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("unifyTablesTest" in targetTestGroups):
unifyTablesTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("unionTest" in targetTestGroups):
unionTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("useLimitTest" in targetTestGroups):
useLimitTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("whereClauseTest" in targetTestGroups):
whereClauseTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("bindableAliasTest" in targetTestGroups):
bindableAliasTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("booleanTest" in targetTestGroups):
booleanTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("caseTest" in targetTestGroups):
caseTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("castTest" in targetTestGroups):
castTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("concatTest" in targetTestGroups):
concatTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("literalTest" in targetTestGroups):
literalTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("dirTest" in targetTestGroups):
dirTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("likeTest" in targetTestGroups):
likeTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("simpleDistributionTest" in targetTestGroups):
simpleDistributionTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("substringTest" in targetTestGroups):
substringTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("wildCardTest" in targetTestGroups):
wildCardTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("tpchQueriesTest" in targetTestGroups):
tpchQueriesTest.main(dask_client, drill, spark, dir_data_file, bc, nRals)
if runAllTests or ("roundTest" in targetTestGroups):
roundTest.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("fileSystemLocalTest" in targetTestGroups):
fileSystemLocalTest.main(dask_client, drill, dir_data_file, bc, nRals)
if Settings.execution_mode != ExecutionMode.GPUCI:
if runAllTests or ("fileSystemS3Test" in targetTestGroups):
fileSystemS3Test.main(dask_client, drill, dir_data_file, bc, nRals)
if runAllTests or ("fileSystemGSTest" in targetTestGroups):
fileSystemGSTest.main(dask_client, drill, dir_data_file, bc, nRals)
if Settings.execution_mode != ExecutionMode.GENERATOR:
result, error_msgs = runTest.save_log(
Settings.execution_mode == ExecutionMode.GPUCI
)
max = 0
for i in range(0, len(Settings.memory_list)):
if (Settings.memory_list[i].delta) > max:
max = Settings.memory_list[i].delta
print("MAX DELTA: " + str(max))
print(
"""***********************************************************
********************"""
)
for i in range(0, len(Settings.memory_list)):
print(
Settings.memory_list[i].name
+ ":"
+ " Start Mem: "
+ str(Settings.memory_list[i].start_mem)
+ " End Mem: "
+ str(Settings.memory_list[i].end_mem)
+ " Diff: "
+ str(Settings.memory_list[i].delta)
)
return result, error_msgs
return True, []
if __name__ == "__main__":
import time
start = time.time()
result, error_msgs = main()
if Settings.execution_mode != ExecutionMode.GENERATOR:
result = True
if result is False:
for error_msg in error_msgs:
print(error_msg)
end = time.time()
elapsed = end - start
time_delta_desc = (
str(elapsed / 60)
+ " minutes and "
+ str(int(elapsed) % 60)
+ " seconds"
)
print(
"==>> E2E FAILED against previous run, total time was: "
+ time_delta_desc
)
| true
| true
|
790a659d49877395c271e997888d2d68ab466174
| 1,253
|
py
|
Python
|
test/unit/test_construct_hostname.py
|
fermezz/snowflake-connector-python
|
bc9616ad568b23cb8a931d2d590041f6bac1cff9
|
[
"Apache-2.0"
] | 1
|
2021-08-06T07:21:09.000Z
|
2021-08-06T07:21:09.000Z
|
test/unit/test_construct_hostname.py
|
fermezz/snowflake-connector-python
|
bc9616ad568b23cb8a931d2d590041f6bac1cff9
|
[
"Apache-2.0"
] | 37
|
2021-08-11T23:22:14.000Z
|
2021-08-12T22:27:15.000Z
|
test/unit/test_construct_hostname.py
|
fermezz/snowflake-connector-python
|
bc9616ad568b23cb8a931d2d590041f6bac1cff9
|
[
"Apache-2.0"
] | 2
|
2021-05-21T10:52:52.000Z
|
2021-05-21T13:34:37.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2012-2021 Snowflake Computing Inc. All right reserved.
#
from snowflake.connector.util_text import construct_hostname
def test_construct_hostname_basic():
assert (
construct_hostname("eu-central-1", "account1")
== "account1.eu-central-1.snowflakecomputing.com"
)
assert construct_hostname("", "account1") == "account1.snowflakecomputing.com"
assert construct_hostname(None, "account1") == "account1.snowflakecomputing.com"
assert (
construct_hostname("as-east-3", "account1")
== "account1.as-east-3.snowflakecomputing.com"
)
assert (
construct_hostname("as-east-3", "account1.eu-central-1")
== "account1.as-east-3.snowflakecomputing.com"
)
assert (
construct_hostname("", "account1.eu-central-1")
== "account1.eu-central-1.snowflakecomputing.com"
)
assert (
construct_hostname(None, "account1.eu-central-1")
== "account1.eu-central-1.snowflakecomputing.com"
)
assert (
construct_hostname(None, "account1-jkabfvdjisoa778wqfgeruishafeuw89q.global")
== "account1-jkabfvdjisoa778wqfgeruishafeuw89q.global.snowflakecomputing.com"
)
| 28.477273
| 85
| 0.672785
|
from snowflake.connector.util_text import construct_hostname
def test_construct_hostname_basic():
assert (
construct_hostname("eu-central-1", "account1")
== "account1.eu-central-1.snowflakecomputing.com"
)
assert construct_hostname("", "account1") == "account1.snowflakecomputing.com"
assert construct_hostname(None, "account1") == "account1.snowflakecomputing.com"
assert (
construct_hostname("as-east-3", "account1")
== "account1.as-east-3.snowflakecomputing.com"
)
assert (
construct_hostname("as-east-3", "account1.eu-central-1")
== "account1.as-east-3.snowflakecomputing.com"
)
assert (
construct_hostname("", "account1.eu-central-1")
== "account1.eu-central-1.snowflakecomputing.com"
)
assert (
construct_hostname(None, "account1.eu-central-1")
== "account1.eu-central-1.snowflakecomputing.com"
)
assert (
construct_hostname(None, "account1-jkabfvdjisoa778wqfgeruishafeuw89q.global")
== "account1-jkabfvdjisoa778wqfgeruishafeuw89q.global.snowflakecomputing.com"
)
| true
| true
|
790a66c97044ad3b15211984004d33fd39612f22
| 5,773
|
py
|
Python
|
scripts/classifcation_pos_n_trials_back (cv counts).py
|
nmningmei/metacognition
|
734082e247cc7fc9d277563e2676e10692617a3f
|
[
"MIT"
] | 3
|
2019-07-09T15:37:46.000Z
|
2019-07-17T16:28:02.000Z
|
scripts/classifcation_pos_n_trials_back (cv counts).py
|
nmningmei/metacognition
|
734082e247cc7fc9d277563e2676e10692617a3f
|
[
"MIT"
] | null | null | null |
scripts/classifcation_pos_n_trials_back (cv counts).py
|
nmningmei/metacognition
|
734082e247cc7fc9d277563e2676e10692617a3f
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 15 16:02:16 2018
@author: ning
"""
import os
working_dir = ''
import pandas as pd
pd.options.mode.chained_assignment = None
import numpy as np
from utils import (cv_counts)
saving_dir = '../results/cv_counts'
if not os.path.exists(saving_dir):
os.mkdir(saving_dir)
# Exp 1
for participant in ['AC', 'CL', 'FW', 'HB', 'KK', 'LM', 'MC', 'MP1', 'MP2', 'NN', 'RP','SD', 'TJ', 'TS', 'WT']:
experiment = 'pos'
df = pd.read_csv(os.path.join(working_dir,'../data/PoSdata.csv'))
df = df[df.columns[1:]]
df.columns = ['participant',
'blocks',
'trials',
'firstgabor',
'success',
'tilted',
'correct',
'RT_correct',
'awareness',
'RT_awareness',
'confidence',
'RT_confidence']
df_sub = df[df['participant'] == participant]
# make sure all the attributes are either 0 or 1
df_sub.loc[:,'success' ] = df_sub.loc[:,'success' ].values - 1
df_sub.loc[:,'awareness' ] = df_sub.loc[:,'awareness' ].values - 1
df_sub.loc[:,'confidence'] = df_sub.loc[:,'confidence'].values - 1
##################################################################
np.random.seed(12345)
# use all 6 possible features
feature_names = [
'correct',
'awareness',
'confidence',
'RT_correct',
'RT_awareness',
'RT_confidence']
target_name = 'success'
results = dict(sub = [],
window = [],
fold = [],
)
for name in feature_names:
results['{}_high_cond_{}_low'.format(target_name,name)] = []
results['{}_high_cond_{}_high'.format(target_name,name)] = []
for n_back in np.arange(1,5): # loop through the number of trials looking back
# this is the part that is redundent and the code is long
results = cv_counts(
df_sub,
feature_names,
target_name,
results,
participant,
experiment,
window=n_back,
)
temp = pd.DataFrame(results)
temp.to_csv(os.path.join(saving_dir,'Pos_6_features (cv_count)_{}.csv'.format(participant)),index=False) # save as a csv
################################################################################
# use success, awareness, and confidence as features
np.random.seed(12345)
# use judgement features
feature_names = [
'correct',
'awareness',
'confidence',]
target_name = 'success'
results = dict(sub = [],
window = [],
fold = [],
)
for name in feature_names:
results['{}_high_cond_{}_low'.format(target_name,name)] = []
results['{}_high_cond_{}_high'.format(target_name,name)] = []
for n_back in np.arange(1,5): # loop through the number of trials looking back
# this is the part that is redundent and the code is long
results = cv_counts(
df_sub,
feature_names,
target_name,
results,
participant,
experiment,
window=n_back,
)
temp = pd.DataFrame(results)
temp.to_csv(os.path.join(saving_dir,'Pos_3_1_features (cv_count)_{}.csv'.format(participant)),index=False) # save as a csv
###############################################################################
# use reactimes as features
np.random.seed(12345)
# use all 6 possible features
feature_names = [
'RT_correct',
'RT_awareness',
'RT_confidence']
target_name = 'success'
results = dict(sub = [],
window = [],
fold = [],
)
for name in feature_names:
results['{}_high_cond_{}_low'.format(target_name,name)] = []
results['{}_high_cond_{}_high'.format(target_name,name)] = []
for n_back in np.arange(1,5): # loop through the number of trials looking back
# this is the part that is redundent and the code is long
results = cv_counts(
df_sub,
feature_names,
target_name,
results,
participant,
experiment,
window=n_back,
)
temp = pd.DataFrame(results)
temp.to_csv(os.path.join(saving_dir,'Pos_RT_features (cv_count)_{}.csv'.format(participant)),index=False) # save as a csv
| 33.760234
| 126
| 0.411744
|
import os
working_dir = ''
import pandas as pd
pd.options.mode.chained_assignment = None
import numpy as np
from utils import (cv_counts)
saving_dir = '../results/cv_counts'
if not os.path.exists(saving_dir):
os.mkdir(saving_dir)
for participant in ['AC', 'CL', 'FW', 'HB', 'KK', 'LM', 'MC', 'MP1', 'MP2', 'NN', 'RP','SD', 'TJ', 'TS', 'WT']:
experiment = 'pos'
df = pd.read_csv(os.path.join(working_dir,'../data/PoSdata.csv'))
df = df[df.columns[1:]]
df.columns = ['participant',
'blocks',
'trials',
'firstgabor',
'success',
'tilted',
'correct',
'RT_correct',
'awareness',
'RT_awareness',
'confidence',
'RT_confidence']
df_sub = df[df['participant'] == participant]
df_sub.loc[:,'success' ] = df_sub.loc[:,'success' ].values - 1
df_sub.loc[:,'awareness' ] = df_sub.loc[:,'awareness' ].values - 1
df_sub.loc[:,'confidence'] = df_sub.loc[:,'confidence'].values - 1
| true
| true
|
790a66f1295e33e078d674f2139b0f8e3276cc44
| 567
|
py
|
Python
|
pypesto/optimize/__init__.py
|
m-philipps/pyPESTO
|
4c30abfca56ba714c302141cd44a9dd366bff4bb
|
[
"BSD-3-Clause"
] | null | null | null |
pypesto/optimize/__init__.py
|
m-philipps/pyPESTO
|
4c30abfca56ba714c302141cd44a9dd366bff4bb
|
[
"BSD-3-Clause"
] | null | null | null |
pypesto/optimize/__init__.py
|
m-philipps/pyPESTO
|
4c30abfca56ba714c302141cd44a9dd366bff4bb
|
[
"BSD-3-Clause"
] | null | null | null |
# noqa: D400,D205
"""
Optimize
========
Multistart optimization with support for various optimizers.
"""
from .load import (
fill_result_from_history,
optimization_result_from_history,
read_result_from_file,
read_results_from_file,
)
from .optimize import minimize
from .optimizer import (
CmaesOptimizer,
DlibOptimizer,
FidesOptimizer,
IpoptOptimizer,
NLoptOptimizer,
Optimizer,
PyswarmOptimizer,
PyswarmsOptimizer,
ScipyDifferentialEvolutionOptimizer,
ScipyOptimizer,
)
from .options import OptimizeOptions
| 19.551724
| 60
| 0.746032
|
from .load import (
fill_result_from_history,
optimization_result_from_history,
read_result_from_file,
read_results_from_file,
)
from .optimize import minimize
from .optimizer import (
CmaesOptimizer,
DlibOptimizer,
FidesOptimizer,
IpoptOptimizer,
NLoptOptimizer,
Optimizer,
PyswarmOptimizer,
PyswarmsOptimizer,
ScipyDifferentialEvolutionOptimizer,
ScipyOptimizer,
)
from .options import OptimizeOptions
| true
| true
|
790a673b35e41f916d9e970cf44d0e194dbec7d2
| 7,560
|
py
|
Python
|
extra_views/formsets.py
|
incuna/django-extra-views
|
37b08e2d27938ab24b22be3ffcec1b4370d621a9
|
[
"MIT"
] | null | null | null |
extra_views/formsets.py
|
incuna/django-extra-views
|
37b08e2d27938ab24b22be3ffcec1b4370d621a9
|
[
"MIT"
] | null | null | null |
extra_views/formsets.py
|
incuna/django-extra-views
|
37b08e2d27938ab24b22be3ffcec1b4370d621a9
|
[
"MIT"
] | null | null | null |
from django.views.generic.base import TemplateResponseMixin, View
from django.http import HttpResponseRedirect
from django.forms.formsets import formset_factory
from django.forms.models import modelformset_factory, inlineformset_factory
from django.views.generic.detail import SingleObjectMixin, SingleObjectTemplateResponseMixin
from django.views.generic.list import MultipleObjectMixin, MultipleObjectTemplateResponseMixin
from django.forms.models import BaseInlineFormSet
class BaseFormSetMixin(object):
"""
Base class for constructing a FormSet within a view
"""
initial = []
form_class = None
formset_class = None
success_url = None
extra = 2
max_num = None
can_order = False
can_delete = False
def construct_formset(self):
return self.get_formset()(initial=self.get_initial(), **self.get_formset_kwargs())
def get_initial(self):
return self.initial
def get_formset_class(self):
return self.formset_class
def get_form_class(self):
return self.form_class
def get_formset(self):
return formset_factory(self.get_form_class(), **self.get_factory_kwargs())
def get_formset_kwargs(self):
kwargs = {}
if self.request.method in ('POST', 'PUT'):
kwargs.update({
'data': self.request.POST,
'files': self.request.FILES,
})
return kwargs
def get_factory_kwargs(self):
kwargs = {
'extra': self.extra,
'max_num': self.max_num,
'can_order': self.can_order,
'can_delete': self.can_delete,
}
if self.get_formset_class():
kwargs['formset'] = self.get_formset_class()
return kwargs
class FormSetMixin(BaseFormSetMixin):
def get_context_data(self, **kwargs):
return kwargs
def get_success_url(self):
if self.success_url:
url = self.success_url
else:
# Default to returning to the same page
url = self.request.get_full_path()
return url
def formset_valid(self, formset):
return HttpResponseRedirect(self.get_success_url())
def formset_invalid(self, formset):
return self.render_to_response(self.get_context_data(formset=formset))
class ModelFormSetMixin(FormSetMixin, MultipleObjectMixin):
exclude = None
fields = None
formfield_callback = None
def get_context_data(self, **kwargs):
context = kwargs
if self.object_list:
context['object_list'] = self.object_list
context_object_name = self.get_context_object_name(self.get_queryset())
if context_object_name:
context[context_object_name] = self.object_list
return context
def construct_formset(self):
return self.get_formset()(queryset=self.get_queryset(), **self.get_formset_kwargs())
def get_factory_kwargs(self):
kwargs = super(ModelFormSetMixin, self).get_factory_kwargs()
kwargs.update({
'exclude': self.exclude,
'fields': self.fields,
'formfield_callback': self.formfield_callback,
})
if self.get_form_class():
kwargs['form'] = self.get_form_class()
if self.get_formset_class():
kwargs['formset'] = self.get_formset_class()
return kwargs
def get_formset(self):
return modelformset_factory(self.model, **self.get_factory_kwargs())
def formset_valid(self, formset):
self.object_list = formset.save()
return super(ModelFormSetMixin, self).formset_valid(formset)
class BaseInlineFormSetMixin(BaseFormSetMixin):
model = None
inline_model = None
fk_name = None
formset_class = BaseInlineFormSet
exclude = None
fields = None
formfield_callback = None
can_delete = True
def get_context_data(self, **kwargs):
context = kwargs
if self.object:
context['object'] = self.object
context_object_name = self.get_context_object_name(self.object)
if context_object_name:
context[context_object_name] = self.object
return context
def construct_formset(self):
return self.get_formset()(instance=self.object, **self.get_formset_kwargs())
def get_inline_model(self):
return self.inline_model
def get_factory_kwargs(self):
kwargs = super(BaseInlineFormSetMixin, self).get_factory_kwargs()
kwargs.update({
'exclude': self.exclude,
'fields': self.fields,
'formfield_callback': self.formfield_callback,
'fk_name': self.fk_name,
})
if self.get_form_class():
kwargs['form'] = self.get_form_class()
if self.get_formset_class():
kwargs['formset'] = self.get_formset_class()
return kwargs
def get_formset(self):
return inlineformset_factory(self.model, self.get_inline_model(), **self.get_factory_kwargs())
class InlineFormSetMixin(BaseInlineFormSetMixin, FormSetMixin, SingleObjectMixin):
def formset_valid(self, formset):
self.object_list = formset.save()
return super(BaseInlineFormSetMixin, self).formset_valid(formset)
class ProcessFormSetView(View):
"""
A mixin that processes a fomset on POST.
"""
def get(self, request, *args, **kwargs):
formset = self.construct_formset()
return self.render_to_response(self.get_context_data(formset=formset))
def post(self, request, *args, **kwargs):
formset = self.construct_formset()
if formset.is_valid():
return self.formset_valid(formset)
else:
return self.formset_invalid(formset)
def put(self, *args, **kwargs):
return self.post(*args, **kwargs)
class BaseFormSetView(FormSetMixin, ProcessFormSetView):
"""
A base view for displaying a formset
"""
class FormSetView(TemplateResponseMixin, BaseFormSetView):
"""
A view for displaying a formset, and rendering a template response
"""
class BaseModelFormSetView(ModelFormSetMixin, ProcessFormSetView):
"""
A base view for displaying a modelformset
"""
def get(self, request, *args, **kwargs):
self.object_list = self.get_queryset()
return super(BaseModelFormSetView, self).get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
self.object_list = self.get_queryset()
return super(BaseModelFormSetView, self).post(request, *args, **kwargs)
class ModelFormSetView(MultipleObjectTemplateResponseMixin, BaseModelFormSetView):
"""
A view for displaying a modelformset, and rendering a template response
"""
class BaseInlineFormSetView(InlineFormSetMixin, ProcessFormSetView):
"""
A base view for displaying a modelformset for a queryset belonging to a parent model
"""
def get(self, request, *args, **kwargs):
self.object = self.get_object()
return super(BaseInlineFormSetView, self).get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
self.object = self.get_object()
return super(BaseInlineFormSetView, self).post(request, *args, **kwargs)
class InlineFormSetView(SingleObjectTemplateResponseMixin, BaseInlineFormSetView):
"""
A view for displaying a modelformset for a queryset belonging to a parent model
"""
| 31.898734
| 102
| 0.661111
|
from django.views.generic.base import TemplateResponseMixin, View
from django.http import HttpResponseRedirect
from django.forms.formsets import formset_factory
from django.forms.models import modelformset_factory, inlineformset_factory
from django.views.generic.detail import SingleObjectMixin, SingleObjectTemplateResponseMixin
from django.views.generic.list import MultipleObjectMixin, MultipleObjectTemplateResponseMixin
from django.forms.models import BaseInlineFormSet
class BaseFormSetMixin(object):
initial = []
form_class = None
formset_class = None
success_url = None
extra = 2
max_num = None
can_order = False
can_delete = False
def construct_formset(self):
return self.get_formset()(initial=self.get_initial(), **self.get_formset_kwargs())
def get_initial(self):
return self.initial
def get_formset_class(self):
return self.formset_class
def get_form_class(self):
return self.form_class
def get_formset(self):
return formset_factory(self.get_form_class(), **self.get_factory_kwargs())
def get_formset_kwargs(self):
kwargs = {}
if self.request.method in ('POST', 'PUT'):
kwargs.update({
'data': self.request.POST,
'files': self.request.FILES,
})
return kwargs
def get_factory_kwargs(self):
kwargs = {
'extra': self.extra,
'max_num': self.max_num,
'can_order': self.can_order,
'can_delete': self.can_delete,
}
if self.get_formset_class():
kwargs['formset'] = self.get_formset_class()
return kwargs
class FormSetMixin(BaseFormSetMixin):
def get_context_data(self, **kwargs):
return kwargs
def get_success_url(self):
if self.success_url:
url = self.success_url
else:
url = self.request.get_full_path()
return url
def formset_valid(self, formset):
return HttpResponseRedirect(self.get_success_url())
def formset_invalid(self, formset):
return self.render_to_response(self.get_context_data(formset=formset))
class ModelFormSetMixin(FormSetMixin, MultipleObjectMixin):
exclude = None
fields = None
formfield_callback = None
def get_context_data(self, **kwargs):
context = kwargs
if self.object_list:
context['object_list'] = self.object_list
context_object_name = self.get_context_object_name(self.get_queryset())
if context_object_name:
context[context_object_name] = self.object_list
return context
def construct_formset(self):
return self.get_formset()(queryset=self.get_queryset(), **self.get_formset_kwargs())
def get_factory_kwargs(self):
kwargs = super(ModelFormSetMixin, self).get_factory_kwargs()
kwargs.update({
'exclude': self.exclude,
'fields': self.fields,
'formfield_callback': self.formfield_callback,
})
if self.get_form_class():
kwargs['form'] = self.get_form_class()
if self.get_formset_class():
kwargs['formset'] = self.get_formset_class()
return kwargs
def get_formset(self):
return modelformset_factory(self.model, **self.get_factory_kwargs())
def formset_valid(self, formset):
self.object_list = formset.save()
return super(ModelFormSetMixin, self).formset_valid(formset)
class BaseInlineFormSetMixin(BaseFormSetMixin):
model = None
inline_model = None
fk_name = None
formset_class = BaseInlineFormSet
exclude = None
fields = None
formfield_callback = None
can_delete = True
def get_context_data(self, **kwargs):
context = kwargs
if self.object:
context['object'] = self.object
context_object_name = self.get_context_object_name(self.object)
if context_object_name:
context[context_object_name] = self.object
return context
def construct_formset(self):
return self.get_formset()(instance=self.object, **self.get_formset_kwargs())
def get_inline_model(self):
return self.inline_model
def get_factory_kwargs(self):
kwargs = super(BaseInlineFormSetMixin, self).get_factory_kwargs()
kwargs.update({
'exclude': self.exclude,
'fields': self.fields,
'formfield_callback': self.formfield_callback,
'fk_name': self.fk_name,
})
if self.get_form_class():
kwargs['form'] = self.get_form_class()
if self.get_formset_class():
kwargs['formset'] = self.get_formset_class()
return kwargs
def get_formset(self):
return inlineformset_factory(self.model, self.get_inline_model(), **self.get_factory_kwargs())
class InlineFormSetMixin(BaseInlineFormSetMixin, FormSetMixin, SingleObjectMixin):
def formset_valid(self, formset):
self.object_list = formset.save()
return super(BaseInlineFormSetMixin, self).formset_valid(formset)
class ProcessFormSetView(View):
def get(self, request, *args, **kwargs):
formset = self.construct_formset()
return self.render_to_response(self.get_context_data(formset=formset))
def post(self, request, *args, **kwargs):
formset = self.construct_formset()
if formset.is_valid():
return self.formset_valid(formset)
else:
return self.formset_invalid(formset)
def put(self, *args, **kwargs):
return self.post(*args, **kwargs)
class BaseFormSetView(FormSetMixin, ProcessFormSetView):
class FormSetView(TemplateResponseMixin, BaseFormSetView):
class BaseModelFormSetView(ModelFormSetMixin, ProcessFormSetView):
def get(self, request, *args, **kwargs):
self.object_list = self.get_queryset()
return super(BaseModelFormSetView, self).get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
self.object_list = self.get_queryset()
return super(BaseModelFormSetView, self).post(request, *args, **kwargs)
class ModelFormSetView(MultipleObjectTemplateResponseMixin, BaseModelFormSetView):
class BaseInlineFormSetView(InlineFormSetMixin, ProcessFormSetView):
def get(self, request, *args, **kwargs):
self.object = self.get_object()
return super(BaseInlineFormSetView, self).get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
self.object = self.get_object()
return super(BaseInlineFormSetView, self).post(request, *args, **kwargs)
class InlineFormSetView(SingleObjectTemplateResponseMixin, BaseInlineFormSetView):
| true
| true
|
790a67a273a5c775988c3b7f3af46bd673e53ab6
| 10,140
|
py
|
Python
|
GamePad/gamepad.py
|
SrGambiarra/ESP8266-MultiplayerGame
|
6c0d3e1f5d859d32fd980c143cfd22c2365d1099
|
[
"RSA-MD"
] | null | null | null |
GamePad/gamepad.py
|
SrGambiarra/ESP8266-MultiplayerGame
|
6c0d3e1f5d859d32fd980c143cfd22c2365d1099
|
[
"RSA-MD"
] | null | null | null |
GamePad/gamepad.py
|
SrGambiarra/ESP8266-MultiplayerGame
|
6c0d3e1f5d859d32fd980c143cfd22c2365d1099
|
[
"RSA-MD"
] | null | null | null |
from multiprocessing import parent_process
from kivy.clock import Clock
from kivy.lang import Builder
from kivy.properties import ObjectProperty, ListProperty
from kivy.graphics import Color, Line
from kivy.uix.screenmanager import Screen
from kivy.animation import Animation
from random import random, randint
from kivy.metrics import dp
from uix.joystick import Joystick
from uix.icons import (
FloatButtonIcon, ToggleButtonIcon,
ButtonIcon, AnchorIcon, FloatLifes)
import socket
import json, time
from utils import get_path, config_path
from functools import partial
from threading import Thread
ip_esp = "192.168.4.2"
gateway_esp = "192.168.4.1"
port_esp = 80
Builder.load_file(get_path('gamepad.kv'))
class GamePad(Screen):
conn = None
HOST = ''
PORT = randint(100, 65000)
can_move = True
move_layout = ObjectProperty(False)
username = ''
connected = False
index_player = -1
lifes = 0
name_players = ListProperty(['', '', '', '', ''])
pos_players = ListProperty([[0, 0], [0, 0], [0, 0], [0, 0], [0, 0]])
players_color = ListProperty([
[1, 0, 0, 1], [1, 0, 0, 1],
[1, 0, 0, 1], [1, 0, 0, 1],
[1, 0, 0, 1]])
def __init__(self, **kwargs):
super().__init__(**kwargs)
Clock.schedule_once(self.config)
def start_server(self, *args):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
self.HOST = s.getsockname()[0]
print("HOST -> ", self.HOST)
print("PORT -> ", self.PORT)
self.conn = socket.socket()
try:
self.conn.bind((self.HOST, self.PORT))
except OSError:
self.conn = None
Clock.schedule_once(self.start_server, 2)
print("Não iniciou o server!!")
self.PORT = randint(100, 65000)
print("NEW PORT -> ", self.PORT)
return False
self.conn.settimeout(2.0)
Thread(target=self.start_listen).start()
print(self.conn.getsockname())
print("Server iniciou.")
def config(self, *args):
self.load_json_pos()
# para deixar a bolinha do joystick dentro dele (para resolver o bug)
self.ids.joystick.on_pos(self.ids.joystick.pos)
# receber os dados do joystick quando mudar de posição internamente
self.ids.joystick.bind(pad=self.update_coordinates)
def start_listen(self, *args):
if self.conn is None:
print("Parou o server!!")
return False
try:
self.conn.listen()
conn, addr = self.conn.accept()
data = conn.recv(1024)
self.update_from_esp(data)
except (socket.timeout, TimeoutError):
pass
Thread(target=self.start_listen).start()
def start_game(self, *args):
self.connected = True
self.manager.current = 'gamepad'
self.ids.lifes.clear_lifes()
self.ids.lifes.show_lifes(self.lifes)
self.players_color[self.index_player] = [0, 0, 1, 1]
def exit_game(self, *args):
self.manager.current = 'login'
self.conn = None
if not self.username:
return None
self.send_informations_with_thread('exit:save')
def update_from_esp(self, data):
resp = data.decode('utf-8').strip("\n").split(":")
if len(resp) < 1:
return None
if resp[0] == 'life':
if self.lifes != int(resp[1]):
self.lifes = int(resp[1])
self.ids.lifes.clear_lifes()
self.ids.lifes.show_lifes(self.lifes)
if self.lifes == 0:
print("MORRI!!")
self.connected = True
print(resp)
if resp[2] == 'pos':
dconv = lambda pos: [dp(float(pos[0])), dp(float(pos[1]))]
self.pos_players = tuple(map(lambda p: dconv(p.split(',')), resp[3:-1]))
else:
print(resp)
def get_json(self, name, *args):
with open(config_path(name), 'r', encoding='utf-8') as file:
return json.load(file)
def reload_json_position(self, default_name, update_name, *args):
default = self.get_json(name=default_name)
self.update_json(default, name=update_name)
self.load_json_pos()
def update_json(self, new_json, name):
with open(config_path(name), 'w', encoding='utf-8') as file:
file.write(json.dumps(new_json, indent=4))
def load_json_pos(self, *args):
dic_hints = self.get_json('position')
for id, hints in dic_hints.items():
wid = getattr(self.ids, id)
setattr(wid, 'name', id)
setattr(wid, 'hint_x', hints['hint_x'])
setattr(wid, 'hint_y', hints['hint_y'])
setattr(wid, 'width', dp(hints['width']))
setattr(wid, 'height', dp(hints['height']))
def connect_to_esp(self, force=False, *args):
if not self.connected and not force:
return None
esp = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
esp.settimeout(2)
try:
esp.connect((gateway_esp, port_esp))
except (socket.timeout, TimeoutError, OSError):
print('Não foi conectar ao ESP8266!!')
return None
return esp
def close_connection_esp(self, esp, *args):
esp.close()
def send_informations(self, msg, *args):
esp = self.connect_to_esp()
if esp is None:
self.can_move = True
return None
try:
esp.send(f'{self.index_player}:{msg}\n'.encode('utf-8'))
if msg.find('atk') != -1:
print('Atacou!!')
elif msg.find('mov') != -1:
print('Moveu!!')
Clock.schedule_once(lambda *a: setattr(self, 'can_move', True), 0.01)
elif msg.find('exit') != -1:
self.connected = False
print(self.username + " saiu!")
self.username = ''
except (ConnectionAbortedError, socket.timeout, TimeoutError):
print(f'Tentando mandar: [ {msg} ] novamente!')
dt = round(min(random(), random()), 2)
Clock.schedule_once(partial(self.send_informations, msg), dt)
self.close_connection_esp(esp)
def send_informations_with_thread(self, msg, *args):
th = Thread(target=self.send_informations, args=(msg, ))
th.start()
def update_coordinates(self, joystick, pad):
if not self.can_move:
return None
x, y = tuple(map(lambda n: round(n, 2), pad))
angle = round(joystick.angle)
self.send_informations_with_thread(f'mov:{x},{y},{angle}')
self.can_move = False
def do_ataque(self, *args):
self.send_informations_with_thread('atk:espd')
def on_move_layout(self, *args):
if not self.move_layout:
self.clear_grid()
return None
self.add_grid()
def update_grid(self, *args):
self.clear_grid(unbind=False)
self.add_grid(bind=False)
def add_grid(self, bind=True, *args):
self.clear_grid(bind)
content_pad = self.ids.content_pad
add = content_pad.canvas.before.add
add(Color(rgba=[0, 0, 1, 1], group='grid_background'))
# vertical lines
for x in range(1, round(content_pad.width/dp(10))+1):
new_x = content_pad.x+(dp(10)*x)
add(Line(
points=[new_x, content_pad.y, new_x, content_pad.y+content_pad.height],
group='grid_background'))
# horizontal lines
for y in range(1, round(content_pad.height/dp(10))+1):
new_y = self.y+(dp(10)*y)
add(Line(
points=[content_pad.x, new_y, content_pad.x+content_pad.width, new_y],
group='grid_background'))
if bind:
self.bind(size=self.update_grid)
self.bind(pos=self.update_grid)
def clear_grid(self, unbind=True, *args):
content_pad = self.ids.content_pad
content_pad.canvas.before.remove_group('grid_background')
if unbind:
self.unbind(size=self.update_grid)
self.unbind(pos=self.update_grid)
def update_middle_line(self, *args):
self.clear_middle_line(unbind=False)
self.add_middle_line(bind=False)
def add_middle_line(self, bind=True, *args):
self.clear_middle_line(bind)
content_pad = self.ids.content_pad
add = content_pad.canvas.before.add
add(Color(rgba=[1, 0, 0, 1], group='middle_background'))
add(Line(
points=[content_pad.x+(content_pad.width/2), content_pad.y,
content_pad.x+(content_pad.width/2), content_pad.y+content_pad.height],
group='middle_background'))
if bind:
self.bind(size=self.update_middle_line)
self.bind(pos=self.update_middle_line)
def clear_middle_line(self, unbind=True, *args):
content_pad = self.ids.content_pad
content_pad.canvas.before.remove_group('middle_background')
if unbind:
self.unbind(size=self.update_middle_line)
self.unbind(pos=self.update_middle_line)
def add_line(self, pos, name, *args):
content_pad = self.ids.content_pad
add = content_pad.canvas.before.add
add(Color(rgba=[1, 0, 0, 1], group=name))
if name.endswith('x'):
pos = [pos, content_pad.y, pos, content_pad.y+content_pad.height]
elif name.endswith('y'):
pos = [content_pad.x, pos, content_pad.x+content_pad.width, pos]
add(Line(points=pos, group=name))
def remove_line(self, name, *args):
content_pad = self.ids.content_pad
content_pad.canvas.before.remove_group(name)
def animate_border_top(self, y, duration, wid, state):
if wid.last_state == state:
return y
Animation(y=y, d=duration).start(wid)
wid.last_state = state
return wid.y
| 35.208333
| 91
| 0.588659
|
from multiprocessing import parent_process
from kivy.clock import Clock
from kivy.lang import Builder
from kivy.properties import ObjectProperty, ListProperty
from kivy.graphics import Color, Line
from kivy.uix.screenmanager import Screen
from kivy.animation import Animation
from random import random, randint
from kivy.metrics import dp
from uix.joystick import Joystick
from uix.icons import (
FloatButtonIcon, ToggleButtonIcon,
ButtonIcon, AnchorIcon, FloatLifes)
import socket
import json, time
from utils import get_path, config_path
from functools import partial
from threading import Thread
ip_esp = "192.168.4.2"
gateway_esp = "192.168.4.1"
port_esp = 80
Builder.load_file(get_path('gamepad.kv'))
class GamePad(Screen):
conn = None
HOST = ''
PORT = randint(100, 65000)
can_move = True
move_layout = ObjectProperty(False)
username = ''
connected = False
index_player = -1
lifes = 0
name_players = ListProperty(['', '', '', '', ''])
pos_players = ListProperty([[0, 0], [0, 0], [0, 0], [0, 0], [0, 0]])
players_color = ListProperty([
[1, 0, 0, 1], [1, 0, 0, 1],
[1, 0, 0, 1], [1, 0, 0, 1],
[1, 0, 0, 1]])
def __init__(self, **kwargs):
super().__init__(**kwargs)
Clock.schedule_once(self.config)
def start_server(self, *args):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
self.HOST = s.getsockname()[0]
print("HOST -> ", self.HOST)
print("PORT -> ", self.PORT)
self.conn = socket.socket()
try:
self.conn.bind((self.HOST, self.PORT))
except OSError:
self.conn = None
Clock.schedule_once(self.start_server, 2)
print("Não iniciou o server!!")
self.PORT = randint(100, 65000)
print("NEW PORT -> ", self.PORT)
return False
self.conn.settimeout(2.0)
Thread(target=self.start_listen).start()
print(self.conn.getsockname())
print("Server iniciou.")
def config(self, *args):
self.load_json_pos()
self.ids.joystick.on_pos(self.ids.joystick.pos)
self.ids.joystick.bind(pad=self.update_coordinates)
def start_listen(self, *args):
if self.conn is None:
print("Parou o server!!")
return False
try:
self.conn.listen()
conn, addr = self.conn.accept()
data = conn.recv(1024)
self.update_from_esp(data)
except (socket.timeout, TimeoutError):
pass
Thread(target=self.start_listen).start()
def start_game(self, *args):
self.connected = True
self.manager.current = 'gamepad'
self.ids.lifes.clear_lifes()
self.ids.lifes.show_lifes(self.lifes)
self.players_color[self.index_player] = [0, 0, 1, 1]
def exit_game(self, *args):
self.manager.current = 'login'
self.conn = None
if not self.username:
return None
self.send_informations_with_thread('exit:save')
def update_from_esp(self, data):
resp = data.decode('utf-8').strip("\n").split(":")
if len(resp) < 1:
return None
if resp[0] == 'life':
if self.lifes != int(resp[1]):
self.lifes = int(resp[1])
self.ids.lifes.clear_lifes()
self.ids.lifes.show_lifes(self.lifes)
if self.lifes == 0:
print("MORRI!!")
self.connected = True
print(resp)
if resp[2] == 'pos':
dconv = lambda pos: [dp(float(pos[0])), dp(float(pos[1]))]
self.pos_players = tuple(map(lambda p: dconv(p.split(',')), resp[3:-1]))
else:
print(resp)
def get_json(self, name, *args):
with open(config_path(name), 'r', encoding='utf-8') as file:
return json.load(file)
def reload_json_position(self, default_name, update_name, *args):
default = self.get_json(name=default_name)
self.update_json(default, name=update_name)
self.load_json_pos()
def update_json(self, new_json, name):
with open(config_path(name), 'w', encoding='utf-8') as file:
file.write(json.dumps(new_json, indent=4))
def load_json_pos(self, *args):
dic_hints = self.get_json('position')
for id, hints in dic_hints.items():
wid = getattr(self.ids, id)
setattr(wid, 'name', id)
setattr(wid, 'hint_x', hints['hint_x'])
setattr(wid, 'hint_y', hints['hint_y'])
setattr(wid, 'width', dp(hints['width']))
setattr(wid, 'height', dp(hints['height']))
def connect_to_esp(self, force=False, *args):
if not self.connected and not force:
return None
esp = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
esp.settimeout(2)
try:
esp.connect((gateway_esp, port_esp))
except (socket.timeout, TimeoutError, OSError):
print('Não foi conectar ao ESP8266!!')
return None
return esp
def close_connection_esp(self, esp, *args):
esp.close()
def send_informations(self, msg, *args):
esp = self.connect_to_esp()
if esp is None:
self.can_move = True
return None
try:
esp.send(f'{self.index_player}:{msg}\n'.encode('utf-8'))
if msg.find('atk') != -1:
print('Atacou!!')
elif msg.find('mov') != -1:
print('Moveu!!')
Clock.schedule_once(lambda *a: setattr(self, 'can_move', True), 0.01)
elif msg.find('exit') != -1:
self.connected = False
print(self.username + " saiu!")
self.username = ''
except (ConnectionAbortedError, socket.timeout, TimeoutError):
print(f'Tentando mandar: [ {msg} ] novamente!')
dt = round(min(random(), random()), 2)
Clock.schedule_once(partial(self.send_informations, msg), dt)
self.close_connection_esp(esp)
def send_informations_with_thread(self, msg, *args):
th = Thread(target=self.send_informations, args=(msg, ))
th.start()
def update_coordinates(self, joystick, pad):
if not self.can_move:
return None
x, y = tuple(map(lambda n: round(n, 2), pad))
angle = round(joystick.angle)
self.send_informations_with_thread(f'mov:{x},{y},{angle}')
self.can_move = False
def do_ataque(self, *args):
self.send_informations_with_thread('atk:espd')
def on_move_layout(self, *args):
if not self.move_layout:
self.clear_grid()
return None
self.add_grid()
def update_grid(self, *args):
self.clear_grid(unbind=False)
self.add_grid(bind=False)
def add_grid(self, bind=True, *args):
self.clear_grid(bind)
content_pad = self.ids.content_pad
add = content_pad.canvas.before.add
add(Color(rgba=[0, 0, 1, 1], group='grid_background'))
for x in range(1, round(content_pad.width/dp(10))+1):
new_x = content_pad.x+(dp(10)*x)
add(Line(
points=[new_x, content_pad.y, new_x, content_pad.y+content_pad.height],
group='grid_background'))
for y in range(1, round(content_pad.height/dp(10))+1):
new_y = self.y+(dp(10)*y)
add(Line(
points=[content_pad.x, new_y, content_pad.x+content_pad.width, new_y],
group='grid_background'))
if bind:
self.bind(size=self.update_grid)
self.bind(pos=self.update_grid)
def clear_grid(self, unbind=True, *args):
content_pad = self.ids.content_pad
content_pad.canvas.before.remove_group('grid_background')
if unbind:
self.unbind(size=self.update_grid)
self.unbind(pos=self.update_grid)
def update_middle_line(self, *args):
self.clear_middle_line(unbind=False)
self.add_middle_line(bind=False)
def add_middle_line(self, bind=True, *args):
self.clear_middle_line(bind)
content_pad = self.ids.content_pad
add = content_pad.canvas.before.add
add(Color(rgba=[1, 0, 0, 1], group='middle_background'))
add(Line(
points=[content_pad.x+(content_pad.width/2), content_pad.y,
content_pad.x+(content_pad.width/2), content_pad.y+content_pad.height],
group='middle_background'))
if bind:
self.bind(size=self.update_middle_line)
self.bind(pos=self.update_middle_line)
def clear_middle_line(self, unbind=True, *args):
content_pad = self.ids.content_pad
content_pad.canvas.before.remove_group('middle_background')
if unbind:
self.unbind(size=self.update_middle_line)
self.unbind(pos=self.update_middle_line)
def add_line(self, pos, name, *args):
content_pad = self.ids.content_pad
add = content_pad.canvas.before.add
add(Color(rgba=[1, 0, 0, 1], group=name))
if name.endswith('x'):
pos = [pos, content_pad.y, pos, content_pad.y+content_pad.height]
elif name.endswith('y'):
pos = [content_pad.x, pos, content_pad.x+content_pad.width, pos]
add(Line(points=pos, group=name))
def remove_line(self, name, *args):
content_pad = self.ids.content_pad
content_pad.canvas.before.remove_group(name)
def animate_border_top(self, y, duration, wid, state):
if wid.last_state == state:
return y
Animation(y=y, d=duration).start(wid)
wid.last_state = state
return wid.y
| true
| true
|
790a67a98070ead48d44cdba6898f8aa501a3001
| 48,634
|
py
|
Python
|
editortools/filter.py
|
furminator/Furminator-MCPE-Tool
|
4fe247351503781db2012815c1e40e881d9e1bba
|
[
"0BSD"
] | null | null | null |
editortools/filter.py
|
furminator/Furminator-MCPE-Tool
|
4fe247351503781db2012815c1e40e881d9e1bba
|
[
"0BSD"
] | null | null | null |
editortools/filter.py
|
furminator/Furminator-MCPE-Tool
|
4fe247351503781db2012815c1e40e881d9e1bba
|
[
"0BSD"
] | null | null | null |
"""Copyright (c) 2010-2012 David Rio Vierra
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE."""
# Modified by D.C.-G. for translation purpose
import collections
import os
import traceback
import copy
from albow import FloatField, IntField, AttrRef, Row, Label, Widget, TabPanel, \
CheckBox, Column, Button, TextFieldWrapped, translate
from editortools.tooloptions import ToolOptions
from albow.extended_widgets import CheckBoxLabel
_ = translate._
import albow
from config import config
from editortools.blockview import BlockButton
from editortools.editortool import EditorTool
from glbackground import Panel
from mceutils import setWindowCaption, alertException
from albow import ChoiceButton, showProgress, TextInputRow
import mcplatform
from operation import Operation
from albow.dialogs import wrapped_label, alert, Dialog
import pymclevel
# from pymclevel import BoundingBox, MCEDIT_DEFS, MCEDIT_IDS
from pymclevel import BoundingBox
from pymclevel.id_definitions import version_defs_ids
import json
import directories
import sys
import keys
import imp
from nbtexplorer import NBTExplorerToolPanel
import logging
log = logging.getLogger(__name__)
class FilterUtils(object):
def __init__(self, **kwargs):
self._given_data = []
for arg in kwargs:
self._given_data.append(arg)
self.__dict__[arg] = kwargs[arg]
def Available_Attributes(self):
return self._given_data
def alertFilterException(func):
def _func(*args, **kw):
try:
func(*args, **kw)
except Exception as e:
print traceback.format_exc()
alert(_(u"Exception during filter operation. See console for details.\n\n{0}").format(e))
return _func
def addNumField(page, optionName, oName, val, min_value=None, max_value=None, increment=0.1):
if isinstance(val, float):
field_type = FloatField
if isinstance(increment, int):
increment = float(increment)
else:
field_type = IntField
if increment == 0.1:
increment = 1
if isinstance(increment, float):
increment = int(round(increment))
if min_value == max_value:
min_value = None
max_value = None
field = field_type(value=val, width=200, min=min_value, max=max_value)
field._increment = increment
page.optionDict[optionName] = AttrRef(field, 'value')
row = Row([Label(oName, doNotTranslate=True), field])
return row
class JsonDictProperty(dict):
def __init__(self, filename, **kwargs):
super(JsonDictProperty, self).__init__(**kwargs)
self._filename = filename
def __setitem__(self, key, value):
data = self._getJson()
data[key] = value
self._putJson(data)
def __getitem__(self, key):
return self._getJson()[key]
def __delitem__(self, key):
data = self._getJson()
del data[key]
self._putJson(data)
def _putJson(self, data):
with open(self._filename, 'wb') as f:
json.dump(data, f)
def _getJson(self):
fp = None
try:
fp = open(self._filename, 'rb')
filter_json = json.load(fp)
if "Macros" not in filter_json.keys():
filter_json["Macros"] = {}
return filter_json
except (ValueError, IOError):
return {"Macros": {}}
finally:
if fp:
fp.close()
class SingleFileChooser(Widget):
OPEN_FILE = 0
SAVE_FILE = 1
def _open_file(self):
file_types = []
for f_type in self.file_types:
file_types.append(f_type.replace("*.", ""))
file_path = mcplatform.askOpenFile(title='Select a file...', suffixes=file_types)
if file_path:
self._button.set_text("{filen}".format(filen=os.path.basename(file_path)), True)
self.file_path = file_path
else:
self._button.set_text("Choose a file")
self.file_path = None
self._button.shrink_wrap()
self.shrink_wrap()
def _save_file(self):
file_types = 'Custom File\0{}\0'.format(';'.join(self.file_types))
if not self.file_types[0].startswith("*"):
name = self.file_types[0]
else:
name = self.file_types[0][1:]
file_path = mcplatform.askSaveFile(".", "Save as...", name, file_types, self.file_types[0][1:])
if file_path:
self._button.set_text(os.path.basename(file_path), True)
self.file_path = file_path
else:
self._button.set_text('Save a file')
self.file_path = None
self._button.shrink_wrap()
self.shrink_wrap()
def __init__(self, file_types=None, operation=0, **kwds):
Widget.__init__(self, **kwds)
if file_types is None:
self.file_types = ["*.*",]
else:
self.file_types = file_types
self.file_path = None
self._button = None
if operation == self.OPEN_FILE:
self._button = Button("Choose a file", action=self._open_file)
elif operation == self.SAVE_FILE:
self._button = Button("Save a file", action=self._save_file)
self.add(self._button)
self.shrink_wrap()
class MacroModuleOptions(Widget):
is_gl_container = True
def __init__(self, macro_data, *args, **kw):
self._parent = None
self._macro_data = macro_data
if '_parent' in kw.keys():
self._parent = kw.pop('_parent')
Widget.__init__(self, *args, **kw)
infoColList = []
stepsLabel = wrapped_label("Number of steps: %s" % macro_data["Number of steps"], 300)
infoColList.append(stepsLabel)
for step in sorted(macro_data.keys()):
if step != "Number of steps":
infoColList.append(wrapped_label("Step %s: %s" % (int(step) + 1, macro_data[step]["Name"]), 300))
self.add(Column(infoColList))
self.shrink_wrap()
@property
def options(self):
return {}
@options.setter
def options(self, value):
pass
def run(self):
pass
@alertFilterException
def confirm(self, tool):
with setWindowCaption("Applying Macro..."):
options = []
filters = []
for step in sorted(self._macro_data.keys()):
if step != "Number of steps":
filters.append(tool.filterModules[self._macro_data[step]["Name"]])
for module_input in self._macro_data[step]["Inputs"].keys():
if not isinstance(self._macro_data[step]["Inputs"][module_input], (str, unicode)):
continue
if not self._macro_data[step]["Inputs"][module_input].startswith("block-"):
continue
toFind = self._macro_data[step]["Inputs"][module_input][6:].split(":")
block = tool.editor.materials.get((toFind[0], toFind[1]))
self._macro_data[step]["Inputs"][module_input] = block
options.append(self._macro_data[step]["Inputs"])
op = MacroOperation(tool.editor, tool.editor.level, tool.selectionBox(), filters, options)
tool.editor.level.showProgress = showProgress
tool.editor.addOperation(op)
tool.editor.addUnsavedEdit()
tool.editor.invalidateBox(tool.selectionBox())
class FilterModuleOptions(Widget):
is_gl_container = True
def __init__(self, tool, module, *args, **kw):
self._parent = None
self.nbttree = None
self.module = module
if '_parent' in kw.keys():
self._parent = kw.pop('_parent')
Widget.__init__(self, *args, **kw)
self.spacing = 2
self.tool = tool
self.pages = pages = TabPanel()
pages.is_gl_container = True
self.optionDict = {}
self.giveEditorObject(module)
log.info("Creating options for " + str(module))
if hasattr(module, "inputs"):
trn = getattr(module, "trn", None)
self.trn = trn
if isinstance(module.inputs, list):
self.pgs = []
for tabData in module.inputs:
title, page, pageRect = self.makeTabPage(self.tool, tabData, trn=trn)
self.pgs.append((title, page))
pages.set_parent(None)
self.pages = pages = TabPanel(self.pgs)
elif isinstance(module.inputs, tuple):
title, page, pageRect = self.makeTabPage(self.tool, module.inputs, trn=trn)
pages.add_page(title, page)
pages.set_rect(pageRect)
else:
self.size = (0, 0)
pages.shrink_wrap()
self.add(pages)
self.shrink_wrap()
if len(pages.pages):
if pages.current_page is not None:
pages.show_page(pages.current_page)
else:
pages.show_page(pages.pages[0])
for eachPage in pages.pages:
self.optionDict = dict(self.optionDict.items() + eachPage.optionDict.items())
def rebuildTabPage(self, inputs, **kwargs):
title, page, rect = self.makeTabPage(self.tool, inputs, self.trn, **kwargs)
for i, t, p, s, r in self.pages.iter_tabs():
if t == title:
self.pages.remove_page(p)
self.pages.add_page(title, page, idx=i)
self.pages.show_page(page)
break
def makeTabPage(self, tool, inputs, trn=None, **kwargs):
page = Widget(**kwargs)
page.is_gl_container = True
rows = []
cols = []
max_height = tool.editor.mainViewport.height - tool.editor.toolbar.height - tool.editor.subwidgets[0].height -\
self._parent.filterSelectRow.height - self._parent.confirmButton.height - self.pages.tab_height
page.optionDict = {}
page.tool = tool
title = "Tab"
for optionSpec in inputs:
optionName = optionSpec[0]
optionType = optionSpec[1]
if trn is not None:
n = trn._(optionName)
else:
n = optionName
if n == optionName:
oName = _(optionName)
else:
oName = n
if isinstance(optionType, tuple):
if isinstance(optionType[0], (int, long, float)):
if len(optionType) == 3:
val, min, max = optionType
increment = 0.1
elif len(optionType) == 2:
min, max = optionType
val = min
increment = 0.1
else:
val, min, max, increment = optionType
rows.append(addNumField(page, optionName, oName, val, min, max, increment))
if isinstance(optionType[0], (str, unicode)):
isChoiceButton = False
if optionType[0] == "string":
kwds = []
wid = None
val = None
for keyword in optionType:
if isinstance(keyword, (str, unicode)) and keyword != "string":
kwds.append(keyword)
for keyword in kwds:
splitWord = keyword.split('=')
if len(splitWord) > 1:
v = None
try:
v = int(splitWord[1])
except ValueError:
pass
key = splitWord[0]
if v is not None:
if key == "width":
wid = v
else:
if key == "value":
val = "=".join(splitWord[1:])
if val is None:
val = ""
if wid is None:
wid = 200
field = TextFieldWrapped(value=val, width=wid)
page.optionDict[optionName] = AttrRef(field, 'value')
row = Row((Label(oName, doNotTranslate=True), field))
rows.append(row)
elif optionType[0] == "block":
blockButton = BlockButton(tool.editor.level.materials)
try:
blockButton.blockInfo = tool.editor.level.materials[optionType[1]]
except AttributeError:
blockButton.blockInfo = tool.editor.level.materials[0]
except KeyError:
if tool.editor.level.materials == pymclevel.pocketMaterials:
blockButton.blockInfo = pymclevel.alphaMaterials[optionType[1]]
else:
raise
row = Column((Label(oName, doNotTranslate=True), blockButton))
page.optionDict[optionName] = AttrRef(blockButton, 'blockInfo')
rows.append(row)
elif optionType[0] == "file-save":
if len(optionType) == 2:
file_chooser = SingleFileChooser(file_types=optionType[1], operation=SingleFileChooser.SAVE_FILE)
else:
file_chooser = SingleFileChooser(operation=SingleFileChooser.SAVE_FILE)
row = Row((Label(oName, doNotTranslate=True), file_chooser))
page.optionDict[optionName] = AttrRef(file_chooser, 'file_path')
rows.append(row)
elif optionType[0] == "file-open":
if len(optionType) == 2:
file_chooser = SingleFileChooser(file_types=optionType[1], operation=SingleFileChooser.OPEN_FILE)
else:
file_chooser = SingleFileChooser(operation=SingleFileChooser.OPEN_FILE)
row = Row((Label(oName, doNotTranslate=True), file_chooser))
page.optionDict[optionName] = AttrRef(file_chooser, 'file_path')
rows.append(row)
else:
isChoiceButton = True
if isChoiceButton:
if trn is not None:
__ = trn._
else:
__ = _
choices = [__("%s" % a) for a in optionType]
choiceButton = ChoiceButton(choices, doNotTranslate=True)
page.optionDict[optionName] = AttrRef(choiceButton, 'selectedChoice')
rows.append(Row((Label(oName, doNotTranslate=True), choiceButton)))
elif isinstance(optionType, bool):
cbox = CheckBox(value=optionType)
page.optionDict[optionName] = AttrRef(cbox, 'value')
row = Row((Label(oName, doNotTranslate=True), cbox))
rows.append(row)
elif isinstance(optionType, (int, float)):
rows.append(addNumField(self, optionName, oName, optionType))
elif optionType == "blocktype" or isinstance(optionType, pymclevel.materials.Block):
blockButton = BlockButton(tool.editor.level.materials)
if isinstance(optionType, pymclevel.materials.Block):
blockButton.blockInfo = optionType
row = Column((Label(oName, doNotTranslate=True), blockButton))
page.optionDict[optionName] = AttrRef(blockButton, 'blockInfo')
rows.append(row)
elif optionType == "label":
rows.append(wrapped_label(oName, 50, doNotTranslate=True))
elif optionType == "string":
inp = None
# not sure how to pull values from filters,
# but leaves it open for the future. Use this variable to set field width.
if inp is not None:
size = inp
else:
size = 200
field = TextFieldWrapped(value="")
row = TextInputRow(oName, ref=AttrRef(field, 'value'), width=size, doNotTranslate=True)
page.optionDict[optionName] = AttrRef(field, 'value')
rows.append(row)
elif optionType == "title":
title = oName
elif optionType == "file-save":
file_chooser = SingleFileChooser(operation=SingleFileChooser.SAVE_FILE)
row = Row((Label(oName, doNotTranslate=True), file_chooser))
page.optionDict[optionName] = AttrRef(file_chooser, 'file_path')
rows.append(row)
elif optionType == "file-open":
file_chooser = SingleFileChooser(operation=SingleFileChooser.OPEN_FILE)
row = Row((Label(oName, doNotTranslate=True), file_chooser))
page.optionDict[optionName] = AttrRef(file_chooser, 'file_path')
rows.append(row)
elif isinstance(optionType, list) and optionType[0].lower() == "nbttree":
kw = {'close_text': None, 'load_text': None}
if len(optionType) >= 3:
def close():
self.pages.show_page(self.pages.pages[optionType[2]])
kw['close_action'] = close
kw['close_text'] = "Go Back"
if len(optionType) >= 4:
if optionType[3]:
kw['load_text'] = optionType[3]
if hasattr(self.module, 'nbt_ok_action'):
kw['ok_action'] = getattr(self.module, 'nbt_ok_action')
self.nbttree = NBTExplorerToolPanel(self.tool.editor, nbtObject=optionType[1],
height=max_height, no_header=True, copy_data=False, **kw)
self.module.set_tree(self.nbttree.tree)
for meth_name in dir(self.module):
if meth_name.startswith('nbttree_'):
setattr(self.nbttree.tree.treeRow, meth_name.split('nbttree_')[-1],
getattr(self.module, meth_name))
# elif meth_name.startswith('nbt_'):
# setattr(self.nbttree, meth_name.split('nbt_')[-1], getattr(self.module, meth_name))
page.optionDict[optionName] = AttrRef(self, 'rebuildTabPage')
rows.append(self.nbttree)
self.nbttree.page = len(self.pgs)
else:
raise ValueError(("Unknown option type", optionType))
height = sum(r.height for r in rows) + (len(rows) - 1) * self.spacing
if height > max_height:
h = 0
for i, r in enumerate(rows):
h += r.height
if h > height / 2:
if rows[:i]:
cols.append(Column(rows[:i], spacing=0))
rows = rows[i:]
break
if len(rows):
cols.append(Column(rows, spacing=0))
if len(cols):
page.add(Row(cols, spacing=0))
page.shrink_wrap()
return title, page, page._rect
@property
def options(self):
options = {}
for k, v in self.optionDict.iteritems():
options[k] = v.get() if not isinstance(v.get(), pymclevel.materials.Block) else copy.copy(v.get())
if self.pages.current_page is not None:
options["__page_index__"] = self.pages.pages.index(self.pages.current_page)
return options
@options.setter
def options(self, val):
for k in val:
if k in self.optionDict:
self.optionDict[k].set(val[k])
index = val.get("__page_index__", -1)
if len(self.pages.pages) > index > -1:
self.pages.show_page(self.pages.pages[index])
def giveEditorObject(self, module):
module.editor = self.tool.editor
@staticmethod
def confirm(tool):
with setWindowCaption("Applying Filter... - "):
filterModule = tool.filterModules[tool.panel.filterSelect.selectedChoice]
op = FilterOperation(tool.editor, tool.editor.level, tool.selectionBox(), filterModule,
tool.panel.filterOptionsPanel.options)
tool.editor.level.showProgress = showProgress
tool.editor.addOperation(op)
tool.editor.addUnsavedEdit()
tool.editor.invalidateBox(tool.selectionBox())
class FilterToolPanel(Panel):
BACKUP_FILTER_JSON = False
"""If set to true, the filter.json is backed up to the hard disk
every time it's edited. The default is false, which makes the file save
only whenever the tool gets closed. If MCEdit were to crash, any recorded
macros would not be saved."""
def __init__(self, tool):
Panel.__init__(self, name='Panel.FilterToolPanel')
self.macro_steps = []
self.current_step = 0
self._filter_json = None
self.keys_panel = None
self.filterOptionsPanel = None
self.filterSelect = ChoiceButton([], choose=self.filterChanged, doNotTranslate=True)
self.binding_button = Button("", action=self.bind_key,
tooltipText="Click to bind this filter to a key")
self.filterLabel = Label("Filter:", fg_color=(177, 177, 255, 255))
self.filterLabel.mouse_down = lambda x: mcplatform.platform_open(directories.getFiltersDir())
self.filterLabel.tooltipText = "Click to open filters folder"
self.macro_button = Button("Record Macro", action=self.start_record_macro)
self.filterSelectRow = Row((self.filterLabel, self.filterSelect,
self.macro_button, self.binding_button))
self.confirmButton = Button("Filter", action=self.confirm)
self._recording = False
self._save_macro = False
self.tool = tool
self.selectedName = self.filter_json.get("Last Filter Opened", "")
utils = FilterUtils(
editor=tool.editor,
materials=self.tool.editor.level.materials,
custom_widget=tool.editor.addExternalWidget,
resize_selection_box=tool.editor._resize_selection_box
)
utils_module = imp.new_module("filter_utils")
utils_module = utils
sys.modules["filter_utils"] = utils_module
@staticmethod
def load_filter_json():
filter_json_file = os.path.join(directories.getDataDir(), "filters.json")
filter_json = {}
if FilterToolPanel.BACKUP_FILTER_JSON:
filter_json = JsonDictProperty(filter_json_file)
else:
fp = None
try:
if os.path.exists(filter_json_file):
fp = open(filter_json_file, 'rb')
filter_json = json.load(fp)
except (ValueError, IOError) as e:
log.error("Error while loading filters.json %s", e)
finally:
if fp:
fp.close()
if "Macros" not in filter_json.keys():
filter_json["Macros"] = {}
return filter_json
@property
def filter_json(self):
if self._filter_json is None:
self._filter_json = FilterToolPanel.load_filter_json()
return self._filter_json
def close(self):
self._saveOptions()
self.filter_json["Last Filter Opened"] = self.selectedName
if not FilterToolPanel.BACKUP_FILTER_JSON:
with open(os.path.join(directories.getDataDir(), "filters.json"), 'w') as f:
json.dump(self.filter_json, f)
def reload(self):
for i in list(self.subwidgets):
self.remove(i)
tool = self.tool
# Display "No filter modules found" if there are no filters
if len(tool.filterModules) is 0:
self.add(Label("No filter modules found!"))
self.shrink_wrap()
return
names_list = sorted([n for n in tool.filterNames if not n.startswith("[")])
# We get a list of names like ["[foo] bar", "[test] thing"]
# The to sort on is created by splitting on "[": "[foo", " bar" and then
# removing the first char: "foo", "bar"
subfolder_names_list = sorted([n for n in tool.filterNames if n.startswith("[")],
key=lambda x: x.split("]")[0][1:])
names_list.extend(subfolder_names_list)
names_list.extend([macro for macro in self.filter_json["Macros"].keys()])
if self.selectedName is None or self.selectedName not in names_list:
self.selectedName = names_list[0]
# Remove any keybindings that don't have a filter
for (i, j) in config.config.items("Filter Keys"):
if i == "__name__":
continue
if not any([i == m.lower() for m in names_list]):
config.config.remove_option("Filter Keys", i)
self.filterSelect.choices = names_list
name = self.selectedName.lower()
names = [k for (k, v) in config.config.items("Filter Keys")]
btn_name = config.config.get("Filter Keys", name) if name in names else "*"
self.binding_button.set_text(btn_name)
self.filterOptionsPanel = None
while self.filterOptionsPanel is None:
module = self.tool.filterModules.get(self.selectedName, None)
if module is not None:
try:
self.filterOptionsPanel = FilterModuleOptions(self.tool, module, _parent=self)
except Exception as e:
alert(_("Error creating filter inputs for {0}: {1}").format(module, e))
traceback.print_exc()
self.tool.filterModules.pop(self.selectedName)
self.selectedName = tool.filterNames[0]
if len(tool.filterNames) == 0:
raise ValueError("No filters loaded!")
if not self._recording:
self.confirmButton.set_text("Filter")
else: # We verified it was an existing macro already
macro_data = self.filter_json["Macros"][self.selectedName]
self.filterOptionsPanel = MacroModuleOptions(macro_data)
self.confirmButton.set_text("Run Macro")
# This has to be recreated every time in case a macro has a longer name then everything else.
self.filterSelect = ChoiceButton(names_list, choose=self.filterChanged, doNotTranslate=True)
self.filterSelect.selectedChoice = self.selectedName
self.filterSelectRow = Row((self.filterLabel, self.filterSelect,
self.macro_button, self.binding_button))
self.add(Column((self.filterSelectRow, self.filterOptionsPanel, self.confirmButton)))
self.shrink_wrap()
if self.parent:
height = self.parent.mainViewport.height - self.parent.toolbar.height
self.centery = height / 2 + self.parent.subwidgets[0].height
if self.selectedName in self.tool.savedOptions:
self.filterOptionsPanel.options = self.tool.savedOptions[self.selectedName]
@property
def macroSelected(self):
return self.filterSelect.selectedChoice not in self.tool.filterNames
def filterChanged(self):
# if self.filterSelect.selectedChoice not in self.tool.filterModules:
# return
self._saveOptions()
self.selectedName = self.filterSelect.selectedChoice
if self.macroSelected: # Is macro
self.macro_button.set_text("Delete Macro")
self.macro_button.action = self.delete_macro
elif not self._recording:
self.macro_button.set_text("Record Macro")
self.macro_button.action = self.start_record_macro
self.reload()
def delete_macro(self):
macro_name = self.selectedName
if macro_name in self.filter_json["Macros"]:
del self.filter_json["Macros"][macro_name]
if len(self.filterSelect.choices) == 1: # Just this macro available
self.reload()
return
choices = self.filterSelect.choices
self.filterSelect.selectedChoice = choices[0] if choices[0] != macro_name else choices[1]
self.filterChanged()
def stop_record_macro(self):
macro_dialog = Dialog()
macroNameLabel = Label("Macro Name: ")
macroNameField = TextFieldWrapped(width=200)
def save_macro():
macro_name = "{Macro} " + macroNameField.get_text()
self.filter_json["Macros"][macro_name] = {}
self.filter_json["Macros"][macro_name]["Number of steps"] = len(self.macro_steps)
self.filterSelect.choices.append(macro_name)
for entry in self.macro_steps:
for inp in entry["Inputs"].keys():
if not isinstance(entry["Inputs"][inp], pymclevel.materials.Block):
if not entry["Inputs"][inp] == "blocktype":
continue
_inp = entry["Inputs"][inp]
entry["Inputs"][inp] = "block-{0}:{1}".format(_inp.ID, _inp.blockData)
self.filter_json["Macros"][macro_name][entry["Step"]] = {"Name": entry["Name"],
"Inputs": entry["Inputs"]}
stop_dialog()
self.filterSelect.selectedChoice = macro_name
self.filterChanged()
def stop_dialog():
self.macro_button.text = "Record Macro"
self.macro_button.tooltipText = None
self.macro_button.action = self.start_record_macro
macro_dialog.dismiss()
self.macro_steps = []
self.current_step = 0
self._recording = False
input_row = Row((macroNameLabel, macroNameField))
saveButton = Button("Save", action=save_macro)
closeButton = Button("Cancel", action=stop_dialog)
button_row = Row((saveButton, closeButton))
macro_dialog.add(Column((input_row, button_row)))
macro_dialog.shrink_wrap()
macro_dialog.present()
def start_record_macro(self):
self.macro_button.text = "Stop recording"
self.macro_button.tooltipText = "Currently recording a macro"
self.macro_button.action = self.stop_record_macro
self.confirmButton.text = "Add macro"
self.confirmButton.width += 75
self.confirmButton.centerx = self.centerx
self._recording = True
def _addMacroStep(self, name=None, inputs=None):
data = {"Name": name, "Step": self.current_step, "Inputs": inputs}
self.current_step += 1
self.macro_steps.append(data)
def unbind_key(self):
config.config.remove_option("Filter Keys", self.selectedName)
self.binding_button.text = "*"
self.keys_panel.dismiss()
# self.saveOptions()
self.reload()
def bind_key(self, message=None):
panel = Panel(name='Panel.FilterToolPanel.bind_key')
panel.bg_color = (0.5, 0.5, 0.6, 1.0)
if not message:
message = _("Press a key to assign to the filter \"{0}\"\n\n"
"Press ESC to cancel.").format(self.selectedName)
label = albow.Label(message)
unbind_button = Button("Press to unbind", action=self.unbind_key)
column = Column((label, unbind_button))
panel.add(column)
panel.shrink_wrap()
def panelKeyUp(evt):
_key_name = self.root.getKey(evt)
panel.dismiss(_key_name)
def panelMouseUp(evt):
button = keys.remapMouseButton(evt.button)
_key_name = None
if button == 3:
_key_name = "Button 3"
elif button == 4:
_key_name = "Scroll Up"
elif button == 5:
_key_name = "Scroll Down"
elif button == 6:
_key_name = "Button 4"
elif button == 7:
_key_name = "Button 5"
if 2 < button < 8:
panel.dismiss(_key_name)
panel.key_up = panelKeyUp
panel.mouse_up = panelMouseUp
self.keys_panel = panel
key_name = panel.present()
if isinstance(key_name, bool):
return True
if key_name != "Escape":
if key_name in ["Alt-F4", "F1", "F2", "F3", "F4", "F5", "1", "2", "3",
"4", "5", "6", "7", "8", "9", "Ctrl-Alt-F9", "Ctrl-Alt-F10"]:
self.bind_key(_("You can't use the key {0}.\n"
"Press a key to assign to the filter \"{1}\"\n\n"
""
"Press ESC to cancel.").format(_(key_name), self.selectedName))
return True
keysUsed = [(j, i) for (j, i) in config.config.items("Keys") if i == key_name]
if keysUsed:
self.bind_key(_("Can't bind. {0} is already used by {1}.\n"
"Press a key to assign to the filter \"{2}\"\n\n"
""
"Press ESC to cancel.").format(_(key_name), keysUsed[0][0], self.selectedName))
return True
filter_keys = [i for (i, j) in config.config.items("Filter Keys") if j == key_name]
if filter_keys:
self.bind_key(_("Can't bind. {0} is already used by the \"{1}\" filter.\n"
"Press a new key.\n\n"
""
"Press ESC to cancel.").format(_(key_name), filter_keys[0]))
return True
config.config.set("Filter Keys", self.selectedName.lower(), key_name)
config.save()
self.reload()
def _saveOptions(self):
"""Should never be called. Call filterchanged() or close() instead,
which will then call this.
:return:
"""
if self.filterOptionsPanel is not None:
options = {}
options.update(self.filterOptionsPanel.options)
options.pop("", "")
self.tool.savedOptions[self.selectedName] = options
@alertFilterException
def confirm(self):
if self._recording:
self._addMacroStep(self.selectedName, self.filterOptionsPanel.options)
else:
self.filterOptionsPanel.confirm(self.tool)
class FilterOperation(Operation):
def __init__(self, editor, level, box, filter, options):
super(FilterOperation, self).__init__(editor, level)
self.box = box
self.filter = filter
self.options = options
self.canUndo = False
def perform(self, recordUndo=True):
if self.level.saving:
alert(_("Cannot perform action while saving is taking place"))
return
# Override 'recordUndo' with filter RECORD_UNDO.
# Some filters, like Find does not need to record undo stuff, since they're not changing anything
recordUndo = getattr(self.filter, 'RECORD_UNDO', recordUndo)
if recordUndo:
self.undoLevel = self.extractUndo(self.level, self.box)
# Inject the defs for blocks/entities in the module
# Need to reimport the defs and ids to get the 'fresh' ones
# from pymclevel import MCEDIT_DEFS, MCEDIT_IDS
self.filter.MCEDIT_DEFS = self.level.defsIds.mcedit_defs
self.filter.MCEDIT_IDS = self.level.defsIds.mcedit_ids
self.filter.perform(self.level, BoundingBox(self.box), self.options)
self.canUndo = True
def dirtyBox(self):
return self.box
class MacroOperation(Operation):
def __init__(self, editor, level, box, filters, options):
super(MacroOperation, self).__init__(editor, level)
self._box = box
self.options = options
self.filters = filters
self.canUndo = False
def perform(self, recordUndo=True):
if self.level.saving:
alert(_("Cannot perform action while saving is taking place"))
return
if recordUndo:
self.undoLevel = self.extractUndo(self.level, self._box)
for o, f in zip(self.options, self.filters):
f.perform(self.level, BoundingBox(self._box), o)
self.canUndo = True
def dirtyBox(self):
return self._box
class FilterToolOptions(ToolOptions):
def __init__(self, tool):
ToolOptions.__init__(self, name='Panel.FilterToolOptions')
self.tool = tool
self.notifications_disabled = False
disable_error_popup = CheckBoxLabel("Disable Error Notification",
ref=AttrRef(self, 'notifications_disabled'))
ok_button = Button("Ok", action=self.dismiss)
col = Column((disable_error_popup, ok_button,), spacing=2)
self.add(col)
self.shrink_wrap()
class FilterTool(EditorTool):
tooltipText = "Filter"
toolIconName = "filter"
def __init__(self, editor):
EditorTool.__init__(self, editor)
self.filterModules = {}
self.savedOptions = {}
self.filters_not_imported = []
self.optionsPanel = FilterToolOptions(self)
@property
def statusText(self):
return "Choose a filter, then click Filter or press Enter to apply it."
def toolEnabled(self):
return not (self.selectionBox() is None)
def toolSelected(self):
self.showPanel()
@alertException
def showPanel(self):
self.panel = FilterToolPanel(self)
self.not_imported_filters = []
self.reloadFilters()
self.panel.reload()
height = self.editor.mainViewport.height - self.editor.toolbar.height
self.panel.centery = height / 2 + self.editor.subwidgets[0].height
self.panel.left = self.editor.left
self.editor.add(self.panel)
def hidePanel(self):
if self.panel is None:
return
self.panel.close()
if self.panel.parent:
self.panel.parent.remove(self.panel)
self.panel = None
def reloadFilters(self):
filterFiles = []
unicode_module_names = []
# Tracking stock and custom filters names in order to load correctly the translations.
stock_filters = []
cust_filters = []
def searchForFiltersInDir(searchFolder, stock=False):
for root, folders, files in os.walk(os.path.join(searchFolder), True):
filter_dir = os.path.basename(root)
if filter_dir.startswith('demo') or filter_dir.startswith('lib'):
continue
subFolderString = root.replace(searchFolder, "")
if subFolderString.endswith(os.sep):
subFolderString = subFolderString[:len(os.sep)]
if subFolderString.startswith(os.sep):
subFolderString = subFolderString[len(os.sep):]
if len(subFolderString) > 0:
subFolderString = "[" + subFolderString + "]"
try:
root = str(root)
if root not in sys.path:
sys.path.append(root)
except UnicodeEncodeError:
unicode_module_names.extend([filter_name for filter_name in files])
for possible_filter in files:
if possible_filter.endswith(".py"):
if stock:
stock_filters.append(possible_filter)
_stock = True
else:
cust_filters.append(possible_filter)
_stock = False
# Force the 'stock' parameter if the filter was found in the stock-filters directory
if possible_filter in stock_filters:
_stock = True
filterFiles.append((root, possible_filter, _stock, subFolderString))
# Search first for the stock filters.
searchForFiltersInDir(os.path.join(directories.getDataDir(), "stock-filters"), True)
searchForFiltersInDir(directories.getFiltersDir(), False)
filterModules = []
org_lang = albow.translate.lang
# If the path has unicode chars, there's no way of knowing what order to add the
# files to the sys.modules. To fix this, we keep trying to import until we import
# fail to import all leftover files.
shouldContinue = True
while shouldContinue:
shouldContinue = False
for f in filterFiles:
if f[1] in self.not_imported_filters:
continue
module = tryImport(f[0], f[1], org_lang, f[2], f[3], f[1] in unicode_module_names, notify=(not self.optionsPanel.notifications_disabled))
if module is None:
self.not_imported_filters.append(f[1])
continue
filterModules.append(module)
filterFiles.remove(f)
shouldContinue |= True
displayNames = []
for m in filterModules:
while m.displayName in displayNames:
m.displayName += "_"
displayNames.append(m)
filterModules = filter(lambda mod: hasattr(mod, "perform"), filterModules)
self.filterModules = collections.OrderedDict(sorted(
[(FilterTool.moduleDisplayName(x), x) for x in filterModules],
key=lambda module_name: (module_name[0].lower(),
module_name[1])))
@staticmethod
def moduleDisplayName(module):
subFolderString = getattr(module, 'foldersForDisplayName', "")
subFolderString = subFolderString if len(subFolderString) < 1 else subFolderString + " "
name = getattr(module, "displayName", module.__name__)
return subFolderString + _(name[0].upper() + name[1:])
@property
def filterNames(self):
return [FilterTool.moduleDisplayName(module) for module in self.filterModules.itervalues()]
#-# WIP. Reworking on the filters translations.
#-# The 'new_method' variable is used to select the latest working code or the actual under development one.
#-# This variable must be on False when releasing unless the actual code is fully working.
new_method = True
def tryImport_old(_root, name, org_lang, stock=False, subFolderString="", unicode_name=False, notify=True):
with open(os.path.join(_root, name)) as module_file:
module_name = name.split(os.path.sep)[-1].replace(".py", "")
try:
if unicode_name:
source_code = module_file.read()
module = imp.new_module(module_name)
exec (source_code, module.__dict__)
if module_name not in sys.modules.keys():
sys.modules[module_name] = module
else:
module = imp.load_source(module_name, os.path.join(_root, name), module_file)
module.foldersForDisplayName = subFolderString
if not (hasattr(module, 'displayName')):
module.displayName = module_name # Python is awesome
if not stock:
if "trn" in sys.modules.keys():
del sys.modules["trn"]
if "albow.translate" in sys.modules.keys():
del sys.modules["albow.translate"]
from albow import translate as trn
if directories.getFiltersDir() in name:
trn_path = os.path.split(name)[0]
else:
trn_path = directories.getFiltersDir()
trn_path = os.path.join(trn_path, subFolderString[1:-1], module_name)
module.trn = trn
if os.path.exists(trn_path):
module.trn.setLangPath(trn_path)
module.trn.buildTranslation(config.settings.langCode.get())
n = module.displayName
if hasattr(module, "trn"):
n = module.trn._(module.displayName)
if n == module.displayName:
n = _(module.displayName)
module.displayName = n
import albow.translate
albow.translate.lang = org_lang
return module
except Exception as e:
traceback.print_exc()
if notify:
alert(_(u"Exception while importing filter module {}. " +
u"See console for details.\n\n{}").format(name, e))
return None
def tryImport_new(_root, name, org_lang, stock=False, subFolderString="", unicode_name=False, notify=True):
with open(os.path.join(_root, name)) as module_file:
module_name = name.split(os.path.sep)[-1].replace(".py", "")
try:
if unicode_name:
source_code = module_file.read()
module = imp.new_module(module_name)
exec (source_code, module.__dict__)
if module_name not in sys.modules.keys():
sys.modules[module_name] = module
else:
module = imp.load_source(module_name, os.path.join(_root, name), module_file)
module.foldersForDisplayName = subFolderString
if not (hasattr(module, 'displayName')):
module.displayName = module_name # Python is awesome
if not stock:
# This work fine with custom filters, but the choice buttons are broken for the stock ones...
if directories.getFiltersDir() in name:
trn_path = os.path.split(name)[0]
else:
trn_path = directories.getFiltersDir()
trn_path = os.path.join(trn_path, subFolderString[1:-1], module_name)
if os.path.exists(trn_path):
albow.translate.buildTranslation(config.settings.langCode.get(), extend=True, langPath=trn_path)
# module.trn = albow.translate
module.displayName = _(module.displayName)
module.trn = albow.translate
return module
except Exception as e:
traceback.print_exc()
if notify:
alert(_(u"Exception while importing filter module {}. " +
u"See console for details.\n\n{}").format(name, e))
return None
if new_method:
tryImport = tryImport_new
else:
tryImport = tryImport_old
| 40.697908
| 153
| 0.562857
|
"""Copyright (c) 2010-2012 David Rio Vierra
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE."""
import collections
import os
import traceback
import copy
from albow import FloatField, IntField, AttrRef, Row, Label, Widget, TabPanel, \
CheckBox, Column, Button, TextFieldWrapped, translate
from editortools.tooloptions import ToolOptions
from albow.extended_widgets import CheckBoxLabel
_ = translate._
import albow
from config import config
from editortools.blockview import BlockButton
from editortools.editortool import EditorTool
from glbackground import Panel
from mceutils import setWindowCaption, alertException
from albow import ChoiceButton, showProgress, TextInputRow
import mcplatform
from operation import Operation
from albow.dialogs import wrapped_label, alert, Dialog
import pymclevel
from pymclevel import BoundingBox
from pymclevel.id_definitions import version_defs_ids
import json
import directories
import sys
import keys
import imp
from nbtexplorer import NBTExplorerToolPanel
import logging
log = logging.getLogger(__name__)
class FilterUtils(object):
def __init__(self, **kwargs):
self._given_data = []
for arg in kwargs:
self._given_data.append(arg)
self.__dict__[arg] = kwargs[arg]
def Available_Attributes(self):
return self._given_data
def alertFilterException(func):
def _func(*args, **kw):
try:
func(*args, **kw)
except Exception as e:
print traceback.format_exc()
alert(_(u"Exception during filter operation. See console for details.\n\n{0}").format(e))
return _func
def addNumField(page, optionName, oName, val, min_value=None, max_value=None, increment=0.1):
if isinstance(val, float):
field_type = FloatField
if isinstance(increment, int):
increment = float(increment)
else:
field_type = IntField
if increment == 0.1:
increment = 1
if isinstance(increment, float):
increment = int(round(increment))
if min_value == max_value:
min_value = None
max_value = None
field = field_type(value=val, width=200, min=min_value, max=max_value)
field._increment = increment
page.optionDict[optionName] = AttrRef(field, 'value')
row = Row([Label(oName, doNotTranslate=True), field])
return row
class JsonDictProperty(dict):
def __init__(self, filename, **kwargs):
super(JsonDictProperty, self).__init__(**kwargs)
self._filename = filename
def __setitem__(self, key, value):
data = self._getJson()
data[key] = value
self._putJson(data)
def __getitem__(self, key):
return self._getJson()[key]
def __delitem__(self, key):
data = self._getJson()
del data[key]
self._putJson(data)
def _putJson(self, data):
with open(self._filename, 'wb') as f:
json.dump(data, f)
def _getJson(self):
fp = None
try:
fp = open(self._filename, 'rb')
filter_json = json.load(fp)
if "Macros" not in filter_json.keys():
filter_json["Macros"] = {}
return filter_json
except (ValueError, IOError):
return {"Macros": {}}
finally:
if fp:
fp.close()
class SingleFileChooser(Widget):
OPEN_FILE = 0
SAVE_FILE = 1
def _open_file(self):
file_types = []
for f_type in self.file_types:
file_types.append(f_type.replace("*.", ""))
file_path = mcplatform.askOpenFile(title='Select a file...', suffixes=file_types)
if file_path:
self._button.set_text("{filen}".format(filen=os.path.basename(file_path)), True)
self.file_path = file_path
else:
self._button.set_text("Choose a file")
self.file_path = None
self._button.shrink_wrap()
self.shrink_wrap()
def _save_file(self):
file_types = 'Custom File\0{}\0'.format(';'.join(self.file_types))
if not self.file_types[0].startswith("*"):
name = self.file_types[0]
else:
name = self.file_types[0][1:]
file_path = mcplatform.askSaveFile(".", "Save as...", name, file_types, self.file_types[0][1:])
if file_path:
self._button.set_text(os.path.basename(file_path), True)
self.file_path = file_path
else:
self._button.set_text('Save a file')
self.file_path = None
self._button.shrink_wrap()
self.shrink_wrap()
def __init__(self, file_types=None, operation=0, **kwds):
Widget.__init__(self, **kwds)
if file_types is None:
self.file_types = ["*.*",]
else:
self.file_types = file_types
self.file_path = None
self._button = None
if operation == self.OPEN_FILE:
self._button = Button("Choose a file", action=self._open_file)
elif operation == self.SAVE_FILE:
self._button = Button("Save a file", action=self._save_file)
self.add(self._button)
self.shrink_wrap()
class MacroModuleOptions(Widget):
is_gl_container = True
def __init__(self, macro_data, *args, **kw):
self._parent = None
self._macro_data = macro_data
if '_parent' in kw.keys():
self._parent = kw.pop('_parent')
Widget.__init__(self, *args, **kw)
infoColList = []
stepsLabel = wrapped_label("Number of steps: %s" % macro_data["Number of steps"], 300)
infoColList.append(stepsLabel)
for step in sorted(macro_data.keys()):
if step != "Number of steps":
infoColList.append(wrapped_label("Step %s: %s" % (int(step) + 1, macro_data[step]["Name"]), 300))
self.add(Column(infoColList))
self.shrink_wrap()
@property
def options(self):
return {}
@options.setter
def options(self, value):
pass
def run(self):
pass
@alertFilterException
def confirm(self, tool):
with setWindowCaption("Applying Macro..."):
options = []
filters = []
for step in sorted(self._macro_data.keys()):
if step != "Number of steps":
filters.append(tool.filterModules[self._macro_data[step]["Name"]])
for module_input in self._macro_data[step]["Inputs"].keys():
if not isinstance(self._macro_data[step]["Inputs"][module_input], (str, unicode)):
continue
if not self._macro_data[step]["Inputs"][module_input].startswith("block-"):
continue
toFind = self._macro_data[step]["Inputs"][module_input][6:].split(":")
block = tool.editor.materials.get((toFind[0], toFind[1]))
self._macro_data[step]["Inputs"][module_input] = block
options.append(self._macro_data[step]["Inputs"])
op = MacroOperation(tool.editor, tool.editor.level, tool.selectionBox(), filters, options)
tool.editor.level.showProgress = showProgress
tool.editor.addOperation(op)
tool.editor.addUnsavedEdit()
tool.editor.invalidateBox(tool.selectionBox())
class FilterModuleOptions(Widget):
is_gl_container = True
def __init__(self, tool, module, *args, **kw):
self._parent = None
self.nbttree = None
self.module = module
if '_parent' in kw.keys():
self._parent = kw.pop('_parent')
Widget.__init__(self, *args, **kw)
self.spacing = 2
self.tool = tool
self.pages = pages = TabPanel()
pages.is_gl_container = True
self.optionDict = {}
self.giveEditorObject(module)
log.info("Creating options for " + str(module))
if hasattr(module, "inputs"):
trn = getattr(module, "trn", None)
self.trn = trn
if isinstance(module.inputs, list):
self.pgs = []
for tabData in module.inputs:
title, page, pageRect = self.makeTabPage(self.tool, tabData, trn=trn)
self.pgs.append((title, page))
pages.set_parent(None)
self.pages = pages = TabPanel(self.pgs)
elif isinstance(module.inputs, tuple):
title, page, pageRect = self.makeTabPage(self.tool, module.inputs, trn=trn)
pages.add_page(title, page)
pages.set_rect(pageRect)
else:
self.size = (0, 0)
pages.shrink_wrap()
self.add(pages)
self.shrink_wrap()
if len(pages.pages):
if pages.current_page is not None:
pages.show_page(pages.current_page)
else:
pages.show_page(pages.pages[0])
for eachPage in pages.pages:
self.optionDict = dict(self.optionDict.items() + eachPage.optionDict.items())
def rebuildTabPage(self, inputs, **kwargs):
title, page, rect = self.makeTabPage(self.tool, inputs, self.trn, **kwargs)
for i, t, p, s, r in self.pages.iter_tabs():
if t == title:
self.pages.remove_page(p)
self.pages.add_page(title, page, idx=i)
self.pages.show_page(page)
break
def makeTabPage(self, tool, inputs, trn=None, **kwargs):
page = Widget(**kwargs)
page.is_gl_container = True
rows = []
cols = []
max_height = tool.editor.mainViewport.height - tool.editor.toolbar.height - tool.editor.subwidgets[0].height -\
self._parent.filterSelectRow.height - self._parent.confirmButton.height - self.pages.tab_height
page.optionDict = {}
page.tool = tool
title = "Tab"
for optionSpec in inputs:
optionName = optionSpec[0]
optionType = optionSpec[1]
if trn is not None:
n = trn._(optionName)
else:
n = optionName
if n == optionName:
oName = _(optionName)
else:
oName = n
if isinstance(optionType, tuple):
if isinstance(optionType[0], (int, long, float)):
if len(optionType) == 3:
val, min, max = optionType
increment = 0.1
elif len(optionType) == 2:
min, max = optionType
val = min
increment = 0.1
else:
val, min, max, increment = optionType
rows.append(addNumField(page, optionName, oName, val, min, max, increment))
if isinstance(optionType[0], (str, unicode)):
isChoiceButton = False
if optionType[0] == "string":
kwds = []
wid = None
val = None
for keyword in optionType:
if isinstance(keyword, (str, unicode)) and keyword != "string":
kwds.append(keyword)
for keyword in kwds:
splitWord = keyword.split('=')
if len(splitWord) > 1:
v = None
try:
v = int(splitWord[1])
except ValueError:
pass
key = splitWord[0]
if v is not None:
if key == "width":
wid = v
else:
if key == "value":
val = "=".join(splitWord[1:])
if val is None:
val = ""
if wid is None:
wid = 200
field = TextFieldWrapped(value=val, width=wid)
page.optionDict[optionName] = AttrRef(field, 'value')
row = Row((Label(oName, doNotTranslate=True), field))
rows.append(row)
elif optionType[0] == "block":
blockButton = BlockButton(tool.editor.level.materials)
try:
blockButton.blockInfo = tool.editor.level.materials[optionType[1]]
except AttributeError:
blockButton.blockInfo = tool.editor.level.materials[0]
except KeyError:
if tool.editor.level.materials == pymclevel.pocketMaterials:
blockButton.blockInfo = pymclevel.alphaMaterials[optionType[1]]
else:
raise
row = Column((Label(oName, doNotTranslate=True), blockButton))
page.optionDict[optionName] = AttrRef(blockButton, 'blockInfo')
rows.append(row)
elif optionType[0] == "file-save":
if len(optionType) == 2:
file_chooser = SingleFileChooser(file_types=optionType[1], operation=SingleFileChooser.SAVE_FILE)
else:
file_chooser = SingleFileChooser(operation=SingleFileChooser.SAVE_FILE)
row = Row((Label(oName, doNotTranslate=True), file_chooser))
page.optionDict[optionName] = AttrRef(file_chooser, 'file_path')
rows.append(row)
elif optionType[0] == "file-open":
if len(optionType) == 2:
file_chooser = SingleFileChooser(file_types=optionType[1], operation=SingleFileChooser.OPEN_FILE)
else:
file_chooser = SingleFileChooser(operation=SingleFileChooser.OPEN_FILE)
row = Row((Label(oName, doNotTranslate=True), file_chooser))
page.optionDict[optionName] = AttrRef(file_chooser, 'file_path')
rows.append(row)
else:
isChoiceButton = True
if isChoiceButton:
if trn is not None:
__ = trn._
else:
__ = _
choices = [__("%s" % a) for a in optionType]
choiceButton = ChoiceButton(choices, doNotTranslate=True)
page.optionDict[optionName] = AttrRef(choiceButton, 'selectedChoice')
rows.append(Row((Label(oName, doNotTranslate=True), choiceButton)))
elif isinstance(optionType, bool):
cbox = CheckBox(value=optionType)
page.optionDict[optionName] = AttrRef(cbox, 'value')
row = Row((Label(oName, doNotTranslate=True), cbox))
rows.append(row)
elif isinstance(optionType, (int, float)):
rows.append(addNumField(self, optionName, oName, optionType))
elif optionType == "blocktype" or isinstance(optionType, pymclevel.materials.Block):
blockButton = BlockButton(tool.editor.level.materials)
if isinstance(optionType, pymclevel.materials.Block):
blockButton.blockInfo = optionType
row = Column((Label(oName, doNotTranslate=True), blockButton))
page.optionDict[optionName] = AttrRef(blockButton, 'blockInfo')
rows.append(row)
elif optionType == "label":
rows.append(wrapped_label(oName, 50, doNotTranslate=True))
elif optionType == "string":
inp = None
if inp is not None:
size = inp
else:
size = 200
field = TextFieldWrapped(value="")
row = TextInputRow(oName, ref=AttrRef(field, 'value'), width=size, doNotTranslate=True)
page.optionDict[optionName] = AttrRef(field, 'value')
rows.append(row)
elif optionType == "title":
title = oName
elif optionType == "file-save":
file_chooser = SingleFileChooser(operation=SingleFileChooser.SAVE_FILE)
row = Row((Label(oName, doNotTranslate=True), file_chooser))
page.optionDict[optionName] = AttrRef(file_chooser, 'file_path')
rows.append(row)
elif optionType == "file-open":
file_chooser = SingleFileChooser(operation=SingleFileChooser.OPEN_FILE)
row = Row((Label(oName, doNotTranslate=True), file_chooser))
page.optionDict[optionName] = AttrRef(file_chooser, 'file_path')
rows.append(row)
elif isinstance(optionType, list) and optionType[0].lower() == "nbttree":
kw = {'close_text': None, 'load_text': None}
if len(optionType) >= 3:
def close():
self.pages.show_page(self.pages.pages[optionType[2]])
kw['close_action'] = close
kw['close_text'] = "Go Back"
if len(optionType) >= 4:
if optionType[3]:
kw['load_text'] = optionType[3]
if hasattr(self.module, 'nbt_ok_action'):
kw['ok_action'] = getattr(self.module, 'nbt_ok_action')
self.nbttree = NBTExplorerToolPanel(self.tool.editor, nbtObject=optionType[1],
height=max_height, no_header=True, copy_data=False, **kw)
self.module.set_tree(self.nbttree.tree)
for meth_name in dir(self.module):
if meth_name.startswith('nbttree_'):
setattr(self.nbttree.tree.treeRow, meth_name.split('nbttree_')[-1],
getattr(self.module, meth_name))
page.optionDict[optionName] = AttrRef(self, 'rebuildTabPage')
rows.append(self.nbttree)
self.nbttree.page = len(self.pgs)
else:
raise ValueError(("Unknown option type", optionType))
height = sum(r.height for r in rows) + (len(rows) - 1) * self.spacing
if height > max_height:
h = 0
for i, r in enumerate(rows):
h += r.height
if h > height / 2:
if rows[:i]:
cols.append(Column(rows[:i], spacing=0))
rows = rows[i:]
break
if len(rows):
cols.append(Column(rows, spacing=0))
if len(cols):
page.add(Row(cols, spacing=0))
page.shrink_wrap()
return title, page, page._rect
@property
def options(self):
options = {}
for k, v in self.optionDict.iteritems():
options[k] = v.get() if not isinstance(v.get(), pymclevel.materials.Block) else copy.copy(v.get())
if self.pages.current_page is not None:
options["__page_index__"] = self.pages.pages.index(self.pages.current_page)
return options
@options.setter
def options(self, val):
for k in val:
if k in self.optionDict:
self.optionDict[k].set(val[k])
index = val.get("__page_index__", -1)
if len(self.pages.pages) > index > -1:
self.pages.show_page(self.pages.pages[index])
def giveEditorObject(self, module):
module.editor = self.tool.editor
@staticmethod
def confirm(tool):
with setWindowCaption("Applying Filter... - "):
filterModule = tool.filterModules[tool.panel.filterSelect.selectedChoice]
op = FilterOperation(tool.editor, tool.editor.level, tool.selectionBox(), filterModule,
tool.panel.filterOptionsPanel.options)
tool.editor.level.showProgress = showProgress
tool.editor.addOperation(op)
tool.editor.addUnsavedEdit()
tool.editor.invalidateBox(tool.selectionBox())
class FilterToolPanel(Panel):
BACKUP_FILTER_JSON = False
"""If set to true, the filter.json is backed up to the hard disk
every time it's edited. The default is false, which makes the file save
only whenever the tool gets closed. If MCEdit were to crash, any recorded
macros would not be saved."""
def __init__(self, tool):
Panel.__init__(self, name='Panel.FilterToolPanel')
self.macro_steps = []
self.current_step = 0
self._filter_json = None
self.keys_panel = None
self.filterOptionsPanel = None
self.filterSelect = ChoiceButton([], choose=self.filterChanged, doNotTranslate=True)
self.binding_button = Button("", action=self.bind_key,
tooltipText="Click to bind this filter to a key")
self.filterLabel = Label("Filter:", fg_color=(177, 177, 255, 255))
self.filterLabel.mouse_down = lambda x: mcplatform.platform_open(directories.getFiltersDir())
self.filterLabel.tooltipText = "Click to open filters folder"
self.macro_button = Button("Record Macro", action=self.start_record_macro)
self.filterSelectRow = Row((self.filterLabel, self.filterSelect,
self.macro_button, self.binding_button))
self.confirmButton = Button("Filter", action=self.confirm)
self._recording = False
self._save_macro = False
self.tool = tool
self.selectedName = self.filter_json.get("Last Filter Opened", "")
utils = FilterUtils(
editor=tool.editor,
materials=self.tool.editor.level.materials,
custom_widget=tool.editor.addExternalWidget,
resize_selection_box=tool.editor._resize_selection_box
)
utils_module = imp.new_module("filter_utils")
utils_module = utils
sys.modules["filter_utils"] = utils_module
@staticmethod
def load_filter_json():
filter_json_file = os.path.join(directories.getDataDir(), "filters.json")
filter_json = {}
if FilterToolPanel.BACKUP_FILTER_JSON:
filter_json = JsonDictProperty(filter_json_file)
else:
fp = None
try:
if os.path.exists(filter_json_file):
fp = open(filter_json_file, 'rb')
filter_json = json.load(fp)
except (ValueError, IOError) as e:
log.error("Error while loading filters.json %s", e)
finally:
if fp:
fp.close()
if "Macros" not in filter_json.keys():
filter_json["Macros"] = {}
return filter_json
@property
def filter_json(self):
if self._filter_json is None:
self._filter_json = FilterToolPanel.load_filter_json()
return self._filter_json
def close(self):
self._saveOptions()
self.filter_json["Last Filter Opened"] = self.selectedName
if not FilterToolPanel.BACKUP_FILTER_JSON:
with open(os.path.join(directories.getDataDir(), "filters.json"), 'w') as f:
json.dump(self.filter_json, f)
def reload(self):
for i in list(self.subwidgets):
self.remove(i)
tool = self.tool
# Display "No filter modules found" if there are no filters
if len(tool.filterModules) is 0:
self.add(Label("No filter modules found!"))
self.shrink_wrap()
return
names_list = sorted([n for n in tool.filterNames if not n.startswith("[")])
# We get a list of names like ["[foo] bar", "[test] thing"]
# The to sort on is created by splitting on "[": "[foo", " bar" and then
# removing the first char: "foo", "bar"
subfolder_names_list = sorted([n for n in tool.filterNames if n.startswith("[")],
key=lambda x: x.split("]")[0][1:])
names_list.extend(subfolder_names_list)
names_list.extend([macro for macro in self.filter_json["Macros"].keys()])
if self.selectedName is None or self.selectedName not in names_list:
self.selectedName = names_list[0]
# Remove any keybindings that don't have a filter
for (i, j) in config.config.items("Filter Keys"):
if i == "__name__":
continue
if not any([i == m.lower() for m in names_list]):
config.config.remove_option("Filter Keys", i)
self.filterSelect.choices = names_list
name = self.selectedName.lower()
names = [k for (k, v) in config.config.items("Filter Keys")]
btn_name = config.config.get("Filter Keys", name) if name in names else "*"
self.binding_button.set_text(btn_name)
self.filterOptionsPanel = None
while self.filterOptionsPanel is None:
module = self.tool.filterModules.get(self.selectedName, None)
if module is not None:
try:
self.filterOptionsPanel = FilterModuleOptions(self.tool, module, _parent=self)
except Exception as e:
alert(_("Error creating filter inputs for {0}: {1}").format(module, e))
traceback.print_exc()
self.tool.filterModules.pop(self.selectedName)
self.selectedName = tool.filterNames[0]
if len(tool.filterNames) == 0:
raise ValueError("No filters loaded!")
if not self._recording:
self.confirmButton.set_text("Filter")
else:
macro_data = self.filter_json["Macros"][self.selectedName]
self.filterOptionsPanel = MacroModuleOptions(macro_data)
self.confirmButton.set_text("Run Macro")
self.filterSelect = ChoiceButton(names_list, choose=self.filterChanged, doNotTranslate=True)
self.filterSelect.selectedChoice = self.selectedName
self.filterSelectRow = Row((self.filterLabel, self.filterSelect,
self.macro_button, self.binding_button))
self.add(Column((self.filterSelectRow, self.filterOptionsPanel, self.confirmButton)))
self.shrink_wrap()
if self.parent:
height = self.parent.mainViewport.height - self.parent.toolbar.height
self.centery = height / 2 + self.parent.subwidgets[0].height
if self.selectedName in self.tool.savedOptions:
self.filterOptionsPanel.options = self.tool.savedOptions[self.selectedName]
@property
def macroSelected(self):
return self.filterSelect.selectedChoice not in self.tool.filterNames
def filterChanged(self):
self._saveOptions()
self.selectedName = self.filterSelect.selectedChoice
if self.macroSelected:
self.macro_button.set_text("Delete Macro")
self.macro_button.action = self.delete_macro
elif not self._recording:
self.macro_button.set_text("Record Macro")
self.macro_button.action = self.start_record_macro
self.reload()
def delete_macro(self):
macro_name = self.selectedName
if macro_name in self.filter_json["Macros"]:
del self.filter_json["Macros"][macro_name]
if len(self.filterSelect.choices) == 1:
self.reload()
return
choices = self.filterSelect.choices
self.filterSelect.selectedChoice = choices[0] if choices[0] != macro_name else choices[1]
self.filterChanged()
def stop_record_macro(self):
macro_dialog = Dialog()
macroNameLabel = Label("Macro Name: ")
macroNameField = TextFieldWrapped(width=200)
def save_macro():
macro_name = "{Macro} " + macroNameField.get_text()
self.filter_json["Macros"][macro_name] = {}
self.filter_json["Macros"][macro_name]["Number of steps"] = len(self.macro_steps)
self.filterSelect.choices.append(macro_name)
for entry in self.macro_steps:
for inp in entry["Inputs"].keys():
if not isinstance(entry["Inputs"][inp], pymclevel.materials.Block):
if not entry["Inputs"][inp] == "blocktype":
continue
_inp = entry["Inputs"][inp]
entry["Inputs"][inp] = "block-{0}:{1}".format(_inp.ID, _inp.blockData)
self.filter_json["Macros"][macro_name][entry["Step"]] = {"Name": entry["Name"],
"Inputs": entry["Inputs"]}
stop_dialog()
self.filterSelect.selectedChoice = macro_name
self.filterChanged()
def stop_dialog():
self.macro_button.text = "Record Macro"
self.macro_button.tooltipText = None
self.macro_button.action = self.start_record_macro
macro_dialog.dismiss()
self.macro_steps = []
self.current_step = 0
self._recording = False
input_row = Row((macroNameLabel, macroNameField))
saveButton = Button("Save", action=save_macro)
closeButton = Button("Cancel", action=stop_dialog)
button_row = Row((saveButton, closeButton))
macro_dialog.add(Column((input_row, button_row)))
macro_dialog.shrink_wrap()
macro_dialog.present()
def start_record_macro(self):
self.macro_button.text = "Stop recording"
self.macro_button.tooltipText = "Currently recording a macro"
self.macro_button.action = self.stop_record_macro
self.confirmButton.text = "Add macro"
self.confirmButton.width += 75
self.confirmButton.centerx = self.centerx
self._recording = True
def _addMacroStep(self, name=None, inputs=None):
data = {"Name": name, "Step": self.current_step, "Inputs": inputs}
self.current_step += 1
self.macro_steps.append(data)
def unbind_key(self):
config.config.remove_option("Filter Keys", self.selectedName)
self.binding_button.text = "*"
self.keys_panel.dismiss()
self.reload()
def bind_key(self, message=None):
panel = Panel(name='Panel.FilterToolPanel.bind_key')
panel.bg_color = (0.5, 0.5, 0.6, 1.0)
if not message:
message = _("Press a key to assign to the filter \"{0}\"\n\n"
"Press ESC to cancel.").format(self.selectedName)
label = albow.Label(message)
unbind_button = Button("Press to unbind", action=self.unbind_key)
column = Column((label, unbind_button))
panel.add(column)
panel.shrink_wrap()
def panelKeyUp(evt):
_key_name = self.root.getKey(evt)
panel.dismiss(_key_name)
def panelMouseUp(evt):
button = keys.remapMouseButton(evt.button)
_key_name = None
if button == 3:
_key_name = "Button 3"
elif button == 4:
_key_name = "Scroll Up"
elif button == 5:
_key_name = "Scroll Down"
elif button == 6:
_key_name = "Button 4"
elif button == 7:
_key_name = "Button 5"
if 2 < button < 8:
panel.dismiss(_key_name)
panel.key_up = panelKeyUp
panel.mouse_up = panelMouseUp
self.keys_panel = panel
key_name = panel.present()
if isinstance(key_name, bool):
return True
if key_name != "Escape":
if key_name in ["Alt-F4", "F1", "F2", "F3", "F4", "F5", "1", "2", "3",
"4", "5", "6", "7", "8", "9", "Ctrl-Alt-F9", "Ctrl-Alt-F10"]:
self.bind_key(_("You can't use the key {0}.\n"
"Press a key to assign to the filter \"{1}\"\n\n"
""
"Press ESC to cancel.").format(_(key_name), self.selectedName))
return True
keysUsed = [(j, i) for (j, i) in config.config.items("Keys") if i == key_name]
if keysUsed:
self.bind_key(_("Can't bind. {0} is already used by {1}.\n"
"Press a key to assign to the filter \"{2}\"\n\n"
""
"Press ESC to cancel.").format(_(key_name), keysUsed[0][0], self.selectedName))
return True
filter_keys = [i for (i, j) in config.config.items("Filter Keys") if j == key_name]
if filter_keys:
self.bind_key(_("Can't bind. {0} is already used by the \"{1}\" filter.\n"
"Press a new key.\n\n"
""
"Press ESC to cancel.").format(_(key_name), filter_keys[0]))
return True
config.config.set("Filter Keys", self.selectedName.lower(), key_name)
config.save()
self.reload()
def _saveOptions(self):
"""Should never be called. Call filterchanged() or close() instead,
which will then call this.
:return:
"""
if self.filterOptionsPanel is not None:
options = {}
options.update(self.filterOptionsPanel.options)
options.pop("", "")
self.tool.savedOptions[self.selectedName] = options
@alertFilterException
def confirm(self):
if self._recording:
self._addMacroStep(self.selectedName, self.filterOptionsPanel.options)
else:
self.filterOptionsPanel.confirm(self.tool)
class FilterOperation(Operation):
def __init__(self, editor, level, box, filter, options):
super(FilterOperation, self).__init__(editor, level)
self.box = box
self.filter = filter
self.options = options
self.canUndo = False
def perform(self, recordUndo=True):
if self.level.saving:
alert(_("Cannot perform action while saving is taking place"))
return
# Override 'recordUndo' with filter RECORD_UNDO.
# Some filters, like Find does not need to record undo stuff, since they're not changing anything
recordUndo = getattr(self.filter, 'RECORD_UNDO', recordUndo)
if recordUndo:
self.undoLevel = self.extractUndo(self.level, self.box)
self.filter.MCEDIT_DEFS = self.level.defsIds.mcedit_defs
self.filter.MCEDIT_IDS = self.level.defsIds.mcedit_ids
self.filter.perform(self.level, BoundingBox(self.box), self.options)
self.canUndo = True
def dirtyBox(self):
return self.box
class MacroOperation(Operation):
def __init__(self, editor, level, box, filters, options):
super(MacroOperation, self).__init__(editor, level)
self._box = box
self.options = options
self.filters = filters
self.canUndo = False
def perform(self, recordUndo=True):
if self.level.saving:
alert(_("Cannot perform action while saving is taking place"))
return
if recordUndo:
self.undoLevel = self.extractUndo(self.level, self._box)
for o, f in zip(self.options, self.filters):
f.perform(self.level, BoundingBox(self._box), o)
self.canUndo = True
def dirtyBox(self):
return self._box
class FilterToolOptions(ToolOptions):
def __init__(self, tool):
ToolOptions.__init__(self, name='Panel.FilterToolOptions')
self.tool = tool
self.notifications_disabled = False
disable_error_popup = CheckBoxLabel("Disable Error Notification",
ref=AttrRef(self, 'notifications_disabled'))
ok_button = Button("Ok", action=self.dismiss)
col = Column((disable_error_popup, ok_button,), spacing=2)
self.add(col)
self.shrink_wrap()
class FilterTool(EditorTool):
tooltipText = "Filter"
toolIconName = "filter"
def __init__(self, editor):
EditorTool.__init__(self, editor)
self.filterModules = {}
self.savedOptions = {}
self.filters_not_imported = []
self.optionsPanel = FilterToolOptions(self)
@property
def statusText(self):
return "Choose a filter, then click Filter or press Enter to apply it."
def toolEnabled(self):
return not (self.selectionBox() is None)
def toolSelected(self):
self.showPanel()
@alertException
def showPanel(self):
self.panel = FilterToolPanel(self)
self.not_imported_filters = []
self.reloadFilters()
self.panel.reload()
height = self.editor.mainViewport.height - self.editor.toolbar.height
self.panel.centery = height / 2 + self.editor.subwidgets[0].height
self.panel.left = self.editor.left
self.editor.add(self.panel)
def hidePanel(self):
if self.panel is None:
return
self.panel.close()
if self.panel.parent:
self.panel.parent.remove(self.panel)
self.panel = None
def reloadFilters(self):
filterFiles = []
unicode_module_names = []
stock_filters = []
cust_filters = []
def searchForFiltersInDir(searchFolder, stock=False):
for root, folders, files in os.walk(os.path.join(searchFolder), True):
filter_dir = os.path.basename(root)
if filter_dir.startswith('demo') or filter_dir.startswith('lib'):
continue
subFolderString = root.replace(searchFolder, "")
if subFolderString.endswith(os.sep):
subFolderString = subFolderString[:len(os.sep)]
if subFolderString.startswith(os.sep):
subFolderString = subFolderString[len(os.sep):]
if len(subFolderString) > 0:
subFolderString = "[" + subFolderString + "]"
try:
root = str(root)
if root not in sys.path:
sys.path.append(root)
except UnicodeEncodeError:
unicode_module_names.extend([filter_name for filter_name in files])
for possible_filter in files:
if possible_filter.endswith(".py"):
if stock:
stock_filters.append(possible_filter)
_stock = True
else:
cust_filters.append(possible_filter)
_stock = False
if possible_filter in stock_filters:
_stock = True
filterFiles.append((root, possible_filter, _stock, subFolderString))
searchForFiltersInDir(os.path.join(directories.getDataDir(), "stock-filters"), True)
searchForFiltersInDir(directories.getFiltersDir(), False)
filterModules = []
org_lang = albow.translate.lang
# files to the sys.modules. To fix this, we keep trying to import until we import
# fail to import all leftover files.
shouldContinue = True
while shouldContinue:
shouldContinue = False
for f in filterFiles:
if f[1] in self.not_imported_filters:
continue
module = tryImport(f[0], f[1], org_lang, f[2], f[3], f[1] in unicode_module_names, notify=(not self.optionsPanel.notifications_disabled))
if module is None:
self.not_imported_filters.append(f[1])
continue
filterModules.append(module)
filterFiles.remove(f)
shouldContinue |= True
displayNames = []
for m in filterModules:
while m.displayName in displayNames:
m.displayName += "_"
displayNames.append(m)
filterModules = filter(lambda mod: hasattr(mod, "perform"), filterModules)
self.filterModules = collections.OrderedDict(sorted(
[(FilterTool.moduleDisplayName(x), x) for x in filterModules],
key=lambda module_name: (module_name[0].lower(),
module_name[1])))
@staticmethod
def moduleDisplayName(module):
subFolderString = getattr(module, 'foldersForDisplayName', "")
subFolderString = subFolderString if len(subFolderString) < 1 else subFolderString + " "
name = getattr(module, "displayName", module.__name__)
return subFolderString + _(name[0].upper() + name[1:])
@property
def filterNames(self):
return [FilterTool.moduleDisplayName(module) for module in self.filterModules.itervalues()]
#-# WIP. Reworking on the filters translations.
#-# The 'new_method' variable is used to select the latest working code or the actual under development one.
#-# This variable must be on False when releasing unless the actual code is fully working.
new_method = True
def tryImport_old(_root, name, org_lang, stock=False, subFolderString="", unicode_name=False, notify=True):
with open(os.path.join(_root, name)) as module_file:
module_name = name.split(os.path.sep)[-1].replace(".py", "")
try:
if unicode_name:
source_code = module_file.read()
module = imp.new_module(module_name)
exec (source_code, module.__dict__)
if module_name not in sys.modules.keys():
sys.modules[module_name] = module
else:
module = imp.load_source(module_name, os.path.join(_root, name), module_file)
module.foldersForDisplayName = subFolderString
if not (hasattr(module, 'displayName')):
module.displayName = module_name # Python is awesome
if not stock:
if "trn" in sys.modules.keys():
del sys.modules["trn"]
if "albow.translate" in sys.modules.keys():
del sys.modules["albow.translate"]
from albow import translate as trn
if directories.getFiltersDir() in name:
trn_path = os.path.split(name)[0]
else:
trn_path = directories.getFiltersDir()
trn_path = os.path.join(trn_path, subFolderString[1:-1], module_name)
module.trn = trn
if os.path.exists(trn_path):
module.trn.setLangPath(trn_path)
module.trn.buildTranslation(config.settings.langCode.get())
n = module.displayName
if hasattr(module, "trn"):
n = module.trn._(module.displayName)
if n == module.displayName:
n = _(module.displayName)
module.displayName = n
import albow.translate
albow.translate.lang = org_lang
return module
except Exception as e:
traceback.print_exc()
if notify:
alert(_(u"Exception while importing filter module {}. " +
u"See console for details.\n\n{}").format(name, e))
return None
def tryImport_new(_root, name, org_lang, stock=False, subFolderString="", unicode_name=False, notify=True):
with open(os.path.join(_root, name)) as module_file:
module_name = name.split(os.path.sep)[-1].replace(".py", "")
try:
if unicode_name:
source_code = module_file.read()
module = imp.new_module(module_name)
exec (source_code, module.__dict__)
if module_name not in sys.modules.keys():
sys.modules[module_name] = module
else:
module = imp.load_source(module_name, os.path.join(_root, name), module_file)
module.foldersForDisplayName = subFolderString
if not (hasattr(module, 'displayName')):
module.displayName = module_name # Python is awesome
if not stock:
# This work fine with custom filters, but the choice buttons are broken for the stock ones...
if directories.getFiltersDir() in name:
trn_path = os.path.split(name)[0]
else:
trn_path = directories.getFiltersDir()
trn_path = os.path.join(trn_path, subFolderString[1:-1], module_name)
if os.path.exists(trn_path):
albow.translate.buildTranslation(config.settings.langCode.get(), extend=True, langPath=trn_path)
# module.trn = albow.translate
module.displayName = _(module.displayName)
module.trn = albow.translate
return module
except Exception as e:
traceback.print_exc()
if notify:
alert(_(u"Exception while importing filter module {}. " +
u"See console for details.\n\n{}").format(name, e))
return None
if new_method:
tryImport = tryImport_new
else:
tryImport = tryImport_old
| false
| true
|
790a67bcfeb56bd0391430b120308c8776f637d9
| 376
|
py
|
Python
|
Old/exercise/example52.py
|
exchris/Pythonlearn
|
174f38a86cf1c85d6fc099005aab3568e7549cd0
|
[
"MIT"
] | null | null | null |
Old/exercise/example52.py
|
exchris/Pythonlearn
|
174f38a86cf1c85d6fc099005aab3568e7549cd0
|
[
"MIT"
] | 1
|
2018-11-27T09:58:54.000Z
|
2018-11-27T09:58:54.000Z
|
Old/exercise/example52.py
|
exchris/pythonlearn
|
174f38a86cf1c85d6fc099005aab3568e7549cd0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2017/7/25 0025 上午 10:14
# @Author : Exchris Tsai
# @Site :
# @File : example52.py
# @Software: PyCharm
"""
题目:学习使用按位或 | 。
程序分析:0|0=0; 0|1=1; 1|0=1; 1|1=1
"""
__author__ = 'Exchris Tsai'
if __name__ == '__main__':
a = 0o77
b = a | 3
print('a | b is %d' %b)
b |= 7
print('a | b is %d' %b)
| 17.904762
| 36
| 0.513298
|
__author__ = 'Exchris Tsai'
if __name__ == '__main__':
a = 0o77
b = a | 3
print('a | b is %d' %b)
b |= 7
print('a | b is %d' %b)
| true
| true
|
790a680754acad956620180933bf33dc64c43918
| 508
|
py
|
Python
|
tests/example.py
|
bwohlberg/py2nb
|
efc98c33c34f364f582da5f716f678253c2887ba
|
[
"BSD-3-Clause"
] | 79
|
2015-06-05T17:38:07.000Z
|
2020-08-09T01:52:00.000Z
|
tests/example.py
|
bwohlberg/py2jn
|
efc98c33c34f364f582da5f716f678253c2887ba
|
[
"BSD-3-Clause"
] | 6
|
2017-02-14T03:20:56.000Z
|
2018-06-15T19:14:12.000Z
|
tests/example.py
|
bwohlberg/py2jn
|
efc98c33c34f364f582da5f716f678253c2887ba
|
[
"BSD-3-Clause"
] | 41
|
2017-02-13T23:00:32.000Z
|
2020-04-05T06:31:45.000Z
|
"""
# Hello
Demonstrate:
* conversion of regular python script into _Jupyter notebook_
* support **Markdown**
* this is a list
"""
from __future__ import absolute_import, print_function, division
"""
## Hello
This is a *hello world* function.
"""
def hello():
"""
This is a docstring
"""
print("hello")
"""
## Another Cell 1
"""
def main():
hello()
"""
### Run this
"""
if __name__ == '__main__':
def what():
main()
print(what())
"""
## Another Cell 2
"""
| 10.367347
| 64
| 0.582677
|
from __future__ import absolute_import, print_function, division
def hello():
print("hello")
def main():
hello()
if __name__ == '__main__':
def what():
main()
print(what())
| true
| true
|
790a68208e1c6207772a92098da7f33d1495a262
| 424
|
py
|
Python
|
tests/migrations/2015_10_10_move_keys.py
|
adamlwgriffiths/exodus
|
a8bd7edcc963fb87e034b06de6db3ae5948f5668
|
[
"BSD-2-Clause"
] | 1
|
2015-10-21T06:15:24.000Z
|
2015-10-21T06:15:24.000Z
|
tests/jaweson_migrations/2015_10_10_move_keys.py
|
adamlwgriffiths/exodus
|
a8bd7edcc963fb87e034b06de6db3ae5948f5668
|
[
"BSD-2-Clause"
] | null | null | null |
tests/jaweson_migrations/2015_10_10_move_keys.py
|
adamlwgriffiths/exodus
|
a8bd7edcc963fb87e034b06de6db3ae5948f5668
|
[
"BSD-2-Clause"
] | 1
|
2019-11-06T11:44:15.000Z
|
2019-11-06T11:44:15.000Z
|
from __future__ import print_function
from exodus import BaseMigration
class Migration(BaseMigration):
version = '2015_10_10'
def can_migrate_database(self, adapter):
return self.version > adapter.db.get('version', None)
def migrate_database(self, adapter):
# migrate the keys
adapter.db['c'] = adapter.db['a']
del adapter.db['a']
adapter.db['version'] = self.version
| 26.5
| 61
| 0.67217
|
from __future__ import print_function
from exodus import BaseMigration
class Migration(BaseMigration):
version = '2015_10_10'
def can_migrate_database(self, adapter):
return self.version > adapter.db.get('version', None)
def migrate_database(self, adapter):
adapter.db['c'] = adapter.db['a']
del adapter.db['a']
adapter.db['version'] = self.version
| true
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.