hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d07df814cb6748757669097a641e731236e968f6
| 7,672
|
py
|
Python
|
scripts/deploy.py
|
tallywiesenberg/algorand-tellorflex
|
5fa673a637338e4dfa15b0cf4937a757890a5e14
|
[
"MIT"
] | null | null | null |
scripts/deploy.py
|
tallywiesenberg/algorand-tellorflex
|
5fa673a637338e4dfa15b0cf4937a757890a5e14
|
[
"MIT"
] | null | null | null |
scripts/deploy.py
|
tallywiesenberg/algorand-tellorflex
|
5fa673a637338e4dfa15b0cf4937a757890a5e14
|
[
"MIT"
] | null | null | null |
from typing import Tuple, List
from algosdk.v2client.algod import AlgodClient
from algosdk.future import transaction
from algosdk.logic import get_application_address
from algosdk import account, encoding
from pyteal import compileTeal, Mode, Keccak256
from tellorflex.methods import report
from utils.account import Account
from tellorflex.contracts import approval_program, clear_state_program
from utils.helpers import add_standalone_account, fund_account
from utils.util import (
waitForTransaction,
fullyCompileContract,
getAppGlobalState,
)
APPROVAL_PROGRAM = b""
CLEAR_STATE_PROGRAM = b""
class Scripts:
def __init__(self, client, tipper, reporter, governance_address) -> None:
self.client = client
self.tipper = tipper
self.reporter = reporter
self.governance_address = governance_address.getAddress()
def get_contracts(self, client: AlgodClient) -> Tuple[bytes, bytes]:
"""Get the compiled TEAL contracts for the tellor contract.
Args:
client: An algod client that has the ability to compile TEAL programs.
Returns:
A tuple of 2 byte strings. The first is the approval program, and the
second is the clear state program.
"""
global APPROVAL_PROGRAM
global CLEAR_STATE_PROGRAM
if len(APPROVAL_PROGRAM) == 0:
APPROVAL_PROGRAM = fullyCompileContract(client, approval_program())
CLEAR_STATE_PROGRAM = fullyCompileContract(client, clear_state_program())
return APPROVAL_PROGRAM, CLEAR_STATE_PROGRAM
def deploy_tellor_flex(
self,
query_id: str,
query_data: str
) -> int:
"""Create a new tellor reporting contract.
Args:
client: An algod client.
sender: The account that will request data through the contract
governance_address: the account that can vote to dispute reports
query_id: the ID of the data requested to be put on chain
query_data: the in-depth specifications of the data requested
Returns:
The ID of the newly created auction app.
"""
approval, clear = self.get_contracts(self.client)
globalSchema = transaction.StateSchema(num_uints=7, num_byte_slices=5)
localSchema = transaction.StateSchema(num_uints=0, num_byte_slices=0)
app_args = [
encoding.decode_address(self.governance_address),
query_id.encode("utf-8"),
query_data.encode("utf-8"),
]
txn = transaction.ApplicationCreateTxn(
sender=self.tipper.getAddress(),
on_complete=transaction.OnComplete.NoOpOC,
approval_program=approval,
clear_program=clear,
global_schema=globalSchema,
local_schema=localSchema,
app_args=app_args,
sp=self.client.suggested_params(),
)
signedTxn = txn.sign(self.tipper.getPrivateKey())
self.client.send_transaction(signedTxn)
response = waitForTransaction(self.client, signedTxn.get_txid())
assert response.applicationIndex is not None and response.applicationIndex > 0
self.app_id = response.applicationIndex
self.app_address = get_application_address(self.app_id)
def stake(self) -> None:
"""Place a bid on an active auction.
Args:
client: An Algod client.
appID: The app ID of the auction.
reporter: The account staking to report.
"""
appAddr = get_application_address(self.app_id)
# appGlobalState = getAppGlobalState(client, appID)
# if any(appGlobalState[b"bid_account"]):
# # if "bid_account" is not the zero address
# prevBidLeader = encoding.encode_address(appGlobalState[b"bid_account"])
# else:
# prevBidLeader = None
stake_amount = 180*1000000 #200 dollars of ALGO
suggestedParams = self.client.suggested_params()
payTxn = transaction.PaymentTxn(
sender=self.reporter.getAddress(),
receiver=self.app_address,
amt=stake_amount,
sp=suggestedParams,
)
optInTx = transaction.ApplicationOptInTxn(
sender=self.reporter.getAddress(),
index=self.app_id,
sp=suggestedParams,
)
transaction.assign_group_id([payTxn, optInTx])
signedPayTxn = payTxn.sign(self.reporter.getPrivateKey())
signedAppCallTxn = optInTx.sign(self.reporter.getPrivateKey())
self.client.send_transactions([signedPayTxn, signedAppCallTxn])
waitForTransaction(self.client, optInTx.get_txid())
def closeAuction(self, client: AlgodClient, appID: int, closer: Account):
"""Close an auction.
This action can only happen before an auction has begun, in which case it is
cancelled, or after an auction has ended.
If called after the auction has ended and the auction was successful, the
NFT is transferred to the winning bidder and the auction proceeds are
transferred to the seller. If the auction was not successful, the NFT and
all funds are transferred to the seller.
Args:
client: An Algod client.
appID: The app ID of the auction.
closer: The account initiating the close transaction. This must be
either the seller or auction creator if you wish to close the
auction before it starts. Otherwise, this can be any account.
"""
appGlobalState = getAppGlobalState(client, appID)
nftID = appGlobalState[b"nft_id"]
accounts: List[str] = [encoding.encode_address(appGlobalState[b"seller"])]
if any(appGlobalState[b"bid_account"]):
# if "bid_account" is not the zero address
accounts.append(encoding.encode_address(appGlobalState[b"bid_account"]))
deleteTxn = transaction.ApplicationDeleteTxn(
sender=closer.getAddress(),
index=appID,
accounts=accounts,
foreign_assets=[nftID],
sp=client.suggested_params(),
)
signedDeleteTxn = deleteTxn.sign(closer.getPrivateKey())
client.send_transaction(signedDeleteTxn)
waitForTransaction(client, signedDeleteTxn.get_txid())
if __name__ == "__main__":
def setup():
algo_address = "http://localhost:4001"
algo_token = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
client = AlgodClient(algod_address=algo_address, algod_token=algo_token)
gov_address = Account.FromMnemonic("figure adapt crumble always cart twist scatter timber smooth artist gaze raise genre say scissors arena hidden poem mimic worry race burst yard about key")
tipper = Account.FromMnemonic("lava side salad unit door frozen clay skate project slogan choose poverty magic arrow pond swing alcohol bachelor witness monkey iron remind team abstract mom")
reporter = Account.FromMnemonic("gaze hockey eight fog scrub bind calm scrub change cannon recipe face shield smart member toward turkey pyramid item quote explain witness music ability weapon")
print("gov", gov_address.getAddress())
print("tipper", tipper.getAddress())
print("reporter", reporter.getAddress())
s = Scripts(client=client, tipper=tipper, reporter=reporter, governance_address=gov_address)
return s
s = setup()
app_id = s.deploy_tellor_flex(
query_id="hi",
query_data="hi",
)
s.stake()
| 37.607843
| 202
| 0.669317
| 864
| 7,672
| 5.810185
| 0.347222
| 0.021912
| 0.023705
| 0.013546
| 0.122311
| 0.086056
| 0.061753
| 0.043426
| 0.043426
| 0.043426
| 0
| 0.005273
| 0.258472
| 7,672
| 204
| 203
| 37.607843
| 0.877131
| 0.233186
| 0
| 0.035088
| 0
| 0.026316
| 0.112404
| 0.011455
| 0
| 0
| 0
| 0
| 0.008772
| 1
| 0.052632
| false
| 0
| 0.096491
| 0
| 0.175439
| 0.026316
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d07f63644facb997bb2d4b1393d95a337bd66e35
| 11,118
|
py
|
Python
|
parse-vocab-list.py
|
kltm/kanji-textbook-table
|
e97630b47c4b00b734f0f29794331e7a0fd857d5
|
[
"BSD-3-Clause"
] | null | null | null |
parse-vocab-list.py
|
kltm/kanji-textbook-table
|
e97630b47c4b00b734f0f29794331e7a0fd857d5
|
[
"BSD-3-Clause"
] | null | null | null |
parse-vocab-list.py
|
kltm/kanji-textbook-table
|
e97630b47c4b00b734f0f29794331e7a0fd857d5
|
[
"BSD-3-Clause"
] | null | null | null |
####
#### Convert a TSV into a fully parsed JSON list blob that could be
#### used by a mustache (or other logicless) template.
####
#### Example usage to analyze the usual suspects:
#### python3 parse.py --help
####
#### Get report of current problems:
#### python3 parse-vocab-list.py --tsv ~/Downloads/UCSC中上級教科書_漢字・単語リスト\ -\ 単語リス ト\(4\).tsv --output /tmp/parsed-vocab-list.json
####
#### As part of a pipeline for vocab list:
#### python3 parse-vocab-list.py --tsv ~/Downloads/UCSC中上級教科書_漢字・単語リスト\ -\ 単語リス ト\(13\).tsv --output /tmp/parsed-vocab-list.json && python3 chapter-bin.py -v --input /tmp/parsed-vocab-list.json --output /tmp/chapters.json && python3 apply-to-chapters.py --input /tmp/chapters.json --template ./word-html-frame.template.html --output /tmp/chapter
####
#### As part of a pipeline for glossary:
#### python3 parse-vocab-list.py --tsv ~/Downloads/UCSC中上級教科書_漢字・単語リスト\ -\ 単語リス ト\(13\).tsv --output /tmp/parsed-vocab-list.json && python3 jalphabetical-bin.py --pattern vocab-list --input /tmp/parsed-vocab-list.json --output /tmp/jalphed-vocab-list.json && python3 apply-globally.py --input /tmp/jalphed-vocab-list.json --template ./manual-glossary.template.html --output /tmp/glossary.html
####
import sys
import argparse
import logging
import csv
import pystache
import json
import os
## Logger basic setup.
logging.basicConfig(level=logging.INFO)
LOGGER = logging.getLogger('parse')
LOGGER.setLevel(logging.WARNING)
def die_screaming(string):
""" Die and take our toys home. """
LOGGER.error(string)
sys.exit(1)
def main():
## Deal with incoming.
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-v', '--verbose', action='store_true',
help='More verbose output')
parser.add_argument('-t', '--tsv',
help='The TSV data file to read in')
parser.add_argument('-o', '--output',
help='The file to output to')
args = parser.parse_args()
## Up the verbosity level if we want.
if args.verbose:
LOGGER.setLevel(logging.INFO)
LOGGER.info('Verbose: on')
## Ensure arguments and read in what is necessary.
if not args.tsv:
die_screaming('need an input tsv argument')
LOGGER.info('Will use "' + args.tsv + '" as data')
if not args.output:
die_screaming('need an output file argument')
LOGGER.info('Will output to: ' + args.output)
## Setup some general metadata checking for the different formats.
required_total_columns = 10
required_columns = ["level", "chapter", "raw-japanese", "reading", "meaning"]
## Bring on all data in one sweep, formatting and adding
## appropriate parts to internal format so that we can simply
## output in any mustache template.
data_list = []
with open(args.tsv, 'r') as tsv_in:
tsv_in = csv.reader(tsv_in, delimiter='\t')
## Process data.
first_line_p = True
i = 0
for line in tsv_in:
i = i + 1
if first_line_p:
first_line_p = False
continue
else:
count = len(line)
if len(set(line)) == 1 and line[0] == "":
LOGGER.info("Skipping completely empty line: " + str(i))
continue
elif not count == required_total_columns:
die_screaming('malformed line: '+ str(i) +' '+ '\t'.join(line))
else:
# LOGGER.info("-------")
# LOGGER.info(type(line[3]))
# LOGGER.info(len(line[3]))
# LOGGER.info(line[3])
## Base parsing everything into a common object.
## Additional metadata that we'll want.
data_object = {}
data_object["row"] = str(i) # inserted
data_object["level"] = str(line[0]) # req
data_object["chapter"] = str(line[1]) # req
data_object["raw-japanese"] = str(line[2]) # req
data_object["raw-ruby"] = line[3] if (type(line[3]) is str and len(line[3]) > 0) else None # opt
data_object["reading"] = str(line[4]) # req
data_object["meaning"] = line[5] # req
data_object["section"] = line[6] if (type(line[6]) is str and len(line[6]) > 0) else None # opt
data_object["extra"] = True if (type(line[7]) is str and line[7] == '*') else None # opt
data_object["grammar-point"] = line[8] if (type(line[8]) is str and len(line[8]) > 0) else None # opt
data_object["notes"] = line[9] if (type(line[9]) is str and len(line[9]) > 0) else None # opt
## Basic error checking.
for required_entry in required_columns:
if not data_object[required_entry] is str and not len(data_object[required_entry]) > 0:
die_screaming('malformed line with "'+required_entry+'" at '+ str(i) +': '+ '\t'.join(line))
## Make some other mappings for commonly used
## sections names.
section_names_alt = {#None: "",
"読み物 一": "R.1",
"会話 一": "D.1",
"読み物 二": "R.2",
"会話 二": "D.2",
"読み物 三": "R.3",
"会話 三": "D.3",
"読み物 四": "R.4",
"会話 四": "D.4"}
if data_object["section"] in section_names_alt.keys():
data_object["section-alt-en-short"] = section_names_alt[data_object["section"]]
## Transform the comma/pipe-separated data raw "Ruby"
## object into something usable, if extant.
# LOGGER.info(data_object["raw-ruby"])
ruby = []
if data_object["raw-ruby"]:
try:
ruby_set_list_raw = data_object["raw-ruby"].split(",")
for ruby_set_raw in ruby_set_list_raw:
ruby_set_pre = ruby_set_raw.strip()
LOGGER.info("ruby_set_pre: " + ruby_set_pre)
ruby_set = ruby_set_pre.split("|")
ruby_kanji = ruby_set[0].strip()
ruby_reading = ruby_set[1].strip()
ruby.append({"kanji": ruby_kanji,
"reading": ruby_reading})
except:
die_screaming('error parsing ruby at '+ str(i) +': '+ '\t'.join(line))
data_object["ruby"] = ruby
## Now that we have the ruby parsed, create a new
## version of the "Japanese" ("raw-japanese")
## column with mustache renderable data hints.
LOGGER.info('^^^')
j = data_object["raw-japanese"]
remaining_rubys = len(ruby)
ruby_parse_data = []
for r in ruby:
## Case when kanji not found in remaining
## japanese.
LOGGER.info("japanese: " + j)
LOGGER.info("kanji: " + r["kanji"])
LOGGER.info("reading: " + r["reading"])
if j.find(r["kanji"]) == -1:
LOGGER.info('malformed line at '+ str(i) +': '+ '\t'.join(line))
die_screaming('bad japanese/ruby at line '+ str(i))
else:
## Some numbers we'll want on hand.
jl = len(j) # the remaining length of the japanese
rl = len(r["kanji"]) # the length of the ruby
offset = j.find(r["kanji"]) # the offset of the kanji
LOGGER.info(str(jl))
LOGGER.info(str(rl))
LOGGER.info(str(offset))
## Get the pre-ruby string added, if
## extant.
if offset == 0:
pass
else:
pre_string = j[0:(offset)]
LOGGER.info('pre_string: ' + pre_string)
ruby_parse_data.append({"string": pre_string,
"has-ruby": False})
## Add the ruby string section.
ruby_string = j[offset:(offset+rl)]
LOGGER.info('ruby_string: ' + ruby_string)
ruby_parse_data.append({"string": ruby_string,
"reading":r["reading"],
"has-ruby": True})
## If this is the last ruby we're dealing
## with, we're done and add the rest of
## the string. Otherwise, "soft loop" on
## the shorter string and next ruby.
remaining_rubys = remaining_rubys - 1
if remaining_rubys == 0:
## Last one, add any remaining string.
if (offset+rl) < jl:
post_string = j[(offset+rl):jl]
LOGGER.info('post_string: ' + post_string)
ruby_parse_data.append({"string": post_string,
"has-ruby": False})
else:
j = j[(offset+rl):jl]
data_object["rich-japanese"] = ruby_parse_data
## Basic error checking.
for required_entry in required_columns:
if not data_object[required_entry] is str and not len(data_object[required_entry]) > 0:
die_screaming('malformed line with "'+required_entry+'" at '+ str(i) +': '+ '\t'.join(line))
## Onto the pile.
data_list.append(data_object)
## Dump to given file.
#LOGGER.info(json.dumps(data_list, indent = 4))
with open(args.output, 'w') as output:
output.write(json.dumps(data_list, indent = 4))
## You saw it coming...
if __name__ == '__main__':
main()
| 48.977974
| 394
| 0.477154
| 1,211
| 11,118
| 4.266722
| 0.2436
| 0.050319
| 0.017612
| 0.017418
| 0.225082
| 0.190052
| 0.136443
| 0.130443
| 0.116509
| 0.116509
| 0
| 0.009847
| 0.406278
| 11,118
| 226
| 395
| 49.19469
| 0.772459
| 0.238172
| 0
| 0.105634
| 0
| 0
| 0.101432
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014085
| false
| 0.007042
| 0.049296
| 0
| 0.06338
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d080168f53e03ca48aa7bb5ade9767788f28ed6f
| 2,716
|
py
|
Python
|
expressy/importer.py
|
timedata-org/expressy
|
0aaeffa430c1ccfe649ee2bcdb69e7feb48ada95
|
[
"MIT"
] | 1
|
2017-01-22T18:53:52.000Z
|
2017-01-22T18:53:52.000Z
|
expressy/importer.py
|
timedata-org/expressy
|
0aaeffa430c1ccfe649ee2bcdb69e7feb48ada95
|
[
"MIT"
] | 10
|
2017-01-23T15:41:15.000Z
|
2017-07-11T11:55:46.000Z
|
expressy/importer.py
|
timedata-org/expressy
|
0aaeffa430c1ccfe649ee2bcdb69e7feb48ada95
|
[
"MIT"
] | null | null | null |
import builtins, importlib
class Importer(object):
"""An Importer imports either a namespace or a symbol within a namespace.
It's like a more general version of importlib.import_module which handles
builtins and attributes within a module.
An Importer has a symbol_table that's always used to try to resolve
symbols before anything else. By default, symbol_table is the Python
built-in symbols as found in the module `builtins`:
ArithmeticError, AssertionError, ..., abs, all, ... zip
It also has a module_importer which imports Python modules or raises
an ImportError. By default this is just importlib.import_module.
"""
def __init__(self, symbol_table=vars(builtins),
module_importer=importlib.import_module):
"""Args:
symbol_table: a dictionary which maps symbols to values.
module_importer: a function that imports namespaces by path or
raises an ImportError otherwise.
"""
self.symbol_table = symbol_table
self.module_importer = module_importer
def getter(self, symbol):
"""Return a function that gets the value for symbol when called.
This function will return the new value when that value changes,
but will *not* reload a module when that module changes.
"""
try:
value = self.symbol_table[symbol]
return lambda: value
except KeyError:
pass
*body, last = symbol.split('.')
try:
imported = self.module_importer(symbol)
return lambda: imported
except ImportError:
if not (body and last):
raise # Can't recurse any more!
# Call getter recursively on the parent.
parent_name = '.'.join(body)
parent = self.getter(parent_name)
parent_value = parent()
def getter():
try:
return getattr(parent_value, last)
except AttributeError:
raise ImportError("No module named '%s'" % symbol, name=symbol)
return getter
def __call__(self, symbol):
"""Import the value for symbol, or raise an ImportError if it can't be
found.
"""
return self.getter(symbol)()
def make(self, typename, args=(), **kwds):
"""Make an object from its type.
Args:
typename: name of the class or other constructor for the object.
args: positional arguments to the constructor.
keyword arguments to the constructor.
"""
constructor = self(typename)
return constructor(*args, **kwds)
importer = Importer()
| 33.121951
| 79
| 0.621134
| 324
| 2,716
| 5.12037
| 0.367284
| 0.046414
| 0.037975
| 0.025316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.308174
| 2,716
| 81
| 80
| 33.530864
| 0.882916
| 0.463918
| 0
| 0.088235
| 0
| 0
| 0.01746
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.147059
| false
| 0.029412
| 0.264706
| 0
| 0.617647
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0814ab2d80cbf44bcd4c19447312d5ef89e098c
| 2,533
|
py
|
Python
|
tests/test_plain.py
|
Nafi-Amaan-Hossain/willpyre
|
a506d32765088c8e59c46672946891e61dce87f2
|
[
"BSD-3-Clause"
] | 1
|
2021-08-16T08:18:22.000Z
|
2021-08-16T08:18:22.000Z
|
tests/test_plain.py
|
Nafi-Amaan-Hossain/willpyre
|
a506d32765088c8e59c46672946891e61dce87f2
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_plain.py
|
Nafi-Amaan-Hossain/willpyre
|
a506d32765088c8e59c46672946891e61dce87f2
|
[
"BSD-3-Clause"
] | null | null | null |
from async_asgi_testclient import TestClient
from myapp import main
import pytest
@pytest.mark.asyncio
async def test_willpyre_app():
async with TestClient(main) as client:
resp = await client.get("/")
assert resp.status_code == 200
assert resp.text == "index page"
@pytest.mark.asyncio
async def test_willpyre_post():
async with TestClient(main) as client:
resp = await client.post("/login/", data="a=anything")
assert resp.status_code == 200
assert resp.text == "anything"
@pytest.mark.asyncio
async def test_willpyre_get():
async with TestClient(main) as client:
resp = await client.get("/login/?user=admin")
assert resp.status_code == 200
assert resp.text == "Welcome admin"
@pytest.mark.asyncio
async def test_trailing_slash():
async with TestClient(main) as client:
resp = await client.get("/login")
assert resp.status_code == 200
assert resp.text == "Welcome ordinary user"
@pytest.mark.asyncio
async def test_url_vars():
async with TestClient(main) as client:
resp = await client.get("/api/hello")
assert resp.status_code == 200
assert resp.text == "You requested the variable hello"
@pytest.mark.asyncio
async def test_url_many():
async with TestClient(main) as client:
resp = await client.get("/static/foo/bar/baz")
assert resp.status_code == 200
assert resp.text == "foobarbaz"
@pytest.mark.asyncio
async def test_utils():
async with TestClient(main) as client:
resp = await client.get("/json")
assert resp.json() == {'a': 'b'}
assert resp.headers["Content-Type"] == "application/json"
@pytest.mark.asyncio
async def test_response404():
async with TestClient(main) as client:
resp = await client.get("/non-exhistent")
assert resp.text == "Not found"
assert resp.status_code == 404
@pytest.mark.asyncio
async def test_response405():
async with TestClient(main) as client:
resp = await client.open("/login", method="NO_SUCH_METHOD")
assert resp.text == "Method not allowed"
assert resp.status_code == 405
@pytest.mark.asyncio
async def test_put():
async with TestClient(main) as client:
resp = await client.put("/others")
assert resp.text == "others"
@pytest.mark.asyncio
async def test_patch():
async with TestClient(main) as client:
resp = await client.patch("/others")
assert resp.text == "others"
| 25.585859
| 67
| 0.660087
| 332
| 2,533
| 4.948795
| 0.225904
| 0.121729
| 0.113816
| 0.147292
| 0.714547
| 0.682897
| 0.576993
| 0.470481
| 0.380402
| 0.214851
| 0
| 0.015283
| 0.22503
| 2,533
| 98
| 68
| 25.846939
| 0.821701
| 0
| 0
| 0.447761
| 0
| 0
| 0.11291
| 0
| 0
| 0
| 0
| 0
| 0.298507
| 1
| 0
| false
| 0
| 0.044776
| 0
| 0.044776
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d085e4d4c18167f75fdb378a2d6a53bb684ea18f
| 1,124
|
py
|
Python
|
scripts/removeComments.py
|
doggy8088/azure-devops-cli-extension
|
2f6b1a6ffbc49ae454df640a8bb00dac991d6514
|
[
"MIT"
] | 326
|
2019-04-10T12:38:23.000Z
|
2022-03-31T23:07:49.000Z
|
scripts/removeComments.py
|
doggy8088/azure-devops-cli-extension
|
2f6b1a6ffbc49ae454df640a8bb00dac991d6514
|
[
"MIT"
] | 562
|
2019-04-10T07:36:12.000Z
|
2022-03-28T07:37:54.000Z
|
scripts/removeComments.py
|
doggy8088/azure-devops-cli-extension
|
2f6b1a6ffbc49ae454df640a8bb00dac991d6514
|
[
"MIT"
] | 166
|
2019-04-10T07:59:40.000Z
|
2022-03-16T14:17:13.000Z
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import os
def remove_comment_from_file(filePath):
temp_file = filePath + ".tmp"
file = open(filePath, "r")
file_new = open(temp_file, "w")
comment_in_progress = False
for line in file:
if "\"\"\"" in line:
comment_in_progress = not comment_in_progress
if not comment_in_progress and "\"\"\"" not in line and not line.startswith("#"):
file_new.write(line)
file.close()
file_new.close()
os.replace(temp_file, filePath)
for path, subdirs, files in os.walk('.'):
for name in files:
file_path = os.path.join(path, name)
if file_path.endswith(".py") and "devops_sdk" in file_path:
print('removing comments from ' + file_path)
remove_comment_from_file(file_path)
| 36.258065
| 94
| 0.544484
| 130
| 1,124
| 4.507692
| 0.430769
| 0.068259
| 0.116041
| 0.071672
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.200178
| 1,124
| 30
| 95
| 37.466667
| 0.651835
| 0.298932
| 0
| 0
| 0
| 0
| 0.10728
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| false
| 0
| 0.05
| 0
| 0.1
| 0.05
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d086ce5911f7ac6a2a4bd8994adb6dc6191adc49
| 7,407
|
py
|
Python
|
scripts/update_tables.py
|
EnsemblGSOC/tony-gsoc-2018
|
7b727e3a82654a4f102d735fb0b2c4ab12470ff6
|
[
"Apache-2.0"
] | 1
|
2018-08-12T08:34:51.000Z
|
2018-08-12T08:34:51.000Z
|
scripts/update_tables.py
|
EnsemblGSOC/tony-gsoc-2018
|
7b727e3a82654a4f102d735fb0b2c4ab12470ff6
|
[
"Apache-2.0"
] | 19
|
2018-05-11T12:46:28.000Z
|
2018-08-13T11:28:44.000Z
|
scripts/update_tables.py
|
EnsemblGSOC/tony-gsoc-2018
|
7b727e3a82654a4f102d735fb0b2c4ab12470ff6
|
[
"Apache-2.0"
] | 1
|
2018-09-22T04:58:55.000Z
|
2018-09-22T04:58:55.000Z
|
"""
.. See the NOTICE file distributed with this work for additional information
regarding copyright ownership.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
import ast
import sys
import xml.etree.ElementTree as ET
from datetime import datetime
import requests
from sqlalchemy import create_engine, Table, MetaData, func, or_
from sqlalchemy.orm import sessionmaker
from base import *
# setup config
config_path = sys.argv[1]
with open(config_path) as configfile:
config = ast.literal_eval(configfile.read())
tony_assembly = config["tony_assembly"]
results_dir = config ["results_dir"]
udocker_root = config["udocker_root"]
toil_dir = config["toil_dir"]
workflow_dir = config["workflow_dir"]
log_dir = config["log_dir"]
registry = config["registry"]
def xml_download(ena_accession):
"""
pulling xml record from ENA
:param ena_accession:
:return:
"""
try:
xml = ET.fromstring(requests.get("https://www.ebi.ac.uk/ena/data/view/{}&display=xml".format(ena_accession),
stream=True, timeout=60).content)
return xml
except requests.exceptions.ReadTimeout:
stderr.write("Could not download XML file with accession {}\n".format(ena_accession))
return None
def xml_download_retry(ena_accession):
"""
pulling xml record from ENA, some of the records take a longer time to connect, this retry set timeout to be 5 mins
:param ena_accession:
:return:
"""
try:
xml = ET.fromstring(requests.get("https://www.ebi.ac.uk/ena/data/view/{}&display=xml".format(ena_accession),
stream=True, timeout=300).content)
return xml
except requests.exceptions.ReadTimeout:
stderr.write("Could not download XML file with accession {}\n".format(ena_accession))
return None
def chromosome_number(xml):
"""
find the number of chromosomes within the assembly. If the assembly is assembled to scaffold level, returns 0
:param xml:
:return:
"""
try:
chroms_number = len(xml.find("ASSEMBLY").find("CHROMOSOMES").findall("CHROMOSOME"))
return chroms_number
except AttributeError:
return 0
def get_chromosomes(xml):
for record in xml.find("ASSEMBLY").find("CHROMOSOMES").findall("CHROMOSOME"):
yield record
def chromosome_data(xml):
"""
extract md5 and length of the chromosome from the chromosome's xml record
:param xml:
:return:
"""
for xref in xml.find("entry").findall("xref"):
if xref.attrib["db"] == "MD5":
md5 = xref.attrib["id"]
break
length = xml.find("entry").attrib["sequenceLength"]
return md5, int(length)
def get_scaffold_number(xml):
for attribute in xml.find("ASSEMBLY").find("ASSEMBLY_ATTRIBUTES").findall("ASSEMBLY_ATTRIBUTE"):
if attribute.find("TAG").text == "scaffold-count":
return int(attribute.find("VALUE").text)
stderr = open("{log_dir}/log_update_tables.txt".format(log_dir=log_dir), "a")
stderr.write(str(datetime.now()) + "\n")
stderr.write("====\n")
registry_engine = create_engine(registry)
assembly = Table("assembly", MetaData(), autoload=True, autoload_with=registry_engine)
engine = create_engine(tony_assembly)
session = sessionmaker(bind=engine)
s = session()
old_accessions = s.query(GCA.accession).all()
r_session = sessionmaker(bind=registry_engine)
rs = r_session()
sub_concat = func.concat(assembly.c.chain, ".", assembly.c.version)
new_accessions = rs.query(sub_concat).filter(sub_concat.notin_(old_accessions)).all()
rs.close()
s = session()
for entry in new_accessions:
gca = GCA()
gca.accession = entry[0]
# print(gca.accession)
gca_xml = xml_download(gca.accession)
if gca_xml is not None: # only add to GCA table if the xml record of the assembly exists
try:
gca.assembly_level = gca_xml.find("ASSEMBLY").find("ASSEMBLY_LEVEL").text
except AttributeError:
gca.assembly_level = "No Level"
stderr.write("{} has no assembly_level attribute, not added to database\n".format(gca.accession))
if gca.assembly_level in ["chromosome", "complete genome"]:
gca.records = chromosome_number(gca_xml)
s.add(gca)
# print(gca.accession, gca.assembly_level, gca.records)
for chrom_record in get_chromosomes(gca_xml):
chromosome = Chromosome()
chromosome.GCA_accession = gca.accession
chromosome.accession = chrom_record.attrib["accession"]
# print(chromosome.accession)
chromosome.name = chrom_record.find("NAME").text
chromosome.status = 1
chrom_xml = xml_download(chromosome.accession)
if chrom_xml is not None:
try:
chromosome.md5, chromosome.length = chromosome_data(chrom_xml)
except AttributeError:
stderr.write("Chromosome {} doesn't exit or has corrupted xml file. Chromosome was added "
"without md5 and length.\n".format(chromosome.accession))
s.add(chromosome)
# print(chromosome.accession, chromosome.GCA_accession,
# chromosome.name, chromosome.length, chromosome.md5)
if not s.query(Jobs).filter(Jobs.chromosome_accession == chromosome.accession).all():
for job in ["get_fasta", "GC", "trf", "CpG"]:
s.add(Jobs(chromosome_accession=chromosome.accession,
job_name=job))
# print(chromosome.accession, job)
elif gca.assembly_level in ["scaffold", "contig"]:
gca.records = get_scaffold_number(gca_xml)
s.add(gca)
for job in ["get_fasta", "GC", "trf", "CpG"]:
s.add(Jobs(chromosome_accession=gca.accession,
job_name=job))
# print(gca.accession, gca.assembly_level, gca.records)
s.commit()
else:
stderr.write("{} was not added because XML record is unavailable\n".format(gca.accession))
stderr.flush()
# retry download chromosome xml record with a longer timeout
for chromosome in s.query(Chromosome).filter(or_(Chromosome.md5 == None, Chromosome.length == None)).all():
chrom_xml = xml_download_retry(chromosome.accession)
if chrom_xml is not None:
try:
chromosome.md5, chromosome.length = chromosome_data(chrom_xml)
except AttributeError:
stderr.write("Chromosome {} doesn't exit or has corrupted xml file. Chromosome data was not added\n"
.format(chromosome.accession))
s.commit()
stderr.flush()
s.close()
stderr.close()
| 38.780105
| 119
| 0.652491
| 920
| 7,407
| 5.136957
| 0.257609
| 0.052264
| 0.020313
| 0.016081
| 0.307025
| 0.263225
| 0.255184
| 0.220482
| 0.202285
| 0.202285
| 0
| 0.004271
| 0.241393
| 7,407
| 190
| 120
| 38.984211
| 0.836804
| 0.205751
| 0
| 0.292683
| 0
| 0
| 0.153473
| 0.00537
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04878
| false
| 0
| 0.073171
| 0
| 0.186992
| 0.00813
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d086eb141826a0dd9e722b35bf04940deba291b8
| 1,003
|
py
|
Python
|
api-gateway/services/mail_service.py
|
Niweera/DNSTool-Middleware-API
|
0e83d9f62fb65d9223b86a7876b3f30b2771befb
|
[
"Apache-2.0"
] | null | null | null |
api-gateway/services/mail_service.py
|
Niweera/DNSTool-Middleware-API
|
0e83d9f62fb65d9223b86a7876b3f30b2771befb
|
[
"Apache-2.0"
] | 9
|
2021-06-12T05:39:59.000Z
|
2021-08-14T09:20:00.000Z
|
api-gateway/services/mail_service.py
|
Niweera/DNSTool-Middleware-API
|
0e83d9f62fb65d9223b86a7876b3f30b2771befb
|
[
"Apache-2.0"
] | 2
|
2021-05-22T15:33:50.000Z
|
2021-08-28T08:51:25.000Z
|
from flask_mail import Message
from mailer import mailer
from middleware.error_handling import write_log
class MailService:
@staticmethod
def send_welcome_email(email: str, full_name: str) -> None:
try:
email = Message(
subject="Welcome to DNSTool!",
recipients=[email],
body=f"Welcome {full_name} to DNSTool!",
)
mailer.send(email)
return
except Exception as e:
write_log("error", e)
@staticmethod
def send_verification_email(email: str, verification_link: str) -> None:
try:
email = Message(
subject="Verification Required!",
recipients=[email],
html=f"<h3>Please click on the following link to verify you</h3><br><a href='{verification_link}'>Click Here</a>",
)
mailer.send(email)
return
except Exception as e:
write_log("error", e)
| 31.34375
| 130
| 0.564307
| 109
| 1,003
| 5.073395
| 0.458716
| 0.0434
| 0.068716
| 0.05425
| 0.296564
| 0.296564
| 0.191682
| 0.191682
| 0.191682
| 0.191682
| 0
| 0.00304
| 0.343968
| 1,003
| 31
| 131
| 32.354839
| 0.837386
| 0
| 0
| 0.571429
| 0
| 0.035714
| 0.186441
| 0.031904
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.107143
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d088e305cc713c10d76eb917ab7c20df555901b9
| 1,469
|
py
|
Python
|
day02/python/subesokun/solution.py
|
clssn/aoc-2019
|
a978e5235855be937e60a1e7f88d1ef9b541be15
|
[
"MIT"
] | 22
|
2019-11-27T08:28:46.000Z
|
2021-04-27T05:37:08.000Z
|
day02/python/subesokun/solution.py
|
sancho1241/aoc-2019
|
e0f63824c8250e0f84a42805e1a7ff7d9232002c
|
[
"MIT"
] | 77
|
2019-11-16T17:22:42.000Z
|
2021-05-10T20:36:36.000Z
|
day02/python/subesokun/solution.py
|
sancho1241/aoc-2019
|
e0f63824c8250e0f84a42805e1a7ff7d9232002c
|
[
"MIT"
] | 43
|
2019-11-27T06:36:51.000Z
|
2021-11-03T20:56:48.000Z
|
INPUT_FILE_NAME = 'input.txt'
puzzle_input = None
with open(INPUT_FILE_NAME) as input_file:
puzzle_input = list(map(lambda val: int(val), input_file.readline().rstrip('\n').split(',')))
def run_instruction(opcode, param_1, param_2, param_3, memory):
if opcode == 1:
memory[param_3] = memory[param_1] + memory[param_2]
elif opcode == 2:
memory[param_3] = memory[param_1] * memory[param_2]
else:
raise Exception('Ooooppps')
def run_program(memory):
instruction_pointer = 0
while memory[instruction_pointer] != 99:
run_instruction(memory[instruction_pointer + 0], memory[instruction_pointer + 1], memory[instruction_pointer + 2], memory[instruction_pointer + 3], memory)
instruction_pointer += 4
return memory
memory_solution_part1 = puzzle_input.copy()
memory_solution_part1[1] = 12
memory_solution_part1[2] = 2
solution_part_1 = run_program(memory_solution_part1)
print('Solution to part 1: %i' % (solution_part_1[0],))
def find_noun_verb(output, memory):
for noun in range(0, 100):
for verb in range(0, 100):
memory_copy = memory.copy()
memory_copy[1] = noun
memory_copy[2] = verb
result = run_program(memory_copy)[0]
if result == output:
return (noun, verb)
(noun, verb) = find_noun_verb(19690720, puzzle_input)
solution_part_2 = 100 * noun + verb
print('Solution to part 2: %i' % (solution_part_2,))
| 31.934783
| 163
| 0.673247
| 204
| 1,469
| 4.578431
| 0.284314
| 0.127409
| 0.179872
| 0.038544
| 0.077088
| 0.077088
| 0.077088
| 0.077088
| 0.077088
| 0
| 0
| 0.049053
| 0.208986
| 1,469
| 45
| 164
| 32.644444
| 0.754733
| 0
| 0
| 0
| 0
| 0
| 0.043567
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088235
| false
| 0
| 0
| 0
| 0.147059
| 0.058824
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d08a58e73bc10ca50c82c660e481d5fd9ab9cf8d
| 6,299
|
py
|
Python
|
opendata_module/anonymizer/iio/mongodb_manager.py
|
ria-ee/monitor
|
d5cb9384abf38394b35e760729649136cbbc7548
|
[
"MIT"
] | 10
|
2017-12-01T11:59:54.000Z
|
2021-11-08T10:30:35.000Z
|
opendata_module/anonymizer/iio/mongodb_manager.py
|
ria-ee/monitor
|
d5cb9384abf38394b35e760729649136cbbc7548
|
[
"MIT"
] | 16
|
2019-11-15T08:45:33.000Z
|
2021-06-10T18:06:03.000Z
|
opendata_module/anonymizer/iio/mongodb_manager.py
|
ria-ee/monitor
|
d5cb9384abf38394b35e760729649136cbbc7548
|
[
"MIT"
] | 13
|
2017-11-22T08:46:57.000Z
|
2021-12-16T06:51:07.000Z
|
from pymongo import MongoClient
import pymongo
import datetime
import sqlite3 as sql
import os
import signal
from signal import SIGABRT, SIGILL, SIGINT, SIGSEGV, SIGTERM, SIGHUP
import traceback
from anonymizer.utils import logger_manager
import sys
ROOT_DIR = os.path.abspath(os.path.dirname(__file__))
ATEXIT_SINGLETON = None
def store_last_processed_timestamp(*args):
ATEXIT_SINGLETON.update_last_processed_timestamp(max_timestamp=ATEXIT_SINGLETON.last_processed_timestamp)
sys.exit(1)
for sig in (SIGABRT, SIGILL, SIGINT, SIGSEGV, SIGTERM, SIGHUP):
signal.signal(sig, store_last_processed_timestamp)
class MongoDB_Manager(object):
def __init__(self, config, previous_run_manager=None):
self._logger = logger_manager.LoggerManager(logger_name='opendata-anonymizer', module_name='opendata')
global ATEXIT_SINGLETON
ATEXIT_SINGLETON = self
self._config = config
self.mongo_connection_string = "mongodb://{user}:{password}@{host}:{port}/{database}".format(
**{'user': config.mongo_db['user'],
'password': config.mongo_db['password'],
'host': config.mongo_db['host_address'],
'port': config.mongo_db['port'],
'database': config.mongo_db['auth_db']})
self._mongo_client = MongoClient(self.mongo_connection_string)
self._previous_run_manager = previous_run_manager if previous_run_manager else PreviousRunManager(config)
self.last_processed_timestamp = self._get_last_processed_timestamp()
def get_records(self, allowed_fields):
collection = self._mongo_client[self._config.mongo_db['database_name']][self._config.mongo_db['table_name']]
min_timestamp = self._get_last_processed_timestamp()
projection = {field: True for field in allowed_fields}
projection['correctorTime'] = True
batch_idx = 0
current_timestamp = datetime.datetime.now().timestamp()
for document in collection.find({
'correctorTime': {'$gt': min_timestamp, '$lte': current_timestamp},
'correctorStatus': 'done',
'client.clientXRoadInstance': {'$ne': None}
}, projection=projection, no_cursor_timeout=True).sort('correctorTime', pymongo.ASCENDING):
if batch_idx == 1000:
self.update_last_processed_timestamp(max_timestamp=self.last_processed_timestamp)
batch_idx = 0
self.last_processed_timestamp = document['correctorTime']
del document['_id']
del document['correctorTime']
yield self._add_missing_fields(document, allowed_fields)
batch_idx += 1
self.update_last_processed_timestamp(max_timestamp=self.last_processed_timestamp)
def is_alive(self):
try:
self._mongo_client[self._config.mongo_db['database_name']][self._config.mongo_db['table_name']].find_one()
return True
except Exception:
self._logger.log_error('mongodb_connection_failed',
("Failed to connect to mongodb with connection string {0}. ERROR: {1}".format(
self.mongo_connection_string, traceback.format_exc().replace('\n', '')))
)
return False
def _add_missing_fields(self, document, allowed_fields):
try:
existing_agents = [agent for agent in ['client', 'producer'] if agent in document]
for field in allowed_fields:
field_path = field.split('.')
if len(field_path) == 2 and field_path[0] in existing_agents:
if field_path[0] not in document:
document[field_path[0]] = {}
if field_path[1] not in document[field_path[0]]:
document[field_path[0]][field_path[1]] = self._get_default_value(field_path)
elif len(field_path) == 1:
if field_path[0] not in document:
document[field_path[0]] = self._get_default_value(field_path)
return document
except Exception:
self._logger.log_error('adding_missing_fields_failed',
("Failed adding missing fields from {0} to document {1}. ERROR: {2}".format(
str(allowed_fields), str(document), traceback.format_exc().replace('\n', ''))))
raise
def _get_default_value(self, field_path):
return None
def _get_last_processed_timestamp(self):
min_timestamp = self._previous_run_manager.get_previous_run()
return min_timestamp
def update_last_processed_timestamp(self, max_timestamp):
if max_timestamp:
self._previous_run_manager.set_previous_run(max_timestamp)
def __del__(self):
self.update_last_processed_timestamp(max_timestamp=self.last_processed_timestamp)
class PreviousRunManager(object):
initial_value = 0.0
def __init__(self, config):
self._config = config
self.mongo_connection_string = "mongodb://{user}:{password}@{host}:{port}/{database}".format(
**{'user': config.mongo_db['user'],
'password': config.mongo_db['password'],
'host': config.mongo_db['host_address'],
'port': config.mongo_db['port'],
'database': config.mongo_db['auth_db']})
self._mongo_client = MongoClient(self.mongo_connection_string)
def get_previous_run(self):
collection = self._mongo_client[self._config.mongo_db['state']['database_name']][
self._config.mongo_db['state']['table_name']]
entry = collection.find_one({'key': 'last_mongodb_timestamp'})
if entry:
return float(entry['value'])
else:
return self.initial_value
def set_previous_run(self, max_timestamp):
collection = self._mongo_client[self._config.mongo_db['state']['database_name']][
self._config.mongo_db['state']['table_name']]
collection.update(
{'key': 'last_mongodb_timestamp'},
{'key': 'last_mongodb_timestamp', 'value': str(max_timestamp)},
upsert=True
)
| 40.378205
| 118
| 0.640895
| 709
| 6,299
| 5.35402
| 0.207334
| 0.05216
| 0.061644
| 0.035827
| 0.426765
| 0.384615
| 0.30137
| 0.30137
| 0.298736
| 0.298736
| 0
| 0.00573
| 0.251945
| 6,299
| 155
| 119
| 40.63871
| 0.799873
| 0
| 0
| 0.260504
| 0
| 0
| 0.123988
| 0.03953
| 0
| 0
| 0
| 0
| 0
| 1
| 0.10084
| false
| 0.033613
| 0.084034
| 0.008403
| 0.268908
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d08f1ec8fdeeec92aebf3f03615a051f9221f14d
| 802
|
py
|
Python
|
problems/p050.py
|
davisschenk/project-euler-python
|
1375412e6c8199ab02250bd67223c758d4df1725
|
[
"MIT"
] | null | null | null |
problems/p050.py
|
davisschenk/project-euler-python
|
1375412e6c8199ab02250bd67223c758d4df1725
|
[
"MIT"
] | null | null | null |
problems/p050.py
|
davisschenk/project-euler-python
|
1375412e6c8199ab02250bd67223c758d4df1725
|
[
"MIT"
] | 2
|
2020-10-08T23:35:03.000Z
|
2020-10-09T00:28:36.000Z
|
from problem import Problem
from utils.primes import sieve_of_eratosthenes, simple_is_prime
class ConsecutivePrimeSum(Problem, name="Consecutive prime sum", expected=997651):
@Problem.solution()
def brute_force(self):
upper_bound = 1_000_000
primes = list(sieve_of_eratosthenes(4000))
max_length = 0
max_prime = 0
for start in range(len(primes)):
current_length = 0
for end in range(start, len(primes)):
s = sum(primes[start:end])
current_length += 1
if s > upper_bound:
break
elif simple_is_prime(s) and current_length > max_length:
max_length = current_length
max_prime = s
return max_prime
| 30.846154
| 82
| 0.588529
| 95
| 802
| 4.726316
| 0.473684
| 0.115813
| 0.084633
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.039924
| 0.34414
| 802
| 25
| 83
| 32.08
| 0.813688
| 0
| 0
| 0
| 0
| 0
| 0.026185
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| false
| 0
| 0.1
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d091d22d60b6b043a2100712328787d0097e7ec3
| 2,312
|
py
|
Python
|
acapy_client/models/indy_pres_attr_spec.py
|
dbluhm/acapy-client
|
d92ef607ba2ff1152ec15429f2edb20976991424
|
[
"Apache-2.0"
] | 4
|
2021-08-05T09:20:34.000Z
|
2021-08-08T19:37:29.000Z
|
acapy_client/models/indy_pres_attr_spec.py
|
dbluhm/acapy-client
|
d92ef607ba2ff1152ec15429f2edb20976991424
|
[
"Apache-2.0"
] | null | null | null |
acapy_client/models/indy_pres_attr_spec.py
|
dbluhm/acapy-client
|
d92ef607ba2ff1152ec15429f2edb20976991424
|
[
"Apache-2.0"
] | 2
|
2021-08-12T18:18:45.000Z
|
2021-08-14T13:22:28.000Z
|
from typing import Any, Dict, List, Type, TypeVar, Union
import attr
from ..types import UNSET, Unset
T = TypeVar("T", bound="IndyPresAttrSpec")
@attr.s(auto_attribs=True)
class IndyPresAttrSpec:
""" """
name: str
cred_def_id: Union[Unset, str] = UNSET
mime_type: Union[Unset, str] = UNSET
referent: Union[Unset, str] = UNSET
value: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
name = self.name
cred_def_id = self.cred_def_id
mime_type = self.mime_type
referent = self.referent
value = self.value
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
{
"name": name,
}
)
if cred_def_id is not UNSET:
field_dict["cred_def_id"] = cred_def_id
if mime_type is not UNSET:
field_dict["mime-type"] = mime_type
if referent is not UNSET:
field_dict["referent"] = referent
if value is not UNSET:
field_dict["value"] = value
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
name = d.pop("name")
cred_def_id = d.pop("cred_def_id", UNSET)
mime_type = d.pop("mime-type", UNSET)
referent = d.pop("referent", UNSET)
value = d.pop("value", UNSET)
indy_pres_attr_spec = cls(
name=name,
cred_def_id=cred_def_id,
mime_type=mime_type,
referent=referent,
value=value,
)
indy_pres_attr_spec.additional_properties = d
return indy_pres_attr_spec
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties
| 27.2
| 77
| 0.602076
| 296
| 2,312
| 4.449324
| 0.209459
| 0.053151
| 0.068337
| 0.05467
| 0.149582
| 0.027335
| 0
| 0
| 0
| 0
| 0
| 0
| 0.29109
| 2,312
| 84
| 78
| 27.52381
| 0.803539
| 0
| 0
| 0
| 0
| 0
| 0.039479
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.112903
| false
| 0
| 0.048387
| 0.048387
| 0.354839
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0955d256ce56a7190e83eb9977c69b7f233f594
| 4,929
|
py
|
Python
|
close_approach.py
|
jepster/python_project_near_earth_objects
|
6e48da50685f15226bbc0adc66231c661596ac67
|
[
"MIT"
] | null | null | null |
close_approach.py
|
jepster/python_project_near_earth_objects
|
6e48da50685f15226bbc0adc66231c661596ac67
|
[
"MIT"
] | null | null | null |
close_approach.py
|
jepster/python_project_near_earth_objects
|
6e48da50685f15226bbc0adc66231c661596ac67
|
[
"MIT"
] | null | null | null |
from helpers import cd_to_datetime, datetime_to_str
class CloseApproach:
"""A close approach to Earth by an NEO.
A `CloseApproach` encapsulates information about the NEO's close approach
to Earth, such as the date and time (in UTC) of closest approach, the
nominal approach distance in astronomical units, and the relative approach
velocity in kilometers per second.
A `CloseApproach` also maintains a reference to its `NearEarthObject` -
initially, this information (the NEO's primary designation) is saved in a
private attribute, but the referenced NEO is eventually replaced in the
`NEODatabase` constructor.
"""
def __init__(self, **info):
"""Create a new `CloseApproach`.
:param string time: The date and time (in UTC) of closest approach.
NASA's format, at least in the `cd`
field of close approach data, uses the English locale's month names.
For example, December 31st, 2020 at noon
is: 2020-Dec-31 12:00
:param float distance: The nominal approach distance in astronomical
units.
:param float velocity: The relative approach velocity in kilometers per
second.
:param NearEarthObject neo: Reference to its `NearEarthObject` -
initially, this information
(the NEO's primary designation) is saved in a private attribute, but
the referenced NEO is
eventually replaced in the `NEODatabase` constructor.
"""
for key, value in info.items():
# assign the designation parameter
if key.lower() == 'des':
# check the value of the parameter to avoid
# an inappropriate value
try:
# if the type of value is not string
self._designation = str(value)
except ValueError:
# print the text message
print(f'The type of {key} is not string')
# assign the time parameter
elif key.lower() == 'cd':
# check the value of the parameter to avoid
# an inappropriate value
try:
# if the type of value is not string
self.time = str(value)
self.time = cd_to_datetime(self.time)
except ValueError:
# print the text message
print(f'The type of {key} is not string')
# assign the distance parameter
elif key.lower() == 'dist':
# check the value of the parameter to avoid
# an inappropriate value
try:
# if the type of value is not float
self.distance = float(value)
except ValueError:
# print the text message
print(f'The type of {key} is not float')
# assign the velocity parameter
elif key.lower() == 'v_rel':
# check the value of the parameter to avoid
# an inappropriate value
try:
# if the type of value is not float
self.velocity = float(value)
except ValueError:
# print the text message
print(f'The type of {key} is not float')
self.neo = self._designation
@property
def time_str(self):
"""Return a formatted representation of this `CloseApproach`'s
approach time.
The value in `self.time` should be a Python `datetime` object. While a
`datetime` object has a string representation, the default
representation includes seconds - significant figures that don't
exist in our input data set.
The `datetime_to_str` method converts a `datetime` object to a
formatted string that can be used in human-readable representations and
in serialization to CSV and JSON files.
"""
return f"Approach time of {self._designation} was at " \
f"{datetime_to_str(self.time)}"
def get_neo_primary_designation(self) -> str:
return self._designation
@property
def designation(self):
"""To access to the self._designation.
:return: self._designation
"""
return self._designation
def __str__(self):
"""Return `str(self)`."""
return f"A CloseApproach time={self.time_str} " \
f"distance={self.distance} velocity={self.velocity} " \
f"neo={self.neo}"
def __repr__(self):
"""Return `repr(self)`, a computer-readable string representation of
this object."""
return (
f"CloseApproach(time={self.time_str!r}, "
f"distance={self.distance:.2f}, "f"velocity={self.velocity:.2f}, "
f"neo={self.neo!r})")
| 39.432
| 79
| 0.581862
| 586
| 4,929
| 4.832765
| 0.259386
| 0.019774
| 0.025424
| 0.021186
| 0.47952
| 0.439619
| 0.439619
| 0.407839
| 0.373941
| 0.348517
| 0
| 0.005587
| 0.346318
| 4,929
| 124
| 80
| 39.75
| 0.873371
| 0.49645
| 0
| 0.363636
| 0
| 0
| 0.194853
| 0.078585
| 0
| 0
| 0
| 0
| 0
| 1
| 0.136364
| false
| 0
| 0.022727
| 0.022727
| 0.295455
| 0.090909
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d096f230d88b0cb0b44ad27a15da83bc18edf195
| 11,023
|
py
|
Python
|
terncy/terncy.py
|
rxwen/python-terncy
|
69be427c39118f122554a300e6e82ec24ad48bc1
|
[
"MIT"
] | 1
|
2020-06-30T07:06:14.000Z
|
2020-06-30T07:06:14.000Z
|
terncy/terncy.py
|
rxwen/python-terncy
|
69be427c39118f122554a300e6e82ec24ad48bc1
|
[
"MIT"
] | null | null | null |
terncy/terncy.py
|
rxwen/python-terncy
|
69be427c39118f122554a300e6e82ec24ad48bc1
|
[
"MIT"
] | 1
|
2020-12-26T11:20:42.000Z
|
2020-12-26T11:20:42.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import asyncio
import logging
import json
import ssl
import uuid
from terncy.version import __version__
import terncy.event as event
import ipaddress
from datetime import datetime
from enum import Enum
from zeroconf import ServiceBrowser, Zeroconf
import aiohttp
import websockets
_LOGGER = logging.getLogger(__name__)
def _next_req_id():
return uuid.uuid4().hex[0:8]
class TokenState(Enum):
INVALID = -1
REQUESTED = 1
APPROVED = 3
TERNCY_HUB_SVC_NAME = "_websocket._tcp.local."
WAIT_RESP_TIMEOUT_SEC = 5
_discovery_engine = None
_discovery_browser = None
discovered_homecenters = {}
class _TerncyZCListener:
def __init__(self):
pass
def remove_service(self, zeroconf, svc_type, name):
global discovered_homecenters
dev_id = name.replace("." + svc_type, "")
if dev_id in discovered_homecenters:
discovered_homecenters.pop(dev_id)
def update_service(self, zeroconf, svc_type, name):
global discovered_homecenters
info = zeroconf.get_service_info(svc_type, name)
dev_id = name.replace("." + svc_type, "")
txt_records = {"dev_id": dev_id}
ip = ""
if len(info.addresses) > 0:
if len(info.addresses[0]) == 4:
ip = str(ipaddress.IPv4Address(info.addresses[0]))
if len(info.addresses[0]) == 16:
ip = str(ipaddress.IPv6Address(info.addresses[0]))
txt_records["ip"] = ip
txt_records["port"] = info.port
for k in info.properties:
txt_records[k.decode("utf-8")] = info.properties[k].decode("utf-8")
discovered_homecenters[dev_id] = txt_records
def add_service(self, zeroconf, svc_type, name):
global discovered_homecenters
info = zeroconf.get_service_info(svc_type, name)
dev_id = name.replace("." + svc_type, "")
txt_records = {"dev_id": dev_id}
ip = ""
if len(info.addresses) > 0:
if len(info.addresses[0]) == 4:
ip = str(ipaddress.IPv4Address(info.addresses[0]))
if len(info.addresses[0]) == 16:
ip = str(ipaddress.IPv6Address(info.addresses[0]))
txt_records["ip"] = ip
txt_records["port"] = info.port
for k in info.properties:
txt_records[k.decode("utf-8")] = info.properties[k].decode("utf-8")
discovered_homecenters[dev_id] = txt_records
async def start_discovery():
global _discovery_engine
global _discovery_browser
if _discovery_engine is None:
zc = Zeroconf()
listener = _TerncyZCListener()
browser = ServiceBrowser(zc, TERNCY_HUB_SVC_NAME, listener)
_discovery_engine = zc
_discovery_browser = browser
async def stop_discovery():
global _discovery_engine
global _discovery_browser
if _discovery_engine is not None:
_discovery_browser.cancel()
_discovery_engine.close()
_discovery_engine = None
_discovery_browser = None
class Terncy:
def __init__(self, client_id, dev_id, ip, port=443, username="", token=""):
self.client_id = client_id
self.dev_id = dev_id
self.ip = ip
self.port = port
self.username = username
self.token = token
self.token_id = -1
self.token_state = TokenState.INVALID
self._connection = None
self._pending_requests = {}
self._event_handler = None
def is_connected(self):
return self._connection is not None
def register_event_handler(self, handler):
self._event_handler = handler
async def request_token(self, username, name):
url = f"https://{self.ip}:{self.port}/v1/tokens:request"
async with aiohttp.ClientSession() as session:
data = {
"reqId": _next_req_id(),
"intent": "requestToken",
"clientId": self.client_id,
"username": self.username,
"name": name,
"role": 3,
}
async with session.post(
url,
data=json.dumps(data),
ssl=ssl._create_unverified_context(),
) as response:
body = await response.json()
_LOGGER.debug(f"resp body: {body}")
state = TokenState.INVALID
token = ""
token_id = -1
if "state" in body:
state = body["state"]
if "id" in body:
token_id = body["id"]
if "token" in body:
token = body["token"]
return response.status, token_id, token, state
async def delete_token(self, token_id, token):
url = f"https://{self.ip}:{self.port}/v1/tokens:delete"
async with aiohttp.ClientSession() as session:
data = {
"reqId": _next_req_id(),
"intent": "deleteToken",
"clientId": self.client_id,
"id": token_id,
"token": token,
}
async with session.post(
url,
data=json.dumps(data),
ssl=ssl._create_unverified_context(),
) as response:
_LOGGER.debug(f"resp: {response}")
return response.status
async def check_token_state(self, token_id, token=""):
url = f"https://{self.ip}:{self.port}/v1/tokens:query"
async with aiohttp.ClientSession() as session:
data = {
"reqId": _next_req_id(),
"intent": "queryToken",
"clientId": self.client_id,
"token": token,
"id": token_id,
}
async with session.post(
url,
data=json.dumps(data),
ssl=ssl._create_unverified_context(),
) as response:
body = await response.json()
_LOGGER.debug(f"resp: {response}")
state = TokenState.INVALID
if "state" in body:
state = body["state"]
return response.status, state
async def start(self):
"""Connect to Terncy system and start event monitor."""
_LOGGER.info(f"Terncy v{__version__} starting connection to:")
_LOGGER.info(f"{self.dev_id} {self.ip}:{self.port}")
return await self._start_websocket()
async def stop(self):
if self._connection:
await self._connection.close()
self._connection = None
async def _start_websocket(self):
url = f"wss://{self.ip}:{self.port}/ws/json?clientId={self.client_id}&username={self.username}&token={self.token}"
try:
ssl_no_verify = ssl._create_unverified_context()
async with websockets.connect(
url, ping_timeout=None, ping_interval=None, ssl=ssl_no_verify
) as ws:
self._connection = ws
if self._event_handler:
_LOGGER.info(f"connected to {self.dev_id}")
self._event_handler(self, event.Connected())
async for msg in ws:
msgObj = json.loads(msg)
_LOGGER.debug(f"recv {self.dev_id} msg: {msgObj}")
if "rspId" in msgObj:
rsp_id = msgObj["rspId"]
if rsp_id in self._pending_requests:
req = self._pending_requests[rsp_id]
req["rsp"] = msgObj
req["event"].set()
if "intent" in msgObj and msgObj["intent"] == "event":
if self._event_handler:
ev = event.EventMessage(msgObj)
self._event_handler(self, ev)
if "intent" in msgObj and msgObj["intent"] == "ping":
await ws.send('{"intent":"pong"}')
except (
aiohttp.client_exceptions.ClientConnectionError,
websockets.exceptions.ConnectionClosedError,
ConnectionRefusedError,
OSError,
websockets.exceptions.InvalidStatusCode,
) as e:
_LOGGER.info(f"disconnect with {self.dev_id} {e}")
if self._event_handler:
self._event_handler(self, event.Disconnected())
self._connection = None
return
async def _wait_for_response(self, req_id, req, timeout):
""" return the request and its response """
evt = asyncio.Event()
response_desc = {
"req": req,
"time": datetime.now(),
"event": evt,
}
self._pending_requests[req_id] = response_desc
aw = asyncio.ensure_future(evt.wait())
done, pending = await asyncio.wait({aw}, timeout=timeout)
if aw in done:
pass
else:
_LOGGER.info(f"wait {self.dev_id} response timeout")
if req_id in self._pending_requests:
self._pending_requests.pop(req_id)
return response_desc
async def get_entities(
self, ent_type, wait_result=False, timeout=WAIT_RESP_TIMEOUT_SEC
):
if self._connection is None:
_LOGGER.info(f"no connection with {self.dev_id}")
return None
req_id = _next_req_id()
data = {
"reqId": req_id,
"intent": "sync",
"type": ent_type,
}
await self._connection.send(json.dumps(data))
if wait_result:
return await self._wait_for_response(req_id, data, timeout)
async def set_onoff(
self, ent_id, state, wait_result=False, timeout=WAIT_RESP_TIMEOUT_SEC
):
if self._connection is None:
_LOGGER.info(f"no connection with {self.dev_id}")
return None
return await self.set_attribute(ent_id, "on", state, 0, wait_result)
async def set_attribute(
self,
ent_id,
attr,
attr_val,
method,
wait_result=False,
timeout=WAIT_RESP_TIMEOUT_SEC,
):
if self._connection is None:
_LOGGER.info(f"no connection with {self.dev_id}")
return None
req_id = _next_req_id()
data = {
"reqId": req_id,
"intent": "execute",
"entities": [
{
"id": ent_id,
"attributes": [
{
"attr": attr,
"value": attr_val,
"method": method,
}
],
}
],
}
await self._connection.send(json.dumps(data))
if wait_result:
return await self._wait_for_response(req_id, data, timeout)
| 34.021605
| 122
| 0.547945
| 1,200
| 11,023
| 4.793333
| 0.1675
| 0.019124
| 0.024339
| 0.018776
| 0.464534
| 0.439499
| 0.42194
| 0.387865
| 0.387865
| 0.372566
| 0
| 0.005863
| 0.350086
| 11,023
| 323
| 123
| 34.126935
| 0.797041
| 0.00381
| 0
| 0.407801
| 0
| 0.003546
| 0.089238
| 0.013602
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028369
| false
| 0.007092
| 0.046099
| 0.007092
| 0.14539
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0995eaa88ebe8af4a5de82f32eaa66b6c6943ba
| 5,310
|
py
|
Python
|
ssc/visualization/vispy_voxel.py
|
DavidGillsjo/bssc-net
|
e1ffa643a2c8e3df34225f0756bad0dec9f801a2
|
[
"MIT"
] | 5
|
2021-02-25T01:59:46.000Z
|
2022-02-09T12:23:30.000Z
|
ssc/visualization/vispy_voxel.py
|
DavidGillsjo/bssc-net
|
e1ffa643a2c8e3df34225f0756bad0dec9f801a2
|
[
"MIT"
] | null | null | null |
ssc/visualization/vispy_voxel.py
|
DavidGillsjo/bssc-net
|
e1ffa643a2c8e3df34225f0756bad0dec9f801a2
|
[
"MIT"
] | 1
|
2021-04-10T04:11:02.000Z
|
2021-04-10T04:11:02.000Z
|
import vispy
vispy.use(app='egl')
from moviepy.editor import VideoClip
import numpy as np
from vispy import scene, io, visuals
from vispy.color import *
import cv2
# Check the application correctly picked up egl
assert vispy.app.use_app().backend_name == 'egl', 'Not using EGL'
class AlphaAwareCM(BaseColormap):
def __init__(self, color_list):
bins = np.linspace(0,1,len(color_list)+1)
self.glsl_map = 'vec4 translucent_grays(float t) {\n'
for c_idx, (i1, i2) in enumerate(zip(bins[:-1], bins[1:])):
return_vec = 'return vec4({0[0]:.4},{0[1]:.4},{0[2]:.4},{0[3]:.4});'.format(color_list[c_idx].rgba.flat)
if c_idx == 0:
self.glsl_map += ' if (t < {:.2}) {{\n {}\n }}'.format(i2, return_vec)
elif c_idx == len(color_list):
self.glsl_map += ' else {{}\n {}\n }}'.format(return_vec)
else:
self.glsl_map += ' else if (({:.2} <= t) && (t < {:.2})) {{\n {}\n }}'.format(i1, i2, return_vec)
self.glsl_map += '\n}'
super().__init__()
def plot_voxels(gridLabels, suncg_labels, vox_min, vox_unit, save_path = None, animate = False):
nbr_classes = len(suncg_labels)
canvas = scene.SceneCanvas(keys='interactive', bgcolor='w', size = (1920,1080))
view = canvas.central_widget.add_view()
azimuth = 30
view.camera = scene.TurntableCamera(up='y', distance=4, fov=70,
azimuth=azimuth, elevation=30.)
# Sample colormap and adjust alpha
colormap = get_colormap('cubehelix')
cm_sampled = []
for i, (iclass, sample_f) in enumerate(zip(suncg_labels, np.linspace(0,1,nbr_classes))):
if iclass.lower() in ('free', 'ceiling'):
alpha = 0
elif iclass.lower() in ('floor', 'wall', 'window'):
alpha = 0.6
else:
alpha = 1.0
cm_sampled.append(Color(color=colormap[sample_f].rgb, alpha=alpha))
my_cm = AlphaAwareCM(cm_sampled)
volume = scene.visuals.Volume(gridLabels, relative_step_size = 0.1, method='mip', parent=view.scene, cmap = my_cm, clim = [0, nbr_classes-1], emulate_texture = False)
volume.transform = scene.transforms.MatrixTransform()
volume.transform.scale(3*[vox_unit])
volume.transform.translate(3*[-vox_unit*gridLabels.shape[0]/2.0])
if save_path is None:
return
def make_frame(t):
view.camera.set_state({'azimuth': azimuth+t*90})
return canvas.render()
if animate:
animation = VideoClip(make_frame, duration=3)
animation.write_gif('voxel.gif', fps=8, opt='OptimizePlus')
else:
img = canvas.render()
cv2.imwrite('voxel.png', img[::-1])
def scatter_plot_voxels(gridLabels, suncg_labels, vox_min, vox_unit, save_path = None, animate = False):
nbr_classes = len(suncg_labels)
occMask = gridLabels > 0
xyz = np.nonzero(occMask)
positions = np.vstack([xyz[0], xyz[1], xyz[2]])
gridLabelsMasked = gridLabels[occMask]
canvas = scene.SceneCanvas(keys='interactive', bgcolor='w', size = (1920,1080))
view = canvas.central_widget.add_view()
azimuth = 30
view.camera = scene.TurntableCamera(up='y', distance=4, fov=70,
azimuth=azimuth, elevation=30.)
# Sample colormap and adjust alpha
colormap = get_colormap('hsl', value=1.0, saturation=0.8, ncolors = nbr_classes)
pos_color = np.zeros((positions.shape[1], 4))
cm_sampled = []
for i, (iclass, sample_f) in enumerate(zip(suncg_labels[1:], np.linspace(0,1,nbr_classes-1))):
if iclass.lower() in ('floor', 'wall', 'window'):
alpha = 0.5
elif iclass.lower() == 'ceiling':
alpha = 0.0
else:
alpha = 1.0
base_color = colormap[sample_f].rgba.flatten()
base_color[3] = alpha
pos_color[i==gridLabelsMasked] = base_color
Scatter3D = scene.visuals.create_visual_node(visuals.MarkersVisual)
p1 = Scatter3D(parent=view.scene)
p1.set_gl_state('translucent', blend=True, depth_test=True)
p1.set_data(positions.T, face_color=pos_color, symbol='disc', size=10,
edge_width=0.5, edge_color='k')
p1.transform = scene.transforms.MatrixTransform()
p1.transform.scale(3*[vox_unit])
p1.transform.translate(3*[-vox_unit*gridLabels.shape[0]/2.0])
if save_path is None:
return
def make_frame(t):
view.camera.set_state({'azimuth': azimuth+t*90})
return canvas.render()
if animate:
animation = VideoClip(make_frame, duration=3)
animation.write_gif('voxel.gif', fps=8, opt='OptimizePlus')
else:
img = canvas.render()
cv2.imwrite('voxel.png', img[::-1])
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Test plotting voxels')
parser.add_argument('gt_file', type=str, help='Path to gt file')
parser.add_argument('--animate', action='store_true', help='Yield GIF instead of PNG')
args = parser.parse_args()
from ssc.data.suncg_mapping import SUNCGMapping
import os
labels = SUNCGMapping()
gt_npz = np.load(args.gt_file)
scatter_plot_voxels(gt_npz['voxels'], labels.get_classes(), gt_npz['vox_min'], gt_npz['vox_unit'], save_path = os.getcwd() , animate = args.animate)
| 38.759124
| 170
| 0.634087
| 725
| 5,310
| 4.475862
| 0.308966
| 0.0151
| 0.016949
| 0.011094
| 0.436364
| 0.416641
| 0.403082
| 0.403082
| 0.382126
| 0.382126
| 0
| 0.030041
| 0.216384
| 5,310
| 136
| 171
| 39.044118
| 0.74982
| 0.020904
| 0
| 0.376147
| 0
| 0.009174
| 0.099326
| 0.013282
| 0
| 0
| 0
| 0
| 0.009174
| 1
| 0.045872
| false
| 0
| 0.082569
| 0
| 0.174312
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d09a191e2a3804cf26b16a157b643e61f06cbb1c
| 69,906
|
py
|
Python
|
__init__.py
|
state-of-the-art/BlendNet
|
0a303e34081b820370c9528c807f276eefb122dc
|
[
"Apache-2.0"
] | 66
|
2019-10-30T13:39:13.000Z
|
2022-03-23T18:33:03.000Z
|
__init__.py
|
state-of-the-art/BlendNet
|
0a303e34081b820370c9528c807f276eefb122dc
|
[
"Apache-2.0"
] | 97
|
2019-10-18T16:48:50.000Z
|
2022-02-13T18:58:45.000Z
|
__init__.py
|
state-of-the-art/BlendNet
|
0a303e34081b820370c9528c807f276eefb122dc
|
[
"Apache-2.0"
] | 12
|
2019-11-23T12:53:52.000Z
|
2021-08-09T21:15:48.000Z
|
bl_info = {
'name': 'BlendNet - distributed cloud render',
'author': 'www.state-of-the-art.io',
'version': (0, 4, 0),
'warning': 'development version',
'blender': (2, 80, 0),
'location': 'Properties --> Render --> BlendNet Render',
'description': 'Allows to easy allocate resources in cloud and '
'run the cycles rendering with getting preview '
'and results',
'wiki_url': 'https://github.com/state-of-the-art/BlendNet/wiki',
'tracker_url': 'https://github.com/state-of-the-art/BlendNet/issues',
'category': 'Render',
}
if 'bpy' in locals():
import importlib
importlib.reload(BlendNet)
importlib.reload(blend_file)
else:
from . import (
BlendNet,
)
from .BlendNet import blend_file
import os
import time
import tempfile
from datetime import datetime
import bpy
from bpy.props import (
BoolProperty,
IntProperty,
StringProperty,
EnumProperty,
PointerProperty,
CollectionProperty,
)
class BlendNetAddonPreferences(bpy.types.AddonPreferences):
bl_idname = __package__
resource_provider: EnumProperty(
name = 'Provider',
description = 'Engine to provide resources for rendering',
items = BlendNet.addon.getProvidersEnumItems,
update = lambda self, context: BlendNet.addon.selectProvider(self.resource_provider),
)
blendnet_show_panel: BoolProperty(
name = 'Show BlendNet',
description = 'Show BlendNet render panel',
default = True,
)
# Advanced
blender_dist: EnumProperty(
name = 'Blender dist',
description = 'Blender distributive to use on manager/agents. '
'By default it\'s set to the current blender version and if '
'you want to change it - you will deal with the custom URL',
items = BlendNet.addon.fillAvailableBlenderDists,
update = lambda self, context: BlendNet.addon.updateBlenderDistProp(self.blender_dist),
)
blender_dist_url: StringProperty(
name = 'Blender dist URL',
description = 'URL to download the blender distributive',
default = '',
)
blender_dist_checksum: StringProperty(
name = 'Blender dist checksum',
description = 'Checksum of the distributive to validate the binary',
default = '',
)
blender_dist_custom: BoolProperty(
name = 'Custom dist URL',
description = 'Use custom url instead the automatic one',
default = False,
update = lambda self, context: BlendNet.addon.updateBlenderDistProp(),
)
session_id: StringProperty(
name = 'Session ID',
description = 'Identifier of the session and allocated resources. '
'It is used to properly find your resources in the GCP '
'project and separate your resources from the other ones. '
'Warning: Please be careful with this option and don\'t '
'change it if you don\'t know what it\'s doing',
maxlen = 12,
update = lambda self, context: BlendNet.addon.genSID(self, 'session_id'),
)
manager_instance_type: EnumProperty(
name = 'Manager size',
description = 'Selected manager instance size',
items = BlendNet.addon.fillAvailableInstanceTypesManager,
)
manager_ca_path: StringProperty(
name = 'CA certificate',
description = 'Certificate Authority certificate pem file location',
subtype = 'FILE_PATH',
default = '',
)
manager_address: StringProperty(
name = 'Address',
description = 'If you using the existing Manager service put address here '
'(it will be automatically created otherwise)',
default = '',
)
manager_port: IntProperty(
name = 'Port',
description = 'TLS tcp port to communicate Addon with Manager service',
min = 1,
max = 65535,
default = 8443,
)
manager_user: StringProperty(
name = 'User',
description = 'HTTP Basic Auth username (will be generated if empty)',
maxlen = 32,
default = 'blendnet-manager',
)
manager_password: StringProperty(
name = 'Password',
description = 'HTTP Basic Auth password (will be generated if empty)',
subtype = 'PASSWORD',
maxlen = 128,
default = '',
update = lambda self, context: BlendNet.addon.hidePassword(self, 'manager_password'),
)
manager_agent_instance_type: EnumProperty(
name = 'Agent size',
description = 'Selected agent instance size',
items = BlendNet.addon.fillAvailableInstanceTypesAgent,
)
manager_agents_max: IntProperty(
name = 'Agents max',
description = 'Maximum number of agents in Manager\'s pool',
min = 1,
max = 65535,
default = 3,
)
agent_use_cheap_instance: BoolProperty(
name = 'Use cheap VM',
description = 'Use cheap instances to save money',
default = True,
)
agent_cheap_multiplier: EnumProperty(
name = 'Cheap multiplier',
description = 'Way to choose the price to get a cheap VM. '
'Some providers allows to choose the maximum price for the instance '
'and it could be calculated from the ondemand (max) price multiplied by this value.',
items = BlendNet.addon.getCheapMultiplierList,
)
agent_port: IntProperty(
name = 'Port',
description = 'TLS tcp port to communicate Manager with Agent service',
min = 1,
max = 65535,
default = 9443,
)
agent_user: StringProperty(
name = 'User',
description = 'HTTP Basic Auth username (will be generated if empty)',
maxlen = 32,
default = 'blendnet-agent',
)
agent_password: StringProperty(
name = 'Password',
description = 'HTTP Basic Auth password (will be generated if empty)',
subtype = 'PASSWORD',
maxlen = 128,
default = '',
update = lambda self, context: BlendNet.addon.hidePassword(self, 'agent_password'),
)
# Hidden
show_advanced: BoolProperty(
name = 'Advanced Properties',
description = 'Show/Hide the advanced properties',
default = False,
)
manager_password_hidden: StringProperty(
subtype = 'PASSWORD',
update = lambda self, context: BlendNet.addon.genPassword(self, 'manager_password_hidden'),
)
agent_password_hidden: StringProperty(
subtype = 'PASSWORD',
update = lambda self, context: BlendNet.addon.genPassword(self, 'agent_password_hidden'),
)
def draw(self, context):
layout = self.layout
# Provider
box = layout.box()
row = box.row()
split = box.split(factor=0.8)
split.prop(self, 'resource_provider')
info = BlendNet.addon.getProviderDocs(self.resource_provider).split('\n')
for line in info:
if line.startswith('Help: '):
split.operator('wm.url_open', text='How to setup', icon='HELP').url = line.split(': ', 1)[-1]
provider_settings = BlendNet.addon.getProviderSettings()
for key, data in provider_settings.items():
path = 'provider_' + self.resource_provider + '_' + key
if not path in self.__class__.__annotations__:
print('ERROR: Unable to find provider setting:', path)
continue
if path not in self or self[path] is None:
self[path] = data.get('value')
box.prop(self, path)
messages = BlendNet.addon.getProviderMessages(self.resource_provider)
for msg in messages:
box.label(text=msg, icon='ERROR')
if not BlendNet.addon.checkProviderIsSelected():
err = BlendNet.addon.getProviderDocs(self.resource_provider).split('\n')
for line in err:
box.label(text=line.strip(), icon='ERROR')
return
if self.resource_provider != 'local':
box = box.box()
box.label(text='Collected cloud info:')
provider_info = BlendNet.addon.getProviderInfo(context)
if 'ERRORS' in provider_info:
for err in provider_info['ERRORS']:
box.label(text=err, icon='ERROR')
for key, value in provider_info.items():
if key == 'ERRORS':
continue
split = box.split(factor=0.5)
split.label(text=key, icon='DOT')
split.label(text=value)
# Advanced properties panel
advanced_icon = 'TRIA_RIGHT' if not self.show_advanced else 'TRIA_DOWN'
box = layout.box()
box.prop(self, 'show_advanced', emboss=False, icon=advanced_icon)
if self.show_advanced:
if self.resource_provider != 'local':
row = box.row()
row.prop(self, 'session_id')
row = box.row(align=True)
row.prop(self, 'blender_dist_custom', text='')
if not self.blender_dist_custom:
row.prop(self, 'blender_dist')
else:
row.prop(self, 'blender_dist_url')
box.row().prop(self, 'blender_dist_checksum')
box_box = box.box()
box_box.label(text='Manager')
if self.resource_provider != 'local':
row = box_box.row()
row.prop(self, 'manager_instance_type', text='Type')
row = box_box.row()
price = BlendNet.addon.getManagerPriceBG(self.manager_instance_type, context)
if price[0] < 0.0:
row.label(text='WARNING: Unable to find price for the type "%s": %s' % (
self.manager_instance_type, price[1]
), icon='ERROR')
else:
row.label(text='Calculated price: ~%s/Hour (%s)' % (round(price[0], 12), price[1]))
if self.resource_provider == 'local':
row = box_box.row()
row.use_property_split = True
row.prop(self, 'manager_address')
row = box_box.row()
row.use_property_split = True
row.prop(self, 'manager_ca_path')
row = box_box.row()
row.use_property_split = True
row.prop(self, 'manager_port')
row = box_box.row()
row.use_property_split = True
row.prop(self, 'manager_user')
row = box_box.row()
row.use_property_split = True
row.prop(self, 'manager_password')
box_box = box.box()
box_box.label(text='Agent')
if self.resource_provider != 'local':
row = box_box.row()
row.prop(self, 'agent_use_cheap_instance')
if 'Cheap instances not available' in provider_info.get('ERRORS', []):
row.enabled = False
else:
row.prop(self, 'agent_cheap_multiplier')
row = box_box.row()
row.enabled = not BlendNet.addon.isManagerCreated()
row.prop(self, 'manager_agent_instance_type', text='Agents type')
row.prop(self, 'manager_agents_max', text='Agents max')
row = box_box.row()
price = BlendNet.addon.getAgentPriceBG(self.manager_agent_instance_type, context)
if price[0] < 0.0:
row.label(text='ERROR: Unable to find price for the type "%s": %s' % (
self.manager_agent_instance_type, price[1]
), icon='ERROR')
else:
row.label(text='Calculated combined price: ~%s/Hour (%s)' % (
round(price[0] * self.manager_agents_max, 12), price[1]
))
min_price = BlendNet.addon.getMinimalCheapPriceBG(self.manager_agent_instance_type, context)
if min_price > 0.0:
row = box_box.row()
row.label(text='Minimal combined price: ~%s/Hour' % (
round(min_price * self.manager_agents_max, 12),
))
if price[0] <= min_price:
row = box_box.row()
row.label(text='ERROR: Selected cheap price is lower than minimal one', icon='ERROR')
row = box_box.row()
row.use_property_split = True
row.prop(self, 'agent_port')
row = box_box.row()
row.use_property_split = True
row.prop(self, 'agent_user')
row = box_box.row()
row.use_property_split = True
row.prop(self, 'agent_password')
class BlendNetSceneSettings(bpy.types.PropertyGroup):
scene_memory_req: IntProperty(
name = 'Scene RAM to render',
description = 'Required memory to render the scene in GB',
min = 0,
max = 65535,
default = 0,
)
@classmethod
def register(cls):
bpy.types.Scene.blendnet = PointerProperty(
name = 'BlendNet Settings',
description = 'BlendNet scene settings',
type = cls
)
@classmethod
def unregister(cls):
if hasattr(bpy.types.Scene, 'blendnet'):
del bpy.types.Scene.blendnet
class BlendNetManagerTask(bpy.types.PropertyGroup):
'''Class contains the manager task information'''
name: StringProperty()
create_time: StringProperty()
start_time: StringProperty()
end_time: StringProperty()
state: StringProperty()
done: StringProperty()
received: StringProperty()
class BlendNetSessionProperties(bpy.types.PropertyGroup):
manager_tasks: CollectionProperty(
name = 'Manager tasks',
description = 'Contains all the tasks that right now is available '
'on manager',
type = BlendNetManagerTask,
)
manager_tasks_idx: IntProperty(default=0)
status: StringProperty(
name = 'BlendNet status',
description = 'BlendNet is performing some operation',
default = 'idle',
)
@classmethod
def register(cls):
bpy.types.WindowManager.blendnet = PointerProperty(
name = 'BlendNet Session Properties',
description = 'Just current status of process for internal use',
type = cls,
)
@classmethod
def unregister(cls):
if hasattr(bpy.types.WindowManager, 'blendnet'):
del bpy.types.WindowManager.blendnet
class BlendNetToggleManager(bpy.types.Operator):
bl_idname = 'blendnet.togglemanager'
bl_label = ''
bl_description = 'Start/Stop manager instance'
_timer = None
_last_run = 0
@classmethod
def poll(cls, context):
return context.window_manager.blendnet.status == 'idle' or BlendNet.addon.isManagerStarted()
def invoke(self, context, event):
wm = context.window_manager
BlendNet.addon.toggleManager()
if BlendNet.addon.isManagerStarted():
self.report({'INFO'}, 'BlendNet stopping Manager instance...')
wm.blendnet.status = 'Manager stopping...'
else:
self.report({'INFO'}, 'BlendNet starting Manager instance...')
wm.blendnet.status = 'Manager starting...'
if context.area:
context.area.tag_redraw()
wm.modal_handler_add(self)
self._timer = wm.event_timer_add(5.0, window=context.window)
return {'RUNNING_MODAL'}
def modal(self, context, event):
if event.type != 'TIMER' or self._last_run + 4.5 > time.time():
return {'PASS_THROUGH'}
self._last_run = time.time()
return self.execute(context)
def execute(self, context):
wm = context.window_manager
if wm.blendnet.status == 'Manager starting...':
if not BlendNet.addon.isManagerStarted():
return {'PASS_THROUGH'}
self.report({'INFO'}, 'BlendNet Manager started')
wm.blendnet.status = 'Manager connecting...'
if context.area:
context.area.tag_redraw()
BlendNet.addon.requestManagerInfo(context)
elif wm.blendnet.status == 'Manager stopping...':
if not BlendNet.addon.isManagerStopped():
return {'PASS_THROUGH'}
if wm.blendnet.status == 'Manager connecting...':
if not BlendNet.addon.requestManagerInfo(context):
return {'PASS_THROUGH'}
self.report({'INFO'}, 'BlendNet Manager connected')
if self._timer is not None:
wm.event_timer_remove(self._timer)
wm.blendnet.status = 'idle'
if context.area:
context.area.tag_redraw()
return {'FINISHED'}
class BlendNetDestroyManager(bpy.types.Operator):
bl_idname = 'blendnet.destroymanager'
bl_label = ''
bl_description = 'Destroy manager instance'
@classmethod
def poll(cls, context):
return BlendNet.addon.isManagerStopped()
def invoke(self, context, event):
BlendNet.addon.destroyManager()
self.report({'INFO'}, 'BlendNet destroy Manager instance...')
return {'FINISHED'}
class BlendNetTaskPreviewOperation(bpy.types.Operator):
bl_idname = 'blendnet.taskpreview'
bl_label = 'Open preview'
bl_description = 'Show the render for the currently selected task'
@classmethod
def poll(cls, context):
bn = context.window_manager.blendnet
return len(bn.manager_tasks) > bn.manager_tasks_idx
def _findRenderResultArea(self, context):
for window in context.window_manager.windows:
if window.scene != context.scene:
continue
for area in window.screen.areas:
if area.type != 'IMAGE_EDITOR':
continue
if area.spaces.active.image.type == 'RENDER_RESULT':
return area
return None
def invoke(self, context, event):
# Show the preview of the render if not open
if not self._findRenderResultArea(context):
bpy.ops.render.view_show('INVOKE_DEFAULT')
# Save the original render engine to run render on BlendNet
original_render_engine = context.scene.render.engine
context.scene.render.engine = __package__
# Start the render process
self.result = bpy.ops.render.render('INVOKE_DEFAULT')
# Restore the original scene engine
time.sleep(1.0)
if context.scene.render.engine == __package__:
context.scene.render.engine = original_render_engine
return {'FINISHED'}
class BlendNetRunTaskOperation(bpy.types.Operator):
bl_idname = 'blendnet.runtask'
bl_label = 'Run Task'
bl_description = 'Run Manager task using BlendNet resources'
is_animation: BoolProperty(
name = 'Animation',
description = 'Runs animation rendering instead of just a still image rendering',
default = False
)
_timer = None
_project_file: None # temp blend project file to ensure it will not be changed
_frame: 0 # current/start frame depends on animation
_frame_to: 0 # end frame for animation
_frame_orig: 0 # to restore the current frame after animation processing
_task_name: None # store task name to retry later
@classmethod
def poll(cls, context):
return BlendNet.addon.isManagerActive()
def _findRenderResultArea(self, context):
for window in context.window_manager.windows:
if window.scene != context.scene:
continue
for area in window.screen.areas:
if area.type != 'IMAGE_EDITOR':
continue
if area.spaces.active.image.type == 'RENDER_RESULT':
return area
def init(self, context):
'''Initializes the execution'''
if not bpy.data.filepath:
self.report({'ERROR'}, 'Unable to render not saved project. Please save it somewhere.')
return {'CANCELLED'}
# Fix and verify the blendfile dependencies
bads = blend_file.getDependencies(bpy.path.abspath('//'), os.path.abspath(''))[1]
if bads:
self.report({'ERROR'}, 'Found some bad dependencies - please fix them before run: %s' % (bads,))
return {'CANCELLED'}
# Saving project to the same directory
try:
self._project_file = bpy.data.filepath + '_blendnet.blend'
bpy.ops.wm.save_as_mainfile(
filepath = self._project_file,
check_existing = False,
compress = True,
copy = True,
)
except Exception as e:
self.report({'ERROR'}, 'Unable to save the "_blendnet.blend" project file: %s' % (e,))
return {'CANCELLED'}
if self.is_animation:
self._frame = context.scene.frame_start
self._frame_to = context.scene.frame_end
self._frame_orig = context.scene.frame_current
else:
self._frame = context.scene.frame_current
self._task_name = None
context.window_manager.modal_handler_add(self)
self._timer = context.window_manager.event_timer_add(0.1, window=context.window)
return {'RUNNING_MODAL'}
def invoke(self, context, event):
return self.init(context)
def modal(self, context, event):
if event.type != 'TIMER':
return {'PASS_THROUGH'}
# Waiting for manager
if not BlendNet.addon.isManagerActive():
return {'PASS_THROUGH'}
return self.execute(context)
def execute(self, context):
scene = context.scene
wait = False
if not hasattr(self, '_frame'):
wait = True # The execute is running directly, so run in fg
if 'CANCELLED' in self.init(context):
self.report({'ERROR'}, 'Unable to init task preparation')
return {'CANCELLED'}
scene.frame_current = self._frame
fname = bpy.path.basename(bpy.data.filepath)
if not self._task_name:
# If the operation is not completed - reuse the same task name
d = datetime.utcnow().strftime('%y%m%d%H%M')
self._task_name = '%s%s-%d-%s' % (
BlendNet.addon.getTaskProjectPrefix(),
d, scene.frame_current,
BlendNet.addon.genRandomString(3)
)
print('DEBUG: Uploading task "%s" to the manager' % self._task_name)
# Prepare list of files need to be uploaded
deps, bads = blend_file.getDependencies(bpy.path.abspath('//'), os.path.abspath(''))
if bads:
self.report({'ERROR'}, 'Found some bad dependencies - please fix them before run: %s' % (bads,))
return {'CANCELLED'}
deps_map = dict([ (rel, bpy.path.abspath(rel)) for rel in deps ])
deps_map['//'+fname] = self._project_file
# Run the dependencies upload background process
BlendNet.addon.managerTaskUploadFiles(self._task_name, deps_map)
# Slow down the check process
if self._timer is not None:
context.window_manager.event_timer_remove(self._timer)
self._timer = context.window_manager.event_timer_add(3.0, window=context.window)
status = BlendNet.addon.managerTaskUploadFilesStatus()
if wait:
for retry in range(1, 10):
status = BlendNet.addon.managerTaskUploadFilesStatus()
if not status:
break
time.sleep(1.0)
if status:
self.report({'INFO'}, 'Uploading process for task %s: %s' % (self._task_name, status))
return {'PASS_THROUGH'}
# Configuring the task
print('INFO: Configuring task "%s"' % self._task_name)
self.report({'INFO'}, 'Configuring task "%s"' % (self._task_name,))
samples = None
if hasattr(scene.cycles, 'progressive'):
# For blender < 3.0.0
if scene.cycles.progressive == 'PATH':
samples = scene.cycles.samples
elif scene.cycles.progressive == 'BRANCHED_PATH':
samples = scene.cycles.aa_samples
else:
samples = scene.cycles.samples
if hasattr(scene.cycles, 'use_square_samples'):
# For blender < 3.0.0
# Addon need to pass the actual samples number to the manager
if scene.cycles.use_square_samples:
samples *= samples
# Where the compose result will be stored on the Addon side
compose_filepath = scene.render.frame_path()
if scene.render.filepath.startswith('//'):
# It's relative to blend project path
compose_filepath = bpy.path.relpath(compose_filepath)
cfg = {
'samples': samples,
'frame': scene.frame_current,
'project': fname,
'use_compositing_nodes': scene.render.use_compositing,
'compose_filepath': compose_filepath,
'project_path': bpy.path.abspath('//'), # To resolve the project parent paths like `//../..`
'cwd_path': os.path.abspath(''), # Current working directory to resolve relative paths like `../dir/file.txt`
}
if not BlendNet.addon.managerTaskConfig(self._task_name, cfg):
self.report({'WARNING'}, 'Unable to config the task "%s", let\'s retry...' % (self._task_name,))
return {'PASS_THROUGH'}
# Running the task
self.report({'INFO'}, 'Running task "%s"' % self._task_name)
if not BlendNet.addon.managerTaskRun(self._task_name):
self.report({'WARNING'}, 'Unable to start the task "%s", let\'s retry...' % (self._task_name,))
return {'PASS_THROUGH'}
self.report({'INFO'}, 'Task "%s" marked as ready to start' % (self._task_name,))
# Ok, task is started - we can clean the name
self._task_name = None
if self.is_animation:
if self._frame < self._frame_to:
# Not all the frames are processed
self._frame += 1
return {'PASS_THROUGH'}
# Restore the original current frame
scene.frame_current = self._frame_orig
# Removing no more required temp blend file
os.remove(self._project_file)
if self._timer is not None:
context.window_manager.event_timer_remove(self._timer)
return {'FINISHED'}
class TASKS_UL_list(bpy.types.UIList):
def draw_item(self, context, layout, data, item, icon, active_data, active_propname):
self.use_filter_sort_alpha = True
if self.layout_type in {'DEFAULT', 'COMPACT'}:
split = layout.split(factor=0.7)
split.label(text=item.name)
split.label(text=('%s:%s' % (item.state[0], item.done)) if item.done and item.state != 'COMPLETED' else item.state)
elif self.layout_type in {'GRID'}:
pass
class BlendNetGetNodeLogOperation(bpy.types.Operator):
bl_idname = 'blendnet.getnodelog'
bl_label = 'Get Node Log'
bl_description = 'Show the node (instance) log data'
node_id: StringProperty(
name = 'Node ID',
description = 'ID of the node/instance to get the log',
default = ''
)
@classmethod
def poll(cls, context):
return True
def invoke(self, context, event):
wm = context.window_manager
data = BlendNet.addon.getNodeLog(self.node_id)
if not data:
self.report({'WARNING'}, 'No log data retreived for ' + self.node_id)
return {'CANCELLED'}
if data == 'NOT IMPLEMENTED':
self.report({'WARNING'}, 'Not implemented for the current provider')
return {'CANCELLED'}
prefix = self.node_id
def drawPopup(self, context):
layout = self.layout
if BlendNet.addon.showLogWindow(prefix, data):
layout.label(text='''Don't forget to unlink the file if you '''
'''don't want it to stay in blend file.''')
else:
layout.label(text='Unable to show the log window', icon='ERROR')
wm.popup_menu(drawPopup, title='Log for'+prefix, icon='INFO')
return {'FINISHED'}
class BlendNetGetAddonLogOperation(bpy.types.Operator):
bl_idname = 'blendnet.getaddonlog'
bl_label = 'Get BlendNet Addon Log'
bl_description = 'Show the running BlendNet addon log information'
@classmethod
def poll(cls, context):
return True
def invoke(self, context, event):
wm = context.window_manager
out = BlendNet.addon.getAddonLog()
prefix = 'addon'
if not out:
self.report({'ERROR'}, 'No log data found for ' + prefix)
return {'CANCELLED'}
data = []
line = ''
for t, l in out.items():
if not l.endswith('\n'):
line += l
continue
time_str = datetime.fromtimestamp(round(float(t), 3)).strftime('%y.%m.%d %H:%M:%S.%f')
data.append(time_str + '\t' + line + l)
line = ''
if line:
data.append('{not completed line}\t' + line)
data = ''.join(data)
def drawPopup(self, context):
layout = self.layout
if BlendNet.addon.showLogWindow(prefix, data):
layout.label(text='Don\'t forget to unlink the file if you don\'t want it to stay in blend file.')
else:
layout.label(text='Unable to show the log window', icon='ERROR')
wm.popup_menu(drawPopup, title='Log for ' + prefix, icon='INFO')
return {'FINISHED'}
class BlendNetGetServiceLogOperation(bpy.types.Operator):
bl_idname = 'blendnet.getservicelog'
bl_label = 'Get Service Log'
bl_description = 'Show the service (daemon) log data'
agent_name: StringProperty(
name = 'Name of Agent',
description = 'Name of Agent (or Manager by default) to get the log from',
default = ''
)
@classmethod
def poll(cls, context):
return True
def invoke(self, context, event):
wm = context.window_manager
out = {}
if self.agent_name:
out = BlendNet.addon.agentGetLog(self.agent_name)
else:
out = BlendNet.addon.managerGetLog()
prefix = self.agent_name if self.agent_name else BlendNet.addon.getResources(context).get('manager', {}).get('name')
if not out:
self.report({'ERROR'}, 'No log data retreived for ' + prefix)
return {'CANCELLED'}
data = []
line = ''
for t, l in out.items():
if not l.endswith('\n'):
line += l
continue
time_str = datetime.fromtimestamp(round(float(t), 3)).strftime('%y.%m.%d %H:%M:%S.%f')
data.append(time_str + '\t' + line + l)
line = ''
if line:
data.append('{not completed line}\t' + line)
data = ''.join(data)
def drawPopup(self, context):
layout = self.layout
if BlendNet.addon.showLogWindow(prefix, data):
layout.label(text='Don\'t forget to unlink the file if you don\'t want it to stay in blend file.')
else:
layout.label(text='Unable to show the log window', icon='ERROR')
wm.popup_menu(drawPopup, title='Log for' + prefix, icon='INFO')
return {'FINISHED'}
class BlendNetTaskInfoOperation(bpy.types.Operator):
bl_idname = 'blendnet.taskinfo'
bl_label = 'Task info'
bl_description = 'Show the current task info panel'
@classmethod
def poll(cls, context):
bn = context.window_manager.blendnet
return len(bn.manager_tasks) > bn.manager_tasks_idx
def invoke(self, context, event):
wm = context.window_manager
def drawPopup(self, context):
layout = self.layout
task_name = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].name
data = BlendNet.addon.managerTaskStatus(task_name)
if not data:
return
keys = BlendNet.addon.naturalSort(data.keys())
for key in keys:
if key == 'result':
layout.label(text='%s:' % (key,))
for k in data[key]:
layout.label(text=' %s: %s' % (k, data[key][k]))
elif key == 'state_error_info':
layout.label(text='%s:' % (key,), icon='ERROR')
for it in data[key]:
if isinstance(it, dict):
for k, v in it.items():
layout.label(text=' %s: %s' % (k, v))
else:
layout.label(text=' ' + str(it))
else:
layout.label(text='%s: %s' % (key, data[key]))
task_name = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].name
wm.popup_menu(drawPopup, title='Task info for "%s"' % task_name, icon='INFO')
return {'FINISHED'}
class BlendNetTaskMessagesOperation(bpy.types.Operator):
bl_idname = 'blendnet.taskmessages'
bl_label = 'Show task messages'
bl_description = 'Show the task execution messages'
@classmethod
def poll(cls, context):
bn = context.window_manager.blendnet
if len(bn.manager_tasks) <= bn.manager_tasks_idx:
return False
task_state = bn.manager_tasks[bn.manager_tasks_idx].state
return task_state not in {'CREATED', 'PENDING'}
def invoke(self, context, event):
wm = context.window_manager
task_name = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].name
out = BlendNet.addon.managerTaskMessages(task_name)
if not out:
self.report({'ERROR'}, 'No task messages found for "%s"' % (task_name,))
return {'CANCELLED'}
data = []
keys = BlendNet.addon.naturalSort(out.keys())
for key in keys:
data.append(key)
if not out[key]:
continue
for line in out[key]:
data.append(' ' + line)
data = '\n'.join(data)
prefix = task_name + 'messages'
def drawPopup(self, context):
layout = self.layout
if BlendNet.addon.showLogWindow(prefix, data):
layout.label(text='Don\'t forget to unlink the file if you don\'t want it to stay in blend file.')
else:
layout.label(text='Unable to show the log window', icon='ERROR')
wm.popup_menu(drawPopup, title='Task messages for "%s"' % (task_name,), icon='TEXT')
return {'FINISHED'}
class BlendNetTaskDetailsOperation(bpy.types.Operator):
bl_idname = 'blendnet.taskdetails'
bl_label = 'Show task details'
bl_description = 'Show the task execution details'
@classmethod
def poll(cls, context):
bn = context.window_manager.blendnet
if len(bn.manager_tasks) <= bn.manager_tasks_idx:
return False
task_state = bn.manager_tasks[bn.manager_tasks_idx].state
return task_state not in {'CREATED', 'PENDING'}
def invoke(self, context, event):
wm = context.window_manager
task_name = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].name
out = BlendNet.addon.managerTaskDetails(task_name)
if not out:
self.report({'ERROR'}, 'No task details found for "%s"' % (task_name,))
return {'CANCELLED'}
data = []
keys = BlendNet.addon.naturalSort(out.keys())
for key in keys:
data.append(key)
if not out[key]:
continue
for line in out[key]:
data.append(' ' + str(line))
data = '\n'.join(data)
prefix = task_name + 'details'
def drawPopup(self, context):
layout = self.layout
if BlendNet.addon.showLogWindow(prefix, data):
layout.label(text='Don\'t forget to unlink the file if you don\'t want it to stay in blend file.')
else:
layout.label(text='Unable to show the log window', icon='ERROR')
wm.popup_menu(drawPopup, title='Task details for "%s"' % (task_name,), icon='TEXT')
return {'FINISHED'}
class BlendNetTaskRunOperation(bpy.types.Operator):
bl_idname = 'blendnet.taskrun'
bl_label = 'Task run'
bl_description = 'Start the stopped or created task'
@classmethod
def poll(cls, context):
bn = context.window_manager.blendnet
if len(bn.manager_tasks) <= bn.manager_tasks_idx:
return False
task_state = bn.manager_tasks[bn.manager_tasks_idx].state
return task_state in {'CREATED', 'STOPPED'}
def invoke(self, context, event):
wm = context.window_manager
task_name = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].name
BlendNet.addon.managerTaskRun(task_name)
return {'FINISHED'}
class BlendNetTaskDownloadOperation(bpy.types.Operator):
bl_idname = 'blendnet.taskdownload'
bl_label = 'Download task result'
bl_description = 'Download the completed task result'
result: StringProperty()
@classmethod
def poll(cls, context):
bn = context.window_manager.blendnet
if len(bn.manager_tasks) <= bn.manager_tasks_idx:
return False
task_state = bn.manager_tasks[bn.manager_tasks_idx].state
# Allow to download results even for error state
return task_state in {'COMPLETED', 'ERROR'}
def invoke(self, context, event):
wm = context.window_manager
task_name = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].name
# If the result is downloaded manually - use the current project output directory
out_dir = os.path.dirname(bpy.context.scene.render.frame_path())
dir_path = os.path.join(out_dir, self.result)
result = BlendNet.addon.managerDownloadTaskResult(task_name, self.result, dir_path)
if result is None:
self.report({'WARNING'}, 'Unable to download the final result for %s, please retry later ' % (task_name,))
return {'CANCELLED'}
if not result:
self.report({'INFO'}, 'Downloading the final result for %s... ' % (task_name,))
return {'FINISHED'}
self.report({'INFO'}, 'The file is already downloaded and seems the same for %s... ' % (task_name,))
return {'CANCELLED'}
class BlendNetTaskStopOperation(bpy.types.Operator):
bl_idname = 'blendnet.taskstop'
bl_label = 'Task stop'
bl_description = 'Stop the pending, running or error task'
@classmethod
def poll(cls, context):
bn = context.window_manager.blendnet
if len(bn.manager_tasks) <= bn.manager_tasks_idx:
return False
task_state = bn.manager_tasks[bn.manager_tasks_idx].state
return task_state in {'PENDING', 'RUNNING', 'ERROR'}
def invoke(self, context, event):
wm = context.window_manager
task_name = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].name
BlendNet.addon.managerTaskStop(task_name)
return {'FINISHED'}
class BlendNetTasksStopStartedOperation(bpy.types.Operator):
bl_idname = 'blendnet.tasksstopstarted'
bl_label = 'Stop all started tasks'
bl_description = 'Stop all the pending or running tasks'
bl_options = {'REGISTER', 'INTERNAL'}
tasks: CollectionProperty(type=BlendNetManagerTask)
@classmethod
def poll(cls, context):
return True
def invoke(self, context, event):
wm = context.window_manager
self.tasks.clear()
for task in wm.blendnet.manager_tasks:
if task.state in {'PENDING', 'RUNNING'}:
self.tasks.add().name = task.name
return wm.invoke_confirm(self, event)
def execute(self, context):
self.report({'INFO'}, 'Stopping %s tasks' % len(self.tasks))
for task in self.tasks:
print('INFO: Stopping task "%s"' % task.name)
BlendNet.addon.managerTaskStop(task.name)
self.tasks.clear()
return {'FINISHED'}
class BlendNetTaskRemoveOperation(bpy.types.Operator):
bl_idname = 'blendnet.taskremove'
bl_label = 'Remove selected task'
bl_description = 'Remove the task from the tasks list'
bl_options = {'REGISTER', 'INTERNAL'}
task_name: StringProperty()
@classmethod
def poll(cls, context):
bn = context.window_manager.blendnet
if len(bn.manager_tasks) <= bn.manager_tasks_idx:
return False
return bn.manager_tasks[bn.manager_tasks_idx].state in {'CREATED', 'STOPPED', 'COMPLETED', 'ERROR'}
def invoke(self, context, event):
wm = context.window_manager
self.task_name = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].name
return wm.invoke_confirm(self, event)
def execute(self, context):
self.report({'INFO'}, 'Removing task "%s"' % self.task_name)
BlendNet.addon.managerTaskRemove(self.task_name)
return {'FINISHED'}
class BlendNetAgentRemoveOperation(bpy.types.Operator):
bl_idname = 'blendnet.agentremove'
bl_label = 'Remove the agent'
bl_description = 'Remove the agent from the agents pool or terminate in case of cloud provider'
bl_options = {'REGISTER', 'INTERNAL'}
agent_name: StringProperty()
@classmethod
def poll(cls, context):
return True
def invoke(self, context, event):
wm = context.window_manager
return wm.invoke_confirm(self, event)
def execute(self, context):
self.report({'INFO'}, 'Removing agent "%s"' % self.agent_name)
prefs = bpy.context.preferences.addons[__package__].preferences
if prefs.resource_provider == 'local':
if not BlendNet.addon.managerAgentRemove(self.agent_name):
self.report({'WARNING'}, 'Unable to remove agent "%s"' % (self.agent_name,))
return {'CANCELLED'}
self.report({'INFO'}, 'Removed agent "%s"' % (self.agent_name,))
else:
BlendNet.addon.destroyAgent(self.agent_name)
self.report({'INFO'}, 'BlendNet destroy Agent instance ' + self.agent_name)
return {'FINISHED'}
class BlendNetAgentCreateOperation(bpy.types.Operator):
bl_idname = 'blendnet.agentcreate'
bl_label = 'Agent create'
bl_description = 'Register new agent in the manager'
agent_name: StringProperty(
name = 'Name',
description = 'Name of Agent to create',
default = ''
)
agent_address: StringProperty(
name = 'Address',
description = 'IP or domain name of the agent',
default = ''
)
agent_port: IntProperty(
name = 'Port',
description = 'TLS tcp port to communicate Manager with Agent service',
min = 1,
max = 65535,
default = 9443,
)
agent_user: StringProperty(
name = 'User',
description = 'HTTP Basic Auth username',
maxlen = 32,
default = '',
)
agent_password: StringProperty(
name = 'Password',
description = 'HTTP Basic Auth password',
subtype = 'PASSWORD',
maxlen = 128,
default = '',
)
@classmethod
def poll(cls, context):
return BlendNet.addon.isManagerActive()
def invoke(self, context, event):
wm = context.window_manager
prefs = bpy.context.preferences.addons[__package__].preferences
self.agent_port = prefs.agent_port
self.agent_user = prefs.agent_user
self.agent_password = prefs.agent_password_hidden
return wm.invoke_props_dialog(self)
def execute(self, context):
if not self.agent_name:
self.report({'ERROR'}, 'No agent name is specified')
return {'PASS_THROUGH'}
if not self.agent_address:
self.report({'ERROR'}, 'No agent address is specified')
return {'PASS_THROUGH'}
cfg = {
'address': self.agent_address,
'port': self.agent_port,
'auth_user': self.agent_user,
'auth_password': self.agent_password,
}
if not BlendNet.addon.managerAgentCreate(self.agent_name, cfg):
self.report({'WARNING'}, 'Unable to create agent "%s"' % (self.agent_name,))
return {'PASS_THROUGH'}
self.report({'INFO'}, 'Created agent "%s" (%s:%s)' % (
self.agent_name, self.agent_address, self.agent_port
))
return {'FINISHED'}
class BlendNetTasksRemoveEndedOperation(bpy.types.Operator):
bl_idname = 'blendnet.tasksremoveended'
bl_label = 'Remove all ended tasks'
bl_description = 'Remove all the stopped or completed tasks'
bl_options = {'REGISTER', 'INTERNAL'}
tasks: CollectionProperty(type=BlendNetManagerTask)
@classmethod
def poll(cls, context):
return True
def invoke(self, context, event):
wm = context.window_manager
self.tasks.clear()
for task in wm.blendnet.manager_tasks:
if task.state in {'STOPPED', 'COMPLETED'}:
self.tasks.add().name = task.name
return wm.invoke_confirm(self, event)
def execute(self, context):
self.report({'INFO'}, 'Removing %s tasks' % len(self.tasks))
for task in self.tasks:
print('INFO: Removing task "%s"' % task.name)
BlendNet.addon.managerTaskRemove(task.name)
self.tasks.clear()
return {'FINISHED'}
class BlendNetTaskMenu(bpy.types.Menu):
bl_idname = 'RENDER_MT_blendnet_task_menu'
bl_label = 'Task Menu'
bl_description = 'Allow to operate on tasks in the list'
@classmethod
def poll(cls, context):
bn = context.window_manager.blendnet
return len(bn.manager_tasks) > bn.manager_tasks_idx
def draw(self, context):
layout = self.layout
wm = context.window_manager
if not wm.blendnet.manager_tasks:
layout.label(text='No tasks in the list')
return
if len(wm.blendnet.manager_tasks) <= wm.blendnet.manager_tasks_idx:
# No such item in the list
return
task_name = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].name
layout.label(text='Task "%s":' % task_name)
layout.operator('blendnet.taskinfo', icon='INFO')
layout.operator('blendnet.taskmessages', icon='TEXT')
layout.operator('blendnet.taskdetails', icon='TEXT')
layout.operator('blendnet.taskdownload', text='Download render', icon='DOWNARROW_HLT').result = 'render'
layout.operator('blendnet.taskdownload', text='Download compose', icon='DOWNARROW_HLT').result = 'compose'
layout.operator('blendnet.taskrun', icon='PLAY')
layout.operator('blendnet.taskremove', icon='TRASH')
layout.operator('blendnet.taskstop', icon='PAUSE')
layout.label(text='All tasks actions:')
layout.operator('blendnet.tasksstopstarted', text='Stop all started tasks', icon='PAUSE')
layout.operator('blendnet.tasksremoveended', text='Remove all ended tasks', icon='TRASH')
class BlendNetRenderPanel(bpy.types.Panel):
bl_idname = 'RENDER_PT_blendnet_render'
bl_label = 'BlendNet'
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = 'render'
bl_options = {'HIDE_HEADER'}
@classmethod
def poll(cls, context):
# Allow to see the tasks if selected blendnet and support cycles
return context.scene.render.engine in ('CYCLES', __package__)
def draw(self, context):
layout = self.layout
wm = context.window_manager
bn = context.scene.blendnet
prefs = context.preferences.addons[__package__].preferences
box = layout.box()
row = box.split(factor=0.5)
split = row.split(factor=0.1)
split.prop(prefs, 'blendnet_show_panel', icon_only=True)
split.label(text='BlendNet Render (%s)' % (prefs.resource_provider,))
split = row.split(factor=0.9)
split.label(text=context.window_manager.blendnet.status)
split.operator('blendnet.getaddonlog', text='', icon='TEXT')
if not prefs.blendnet_show_panel:
return
row = box.row()
row.use_property_split = True
row.use_property_decorate = False # No prop animation
row.prop(bn, 'scene_memory_req', text='Render RAM (GB)')
if not BlendNet.addon.checkProviderIsSelected():
box.label(text='ERROR: Provider init failed, check addon settings', icon='ERROR')
return
if not BlendNet.addon.checkAgentMemIsEnough():
box.label(text='WARN: Agent does not have enough memory to render the scene', icon='ERROR')
if not prefs.agent_use_cheap_instance:
box.label(text='WARN: No cheap VMs available, check addon settings', icon='ERROR')
if context.scene.render.engine != __package__:
row = box.row(align=True)
if BlendNet.addon.isManagerStarted():
row.operator('blendnet.runtask', text='Run Image Task', icon='RENDER_STILL').is_animation = False
row.operator('blendnet.runtask', text='Run Animation Tasks', icon='RENDER_ANIMATION').is_animation = True
elif prefs.resource_provider != 'local':
row.operator('blendnet.togglemanager', text='Run Manager instance', icon='ADD')
elif prefs.resource_provider == 'local':
split = row.split(factor=0.3)
split.label(text='Using Manager')
split.label(text='%s:%s' % (prefs.manager_address, prefs.manager_port))
if BlendNet.addon.isManagerActive():
box.template_list('TASKS_UL_list', '', wm.blendnet, 'manager_tasks', wm.blendnet, 'manager_tasks_idx', rows=1)
split = box.split(factor=0.8)
split.operator('blendnet.taskpreview', text='Task Preview', icon='RENDER_RESULT')
split.menu('RENDER_MT_blendnet_task_menu', text='Actions')
class BlendNetManagerPanel(bpy.types.Panel):
bl_idname = 'RENDER_PT_blendnet_manager'
bl_parent_id = 'RENDER_PT_blendnet_render'
bl_label = ' '
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = 'render'
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
return context.preferences.addons[__package__].preferences.blendnet_show_panel and BlendNet.addon.checkProviderIsSelected()
def draw_header(self, context):
layout = self.layout
layout.label(text='Manager')
status = BlendNet.addon.getManagerStatus()
layout.label(text=status[0], icon=status[1])
prefs = bpy.context.preferences.addons[__package__].preferences
if prefs.resource_provider != 'local':
layout.operator('blendnet.togglemanager', icon='ADD' if not BlendNet.addon.isManagerStarted() else 'X')
layout.operator('blendnet.destroymanager', icon='TRASH')
def draw(self, context):
layout = self.layout
layout.use_property_split = True
layout.use_property_decorate = False # No prop animation
prefs = bpy.context.preferences.addons[__package__].preferences
if prefs.resource_provider != 'local':
row = layout.row()
row.enabled = not BlendNet.addon.isManagerCreated()
row.prop(prefs, 'manager_instance_type', text='Type')
price = BlendNet.addon.getManagerPriceBG(prefs.manager_instance_type, context)
row = layout.row()
if price[0] < 0.0:
row.label(text='WARNING: Unable to find price for the type "%s": %s' % (
prefs.manager_instance_type, price[1]
), icon='ERROR')
else:
row.label(text='Calculated price: ~%s/Hour (%s)' % (round(price[0], 8), price[1]))
if prefs.resource_provider == 'local':
split = layout.split(factor=0.3)
split.label(text='Address')
split.label(text='%s:%s' % (prefs.manager_address, prefs.manager_port))
row = layout.row()
manager_info = BlendNet.addon.getResources(context).get('manager')
col = row.column()
col.enabled = BlendNet.addon.isManagerActive()
col.operator('blendnet.getservicelog', text='Service Log', icon='TEXT').agent_name = ''
col = row.column()
col.enabled = BlendNet.addon.isManagerStarted()
op = col.operator('blendnet.getnodelog', text='Node Log', icon='TEXT')
op.node_id = manager_info.get('id', '') if manager_info else ''
if manager_info:
layout.label(text='Manager instance:')
box = layout.box()
for key, value in manager_info.items():
split = box.split(factor=0.3)
split.label(text=key)
split.label(text=str(value))
if BlendNet.addon.isManagerActive():
info = BlendNet.addon.requestManagerInfo(context)
if info:
layout.label(text='Manager info:')
box = layout.box()
blender_version = info.get('blender', {}).get('version_string')
if blender_version:
split = box.split(factor=0.3)
split.label(text='blender')
split.label(text=blender_version)
for key, value in info.get('platform', {}).items():
split = box.split(factor=0.3)
split.label(text=key)
split.label(text=str(value))
class BlendNetAgentsPanel(bpy.types.Panel):
bl_idname = 'RENDER_PT_blendnet_agents'
bl_parent_id = 'RENDER_PT_blendnet_render'
bl_label = ' '
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = 'render'
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
return context.preferences.addons[__package__].preferences.blendnet_show_panel and BlendNet.addon.checkProviderIsSelected()
def draw_header(self, context):
layout = self.layout
layout.label(text='Agents (%d)' % BlendNet.addon.getStartedAgentsNumber(context))
prefs = bpy.context.preferences.addons[__package__].preferences
if prefs.resource_provider == 'local':
layout.operator('blendnet.agentcreate', icon='ADD', text='')
def draw(self, context):
layout = self.layout
layout.use_property_split = True
layout.use_property_decorate = False # No prop animation
prefs = bpy.context.preferences.addons[__package__].preferences
if prefs.resource_provider != 'local':
row = layout.row()
row.prop(prefs, 'manager_agent_instance_type', text='Agents type')
row.enabled = not BlendNet.addon.isManagerStarted()
row = layout.row()
row.prop(prefs, 'manager_agents_max', text='Agents max')
row.enabled = not BlendNet.addon.isManagerStarted()
row = layout.row()
price = BlendNet.addon.getAgentPriceBG(prefs.manager_agent_instance_type, context)
if price[0] < 0.0:
row.label(text='ERROR: Unable to find price for the type "%s": %s' % (
prefs.manager_agent_instance_type, price[1]
), icon='ERROR')
else:
row.label(text='Calculated combined price: ~%s/Hour (%s)' % (
round(price[0] * prefs.manager_agents_max, 8), price[1]
))
min_price = BlendNet.addon.getMinimalCheapPriceBG(prefs.manager_agent_instance_type, context)
if min_price > 0.0:
row = layout.row()
row.label(text='Minimal combined price: ~%s/Hour' % (
round(min_price * prefs.manager_agents_max, 8),
))
if price[0] <= min_price:
row = layout.row()
row.label(text='ERROR: Selected cheap price is lower than minimal one', icon='ERROR')
agents = BlendNet.addon.getResources(context).get('agents', {})
if agents:
box = layout.box()
for inst_name in sorted(agents.keys()):
info = agents[inst_name]
split = box.split(factor=0.8)
split.label(text=info.get('name'))
row = split.row()
row.enabled = BlendNet.addon.isManagerActive()
# The Agent status
if info.get('error'):
row.label(icon='ERROR') # You need to check logs
if info.get('active'):
row.label(icon='CHECKMARK') # Agent is active
elif info.get('started'):
row.label(icon='REC') # Node is started, but Agent is initializing
elif info.get('stopped'):
row.label(icon='PAUSE') # Node is stopped
else:
row.label(icon='X') # Node is terminated or unknown state
row.enabled = bool(info.get('started') or info.get('stopped')) or prefs.resource_provider == 'local'
if info.get('active'):
row.operator('blendnet.getservicelog', text='', icon='TEXT').agent_name = info.get('name', '')
else:
col = row.column()
col.operator('blendnet.getnodelog', text='', icon='TEXT').node_id = info.get('id', '')
col.enabled = bool(info.get('started'))
row.operator('blendnet.agentremove', icon='TRASH', text='').agent_name = info.get('name', '')
class BlendNetRenderEngine(bpy.types.RenderEngine):
'''Continuous render engine allows to switch between the tasks'''
bl_idname = __package__
bl_label = "BlendNet (don't use as a primary engine)"
bl_use_postprocess = True
bl_use_preview = False
def __init__(self):
self._prev_status = None
self._prev_message = None
print('DEBUG: Init BlendNet render')
def __del__(self):
print('DEBUG: Delete BlendNet render')
def updateStats(self, status = None, message = None):
'''To update the status only if something is changed and print into console'''
status = status or self._prev_status or ''
message = message or self._prev_message or ''
self.update_stats(status, message)
if self._prev_status != status or self._prev_message != message:
print('INFO: Render status: %s, %s' % (status, message))
self._prev_status = status
self._prev_message = message
def secToTime(self, sec):
h = sec // 3600
m = (sec % 3600) // 60
out = str((sec % 3600) % 60)+'s'
if h or m:
out = str(m)+'m'+out
if h:
out = str(h)+'h'+out
return out
def render(self, depsgraph):
scene = depsgraph.scene
wm = bpy.context.window_manager
scale = scene.render.resolution_percentage / 100.0
self.size_x = int(scene.render.resolution_x * scale)
self.size_y = int(scene.render.resolution_y * scale)
rendering = True
prev_status = {}
prev_name = ''
loaded_final_render = False
temp_dir = tempfile.TemporaryDirectory(prefix='blendnet-preview_')
result = self.begin_result(0, 0, self.size_x, self.size_y)
while rendering:
time.sleep(1.0)
if self.test_break():
# TODO: render cancelled
self.updateStats(None, 'Cancelling...')
rendering = False
if len(wm.blendnet.manager_tasks) < wm.blendnet.manager_tasks_idx+1:
self.updateStats('Please select the task in BlendNet manager tasks list')
continue
task_name = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].name
if task_name != prev_name:
self.update_result(result)
prev_name = task_name
loaded_final_render = False
status = BlendNet.addon.managerTaskStatus(task_name)
if not status:
continue
self.updateStats(None, '%s: %s' % (task_name, status.get('state')))
if status.get('state') == 'RUNNING':
remaining = None
if status.get('remaining'):
remaining = self.secToTime(status.get('remaining'))
self.updateStats('Rendered samples: %s/%s | Remaining: %s' % (
status.get('samples_done'), status.get('samples'),
remaining,
))
update_render = None
if status.get('state') == 'COMPLETED':
if not loaded_final_render:
total_time = self.secToTime((status.get('end_time') or 0) - (status.get('start_time_actual') or 0))
out_file = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].received
if out_file == 'skipped':
# File was skipped by the downloader, so download it to temp dir
out_file = BlendNet.addon.managerDownloadTaskResult(task_name, 'compose', temp_dir.name)
if out_file and os.path.isfile(out_file):
self.updateStats('Got the final result: %s | Task render time: %s' % (out_file, total_time))
update_render = out_file
loaded_final_render = True
else:
# File is going to be downloaded by BlendNet.addon.updateManagerTasks() soon
self.updateStats('%s | Task render time: %s' % (out_file, total_time))
elif status.get('result', {}).get('preview') != prev_status.get('result', {}).get('preview'):
out_file = BlendNet.addon.managerDownloadTaskResult(task_name, 'preview', temp_dir.name)
if out_file and os.path.isfile(out_file):
update_render = out_file
else:
# It's downloading on background, so not store it right now
status['result']['preview'] = prev_status.get('result', {}).get('preview')
if update_render:
if os.path.isfile(update_render):
try:
result.layers[0].load_from_file(update_render)
print('DEBUG: Loaded preview layer:', update_render)
except Exception as e:
print('DEBUG: Unable to load the preview layer:', e)
result.load_from_file(update_render)
print('DEBUG: Loaded render result file:', update_render)
else:
print('ERROR: Unable to load not existing result file "%s"' % (update_render,))
self.update_result(result)
prev_status = status
self.update_progress(status.get('samples_done')/status.get('samples', 1))
self.end_result(result)
def loadProvidersSettings():
'''Get the available providers settings to set and load them during registration of the class'''
all_settings = BlendNet.addon.getProvidersSettings()
for provider, provider_settings in all_settings.items():
for key, data in provider_settings.items():
path = 'provider_' + provider + '_' + key
print('DEBUG: registering provider config:', path)
if data.get('type') in ('string', 'path'):
BlendNetAddonPreferences.__annotations__[path] = StringProperty(
name = data.get('name'),
description = data.get('description'),
subtype = 'FILE_PATH' if data['type'] == 'path' else 'NONE',
update = BlendNet.addon.updateProviderSettings,
)
elif data.get('type') == 'choice':
BlendNetAddonPreferences.__annotations__[path] = EnumProperty(
name = data.get('name'),
description = data.get('description'),
items = data.get('values'),
update = BlendNet.addon.updateProviderSettings,
)
# Additional field to store string value (otherwise it's hard on init when
# value of enum is integer and has no items to choose from)
BlendNetAddonPreferences.__annotations__[path+'_value'] = StringProperty(
name = data.get('name'),
description = data.get('description'),
)
else:
print('ERROR: Unknown provider "%s" setting "%s" type: %s' % (provider, key, data.get('type')))
def initPreferences():
'''Will init the preferences with defaults'''
prefs = bpy.context.preferences.addons[__package__].preferences
# Set defaults for preferences
# Update resource_provider anyway to set the addon var
prefs.resource_provider = prefs.resource_provider or BlendNet.addon.getAddonDefaultProvider()
# Since default for property will be regenerated every restart
# we generate new session id if the current one is empty
if prefs.session_id == '':
prefs.session_id = ''
if prefs.manager_password_hidden == '':
prefs.manager_password_hidden = ''
if prefs.agent_password_hidden == '':
prefs.agent_password_hidden = ''
BlendNet.addon.fillAvailableBlenderDists()
# Getting provider info to make sure all the settings are ok
# for current provider configuration
BlendNet.addon.getProviderInfo()
def register():
BlendNet.addon.initAddonLog()
BlendNet.providers.loadProviders()
loadProvidersSettings()
bpy.utils.register_class(BlendNetAddonPreferences)
initPreferences()
bpy.utils.register_class(BlendNetSceneSettings)
bpy.utils.register_class(BlendNetManagerTask)
bpy.utils.register_class(TASKS_UL_list)
bpy.utils.register_class(BlendNetSessionProperties)
bpy.utils.register_class(BlendNetRenderEngine)
bpy.utils.register_class(BlendNetRunTaskOperation)
bpy.utils.register_class(BlendNetTaskPreviewOperation)
bpy.utils.register_class(BlendNetTaskInfoOperation)
bpy.utils.register_class(BlendNetTaskMessagesOperation)
bpy.utils.register_class(BlendNetTaskDetailsOperation)
bpy.utils.register_class(BlendNetTaskDownloadOperation)
bpy.utils.register_class(BlendNetTaskRunOperation)
bpy.utils.register_class(BlendNetTaskStopOperation)
bpy.utils.register_class(BlendNetTasksStopStartedOperation)
bpy.utils.register_class(BlendNetTaskRemoveOperation)
bpy.utils.register_class(BlendNetTasksRemoveEndedOperation)
bpy.utils.register_class(BlendNetAgentRemoveOperation)
bpy.utils.register_class(BlendNetAgentCreateOperation)
bpy.utils.register_class(BlendNetTaskMenu)
bpy.utils.register_class(BlendNetGetServiceLogOperation)
bpy.utils.register_class(BlendNetGetAddonLogOperation)
bpy.utils.register_class(BlendNetGetNodeLogOperation)
bpy.utils.register_class(BlendNetRenderPanel)
bpy.utils.register_class(BlendNetToggleManager)
bpy.utils.register_class(BlendNetDestroyManager)
bpy.utils.register_class(BlendNetManagerPanel)
bpy.utils.register_class(BlendNetAgentsPanel)
def unregister():
bpy.utils.unregister_class(BlendNetAgentsPanel)
bpy.utils.unregister_class(BlendNetManagerPanel)
bpy.utils.unregister_class(BlendNetToggleManager)
bpy.utils.unregister_class(BlendNetDestroyManager)
bpy.utils.unregister_class(BlendNetRenderPanel)
bpy.utils.unregister_class(BlendNetGetNodeLogOperation)
bpy.utils.unregister_class(BlendNetGetAddonLogOperation)
bpy.utils.unregister_class(BlendNetGetServiceLogOperation)
bpy.utils.unregister_class(BlendNetTaskMenu)
bpy.utils.unregister_class(BlendNetTaskInfoOperation)
bpy.utils.unregister_class(BlendNetAgentCreateOperation)
bpy.utils.unregister_class(BlendNetAgentRemoveOperation)
bpy.utils.unregister_class(BlendNetTasksRemoveEndedOperation)
bpy.utils.unregister_class(BlendNetTaskRemoveOperation)
bpy.utils.unregister_class(BlendNetTasksStopStartedOperation)
bpy.utils.unregister_class(BlendNetTaskStopOperation)
bpy.utils.unregister_class(BlendNetTaskRunOperation)
bpy.utils.unregister_class(BlendNetTaskDownloadOperation)
bpy.utils.unregister_class(BlendNetTaskDetailsOperation)
bpy.utils.unregister_class(BlendNetTaskMessagesOperation)
bpy.utils.unregister_class(BlendNetTaskPreviewOperation)
bpy.utils.unregister_class(BlendNetRunTaskOperation)
bpy.utils.unregister_class(BlendNetRenderEngine)
bpy.utils.unregister_class(BlendNetSessionProperties)
bpy.utils.unregister_class(TASKS_UL_list)
bpy.utils.unregister_class(BlendNetManagerTask)
bpy.utils.unregister_class(BlendNetSceneSettings)
bpy.utils.unregister_class(BlendNetAddonPreferences)
if __name__ == '__main__':
register()
| 38.988288
| 131
| 0.608918
| 7,764
| 69,906
| 5.345183
| 0.094153
| 0.031952
| 0.017831
| 0.016434
| 0.481494
| 0.407157
| 0.359325
| 0.321012
| 0.299398
| 0.274386
| 0
| 0.003979
| 0.284582
| 69,906
| 1,792
| 132
| 39.010045
| 0.825822
| 0.037922
| 0
| 0.424242
| 0
| 0.000689
| 0.167421
| 0.012157
| 0
| 0
| 0
| 0.000558
| 0
| 1
| 0.054408
| false
| 0.028926
| 0.007576
| 0.009642
| 0.242424
| 0.009642
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d09d02c68d6758f8f9b41f85738e74eee8e7455d
| 3,601
|
py
|
Python
|
codonlib/codonlib.py
|
tmsincomb/codonlib
|
026bc475b3255831d749455b4c76250d56e4b91e
|
[
"MIT"
] | null | null | null |
codonlib/codonlib.py
|
tmsincomb/codonlib
|
026bc475b3255831d749455b4c76250d56e4b91e
|
[
"MIT"
] | null | null | null |
codonlib/codonlib.py
|
tmsincomb/codonlib
|
026bc475b3255831d749455b4c76250d56e4b91e
|
[
"MIT"
] | null | null | null |
"""Main module."""
from collections import defaultdict
from functools import cache
from itertools import product
from operator import itemgetter
from typing import List
import numpy as np
from Bio.Data.CodonTable import unambiguous_dna_by_id
class CodonDegeneracy:
def __init__(self, table_id: int = 1):
self.table_id = table_id
self.codontable_atlas = unambiguous_dna_by_id[self.table_id]
self.codon2aa = {}
self.aa2codons = defaultdict(list)
self.codon_table: np.char.array = None
self.nt_table: np.char.array = None
self.aa_table: np.char.array = None
self.__codon_aa_mappings()
def __codon_aa_mappings(self):
for codon, aa in self.codontable_atlas.forward_table.items():
self.codon2aa[codon] = aa
self.aa2codons[aa].append(codon)
for codon in self.codontable_atlas.stop_codons:
self.codon2aa[codon] = "*"
self.aa2codons["*"].append(codon)
def __create_tables(self):
codon_list = []
aa_list = []
for i, col_nt in enumerate(["T", "C", "A", "G"]):
for j, wobble_nt in enumerate(["T", "C", "A", "G"]):
for k, row_nt in enumerate(["T", "C", "A", "G"]):
codon = col_nt + row_nt + wobble_nt
aa = self.codon2aa[codon]
codon_list.append(codon)
aa_list.append(aa)
self.codon_table = np.char.array(codon_list, dtype=str).reshape((16, 4))
self.nt_table = self.codon_table.view("U1").reshape((16, 4, -1))
self.aa_table = np.char.array(aa_list).reshape((16, 4))
@cache
def __(self, nt):
print(nt)
codon_list = [x + y + z for x in nt[0] for y in nt[1] for z in nt[2]]
return set(itemgetter(*codon_list)(self.codon2aa))
@cache
def __get_aa_possibilities(self, codons: list) -> set:
"""
Get the possible combinations from a given list of possibilities.
Parameters
----------
nt_combo : list
List of codons.
Examples
--------
>>>__locknkey(['CCG', 'AAG'])
{'T', 'K', 'P', 'Q'}
Returns
-------
set
amino acid symbol set
"""
nt1, nt2, nt3 = frozenset(), frozenset(), frozenset()
for codon in codons:
nt1 |= frozenset(codon[0])
nt2 |= frozenset(codon[1])
nt3 |= frozenset(codon[2])
return self.__combinations((nt1, nt2, nt3))
def off_targets(self, aa_list: List[str]) -> set:
"""
Get the off-target amino acids for a given list of amino acids.
Parameters
----------
aa_list : list
List of amino acid codons.
Returns
-------
set
Set of off-target amino acids.
"""
on_target_aa = set(aa_list)
off_target_aa: set = set()
off_target_best = [0] * 42
all_codons_per_aa = [self.aa2codons[aa] for aa in aa_list]
for one_codon_selected_per_aa in product(*all_codons_per_aa):
aa_possibilities = self.__get_aa_possibilities(one_codon_selected_per_aa)
off_target_aa = aa_possibilities - on_target_aa
if len(off_target_aa) < len(off_target_best):
off_target_best = off_target_aa
# needs to return best combos and list of all equals of each combos so i know the "real" set of codons to return
# in real life we want all the combons not just 1 for each aa
return off_target_best
| 34.295238
| 120
| 0.580117
| 477
| 3,601
| 4.155136
| 0.268344
| 0.045409
| 0.02775
| 0.040363
| 0.133199
| 0.094349
| 0.028759
| 0.020182
| 0
| 0
| 0
| 0.016013
| 0.306304
| 3,601
| 104
| 121
| 34.625
| 0.777422
| 0.170786
| 0
| 0.032787
| 0
| 0
| 0.00577
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.098361
| false
| 0
| 0.114754
| 0
| 0.278689
| 0.016393
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0a3ddac31091e48614d1cfeaf2b19071cf7215f
| 1,598
|
py
|
Python
|
gym_minigrid/envs/mygridworld.py
|
nathan-miller23/gym-minigrid
|
4ed9e6a511be88a49903f107003951977d86d842
|
[
"Apache-2.0"
] | null | null | null |
gym_minigrid/envs/mygridworld.py
|
nathan-miller23/gym-minigrid
|
4ed9e6a511be88a49903f107003951977d86d842
|
[
"Apache-2.0"
] | null | null | null |
gym_minigrid/envs/mygridworld.py
|
nathan-miller23/gym-minigrid
|
4ed9e6a511be88a49903f107003951977d86d842
|
[
"Apache-2.0"
] | null | null | null |
from gym_minigrid.minigrid import *
from gym_minigrid.register import register
class MyEnv(MyMiniGridEnv):
def __init__(self, size=9, max_steps=100, start_pos=(1, 1), good_goal_pos=None, bad_goal_pos=None, reward='sparse', good_goal_reward=10, bad_goal_reward=-10):
self.start_pos = start_pos
self.good_goal_pos = good_goal_pos
self.bad_goal_pos = bad_goal_pos
self.reward = reward
self.good_goal_reward = good_goal_reward
self.bad_goal_reward = bad_goal_reward
super(MyEnv, self).__init__(grid_size=size, max_steps=max_steps)
def _gen_grid(self, width, height):
self.grid = Grid(width, height)
self.grid.wall_rect(0, 0, width, height)
self.put_obj(GoodGoal(), *self.good_goal_pos)
self.put_obj(BadGoal(), *self.bad_goal_pos)
self.agent_pos = self.start_pos
self.agent_dir = 0
self.mission = "Be the best agent I can be"
def _reward(self):
curr_cell = self.grid.get(*self.agent_pos)
if curr_cell.goal_type == 'good':
return self.good_goal_reward
elif curr_cell.goal_type == 'bad':
return self.bad_goal_reward
else:
raise ValueError("Called `self._reward()` at incorrect time!")
def dist_to_goal(self, pos):
x, y = pos
goal_x, goal_y = self.good_goal_pos
return abs(goal_x - x) + abs(goal_y - y)
def _dense_reward(self, s, s_prime):
if self.reward == 'sparse':
return 0
return self.dist_to_goal(s) - self.dist_to_goal(s_prime)
| 30.730769
| 162
| 0.646433
| 238
| 1,598
| 4
| 0.285714
| 0.07563
| 0.057773
| 0.047269
| 0.031513
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011706
| 0.251564
| 1,598
| 51
| 163
| 31.333333
| 0.784281
| 0
| 0
| 0
| 0
| 0
| 0.054477
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.057143
| 0
| 0.371429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0a857e2bc8d397632de8161aef9ad14a63435bd
| 1,033
|
py
|
Python
|
exchange/utils/http_util.py
|
inasie/PyExchange
|
7d40517c8145f92ac8068d5a0f25cc44ecddc82b
|
[
"MIT"
] | 12
|
2018-07-31T14:56:33.000Z
|
2021-05-24T23:47:51.000Z
|
exchange/utils/http_util.py
|
inasie/PyExchange
|
7d40517c8145f92ac8068d5a0f25cc44ecddc82b
|
[
"MIT"
] | 1
|
2018-08-17T09:16:04.000Z
|
2018-08-17T09:16:04.000Z
|
exchange/utils/http_util.py
|
inasie/PyExchange
|
7d40517c8145f92ac8068d5a0f25cc44ecddc82b
|
[
"MIT"
] | 8
|
2018-07-31T14:57:37.000Z
|
2021-05-24T23:47:52.000Z
|
# -*- coding: utf-8 -*-
import requests
import json
import logging
class HttpUtil:
"""
http util
"""
def get(self, url, params=None):
'''
get request
:param str url: url
:param set params: parameters
:return: json object or json array
'''
resp = requests.get(url, params=params)
if resp.status_code != 200:
logging.error('get(%s) failed(%d)' % (url, resp.status_code))
if resp.text is not None:
logging.error('resp: %s' % resp.text)
return None
return json.loads(resp.text)
def get_raw(self, url):
'''
get request
:param str url: url
:return: response text
'''
resp = requests.get(url)
if resp.status_code != 200:
logging.error('get(%s) failed(%d)' % (url, resp.status_code))
if resp.text is not None:
logging.error('resp: %s' % resp.text)
return None
return resp.text
| 25.825
| 73
| 0.522749
| 125
| 1,033
| 4.28
| 0.328
| 0.08972
| 0.104673
| 0.06729
| 0.534579
| 0.534579
| 0.44486
| 0.44486
| 0.44486
| 0.44486
| 0
| 0.010526
| 0.356244
| 1,033
| 39
| 74
| 26.487179
| 0.793985
| 0.178122
| 0
| 0.5
| 0
| 0
| 0.069612
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.15
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0aadc614a084b433d38993f99643e2433d5d14d
| 3,828
|
py
|
Python
|
pip_services3_datadog/clients/DataDogLogClient.py
|
pip-services3-python/pip-services3-datadog-python
|
5d4549685b8486f1fc663b0e50ea52d019095909
|
[
"MIT"
] | null | null | null |
pip_services3_datadog/clients/DataDogLogClient.py
|
pip-services3-python/pip-services3-datadog-python
|
5d4549685b8486f1fc663b0e50ea52d019095909
|
[
"MIT"
] | null | null | null |
pip_services3_datadog/clients/DataDogLogClient.py
|
pip-services3-python/pip-services3-datadog-python
|
5d4549685b8486f1fc663b0e50ea52d019095909
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import datetime
from typing import Optional, List, Any
from pip_services3_commons.config import ConfigParams
from pip_services3_commons.convert import StringConverter
from pip_services3_commons.errors import ConfigException
from pip_services3_commons.refer import IReferences
from pip_services3_components.auth import CredentialResolver
from pip_services3_rpc.clients import RestClient
from pip_services3_datadog.clients.DataDogLogMessage import DataDogLogMessage
class DataDogLogClient(RestClient):
__default_config: ConfigParams = ConfigParams.from_tuples(
"connection.protocol", "https",
"connection.host", "http-intake.logs.datadoghq.com",
"connection.port", 443,
"credential.internal_network", "true"
)
def __init__(self, config: ConfigParams = None):
super().__init__()
self.__credential_resolver = CredentialResolver()
if config:
self.configure(config)
self._base_route = 'v1'
def configure(self, config: ConfigParams):
config = self.__default_config.override(config)
super().configure(config)
self.__credential_resolver.configure(config)
def set_references(self, references: IReferences):
super().set_references(references)
self.__credential_resolver.set_references(references)
def open(self, correlation_id: Optional[str]):
credential = self.__credential_resolver.lookup(correlation_id)
if credential is None or credential.get_access_key() is None:
raise ConfigException(
correlation_id,
"NO_ACCESS_KEY",
"Missing access key in credentials"
)
self._headers = self._headers or {}
self._headers['DD-API-KEY'] = credential.get_access_key()
super().open(correlation_id)
def __convert_tags(self, tags: List[Any]) -> Optional[str]:
if tags is None:
return
builder: str = ''
for key in tags:
if builder != '':
builder += ','
builder += key + ':' + tags[key]
return builder
def __convert_message(self, message: DataDogLogMessage) -> Any:
result = {
"timestamp": StringConverter.to_string(message.time or datetime.datetime),
"status": message.status or "INFO",
"ddsource": message.source or 'pip-services',
# "source": message.source or 'pip-services',
"service": message.service,
"message": message.message,
}
if message.tags:
result['ddtags'] = self.__convert_tags(message.tags)
if message.host:
result['host'] = message.host
if message.logger_name:
result['logger.name'] = message.logger_name
if message.thread_name:
result['logger.thread_name'] = message.thread_name
if message.error_message:
result['error.message'] = message.error_message
if message.error_kind:
result['error.kind'] = message.error_kind
if message.error_stack:
result['error.stack'] = message.error_stack
return result
def __convert_messages(self, messages: List[DataDogLogMessage]) -> List[Any]:
return list(map(lambda m: self.__convert_message(m), messages))
def send_logs(self, correlation_id: Optional[str], messages: List[DataDogLogMessage]) -> Any:
data = self.__convert_messages(messages)
# Commented instrumentation because otherwise it will never stop sending logs...
# timing = self._instrument(correlation_id, 'datadog.send_logs')
try:
return self._call("post", "input", None, None, data)
finally:
# timing.end_timing()
pass
| 35.119266
| 97
| 0.648642
| 405
| 3,828
| 5.891358
| 0.31358
| 0.020536
| 0.04694
| 0.038558
| 0.045264
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004196
| 0.252874
| 3,828
| 108
| 98
| 35.444444
| 0.83007
| 0.0593
| 0
| 0
| 0
| 0
| 0.086231
| 0.015855
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0.0125
| 0.1125
| 0.0125
| 0.3
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0aaf1fbb6455df5e13ef467cbacc908e5245647
| 2,802
|
py
|
Python
|
gui/composition_worker.py
|
ivanovwaltz/wavelet_sound_microscope
|
ff14d82135193a3d20543e84a9e6a81f884b1cf7
|
[
"MIT"
] | null | null | null |
gui/composition_worker.py
|
ivanovwaltz/wavelet_sound_microscope
|
ff14d82135193a3d20543e84a9e6a81f884b1cf7
|
[
"MIT"
] | null | null | null |
gui/composition_worker.py
|
ivanovwaltz/wavelet_sound_microscope
|
ff14d82135193a3d20543e84a9e6a81f884b1cf7
|
[
"MIT"
] | null | null | null |
import logging
import os
from functools import partial
from PIL.Image import Image
from PyQt5.QtCore import QObject, pyqtSignal, QThread
from PyQt5.QtWidgets import QProgressDialog
from .threading import QThreadedWorkerDebug as QThreadedWorker
from analyze.composition import Composition, Spectrogram
from analyze.media.sound import Sound, SoundResampled
from utils import ProgressProxy
SAMPLERATE = 1024 * 16
log = logging.getLogger(__name__)
class ProgressProxyToProgressDialog(ProgressProxy):
def __init__(self, progress_dialog, *args, **kwargs):
self.progress_dialog = progress_dialog
super().__init__(*args, **kwargs)
def start(self):
self.progress_dialog.reset()
self.progress_dialog.setRange(0, self.length)
def make_step(self):
super().make_step()
if self.progress_dialog.wasCanceled():
self.cancel()
def render_progress(self):
self.progress_dialog.setValue(self.pos)
def done(self):
log.debug('ProgressProxyToProgressDialog.done')
if getattr(self, 'canceled', False):
raise CompositionCanceled
def cancel(self):
self.canceled = True
raise StopIteration
class CompositionCanceled(Exception):
pass
class QCompositionWorker(QThreadedWorker):
def __init__(self):
super().__init__()
self.busy = False
self.process.connect(self._process)
process = pyqtSignal(Sound, QProgressDialog)
process_ok = pyqtSignal(Spectrogram)
process_error = pyqtSignal(str)
message = pyqtSignal(str)
def set_progress_value(self, val):
self._message('Progress value: {}'.format(val))
def _process(self, sound, progressbar):
log.debug('Before Image processed')
# FIXME Implement jobs queue. Just cancel previous here
if self.busy:
self.process_error.emit('Busi')
return
self.busy = True
self._message('Resample sound')
sound_resampled = SoundResampled(sound, SAMPLERATE)
progressbar = partial(ProgressProxyToProgressDialog, progress_dialog)
self._message('Prepare composition')
try:
with Composition(
sound_resampled, scale_resolution=1/155, omega0=70
) as composition:
self._message('Analyse')
spectrogram = composition.get_spectrogram(progressbar)
except CompositionCanceled:
log.debug('Composition canceled')
self.process_error.emit('Composition canceled')
return
else:
log.debug('Image processed')
self.process_ok.emit(spectrogram)
finally:
self.busy = False
def _message(self, msg):
self.message.emit(msg)
| 25.472727
| 77
| 0.664525
| 285
| 2,802
| 6.361404
| 0.382456
| 0.061776
| 0.05957
| 0.024269
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007619
| 0.250535
| 2,802
| 109
| 78
| 25.706422
| 0.855714
| 0.018915
| 0
| 0.055556
| 0
| 0
| 0.06589
| 0.012377
| 0
| 0
| 0
| 0.009174
| 0
| 1
| 0.138889
| false
| 0.013889
| 0.138889
| 0
| 0.402778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0ab2fa07ec216f8af2558d0b182fc6b664345b5
| 61,891
|
py
|
Python
|
metalpipe/node.py
|
zacernst/nanostream
|
382389b09c42b55c6bdb64c7b0017d4810c7165f
|
[
"MIT"
] | 2
|
2019-04-12T19:32:55.000Z
|
2019-12-24T16:50:09.000Z
|
metalpipe/node.py
|
zacernst/metalpipe
|
382389b09c42b55c6bdb64c7b0017d4810c7165f
|
[
"MIT"
] | 10
|
2019-04-03T01:25:52.000Z
|
2019-12-16T05:09:35.000Z
|
metalpipe/node.py
|
zacernst/nanostream
|
382389b09c42b55c6bdb64c7b0017d4810c7165f
|
[
"MIT"
] | 1
|
2019-04-17T12:55:19.000Z
|
2019-04-17T12:55:19.000Z
|
"""
Node module
===========
The ``node`` module contains the ``MetalNode`` class, which is the foundation
for MetalPipe.
"""
import time
import datetime
import uuid
import importlib
import logging
import os
import threading
import pprint
import sys
import copy
import random
import functools
import csv
import MySQLdb
import re
import io
import yaml
import types
import inspect
import prettytable
import requests
import graphviz
from timed_dict.timed_dict import TimedDict
from metalpipe.message.batch import BatchStart, BatchEnd
from metalpipe.message.message import MetalPipeMessage
from metalpipe.node_queue.queue import MetalPipeQueue
from metalpipe.message.canary import Canary
from metalpipe.utils.set_attributes import set_kwarg_attributes
from metalpipe.utils.data_structures import Row, MySQLTypeSystem
from metalpipe.utils import data_structures as ds
# from metalpipe.metalpipe_recorder import RedisFixturizer
from metalpipe.utils.helpers import (
load_function,
replace_by_path,
remap_dictionary,
set_value,
get_value,
to_bool,
aggregate_values,
)
DEFAULT_MAX_QUEUE_SIZE = int(os.environ.get("DEFAULT_MAX_QUEUE_SIZE", 128))
MONITOR_INTERVAL = 1
STATS_COUNTER_MODULO = 4
LOGJAM_THRESHOLD = 0.25
SHORT_DELAY = 0.1
PROMETHEUS = False
def no_op(*args, **kwargs):
"""
No-op function to serve as default ``get_runtime_attrs``.
"""
return None
class bcolors:
"""
This class holds the values for the various colors that are used in the
tables that monitor the status of the nodes.
"""
HEADER = "\033[95m"
OKBLUE = "\033[94m"
OKGREEN = "\033[92m"
WARNING = "\033[93m"
FAIL = "\033[91m"
ENDC = "\033[0m"
BOLD = "\033[1m"
UNDERLINE = "\033[4m"
class NothingToSeeHere:
"""
Vacuous class used as a no-op message type.
"""
pass
class Terminated:
"""
Class sent optionally when a node is done processing messages (i.e. when its
upstream nodes have finished.)
"""
def __init__(self, node):
self.node = node
class MetalNode:
"""
The foundational class of `MetalPipe`. This class is inherited by all
nodes in a computation graph.
Order of operations:
1. Child class ``__init__`` function
2. ``MetalNode`` ``__init__`` function
3. ``preflight_function`` (Specified in initialization params)
4. ``setup``
5. start
These methods have the following intended uses:
1. ``__init__`` Sets attribute values and calls the ``MetalNode`` ``__init__``
method.
2. ``get_runtime_attrs`` Sets any attribute values that are to be determined
at runtime, e.g. by checking environment variables or reading values
from a database. The ``get_runtime_attrs`` should return a dictionary
of attributes -> values, or else ``None``.
3. ``setup`` Sets the state of the ``MetalNode`` and/or creates any attributes
that require information available only at runtime.
Args:
send_batch_markers: If ``True``, then a ``BatchStart`` marker will
be sent when a new input is received, and a ``BatchEnd`` will be sent
after the input has been processed. The intention is that a number of
items will be emitted for each input received. For example, we might
emit a table row-by-row for each input.
get_runtime_attrs: A function that returns a dictionary-like object.
The keys and values will be saved to this ``MetalNode`` object's
attributes. The function is executed one time, upon starting the node.
get_runtime_attrs_args: A tuple of arguments to be passed to the
``get_runtime_attrs`` function upon starting the node.
get_runtime_attrs_kwargs: A dictionary of kwargs passed to the
``get_runtime_attrs`` function.
runtime_attrs_destinations: If set, this is a dictionary mapping
the keys returned from the ``get_runtime_attrs`` function to the
names of the attributes to which the values will be saved.
throttle: For each input received, a delay of ``throttle`` seconds
will be added.
keep_alive: If ``True``, keep the node's thread alive after
everything has been processed.
name: The name of the node. Defaults to a randomly generated hash.
Note that this hash is not consistent from one run to the next.
input_mapping: When the node receives a dictionary-like object,
this dictionary will cause the keys of the dictionary to be remapped
to new keys.
retain_input: If ``True``, then combine the dictionary-like input
with the output. If keys clash, the output value will be kept.
input_message_keypath: Read the value in this keypath as the content
of the incoming message.
"""
def __init__(
self,
*args,
batch=False,
get_runtime_attrs=no_op,
get_runtime_attrs_args=None,
get_runtime_attrs_kwargs=None,
runtime_attrs_destinations=None,
input_mapping=None,
retain_input=True,
throttle=0,
keep_alive=True,
max_errors=0,
max_messages_received=None,
name=None,
input_message_keypath=None,
key=None,
messages_received_counter=0,
prefer_existing_value=False,
messages_sent_counter=0,
post_process_function=None,
post_process_keypath=None,
summary="",
fixturize=False,
post_process_function_kwargs=None,
output_key=None,
break_test=None,
send_termination_message=False,
**kwargs
):
self.name = name or uuid.uuid4().hex
self.input_mapping = input_mapping or {}
self.input_queue_list = []
self.output_queue_list = []
self.input_node_list = []
self.queue_event = threading.Event()
self.input_message_keypath = input_message_keypath or []
self.output_node_list = []
self.max_messages_received = max_messages_received
self.global_dict = None # We'll add a dictionary upon startup
self.terminate = False
self.thread_dict = {}
self.kill_thread = False
self.prefer_existing_value = prefer_existing_value
self.accumulator = {}
self.output_key = output_key
self.fixturize = fixturize
self.keep_alive = keep_alive
self.retain_input = (
retain_input # Keep the input dictionary and send it downstream
)
if break_test is not None:
self.break_test = load_function(break_test)
else:
self.break_test = None
self.throttle = throttle
self.get_runtime_attrs = get_runtime_attrs
self.get_runtime_attrs_args = get_runtime_attrs_args or tuple()
self.cleanup_called = False
self.get_runtime_attrs_kwargs = get_runtime_attrs_kwargs or {}
self.runtime_attrs_destinations = runtime_attrs_destinations or {}
self.key = key
self.messages_received_counter = messages_received_counter
self.messages_sent_counter = messages_sent_counter
self.instantiated_at = datetime.datetime.now()
self.started_at = None
self.stopped_at = None
self.error_counter = 0
self.status = "stopped" # running, error, success
self.max_errors = max_errors
self.post_process_function_name = (
post_process_function # Function to be run on result
)
self.post_process_function_kwargs = post_process_function_kwargs or {}
self.summary = summary
self.prometheus_objects = None
self.logjam_score = {"polled": 0.0, "logjam": 0.0}
self.send_termination_message = send_termination_message
# Get post process function if one is named
if self.post_process_function_name is not None:
components = self.post_process_function_name.split("__")
if len(components) == 1:
module = None
function_name = components[0]
self.post_process_function = globals()[function_name]
else:
module = ".".join(components[:-1])
function_name = components[-1]
module = importlib.import_module(module)
self.post_process_function = getattr(module, function_name)
else:
self.post_process_function = None
self.post_process_keypath = (
post_process_keypath.split(".")
if post_process_keypath is not None
else None
)
if self.fixturize:
self.fixturizer = RedisFixturizer()
else:
self.fixturizer = None
def setup(self):
"""
For classes that require initialization at runtime, which can't be done
when the class's ``__init__`` function is called. The ``MetalNode`` base
class's setup function is just a logging call.
It should be unusual to have to make use of ``setup`` because in practice,
initialization can be done in the ``__init__`` function.
"""
logging.debug(
"No ``setup`` method for {class_name}.".format(
class_name=self.__class__.__name__
)
)
pass
def __gt__(self, other):
"""
Convenience method so that we can link two nodes by ``node1 > node2``.
This just calls ``add_edge``.
"""
self.add_edge(other)
return other
@property
def is_source(self):
"""
Tests whether the node is a source or not, i.e. whether there are no
inputs to the node.
Returns:
(bool): ``True`` if the node has no inputs, ``False`` otherwise.
"""
return len(self.input_queue_list) == 0
@property
def is_sink(self):
"""
Tests whether the node is a sink or not, i.e. whether there are no
outputs from the node.
Returns:
(bool): ``True`` if the node has no output nodes, ``False`` otherwise.
"""
return len(self.output_queue_list) == 0
def add_edge(self, target, **kwargs):
"""
Create an edge connecting `self` to `target`.
This method instantiates the ``MetalPipeQueue`` object that connects the
nodes. Connecting the nodes together consists in (1) adding the queue to
the other's ``input_queue_list`` or ``output_queue_list`` and (2) setting
the queue's ``source_node`` and ``target_node`` attributes.
Args:
target (``MetalNode``): The node to which ``self`` will be connected.
"""
max_queue_size = kwargs.get("max_queue_size", DEFAULT_MAX_QUEUE_SIZE)
edge_queue = MetalPipeQueue(max_queue_size)
self.output_node_list.append(target)
target.input_node_list.append(self)
edge_queue.source_node = self
edge_queue.target_node = target
target.input_queue_list.append(edge_queue)
self.output_queue_list.append(edge_queue)
def _get_message_content(self, one_item):
# Get the content of a specific keypath, if one has
# been defined in the ``MetalNode`` initialization.
message_content = (
get_value(one_item.message_content, self.input_message_keypath)
if len(self.input_message_keypath) > 0
else one_item.message_content
)
if (
isinstance(message_content, (dict,))
and len(message_content) == 1
and "__value__" in message_content
):
message_content = message_content["__value__"]
return message_content
def wait_for_pipeline_finish(self):
while not self.pipeline_finished:
time.sleep(SHORT_DELAY)
def start(self):
"""
Starts the node. This is called by ``MetalNode.global_start()``.
The node's main loop is contained in this method. The main loop does
the following:
1. records the timestamp to the node's ``started_at`` attribute.
#. calls ``get_runtime_attrs`` (TODO: check if we can deprecate this)
#. calls the ``setup`` method for the class (which is a no-op by default)
#. if the node is a source, then successively yield all the results of
the node's ``generator`` method, then exit.
#. if the node is not a source, then loop over the input queues, getting
the next message. Note that when the message is pulled from the queue,
the ``MetalPipeQueue`` yields it as a dictionary.
#. gets either the content of the entire message if the node has no ``key``
attribute, or the value of ``message[self.key]``.
#. remaps the message content if a ``remapping`` dictionary has been
given in the node's configuration
#. calls the node's ``process_item`` method, yielding back the results.
(Note that a single input message may cause the node to yield zero,
one, or more than one output message.)
#. places the results into each of the node's output queues.
"""
self.started_at = datetime.datetime.now()
logging.debug("Starting node: {node}".format(node=self.__class__.__name__))
# ``get_runtime_attrs`` returns a dict-like object whose keys and
# values are stored as attributes of the ``MetalNode`` object.
if self.get_runtime_attrs is not None:
pre_flight_results = (
self.get_runtime_attrs(
*self.get_runtime_attrs_args, **self.get_runtime_attrs_kwargs
)
or {}
)
if self.runtime_attrs_destinations is not None:
for key, value in pre_flight_results.items():
setattr(self, self.runtime_attrs_destinations[key], value)
elif self.runtime_attrs_destinations is None:
for key, value in pre_flight_results.items():
setattr(self, key, value)
else:
raise Exception(
"There is a ``get_runtime_attrs``, but the "
"``runtime_attrs_destinations`` is neither None nor a "
"dict-like object."
)
# We have to separate the pre-flight function, the setup of the
# class, and any necessary startup functions (such as connecting
# to a database).
self.setup() # Setup function?
if self.is_source and not isinstance(self, (DynamicClassMediator,)):
for output in self.generator():
if self.fixturizer:
self.fixturizer.record_source_node(self, output)
yield output, None
for output in self._cleanup():
yield output, None
else:
logging.debug(
"About to enter loop for reading input queue in {node}.".format(
node=str(self)
)
)
# insert conditions for having no more messages to read...
upstream_nodes_finished = all(
input_node.cleanup_called for input_node in self.input_node_list
)
input_queues_empty = self.is_source or self.input_queues_empty()
while not (upstream_nodes_finished and input_queues_empty):
for input_queue in self.input_queue_list:
one_item = input_queue.get()
####
if self.terminate:
# self.finished = True
break
if one_item is None:
continue
# Keep track of where the message came from, useful for
# managing streaming joins, e.g.
message_source = input_queue.source_node
self.messages_received_counter += 1
if (
self.max_messages_received is not None
and self.messages_received_counter > self.max_messages_received
):
self.finished = True
break
# The ``throttle`` keyword introduces a delay in seconds
time.sleep(self.throttle)
# Retrieve the ``message_content``
message_content = self._get_message_content(one_item)
# If we receive ``None`` or a ``NothingToSeeHere``, continue.
if message_content is None or isinstance(
message_content, (NothingToSeeHere,)
):
continue
# Record the message and its source in the node's attributes
self.message = message_content
self.message_source = message_source
# Otherwise, process the message as usual, by calling
# the ``MetalNode`` object's ``process_item`` method.
for output in self._process_item():
# Put redis recording here
if self.fixturizer:
self.fixturizer.record_worker_node(self, one_item, output)
yield output, one_item # yield previous message
### Do the self.break_test() if it's been defined
### Execute the function and break
### if it returns True
if self.break_test is not None and not self.finished:
self.log_info("running break_test.")
break_test_result = self.break_test(
output_message=output, input_message=self.__message__,
)
self.log_info("NODE BREAK TEST: " + str(break_test_result))
# self.finished = break_test_result
# Check input node(s) here to see if they're all ``.finished``
upstream_nodes_finished = all(
input_node.cleanup_called for input_node in self.input_node_list
)
input_queues_empty = self.is_source or self.input_queues_empty()
self.log_info("checking whether cleanup is a generator. " + str(self.name))
for i in self._cleanup():
yield i, None
@property
def upstream_nodes_finished(self):
return all(input_node.cleanup_called for input_node in self.input_node_list)
@property
def finished(self):
"""
A node is considered "finished" if:
1. All of its immediate parents are "finished" (including if the node
is a generator and has no parents);
2. All of its input queues are empty;
3. It is not processing any messages;
4. Its ``cleanup`` method (if any) has been called.
Alternatively, a node is forced to be in a "finished" state if the
pipeline is being terminated. This causes each node's ``terminate``
attribute to be set to ``True``.
"""
input_queues_empty = self.is_source or self.input_queues_empty()
return (
self.upstream_nodes_finished and input_queues_empty and self.cleanup_called
) or self.terminate
def input_queues_empty(self):
"""
Tests whether there are any messages on any of the node's input
queues.
Returns:
bool: ``True`` if input queues are all empty.
"""
return all(queue.empty for queue in self.input_queue_list)
def cleanup(self):
"""
If there is any cleanup (closing files, shutting down database connections),
necessary when the node is stopped, then the node's class should provide
a ``cleanup`` method. By default, the method is just a logging statement.
"""
self.log_info("in null cleanup")
yield NothingToSeeHere()
def _cleanup(self):
self.log_info("Cleanup called after shutdown.")
for i in self.cleanup():
yield i
# Send termination message here
if self.send_termination_message:
yield Terminated(self)
for q in self.output_queue_list:
while not q.empty:
pass
self.log_info("setting cleanup_called to True")
self.cleanup_called = True
def log_info(self, message=""):
logging.info(
"{node_name}: {message}".format(node_name=self.name, message=message)
)
def terminate_pipeline(self, error=False):
"""
This method can be called on any node in a pipeline, and it will cause
all of the nodes to terminate if they haven't stopped already.
Args:
error (bool): Not yet implemented.
"""
self.log_info("terminate_pipeline called..." + str(self.name))
for node in self.all_connected():
node.terminate = True
for q in node.output_queue_list:
q.drain()
# if not node.finished:
# node.stopped_at = datetime.datetime.now()
# print('setting node.terminate')
# node.terminate = True
def process_item(self, *args, **kwargs):
"""
Default no-op for nodes.
"""
pass
def generator(self):
"""
If there is no ``generator`` method, then call the node's ``process_item``
method instead, assuming that there is code to accommodate this case.
"""
for i in self.process_item():
yield i
@property
def __message__(self):
"""
If the node has an ``output_key`` defined, return the corresponding
value in the message dictionary. If it does not, return the entire
message dictionary.
Nodes should access the content of their incoming message via this
property.
"""
if self.key is None:
out = self.message
elif isinstance(self.key, (str,)):
out = self.message[self.key]
elif isinstance(self.key, (list,)):
out = get_value(self.message, self.key)
else:
raise Exception("Bad type for input key.")
return out
def _process_item(self, *args, **kwargs):
"""
This method wraps the node's ``process_item`` method. It provides a place
to insert code for logging, error handling, etc.
There's lots of experimental code here, particularly the code for
Prometheus monitoring.
"""
# Swap out the message if ``key`` is specified
# If we're using prometheus, then increment a counter
if self.prometheus_objects is not None:
self.prometheus_objects["incoming_message_summary"].observe(random.random())
message_arrival_time = time.time()
try:
for out in self.process_item(*args, **kwargs):
if (
not isinstance(out, (dict, NothingToSeeHere))
and self.output_key is None
):
logging.debug("Exception raised due to no key" + str(self.name))
raise Exception(
"Either message must be a dictionary or `output_key` "
"must be specified. {name}".format(self.name)
)
# Apply post_process_function if it's defined
if self.post_process_function is not None:
set_value(
out,
self.post_process_keypath,
self.post_process_function(
get_value(out, self.post_process_keypath),
**self.post_process_function_kwargs
),
)
if self.prometheus_objects is not None:
self.prometheus_objects["outgoing_message_summary"].set(
time.time() - message_arrival_time
)
yield out
except Exception as err:
self.error_counter += 1
logging.error(
"message: "
+ str(err.args)
+ str(self.__class__.__name__)
+ str(self.name)
)
if self.error_counter > self.max_errors:
self.terminate_pipeline(error=True)
self.status = "error" #
else:
logging.warning("oops")
def stream(self):
"""
Called in each ``MetalNode`` thread.
"""
self.status = "running"
if getattr(self, "_import_pydatalog", False):
from pyDatalog import pyDatalog, Logic
Logic(self.logic_engine)
try:
for output, previous_message in self.start():
logging.debug("In MetalNode.stream.stream() --> " + str(output))
for output_queue in self.output_queue_list:
self.messages_sent_counter += 1
output_queue.put(
output,
block=True,
timeout=None,
queue_event=self.queue_event,
previous_message=previous_message,
)
# if 1 or not isinstance(output, (NothingToSeeHere,)) and output is not None:
except Exception as error:
self.status = "error"
self.stopped_at = datetime.datetime.now()
raise error
self.status = "success"
self.stopped_at = datetime.datetime.now()
@property
def time_running(self):
"""
Return the number of wall-clock seconds elapsed since the node was
started.
"""
if self.status == "stopped":
return None
elif self.status == "running":
return datetime.datetime.now() - self.started_at
elif self.stopped_at is None:
return datetime.datetime.now() - self.started_at
else:
return self.stopped_at - self.started_at
def all_connected(self, seen=None):
"""
Returns all the nodes connected (directly or indirectly) to ``self``.
This allows us to loop over all the nodes in a pipeline even if we
have a handle on only one. This is used by ``global_start``, for
example.
Args:
seen (set): A set of all the nodes that have been identified as
connected to ``self``.
Returns:
(set of ``MetalNode``): All the nodes connected to ``self``. This
includes ``self``.
"""
seen = seen or set()
if isinstance(self, (DynamicClassMediator,)):
for node_name, node_dict in self.node_dict.items():
node_obj = node_dict["obj"]
seen = seen | node_obj.all_connected(seen=seen)
else:
if self not in seen:
seen.add(self)
for node in self.input_node_list + self.output_node_list:
if node in seen:
continue
seen.add(node)
seen = seen | node.all_connected(seen=seen)
return seen
def broadcast(self, broadcast_message):
"""
Puts the message into all the input queues for all connected nodes.
"""
for node in self.all_connected():
for input_queue in node.input_queue_list:
input_queue.put(broadcast_message)
@property
def logjam(self):
"""
Returns the logjam score, which measures the degree to which the
node is holding up progress in downstream nodes.
We're defining a logjam as a node whose input queue is full, but
whose output queue(s) is not. More specifically, we poll each node
in the ``monitor_thread``, and increment a counter if the node is
a logjam at that time. This property returns the percentage of
samples in which the node is a logjam. Our intention is that if
this score exceeds a threshold, the user is alerted, or the load
is rebalanced somehow (not yet implemented).
Returns:
(float): Logjam score
"""
if self.logjam_score["polled"] == 0:
return 0.0
else:
return self.logjam_score["logjam"] / self.logjam_score["polled"]
def global_start(
self, prometheus=False, pipeline_name=None, max_time=None, fixturize=False,
):
"""
Starts every node connected to ``self``. Mainly, it:
1. calls ``start()`` on each node
#. sets some global variables
#. optionally starts some experimental code for monitoring
"""
def prometheus_init():
"""
Experimental code for enabling Prometheus monitoring.
"""
from prometheus_client import (
start_http_server,
Summary,
Gauge,
Histogram,
Counter,
)
for node in self.all_connected():
node.prometheus_objects = {}
summary = Summary(
node.name + "_incoming", "Summary of incoming messages"
)
node.prometheus_objects["incoming_message_summary"] = summary
node.prometheus_objects["outgoing_message_summary"] = Gauge(
node.name + "_outgoing", "Summary of outgoing messages"
)
start_http_server(8000)
if PROMETHEUS:
prometheus_init()
# thread_dict = self.thread_dict
global_dict = {}
run_id = uuid.uuid4().hex
for node in self.all_connected():
# Set the pipeline name on the attribute of each node
node.pipeline_name = pipeline_name or uuid.uuid4().hex
# Set a unique run_id
node.run_id = run_id
node.fixturize = fixturize
node.global_dict = global_dict # Establishing shared globals
logging.debug("global_start:" + str(self))
# Create thread event here?
thread = threading.Thread(
target=MetalNode.stream, args=(node,), daemon=False
)
thread.start()
node.thread_dict = self.thread_dict
self.thread_dict[node.name] = thread
node.status = "running"
monitor_thread = threading.Thread(
target=MetalNode.thread_monitor,
args=(self,),
kwargs={"max_time": max_time},
daemon=True,
)
monitor_thread.start()
@property
def input_queue_size(self):
"""
Return the total number of items in all of the queues that are inputs
to this node.
"""
return sum([input_queue.queue.qsize() for input_queue in self.input_queue_list])
def kill_pipeline(self):
for node in self.all_connected():
node.finished = True
def draw_pipeline(self):
"""
Draw the pipeline structure using graphviz.
"""
dot = graphviz.Digraph()
for node in self.all_connected():
dot.node(node.name, node.name, shape="box")
for node in self.all_connected():
for target_node in node.output_node_list:
dot.edge(node.name, target_node.name)
dot.render("pipeline_drawing.gv", view=True)
@property
def pipeline_finished(self):
finished = all(node.cleanup_called for node in self.all_connected())
self.log_info("finished. " + str(self.name))
return finished
def thread_monitor(self, max_time=None):
"""
This function loops over all of the threads in the pipeline, checking
that they are either ``finished`` or ``running``. If any have had an
abnormal exit, terminate the entire pipeline.
"""
counter = 0
error = False
time_started = time.time()
while not self.pipeline_finished:
logging.debug("MONITOR THREAD")
time.sleep(MONITOR_INTERVAL)
counter += 1
if max_time is not None:
print("checking max_time...")
if time.time() - time_started >= max_time:
self.pipeline_finished = True
print("finished because of max_time")
for node in self.all_connected():
node.finished = True
continue
# Check whether all the workers have ``.finished``
# self.pipeline_finished = all(
# node.finished for node in self.all_connected())
if counter % STATS_COUNTER_MODULO == 0:
table = prettytable.PrettyTable(
["Node", "Class", "Received", "Sent", "Queued", "Status", "Time",]
)
for node in sorted(list(self.all_connected()), key=lambda x: x.name):
if node.status == "running":
status_color = bcolors.WARNING
elif node.status == "stopped":
status_color = ""
elif node.status == "error":
status_color = bcolors.FAIL
error = True
elif node.status == "success":
status_color = bcolors.OKGREEN
else:
assert False
if node.logjam >= LOGJAM_THRESHOLD:
logjam_color = bcolors.FAIL
else:
logjam_color = ""
table.add_row(
[
logjam_color + node.name + bcolors.ENDC,
node.__class__.__name__,
node.messages_received_counter,
node.messages_sent_counter,
node.input_queue_size,
status_color + node.status + bcolors.ENDC,
node.time_running,
]
)
self.log_info("\n" + str(table))
if error:
logging.error("Terminating due to error.")
self.terminate_pipeline(error=True)
# self.pipeline_finished = True
break
# Check for blocked nodes
for node in self.all_connected():
input_queue_full = [
input_queue.approximately_full()
for input_queue in node.input_queue_list
]
output_queue_full = [
output_queue.approximately_full()
for output_queue in node.output_queue_list
]
logjam = (
not node.is_source
and all(input_queue_full)
and not any(output_queue_full)
)
node.logjam_score["polled"] += 1
logging.debug("LOGJAM SCORE: {logjam}".format(logjam=str(node.logjam)))
if logjam:
node.logjam_score["logjam"] += 1
logging.debug(
"LOGJAM {logjam} {name}".format(logjam=logjam, name=node.name)
)
self.log_info("Pipeline finished.")
self.log_info("Sending terminate signal to nodes.")
self.log_info("Messages that are being processed will complete.")
# HERE
if error:
self.log_info("Abnormal exit")
sys.exit(1)
else:
self.log_info("Normal exit.")
sys.exit(0)
class CounterOfThings(MetalNode):
def bar__init__(self, *args, start=0, end=None, **kwargs):
self.start = start
self.end = end
super(CounterOfThings, self).__init__(*args, **kwargs)
def generator(self):
"""
Just start counting integers
"""
counter = 1
while 1:
yield counter
counter += 1
if counter > 10:
assert False
class FunctionOfMessage(MetalNode):
def __init__(self, function_name, *args, **kwargs):
self.function_name = function_name
components = self.function_name.split("__")
if len(components) == 1:
module = None
function_name = components[0]
function_obj = globals()[function_name]
else:
module = ".".join(components[:-1])
function_name = components[-1]
module = importlib.import_module(module)
function = getattr(module, function_name)
self.function = function
super(FunctionOfMessage, self).__init__(*args, **kwargs)
def process_item(self):
yield self.function(self.__message__)
class MockNode(MetalNode):
"""
This is only intended for doing unit tests, etc.
"""
def __init__(self, **kwargs):
self.message_holder = None
self.message_counter = 0
self.message_list = []
super(MockNode, self).__init__(**kwargs)
def process_item(self):
self.message_holder = self.__message__
self.message_list.append(self.__message__)
self.message_counter += 1
yield NothingToSeeHere()
class InsertData(MetalNode):
def __init__(
self, overwrite=True, overwrite_if_null=True, value_dict=None, **kwargs
):
self.overwrite = overwrite
self.overwrite_if_null = overwrite_if_null
self.value_dict = value_dict or {}
super(InsertData, self).__init__(**kwargs)
def process_item(self):
logging.debug("INSERT DATA: " + str(self.__message__))
for key, value in self.value_dict.items():
if (
(key not in self.__message__)
or self.overwrite
or (self.__message__.get(key) == None and self.overwrite_if_null)
):
self.__message__[key] = value
yield self.__message__
class RandomSample(MetalNode):
"""
Lets through only a random sample of incoming messages. Might be useful
for testing, or when only approximate results are necessary.
"""
def __init__(self, sample=0.1):
self.sample = sample
def process_item(self):
yield self.message if random.random() <= self.sample else None
class SubstituteRegex(MetalNode):
def __init__(self, match_regex=None, substitute_string=None, *args, **kwargs):
self.match_regex = match_regex
self.substitute_string = substitute_string
self.regex_obj = re.compile(self.match_regex)
super(SubstituteRegex, self).__init__(*args, **kwargs)
def process_item(self):
out = self.regex_obj.sub(self.substitute_string, self.message[self.key])
yield out
class CSVToDictionaryList(MetalNode):
def __init__(self, **kwargs):
super(CSVToDictionaryList, self).__init__(**kwargs)
def process_item(self):
csv_file_obj = io.StringIO(self.__message__)
csv_reader = csv.DictReader(csv_file_obj)
output = [row for row in csv_reader]
yield output
class SequenceEmitter(MetalNode):
"""
Emits ``sequence`` ``max_sequences`` times, or forever if
``max_sequences`` is ``None``.
"""
def __init__(self, sequence, *args, max_sequences=1, **kwargs):
self.sequence = sequence
self.max_sequences = max_sequences
super(SequenceEmitter, self).__init__(*args, **kwargs)
def generator(self):
"""
Emit the sequence ``max_sequences`` times.
"""
type_dict = {
"int": int,
"integer": int,
"str": str,
"string": str,
"float": float,
"bool": to_bool,
}
counter = 0
while counter < self.max_sequences:
for item in self.sequence:
if isinstance(item, (dict,)) and "value" in item and "type" in item:
item = type_dict[item["type"].lower()](item["value"])
item = {self.output_key: item}
yield item
counter += 1
def process_item(self):
"""
Emit the sequence ``max_sequences`` times.
"""
type_dict = {
"int": int,
"integer": int,
"str": str,
"string": str,
"float": float,
"bool": to_bool,
}
counter = 0
while counter < self.max_sequences:
for item in self.sequence:
if isinstance(item, (dict,)) and "value" in item and "type" in item:
item = type_dict[item["type"].lower()](item["value"])
item = {self.output_key: item}
yield item
counter += 1
class GetEnvironmentVariables(MetalNode):
"""
This node reads environment variables and stores them in the message.
The required keyword argument for this node is ``environment_variables``,
which is a list of -- you guessed it! -- environment variables. By
default, they will be read and stored in the outgoing message under
keys with the same names as the environment variables. E.g. ``FOO_VAR``
will be stored in the message ``{"FOO_BAR": whatever}``.
Optionally, you can provide a dictionary to the ``mappings`` keyword
argument, which maps environment variable names to new names. E.g.
if ``mappings = {"FOO_VAR": "bar_var"}``, then the value of ``FOO_VAR``
will be stored in the message ``{"bar_var": whatever}``.
If the environment variable is not defined, then its value will be
set to ``None``.
Args:
mappings (dict): An optional dictionary mapping environment variable
names to new names.
environment_variables (list): A list of environment variable names.
"""
def __init__(self, mappings=None, environment_variables=None, **kwargs):
self.environment_mappings = mappings or {}
self.environment_variables = environment_variables or []
super(GetEnvironmentVariables, self).__init__(**kwargs)
def generator(self):
environment = {
self.environment_mappings.get(
environment_variable, environment_variable
): os.environ.get(environment_variable, None)
for environment_variable in self.environment_variables
}
yield environment
def process_item(self):
environment = {
self.environment_mappings.get(
environment_variable, environment_variable
): os.environ.get(environment_variable, None)
for environment_variable in self.environment_variables
}
yield environment
class SimpleTransforms(MetalNode):
def __init__(
self,
missing_keypath_action="ignore",
starting_path=None,
transform_mapping=None,
target_value=None,
keypath=None,
**kwargs
):
self.missing_keypath_action = missing_keypath_action
self.transform_mapping = transform_mapping or []
self.functions_dict = {}
self.starting_path = starting_path
for transform in self.transform_mapping:
# Not doing the transforms; only loading the right functions here
function_name = transform.get("target_function", None)
full_function_name = function_name
if function_name is not None:
components = function_name.split("__")
if len(components) == 1:
module = None
function_name = components[0]
function_obj = globals()[function_name]
else:
module = ".".join(components[:-1])
function_name = components[-1]
module = importlib.import_module(module)
function = getattr(module, function_name)
self.functions_dict[full_function_name] = function
super(SimpleTransforms, self).__init__(**kwargs)
def process_item(self):
logging.debug("TRANSFORM " + str(self.name))
logging.debug(self.name + " " + str(self.message))
for transform in self.transform_mapping:
path = transform["path"]
target_value = transform.get("target_value", None)
function_name = transform.get("target_function", None)
starting_path = transform.get("starting_path", None)
if function_name is not None:
function = self.functions_dict[function_name]
else:
function = None
function_kwargs = transform.get("function_kwargs", None)
function_args = transform.get("function_args", None)
logging.debug(self.name + " calling replace_by_path:")
replace_by_path(
self.message,
tuple(path),
target_value=target_value,
function=function,
function_args=function_args,
starting_path=starting_path,
function_kwargs=function_kwargs,
)
logging.debug("after SimpleTransform: " + self.name + str(self.message))
yield self.message
class Serializer(MetalNode):
"""
Takes an iterable thing as input, and successively yields its items.
"""
def __init__(self, values=False, *args, **kwargs):
self.values = values
super(Serializer, self).__init__(**kwargs)
def process_item(self):
if self.__message__ is None:
yield None
elif self.values:
for item in self.__message__.values():
yield item
else:
for item in self.__message__:
logging.debug(self.name + " " + str(item))
yield item
class AggregateValues(MetalNode):
"""
Does that.
"""
def __init__(self, values=False, tail_path=None, **kwargs):
self.tail_path = tail_path
self.values = values
super(AggregateValues, self).__init__(**kwargs)
def process_item(self):
values = aggregate_values(self.__message__, self.tail_path, values=self.values)
logging.debug("aggregate_values " + self.name + " " + str(values))
yield values
class Filter(MetalNode):
"""
Applies tests to each message and filters out messages that don't pass
Built-in tests:
key_exists
value_is_true
value_is_not_none
Example:
{'test': 'key_exists',
'key': mykey}
"""
def __init__(self, test=None, test_keypath=None, value=True, *args, **kwargs):
self.test = test
self.value = value
self.test_keypath = test_keypath or []
super(Filter, self).__init__(*args, **kwargs)
@staticmethod
def _key_exists(message, key):
return key in message
@staticmethod
def _value_is_not_none(message, key):
logging.debug(
"value_is_not_none: {message} {key}".format(message=str(message), key=key)
)
return get_value(message, key) is not None
@staticmethod
def _value_is_true(message, key):
return to_bool(message.get(key, False))
def process_item(self):
if self.test in ["key_exists", "value_is_not_none", "value_is_true"]:
result = (
getattr(self, "_" + self.test)(self.__message__, self.test_keypath)
== self.value
)
else:
raise Exception("Unknown test: {test_name}".format(test_name=test))
if result:
logging.debug("Sending message through")
yield self.message
else:
logging.debug("Blocking message: " + str(self.__message__))
yield NothingToSeeHere()
class StreamMySQLTable(MetalNode):
def __init__(
self,
*args,
host="localhost",
user=None,
table=None,
password=None,
database=None,
port=3306,
to_row_obj=False,
send_batch_markers=False,
**kwargs
):
self.host = host
self.user = user
self.to_row_obj = to_row_obj
self.password = password
self.database = database
self.port = port
self.table = table
self.send_batch_markers = send_batch_markers
super(StreamMySQLTable, self).__init__(**kwargs)
def setup(self):
self.db = MySQLdb.connect(
passwd=self.password, db=self.database, user=self.user, port=self.port,
)
self.cursor = MySQLdb.cursors.DictCursor(self.db)
self.table_schema_query = (
"""SELECT column_name, column_type """
"""FROM information_schema.columns """
"""WHERE table_name='{table}';""".format(table=self.table)
)
print(self.table_schema_query)
# self.table_schema = self.get_schema()
# Need a mapping from header to MYSQL TYPE
# for mapping in self.table_schema:
# column = mapping["column_name"]
# type_string = mapping["column_type"]
# this_type = ds.MySQLTypeSystem.type_mapping(type_string)
# Unfinished experimental code
# Start here:
# store the type_mapping
# use it to cast the data into the MySQLTypeSchema
# ensure that the generator is emitting MySQLTypeSchema objects
# def get_schema(self):
# self.cursor.execute(self.table_schema_query)
# table_schema = self.cursor.fetchall()
# return table_schema
def generator(self):
if self.send_batch_markers:
yield BatchStart(schema=self.table_schema)
self.cursor.execute("""SELECT * FROM {table};""".format(table=self.table))
result = self.cursor.fetchone()
while result is not None:
yield result
result = self.cursor.fetchone()
if self.send_batch_markers:
yield BatchEnd()
class PrinterOfThings(MetalNode):
@set_kwarg_attributes()
def __init__(self, disable=False, pretty=False, prepend="printer: ", **kwargs):
self.disable = disable
self.pretty = pretty
super(PrinterOfThings, self).__init__(**kwargs)
logging.debug("Initialized printer...")
def process_item(self):
if not self.disable:
print(self.prepend)
if self.pretty:
pprint.pprint(self.__message__, indent=2)
else:
print(str(self.__message__))
print("\n")
print("------------")
yield self.message
class ConstantEmitter(MetalNode):
"""
Send a thing every n seconds
"""
def __init__(self, thing=None, max_loops=5, delay=0.5, **kwargs):
self.thing = thing
self.delay = delay
self.max_loops = max_loops
super(ConstantEmitter, self).__init__(**kwargs)
def generator(self):
counter = 0
while counter < self.max_loops:
if random.random() < -0.1:
assert False
time.sleep(self.delay)
yield self.thing
counter += 1
class TimeWindowAccumulator(MetalNode):
"""
Every N seconds, put the latest M seconds data on the queue.
"""
@set_kwarg_attributes()
def __init__(self, time_window=None, send_interval=None, **kwargs):
pass
class LocalFileReader(MetalNode):
@set_kwarg_attributes()
def __init__(
self,
directory=".",
send_batch_markers=True,
serialize=False,
read_mode="r",
filename=None,
**kwargs
):
super(LocalFileReader, self).__init__(**kwargs)
def process_item(self):
filename = "/".join([self.directory, self.filename or self.__message__])
with open(filename, self.read_mode) as file_obj:
if self.serialize:
for line in file_obj:
output = line
yield output
else:
output = file_obj.read()
yield output
class CSVReader(MetalNode):
@set_kwarg_attributes()
def __init__(self, **kwargs):
super(CSVReader, self).__init__(**kwargs)
def process_item(self):
file_obj = io.StringIO(self.__message__)
reader = csv.DictReader(file_obj)
for row in reader:
yield row
class LocalDirectoryWatchdog(MetalNode):
def __init__(self, directory=".", check_interval=3, **kwargs):
self.directory = directory
self.latest_arrival = time.time()
self.check_interval = check_interval
super(LocalDirectoryWatchdog, self).__init__(**kwargs)
def generator(self):
while self.keep_alive:
logging.debug("sleeping...")
time.sleep(self.check_interval)
time_in_interval = None
for filename in os.listdir(self.directory):
last_modified_time = os.path.getmtime(
"/".join([self.directory, filename])
)
if last_modified_time > self.latest_arrival:
yield "/".join([self.directory, filename])
if (
time_in_interval is None
or last_modified_time > time_in_interval
):
time_in_interval = last_modified_time
logging.debug("time_in_interval: " + str(time_in_interval))
if time_in_interval is not None:
self.latest_arrival = time_in_interval
class StreamingJoin(MetalNode):
"""
Joins two streams on a key, using exact match only. MVP.
"""
def __init__(self, window=30, streams=None, *args, **kwargs):
self.window = window
self.streams = streams
self.stream_paths = streams
self.buffers = {
stream_name: TimedDict(timeout=self.window)
for stream_name in self.stream_paths.keys()
}
super(StreamingJoin, self).__init__(*args, **kwargs)
def process_item(self):
"""
"""
value_to_match = get_value(
self.message, self.stream_paths[self.message_source.name]
)
# Check for matches in all other streams.
# If complete set of matches, yield the merged result
# If not, add it to the `TimedDict`.
yield ("hi")
class DynamicClassMediator(MetalNode):
def __init__(self, *args, **kwargs):
super(DynamicClassMediator, self).__init__(**kwargs)
for node_name, node_dict in self.node_dict.items():
cls_obj = node_dict["cls_obj"]
node_obj = cls_obj(**kwargs)
node_dict["obj"] = node_obj
for edge in self.raw_config["edges"]:
source_node_obj = self.node_dict[edge["from"]]["obj"]
target_node_obj = self.node_dict[edge["to"]]["obj"]
source_node_obj > target_node_obj
def bind_methods():
for attr_name in dir(DynamicClassMediator):
if attr_name.startswith("_"):
continue
attr_obj = getattr(DynamicClassMediator, attr_name)
if not isinstance(attr_obj, types.FunctionType):
continue
setattr(self, attr_name, types.MethodType(attr_obj, self))
bind_methods()
source = self.get_source()
self.input_queue_list = source.input_queue_list
sink = self.get_sink()
self.output_queue_list = sink.output_queue_list
self.output_node_list = sink.output_node_list
self.input_node_list = source.input_node_list
def get_sink(self):
sinks = self.sink_list()
if len(sinks) > 1:
raise Exception("`DynamicClassMediator` may have no more than one sink.")
elif len(sinks) == 0:
return None
return sinks[0]
def get_source(self):
sources = self.source_list()
if len(sources) > 1:
raise Exception("`DynamicClassMediator` may have no more than one source.")
elif len(sources) == 0:
return None
return sources[0]
def sink_list(self):
sink_nodes = []
for node_name, node_dict in self.node_dict.items():
node_obj = node_dict["obj"]
if len(node_obj.output_queue_list) == 0:
sink_nodes.append(node_obj)
return sink_nodes
def source_list(self):
source_nodes = [
node_dict["obj"]
for node_dict in self.node_dict.values()
if node_dict["obj"].is_source
]
return source_nodes
def hi(self):
return "hi"
def get_node_dict(node_config):
node_dict = {}
for node_config in node_config["nodes"]:
node_class = globals()[node_config["class"]]
node_name = node_config["name"]
node_dict[node_name] = {}
node_dict[node_name]["class"] = node_class
frozen_arguments = node_config.get("frozen_arguments", {})
node_dict[node_name]["frozen_arguments"] = frozen_arguments
node_obj = node_class(**frozen_arguments)
node_dict[node_name]["remapping"] = node_config.get("arg_mapping", {})
return node_dict
def kwarg_remapper(f, **kwarg_mapping):
reverse_mapping = {value: key for key, value in kwarg_mapping.items()}
logging.debug("kwarg_mapping:" + str(kwarg_mapping))
parameters = [i for i, _ in list(inspect.signature(f).parameters.items())]
for kwarg in parameters:
if kwarg not in kwarg_mapping:
reverse_mapping[kwarg] = kwarg
def remapped_function(*args, **kwargs):
remapped_kwargs = {}
for key, value in kwargs.items():
if key in reverse_mapping:
remapped_kwargs[reverse_mapping[key]] = value
logging.debug("renamed function with kwargs: " + str(remapped_kwargs))
return f(*args, **remapped_kwargs)
return remapped_function
def template_class(
class_name, parent_class, kwargs_remapping, frozen_arguments_mapping
):
kwargs_remapping = kwargs_remapping or {}
frozen_init = functools.partial(parent_class.__init__, **frozen_arguments_mapping)
if isinstance(parent_class, (str,)):
parent_class = globals()[parent_class]
cls = type(class_name, (parent_class,), {})
setattr(cls, "__init__", kwarg_remapper(frozen_init, **kwargs_remapping))
return cls
def class_factory(raw_config):
new_class = type(raw_config["name"], (DynamicClassMediator,), {})
new_class.node_dict = get_node_dict(raw_config)
new_class.class_name = raw_config["name"]
new_class.edge_list_dict = raw_config.get("edges", [])
new_class.raw_config = raw_config
for node_name, node_config in new_class.node_dict.items():
_class = node_config["class"]
cls = template_class(
node_name,
_class,
node_config["remapping"],
node_config["frozen_arguments"],
)
setattr(cls, "raw_config", raw_config)
node_config["cls_obj"] = cls
# Inject?
globals()[new_class.__name__] = new_class
return new_class
class Remapper(MetalNode):
def __init__(self, mapping=None, **kwargs):
self.remapping_dict = mapping or {}
super(Remapper, self).__init__(**kwargs)
def process_item(self):
logging.debug("Remapper {node}:".format(node=self.name) + str(self.__message__))
out = remap_dictionary(self.__message__, self.remapping_dict)
yield out
class BlackHole(MetalNode):
"""
Accepts any incoming message and promptly ignores it. Returns ``NothingToSeeHere``.
"""
def __init__(self, **kwargs):
super(BlackHole, self).__init__(**kwargs)
def process_item(self):
logging.debug(
"BlackHole {node}:".format(node=self.name) + str(self.__message__)
)
yield NothingToSeeHere()
class Blocker(BlackHole):
"""
Class that ignores all messages, but sends a message when all of its upstream
nodes have finished.
"""
def __init__(self, **kwargs):
kwargs.update({"send_termination_message": True})
super(Blocker, self).__init__(**kwargs)
class BatchMessages(MetalNode):
def __init__(
self, batch_size=None, batch_list=None, counter=0, timeout=5, **kwargs
):
self.batch_size = batch_size
self.timeout = timeout
self.counter = 0
self.batch_list = batch_list or []
super(BatchMessages, self).__init__(**kwargs)
def process_item(self):
self.counter += 1
self.batch_list.append(self.__message__)
logging.debug(self.name + " " + str(self.__message__))
out = NothingToSeeHere()
if self.counter % self.batch_size == 0:
out = self.batch_list
logging.debug("BatchMessages: " + str(out))
self.batch_list = []
yield out
def cleanup(self):
self.log_info(self.name + " in cleanup, sending remainder of batch...")
yield self.batch_list
if __name__ == "__main__":
pass
| 35.185333
| 97
| 0.587194
| 7,050
| 61,891
| 4.934752
| 0.110638
| 0.014861
| 0.008537
| 0.010865
| 0.214602
| 0.168382
| 0.129348
| 0.097672
| 0.079103
| 0.071687
| 0
| 0.003666
| 0.325653
| 61,891
| 1,758
| 98
| 35.205347
| 0.829907
| 0.217641
| 0
| 0.247795
| 0
| 0
| 0.051118
| 0.005673
| 0
| 0
| 0
| 0.000569
| 0.002646
| 1
| 0.089065
| false
| 0.007937
| 0.032628
| 0.003527
| 0.184303
| 0.0097
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0b20f9be0257673e00f2b9f9aa968fab5295bbd
| 3,091
|
py
|
Python
|
tests/components/test_ts_component.py
|
T4rk1n/dazzler
|
69c49422dc19c910445ab265b1d3481041de8f43
|
[
"MIT"
] | 15
|
2019-12-19T11:57:30.000Z
|
2021-11-15T23:34:41.000Z
|
tests/components/test_ts_component.py
|
T4rk1n/dazzler
|
69c49422dc19c910445ab265b1d3481041de8f43
|
[
"MIT"
] | 196
|
2019-09-21T15:10:14.000Z
|
2022-03-31T11:07:48.000Z
|
tests/components/test_ts_component.py
|
T4rk1n/dazzler
|
69c49422dc19c910445ab265b1d3481041de8f43
|
[
"MIT"
] | 7
|
2019-10-30T19:38:15.000Z
|
2021-12-01T04:54:16.000Z
|
# A bit of duplication of the component system tests to ensure
# typescript components are transpiled properly to Python.
# Types are tested in test_mypy.
import json
import re
import pytest
from . import ts_components as tsc
@pytest.mark.parametrize(
'component',
[tsc.TypedComponent, tsc.TypedClassComponent]
)
def test_tsc_required(component):
with pytest.raises(TypeError) as context:
component()
assert context.value.args[0] == "__init__() missing 1 required positional argument: 'required_str'" # noqa: E501
@pytest.mark.parametrize(
'component, doc',
[
(tsc.TypedComponent, 'Typed Component Docstring'),
(tsc.TypedClassComponent, 'Typed class component')
]
)
def test_tsc_docstring(component, doc):
assert component.__doc__.strip() == doc
@pytest.mark.parametrize(
'component',
[tsc.TypedComponent, tsc.TypedClassComponent]
)
def test_tsc_aspect_docstring(component):
assert ':param str_with_comment: Docstring'\
in component.__init__.__doc__
@pytest.mark.parametrize('prop_name, prop_default, component', [
('default_str', "'default'", tsc.TypedComponent),
('default_required_str', "'default required'", tsc.TypedComponent),
('default_num', 3, tsc.TypedComponent),
('default_str', "'default'", tsc.TypedClassComponent),
('default_required_str', "'default required'", tsc.TypedClassComponent),
('default_num', 3, tsc.TypedClassComponent),
])
def test_tsc_default_props_docstring(prop_name, prop_default, component):
pattern = r':param {}:.*\(default={}\)'.format(prop_name, prop_default)
assert re.search(pattern, str(component.__init__.__doc__))
def test_tsc_enum_docstring():
assert ":param enumeration: (Possible values: 'foo', 'bar')" \
in tsc.TypedComponent.__init__.__doc__
assert ":param defined_enum: (Possible values: 'foo', 'bar')" \
in tsc.TypedComponent.__init__.__doc__
@pytest.mark.async_test
async def test_tsc_render(start_page, browser):
from tests.components.pages.ts import page
await start_page(page)
# assert the children with added classname + base class name css path.
await browser.wait_for_text_to_equal(
'.dazzler-ts-typed-component.other .children .dazzler-core-container',
'foobar'
)
# assert style can be changed, is added to the type by extension.
await browser.wait_for_style_to_equal(
'.dazzler-ts-typed-component.other',
'border', '1px solid rgb(0, 0, 255)'
)
content = await browser.wait_for_element_by_css_selector(
'.dazzler-ts-typed-component.other .json-output'
)
data = json.loads(content.text)
assert data['num'] == 2
assert data['text'] == 'foobar'
assert data['arr'] == [1, 2, 'mixed']
assert data['arr_str'] == ['foo', 'bar']
assert data['default_str'] == 'default'
assert data['required_str'] == 'override'
assert data['obj'] == {'anything': 'possible'}
await browser.wait_for_text_to_equal(
'.dazzler-ts-typed-class-component .children', 'clazz'
)
| 31.865979
| 117
| 0.693303
| 373
| 3,091
| 5.479893
| 0.33244
| 0.066536
| 0.029354
| 0.037182
| 0.281311
| 0.22456
| 0.189335
| 0.165362
| 0.165362
| 0.120352
| 0
| 0.006294
| 0.177612
| 3,091
| 96
| 118
| 32.197917
| 0.797797
| 0.094468
| 0
| 0.157143
| 0
| 0
| 0.286175
| 0.055516
| 0
| 0
| 0
| 0
| 0.185714
| 1
| 0.071429
| false
| 0
| 0.071429
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0b22aa7904b846e9743534781f5c71318798017
| 9,371
|
py
|
Python
|
python test generator/main.py
|
ElDonad/Tixel-Dungeon
|
ad622e570a06bf7722cdf15dcc33547ba14aada4
|
[
"MIT"
] | null | null | null |
python test generator/main.py
|
ElDonad/Tixel-Dungeon
|
ad622e570a06bf7722cdf15dcc33547ba14aada4
|
[
"MIT"
] | null | null | null |
python test generator/main.py
|
ElDonad/Tixel-Dungeon
|
ad622e570a06bf7722cdf15dcc33547ba14aada4
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import random
import numpy as np
import colorama
from colorama import Fore, Back
import copy
colorama.init()
LEFT = 'lft'
RIGHT = 'rgt'
UP = 'up'
DOWN = 'dwn'
HORIZONTAL = 'horizontal'
VERTICAL = 'vertical'
class Room:
def __init__(self, x=0, y=0, h=0, w=0):
self.x = x
self.y = y
self.h = h
self.w = w
class Path:
def __init__(self, orientation = ''):
self.straights = []
self.orientation = orientation
class Straight:
def __init__(self, x=0, y=0, length = 0):
self.x = x
self.y = y
self.length = length
VERTICAL = -1
HORIZONTAL = -2
def getEnd(self):
if self.orientation == VERTICAL:
return (self.x, self.y + self.length)
elif self.orientation == HORIZONTAL:
return (self.x + self.length, self.y)
print("beginning...")
level = []
for x in range(50):
level.append([])
for y in range(50):
level[x].append(".")
rooms = []
corridors = []
def generateLevel():
levelX = 50
levelY = 50
hasKeyRoom = False
hasItemRoom = False
deltaFromCenter = 3
roomNumber = random.randint(6,9)
print(roomNumber)
for roomIndex in range(roomNumber):
placed = False
placeIt = 0
while placed == False:
if placeIt > 1500:
print("placement error")
return False
newRoom = Room()
newRoom.h = random.randint(3,6)
newRoom.w = random.randint(3,6)
newRoom.x = random.randint(max(0, levelX / 2 - deltaFromCenter), min(levelX - newRoom.w - 1, levelX / 2 + deltaFromCenter))
newRoom.y = random.randint(max(0, levelY / 2 - deltaFromCenter), min(levelY - newRoom.h - 1, levelY / 2 + deltaFromCenter))
if not collide(newRoom):
rooms.append(newRoom)
if len(rooms) > 1:
corridors.append(generatePath(rooms[-1], rooms[-2]))
placed = True
deltaFromCenter += 5
print("finished one room !")
def collide(sqr1):
if isinstance(sqr1, Room):
for sqr2 in rooms:
if (sqr1.x + sqr1.w + 1>= sqr2.x - 1 and
sqr1.x - 1<= sqr2.x + sqr2.w + 1 and
sqr1.y + sqr1.h + 1>= sqr2.y - 1 and
sqr1.y - 1<= sqr2.y + sqr2.h + 1
):
return True
return False
elif isinstance(sqr1, Path):
for straight in sqr1.straights:
fakeSqr = Room()
fakeSqr.x = straight.x
fakeSqr.y = straight.y
if straight.orientation == Path.Straight.HORIZONTAL:
fakeSqr.w = straight.length
fakeSqr.h = 1
elif straight.orientation == Path.Straight.VERTICAL:
fakeSqr.w = 1
fakeSqr.h = straight.length
if collide(fakeSqr):
return True
return False
def generatePath(room1, room2):
startPoint = (0,0)
endPoint = (0,0)
def straightInDirection(direction, start = None, length=1, straight = Path.Straight()):
if start == None:
start = (straight.x, straight.y)
x, y = start
straight.x = x
straight.y = y
if direction == LEFT:
straight.orientation = HORIZONTAL
straight.length = -length
elif direction == RIGHT:
straight.orientation = HORIZONTAL
straight.length = length
elif direction == UP:
straight.orientation = VERTICAL
straight.length = -length
elif direction == DOWN:
straight.orientation = VERTICAL
straight.length = length
print("New path : ", start, length, direction)
return straight
initialDir = None
initialiOrientation = None
if abs(room1.x - room2.x) > abs(room1.y - room2.y):
initialiOrientation = HORIZONTAL
else:
initialiOrientation = VERTICAL
if initialiOrientation == HORIZONTAL:
if room1.x > room2.x:
initialDir = LEFT
else:
initialDir = RIGHT
elif initialiOrientation == VERTICAL:
if room1.y > room2.y:
initialDir = UP
else:
initialDir = DOWN
#Début de la génération : choisir un point de départ:
if initialDir == LEFT:
startPoint = (room1.x, random.randint(room1.y, room1.y + room1.h))
elif initialDir == RIGHT:
startPoint = (room1.x + room1.w, random.randint(room1.y, room1.y + room1.h))
elif initialDir == UP:
startPoint = (random.randint(room1.x, room1.x + room1.w), room1.y)
elif initialDir == DOWN:
startPoint = (random.randint(room1.x, room1.x + room1.w), room1.y + room1.h)
print(startPoint)
#input()
#Choisir un point d'arrivée de la même manière: il doit être sur la face de la pièce opposée à celle du point de départ.
if initialDir == LEFT:
endPoint = (room2.x + room2.w, random.randint(room2.y, room2.y + room2.h))
elif initialDir == RIGHT:
endPoint = (room2.x, random.randint(room2.y, room2.y + room2.h))
elif initialDir == UP:
endPoint = (random.randint(room2.x, room2.x + room2.w), room2.y + room2.h)
elif initialDir == DOWN:
endPoint = (random.randint(room2.x, room2.x + room2.w), room2.y)
print(endPoint)
#input()
path = Path()
currentPos = startPoint
headingDir = initialDir
deltaDir = None
priorityOrientation = None
if initialDir == LEFT or initialDir == RIGHT:
deltaDir = abs(startPoint[0] - endPoint[0])
priorityOrientation = VERTICAL
elif initialDir == UP or initialDir == DOWN:
deltaDir = abs(startPoint[1] - endPoint[1])
priorityOrientation = HORIZONTAL
path.straights.append(copy.deepcopy(straightInDirection(initialDir, length = random.randint(1,max(1,int(deltaDir / 3))), start = currentPos)))
currentPos = path.straights[-1].getEnd()
print(currentPos)
#input()
while currentPos != endPoint:
xDelta = abs(endPoint[0] - currentPos[0])
xOffset = endPoint[0] - currentPos[0]
yDelta = abs(endPoint[1] - currentPos[1])
yOffset = endPoint[1] - currentPos[1]
print("loop enter", xOffset, yOffset, priorityOrientation)
#input()
if yOffset != 0 and (priorityOrientation == VERTICAL or xOffset == 0):
print("yOffset")
newDirection = None
if yOffset < 0:
newDirection = UP
elif yOffset > 0:
newDirection = DOWN
path.straights.append(copy.deepcopy(straightInDirection(newDirection, length=yDelta, start=currentPos)))
currentPos = path.straights[-1].getEnd()
elif xOffset != 0 and (priorityOrientation == HORIZONTAL or yOffset == 0):
print("xOffset")
newDirection = None
if xOffset < 0:
newDirection = LEFT
elif xOffset > 0:
newDirection = RIGHT
print(newDirection)
path.straights.append(copy.deepcopy(straightInDirection(newDirection, length=xDelta, start=currentPos)))
currentPos = path.straights[-1].getEnd()
else:
print('nothing')
print("ROAD ADVANCEMENT : currentPos : " + str(currentPos) + " endPos : " + str(endPoint))
#input()
return path
for a in range(0,1000):
generateLevel()
print("finished generation !")
print(len(rooms))
count = 1
for room in rooms:
for x in range(room.x, room.x + room.w):
level[x][room.y] = str(count)#"░"
level[x][room.y + room.h] = str(count)
for y in range(room.y, room.y + room.h):
level[room.x][y] = str(count)
level[room.x + room.w][y] = str(count)
count += 1
print("nombre de corridors : ", len(corridors))
for corridor in corridors:
print("nombre de corridors : ", len(corridor.straights))
for straight in corridor.straights:
print("origine", straight.x, ', ',straight.y,"oritentation : ",straight.orientation, "length : ", straight.length)
if straight.orientation == VERTICAL:
for y in range(straight.y,straight.y + straight.length, np.sign(straight.length)):
for x in range(straight.x -1,straight.x + 1 + 1,2):
#level[x][y] = "░"
pass
level[straight.x][y]=Fore.RED + "." + Fore.WHITE
elif straight.orientation == HORIZONTAL:
print("horizontal")
for x in range(straight.x,straight.x + straight.length, np.sign(straight.length)):
for y in range(straight.y -1,straight.y + 1 + 1,2):
#level[x][y] = "░"
pass
level[x][straight.y]= Fore.RED + "." + Fore.WHITE
for line in level:
lineC = " "
print(lineC.join(line))
rooms = []
corridors = []
level = []
for x in range(50):
level.append([])
for y in range(50):
level[x].append(".")
print("loop position : ",a)
input()
| 33.230496
| 146
| 0.555224
| 1,055
| 9,371
| 4.923223
| 0.151659
| 0.03504
| 0.005776
| 0.010589
| 0.285329
| 0.258183
| 0.201194
| 0.155179
| 0.098575
| 0.090874
| 0
| 0.026207
| 0.32814
| 9,371
| 281
| 147
| 33.348754
| 0.798285
| 0.028172
| 0
| 0.213675
| 0
| 0
| 0.031666
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034188
| false
| 0.008547
| 0.021368
| 0
| 0.106838
| 0.094017
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0b36a7b39c48086c567c97c9b01212d0a865743
| 255
|
py
|
Python
|
src/py/vmw/ui/vmw_launcher.py
|
jp-uom/variant_matrix_wizard
|
c5d7ac509be6d6a2020ab38f49c28df090a03c1d
|
[
"MIT"
] | 1
|
2017-12-27T11:56:33.000Z
|
2017-12-27T11:56:33.000Z
|
src/py/vmw/ui/vmw_launcher.py
|
jp-uom/variant_matrix_wizard
|
c5d7ac509be6d6a2020ab38f49c28df090a03c1d
|
[
"MIT"
] | null | null | null |
src/py/vmw/ui/vmw_launcher.py
|
jp-uom/variant_matrix_wizard
|
c5d7ac509be6d6a2020ab38f49c28df090a03c1d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import wx
import vmwizard as vmw
if __name__ == '__main__':
app = wx.App(False)
frame = wx.Frame(None, wx.ID_ANY, "Variant Matrix")
wiz = vmw.Wizard(frame)
frame.Show(True)
frame.Centre()
app.MainLoop()
| 15.9375
| 55
| 0.639216
| 37
| 255
| 4.162162
| 0.702703
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005025
| 0.219608
| 255
| 15
| 56
| 17
| 0.768844
| 0.082353
| 0
| 0
| 0
| 0
| 0.094828
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.222222
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0b370195e62577b0993491b41073f0838231b20
| 2,308
|
py
|
Python
|
Modules/Scripted/DMRIInstall/DMRIInstall.py
|
TheInterventionCentre/NorMIT-Plan-App
|
765ed9a5dccc1cc134b65ccabe93fc132baeb2ea
|
[
"MIT"
] | null | null | null |
Modules/Scripted/DMRIInstall/DMRIInstall.py
|
TheInterventionCentre/NorMIT-Plan-App
|
765ed9a5dccc1cc134b65ccabe93fc132baeb2ea
|
[
"MIT"
] | null | null | null |
Modules/Scripted/DMRIInstall/DMRIInstall.py
|
TheInterventionCentre/NorMIT-Plan-App
|
765ed9a5dccc1cc134b65ccabe93fc132baeb2ea
|
[
"MIT"
] | null | null | null |
import os
import string
import textwrap
import unittest
import vtk, qt, ctk, slicer
from slicer.ScriptedLoadableModule import *
import logging
#
# DMRIInstall
#
class DMRIInstall(ScriptedLoadableModule):
"""
"""
helpText = textwrap.dedent(
"""
Please use the Extension Manager to install the "SlicerDMRI" extension for
diffusion-related tools including:
<ul>
<li> Diffusion Tensor Estimation</li>
<li>Tractography Display</li>
<li>Tractography Seeding</li>
<li>Fiber Tract Measurement</li>
</ul>
""")
def __init__(self, parent):
# Hide this module if SlicerDMRI is already installed
model = slicer.app.extensionsManagerModel()
if model.isExtensionInstalled("SlicerDMRI"):
return
ScriptedLoadableModule.__init__(self, parent)
self.parent.categories = ["Diffusion"]
self.parent.title = "Install Slicer Diffusion Tools"
self.parent.dependencies = []
self.parent.contributors = ["Isaiah Norton"]
self.parent.helpText = DMRIInstall.helpText
self.parent.helpText += self.getDefaultModuleDocumentationLink()
self.parent.acknowledgementText = textwrap.dedent(
"""
SlicerDMRI supported by NIH NCI ITCR U01CA199459 (Open Source Diffusion MRI
Technology For Brain Cancer Research), and made possible by NA-MIC, NAC,
BIRN, NCIGT, and the Slicer Community.
""")
class DMRIInstallWidget(ScriptedLoadableModuleWidget):
"""Uses ScriptedLoadableModuleWidget base class, available at:
https://github.com/Slicer/Slicer/blob/master/Base/Python/slicer/ScriptedLoadableModule.py
"""
def setup(self):
ScriptedLoadableModuleWidget.setup(self)
self.textBox = ctk.ctkFittedTextBrowser()
self.textBox.setHtml(DMRIInstall.helpText)
self.parent.layout().addWidget(self.textBox)
#
# Apply Button
#
self.applyButton = qt.QPushButton("Open Extension Manager")
self.applyButton.toolTip = 'Install the "SlicerDMRI" extension from the Diffusion category.'
self.applyButton.icon = qt.QIcon(":/Icons/ExtensionDefaultIcon.png")
self.applyButton.enabled = True
self.applyButton.connect('clicked()', self.onApply)
self.parent.layout().addWidget(self.applyButton)
self.parent.layout().addStretch(1)
def onApply(self):
slicer.app.openExtensionsManagerDialog()
| 29.589744
| 96
| 0.731369
| 246
| 2,308
| 6.829268
| 0.5
| 0.071429
| 0.028571
| 0.034524
| 0.034524
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004656
| 0.162478
| 2,308
| 77
| 97
| 29.974026
| 0.864459
| 0.098354
| 0
| 0
| 0
| 0
| 0.120668
| 0.020539
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078947
| false
| 0
| 0.184211
| 0
| 0.368421
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0b43ab4f6dd3ba972b2dc8c30789b6cc19eaa03
| 24,698
|
py
|
Python
|
opcalendar/models.py
|
buahaha/allianceauth-opcalendar
|
44e50e06eac4b5c0e6b809e5ca2638af5e49145f
|
[
"MIT"
] | null | null | null |
opcalendar/models.py
|
buahaha/allianceauth-opcalendar
|
44e50e06eac4b5c0e6b809e5ca2638af5e49145f
|
[
"MIT"
] | null | null | null |
opcalendar/models.py
|
buahaha/allianceauth-opcalendar
|
44e50e06eac4b5c0e6b809e5ca2638af5e49145f
|
[
"MIT"
] | null | null | null |
import requests
import json
from typing import Tuple
from datetime import timedelta, datetime
from django.db import models
from django.urls import reverse
from django.contrib.auth.models import User
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from django.utils.html import strip_tags
from django.contrib.auth.models import Group
from esi.errors import TokenExpiredError, TokenInvalidError
from esi.models import Token
from allianceauth.authentication.models import CharacterOwnership
from allianceauth.eveonline.models import EveCharacter, EveCorporationInfo
from allianceauth.services.hooks import get_extension_logger
from allianceauth.authentication.models import State
from .providers import esi
from .decorators import fetch_token_for_owner
logger = get_extension_logger(__name__)
class General(models.Model):
"""Meta model for app permissions"""
class Meta:
managed = False
default_permissions = ()
permissions = (
(
"basic_access",
"Can access this app and see operations based on visibility rules",
),
("create_event", "Can create and edit events"),
("see_signups", "Can see all signups for event"),
("manage_event", "Can delete and manage other signups"),
(
"add_ingame_calendar_owner",
"Can add ingame calendar feeds for their corporation",
),
)
class WebHook(models.Model):
"""Discord Webhook for pings"""
name = models.CharField(
max_length=150,
help_text=_("Name for this webhook"),
)
webhook_url = models.CharField(
max_length=500,
help_text=_("Webhook URL"),
)
enabled = models.BooleanField(default=True, help_text=_("Is the webhook enabled?"))
def send_embed(self, embed):
custom_headers = {"Content-Type": "application/json"}
data = '{"embeds": [%s]}' % json.dumps(embed)
r = requests.post(self.webhook_url, headers=custom_headers, data=data)
r.raise_for_status()
class Meta:
verbose_name = "Webhook"
verbose_name_plural = "Webhooks"
def __str__(self):
return "{}".format(self.name)
class EventVisibility(models.Model):
name = models.CharField(
max_length=150, null=False, help_text="Name for the visibility filter"
)
restricted_to_group = models.ManyToManyField(
Group,
blank=True,
related_name="eventvisibility_require_groups",
help_text=_(
"The group(s) that will be able to see this event visibility type ..."
),
)
restricted_to_state = models.ManyToManyField(
State,
blank=True,
related_name="eventvisibility_require_states",
help_text=_(
"The state(s) that will be able to see this event visibility type ..."
),
)
webhook = models.ForeignKey(
WebHook,
on_delete=models.CASCADE,
null=True,
blank=True,
help_text=_("Webhook to send over notifications about these fleet types"),
)
ignore_past_fleets = models.BooleanField(
default=True,
help_text=_("Should we ignore fleet signals that are in the past"),
)
color = models.CharField(
max_length=7,
default="",
blank=True,
help_text=_("Color to be displayed on calendar"),
)
include_in_feed = models.BooleanField(
default=False,
help_text=("Whether these events should be included in the ical feed."),
)
is_visible = models.BooleanField(
default=True,
help_text=(
"Whether this visibility filter should be displayed on the event form. Disable for internal visibilities such as the NPSI import fleet visibilities."
),
)
is_default = models.BooleanField(
default=False,
help_text=(
"Whether this visibility filter is used as the default value on the event form"
),
)
is_active = models.BooleanField(
default=True,
help_text=("Whether this visibility filter is active"),
)
def __str__(self) -> str:
return str(self.name)
class Meta:
verbose_name = "Event Visibility Filter"
verbose_name_plural = "Event Visibilities Filters"
def save(self, *args, **kwargs):
if self.is_default:
# select all other is_default items
qs = type(self).objects.filter(is_default=True)
# except self (if self already exists)
if self.pk:
qs = qs.exclude(pk=self.pk)
# and deactive them
qs.update(is_default=False)
super(EventVisibility, self).save(*args, **kwargs)
@property
def get_visibility_class(self):
return f"{self.name.replace(' ', '-').lower()}"
class EventHost(models.Model):
"""Fleet Timer Create/Delete pings"""
community = models.CharField(
max_length=150, null=False, help_text="Name of the community"
)
logo_url = models.CharField(
max_length=256, blank=True, help_text="Absolute URL for the community logo"
)
ingame_channel = models.CharField(
max_length=150, blank=True, help_text="Ingame channel name"
)
ingame_mailing_list = models.CharField(
max_length=150, blank=True, help_text="Ingame mailing list name"
)
fleet_comms = models.CharField(
max_length=150,
blank=True,
help_text="Link or description for primary comms such as discord link",
)
fleet_doctrines = models.CharField(
max_length=150, blank=True, help_text="Link or description to the doctrines"
)
website = models.CharField(max_length=150, blank=True, help_text="Website link URL")
discord = models.CharField(max_length=150, blank=True, help_text="Discord link URL")
twitch = models.CharField(max_length=150, blank=True, help_text="Twitch link URL")
twitter = models.CharField(max_length=150, blank=True, help_text="Twitter link URL")
youtube = models.CharField(max_length=150, blank=True, help_text="Youtube link URL")
facebook = models.CharField(
max_length=150, blank=True, help_text="Facebook link URL"
)
details = models.CharField(
max_length=150, blank=True, help_text="Short description about the host."
)
is_default = models.BooleanField(
default=False,
help_text=("Whether this host is used as the default value on the event form"),
)
external = models.BooleanField(
default=False,
help_text=_(
"External hosts are for NPSI API imports. Checking this box will hide the host in the manual event form."
),
)
def __str__(self):
return str(self.community)
class Meta:
verbose_name = "Host"
verbose_name_plural = "Hosts"
def save(self, *args, **kwargs):
if self.is_default:
# select all other is_default items
qs = type(self).objects.filter(is_default=True)
# except self (if self already exists)
if self.pk:
qs = qs.exclude(pk=self.pk)
# and deactive them
qs.update(is_default=False)
super(EventHost, self).save(*args, **kwargs)
class EventCategory(models.Model):
name = models.CharField(
max_length=150,
help_text=_("Name for the category"),
)
ticker = models.CharField(
max_length=10,
help_text=_("Ticker for the category"),
)
color = models.CharField(
max_length=7,
default="",
blank=True,
help_text=_("Color to be displayed on calendar"),
)
description = models.TextField(
blank=True,
help_text="Prefilled description that will be added on default on the event description.",
)
class Meta:
verbose_name = "Category"
verbose_name_plural = "Categories"
def __str__(self):
return str(self.name)
@property
def get_category_class(self):
return f"{self.name.replace(' ', '-').lower()}"
class EventImport(models.Model):
"""NPSI IMPORT OPTIONS"""
SPECTRE_FLEET = "SF"
EVE_UNIVERSITY = "EU"
FUN_INC = "FI"
FRIDAY_YARRRR = "FY"
REDEMPTION_ROAD = "RR"
CAS = "CA"
FWAMING_DWAGONS = "FD"
FREE_RANGE_CHIKUNS = "FR"
EVE_LINKNET = "LN"
IMPORT_SOURCES = [
(EVE_LINKNET, _("EVE LinkNet")),
(SPECTRE_FLEET, _("Spectre Fleet")),
(EVE_UNIVERSITY, _("EVE University")),
(FUN_INC, _("Fun Inc.")),
(FRIDAY_YARRRR, _("FRIDAY YARRRR")),
(REDEMPTION_ROAD, _("Redemption Road")),
(CAS, _("CAS")),
(FWAMING_DWAGONS, _("Fwaming Dwagons")),
(FREE_RANGE_CHIKUNS, _("FREE RANGE CHIKUNS")),
]
source = models.CharField(
max_length=32,
choices=IMPORT_SOURCES,
help_text="The API source where you want to pull events from",
)
host = models.ForeignKey(
EventHost,
on_delete=models.CASCADE,
default=1,
help_text="The AA host that will be used for the pulled events",
)
operation_type = models.ForeignKey(
EventCategory,
on_delete=models.CASCADE,
help_text="Operation type and ticker that will be assigned for the pulled fleets",
)
creator = models.ForeignKey(
User,
on_delete=models.CASCADE,
default="1",
help_text="User that has been used to create the fleet (most often the superuser who manages the plugin)",
)
eve_character = models.ForeignKey(
EveCharacter,
null=True,
on_delete=models.SET_NULL,
help_text="Event creator main character",
)
event_visibility = models.ForeignKey(
EventVisibility,
on_delete=models.CASCADE,
null=True,
help_text=_("Visibility filter that dictates who is able to see this event"),
)
def __str__(self):
return str(self.source)
class Meta:
verbose_name = "NPSI Event Import"
verbose_name_plural = "NPSI Event Imports"
class Event(models.Model):
operation_type = models.ForeignKey(
EventCategory,
null=True,
on_delete=models.CASCADE,
help_text=_("Event category type"),
)
title = models.CharField(
max_length=200,
help_text=_("Title for the event"),
)
host = models.ForeignKey(
EventHost,
on_delete=models.CASCADE,
help_text=_("Host entity for the event"),
)
doctrine = models.CharField(
max_length=254,
help_text=_("Doctrine URL or name"),
)
formup_system = models.CharField(
max_length=254,
help_text=_("Location for formup"),
)
description = models.TextField(
help_text=_("Description text for the operation"),
)
start_time = models.DateTimeField(
help_text=_("Event start date and time"),
)
end_time = models.DateTimeField(
help_text=_("Event end date and time"),
)
fc = models.CharField(
max_length=254,
help_text=_("Fleet commander/manager for the event"),
)
event_visibility = models.ForeignKey(
EventVisibility,
on_delete=models.CASCADE,
null=True,
blank=True,
help_text=_("Visibility filter that dictates who is able to see this event"),
)
external = models.BooleanField(
default=False,
null=True,
help_text=_("Is the event an external event over API"),
)
created_date = models.DateTimeField(
default=timezone.now,
help_text=_("When the event was created"),
)
eve_character = models.ForeignKey(
EveCharacter,
null=True,
on_delete=models.SET_NULL,
help_text=_("Character used to create the event"),
)
user = models.ForeignKey(
User,
on_delete=models.CASCADE,
help_text=_("User who created the event"),
)
def duration(self):
return self.end_time - self.start_time
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse("opcalendar:event-detail", args=(self.id,))
@property
def get_visibility_class(self):
if self.event_visibility:
return f"{self.event_visibility.name.replace(' ', '-').lower()}"
@property
def get_event_styling(self):
if self.event_visibility:
return f".{self.event_visibility.name.replace(' ', '-').lower()}:before{{border-color: transparent {self.event_visibility.color} transparent transparent;border-style: solid;}} .{self.operation_type.name.replace(' ', '-').lower()} {{border-left: 6px solid {self.operation_type.color} !important;}}"
@property
def get_category_class(self):
if self.operation_type:
return f"{self.operation_type.name.replace(' ', '-').lower()}"
@property
def get_date_status(self):
if datetime.now(timezone.utc) > self.start_time:
return "past-event"
else:
return "future-event"
@property
def get_html_url(self):
url = reverse("opcalendar:event-detail", args=(self.id,))
return f"{url}"
@property
def get_html_title(self):
return f'<span>{self.start_time.strftime("%H:%M")} - {self.end_time.strftime("%H:%M")} <i>{self.host.community}</i></span><span><b>{self.operation_type.ticker} {self.title}</b></span>'
def user_can_edit(self, user: user) -> bool:
"""Checks if the given user can edit this timer. Returns True or False"""
return user.has_perm("opcalendar.manage_event") or (
self.user == user and user.has_perm("opcalendar.create_event")
)
class Owner(models.Model):
"""A corporation that holds the calendars"""
ERROR_NONE = 0
ERROR_TOKEN_INVALID = 1
ERROR_TOKEN_EXPIRED = 2
ERROR_INSUFFICIENT_PERMISSIONS = 3
ERROR_NO_CHARACTER = 4
ERROR_ESI_UNAVAILABLE = 5
ERROR_OPERATION_MODE_MISMATCH = 6
ERROR_UNKNOWN = 99
ERRORS_LIST = [
(ERROR_NONE, "No error"),
(ERROR_TOKEN_INVALID, "Invalid token"),
(ERROR_TOKEN_EXPIRED, "Expired token"),
(ERROR_INSUFFICIENT_PERMISSIONS, "Insufficient permissions"),
(ERROR_NO_CHARACTER, "No character set for fetching data from ESI"),
(ERROR_ESI_UNAVAILABLE, "ESI API is currently unavailable"),
(
ERROR_OPERATION_MODE_MISMATCH,
"Operaton mode does not match with current setting",
),
(ERROR_UNKNOWN, "Unknown error"),
]
corporation = models.OneToOneField(
EveCorporationInfo,
default=None,
null=True,
blank=True,
on_delete=models.CASCADE,
help_text="Corporation owning the calendar",
related_name="+",
)
character = models.ForeignKey(
CharacterOwnership,
on_delete=models.SET_DEFAULT,
default=None,
null=True,
blank=True,
help_text="Character used for syncing the calendar",
related_name="+",
)
event_visibility = models.ForeignKey(
EventVisibility,
on_delete=models.CASCADE,
null=True,
blank=True,
help_text=_("Visibility filter that dictates who is able to see this event"),
)
operation_type = models.ForeignKey(
EventCategory,
null=True,
blank=True,
on_delete=models.CASCADE,
help_text=_(
"Event category that will be assigned for all of the events from this owner."
),
)
is_active = models.BooleanField(
default=True,
help_text=("whether this owner is currently included in the sync process"),
)
class Meta:
verbose_name = "Ingame Clanedar Owner"
verbose_name_plural = "Ingame Calendar Owners"
@fetch_token_for_owner(["esi-calendar.read_calendar_events.v1"])
def update_events_esi(self, token):
if self.is_active:
# Get all current imported fleets in database
event_ids_to_remove = list(
IngameEvents.objects.filter(owner=self).values_list(
"event_id", flat=True
)
)
logger.debug(
"Ingame events currently in database: %s" % event_ids_to_remove
)
events = self._fetch_events()
for event in events:
character_id = self.character.character.character_id
details = (
esi.client.Calendar.get_characters_character_id_calendar_event_id(
character_id=character_id,
event_id=event["event_id"],
token=token.valid_access_token(),
).results()
)
end_date = event["event_date"] + timedelta(minutes=details["duration"])
original = IngameEvents.objects.filter(
owner=self, event_id=event["event_id"]
).first()
text = strip_tags(details["text"])
try:
if original is not None:
logger.debug("Event: %s already in database" % event["title"])
event_ids_to_remove.remove(original.event_id)
else:
# Check if we already have the host
original_host = EventHost.objects.filter(
community=details["owner_name"]
).first()
logger.debug("Got original host: {}".format(original_host))
if original_host is not None:
host = original_host
else:
host = EventHost.objects.create(
community=details["owner_name"],
external=True,
)
IngameEvents.objects.create(
event_id=event["event_id"],
owner=self,
text=text,
event_owner_id=details["owner_id"],
owner_type=details["owner_type"],
owner_name=details["owner_name"],
host=host,
importance=details["importance"],
duration=details["duration"],
event_start_date=event["event_date"],
event_end_date=end_date,
title=event["title"],
)
logger.debug("New event created: %s" % event["title"])
except Exception as e:
logger.debug("Error adding new event: %s" % e)
logger.debug("Removing all events that we did not get over API")
IngameEvents.objects.filter(pk__in=event_ids_to_remove).delete()
logger.debug(
"All events fetched for %s" % self.character.character.character_name
)
@fetch_token_for_owner(["esi-calendar.read_calendar_events.v1"])
def _fetch_events(self, token) -> list:
character_id = self.character.character.character_id
events = esi.client.Calendar.get_characters_character_id_calendar(
character_id=character_id,
token=token.valid_access_token(),
).results()
return events
def token(self, scopes=None) -> Tuple[Token, int]:
"""returns a valid Token for the owner"""
token = None
error = None
# abort if character is not configured
if self.character is None:
logger.error("%s: No character configured to sync", self)
error = self.ERROR_NO_CHARACTER
# abort if character does not have sufficient permissions
elif self.corporation and not self.character.user.has_perm(
"opcalendar.add_ingame_calendar_owner"
):
logger.error(
"%s: This character does not have sufficient permission to sync corporation calendars",
self,
)
error = self.ERROR_INSUFFICIENT_PERMISSIONS
# abort if character does not have sufficient permissions
elif not self.character.user.has_perm("opcalendar.add_ingame_calendar_owner"):
logger.error(
"%s: This character does not have sufficient permission to sync personal calendars",
self,
)
error = self.ERROR_INSUFFICIENT_PERMISSIONS
else:
try:
# get token
token = (
Token.objects.filter(
user=self.character.user,
character_id=self.character.character.character_id,
)
.require_scopes(scopes)
.require_valid()
.first()
)
except TokenInvalidError:
logger.error("%s: Invalid token for fetching calendars", self)
error = self.ERROR_TOKEN_INVALID
except TokenExpiredError:
logger.error("%s: Token expired for fetching calendars", self)
error = self.ERROR_TOKEN_EXPIRED
else:
if not token:
logger.error("%s: No token found with sufficient scopes", self)
error = self.ERROR_TOKEN_INVALID
return token, error
class IngameEvents(models.Model):
event_id = models.PositiveBigIntegerField(
primary_key=True, help_text="The EVE ID of the event"
)
owner = models.ForeignKey(
Owner,
on_delete=models.CASCADE,
help_text="Event holder",
)
event_start_date = models.DateTimeField()
event_end_date = models.DateTimeField(blank=True, null=True)
title = models.CharField(max_length=128)
text = models.TextField()
event_owner_id = models.IntegerField(null=True)
owner_type = models.CharField(max_length=128)
owner_name = models.CharField(max_length=128)
host = models.ForeignKey(
EventHost,
on_delete=models.CASCADE,
default=1,
help_text=_("Host entity for the event"),
)
importance = models.CharField(max_length=128)
duration = models.CharField(max_length=128)
def __str__(self):
return self.title
class Meta:
verbose_name = "Ingame Event"
verbose_name_plural = "Ingame Events"
def get_absolute_url(self):
return reverse("opcalendar:ingame-event-detail", args=(self.event_id,))
@property
def get_date_status(self):
if datetime.now(timezone.utc) > self.event_start_date:
return "past-event"
else:
return "future-event"
@property
def get_visibility_class(self):
if self.owner.event_visibility:
return f"{self.owner.event_visibility.name.replace(' ', '-').lower()}"
else:
return "ingame-event"
@property
def get_event_styling(self):
d = ""
if self.owner.event_visibility:
d += f".{self.owner.event_visibility.name.replace(' ', '-').lower()}:before{{border-color: transparent {self.owner.event_visibility.color} transparent transparent;border-style: solid;}}"
if self.owner.operation_type:
d += f".{self.owner.operation_type.name.replace(' ', '-').lower()} {{border-left: 6px solid {self.owner.operation_type.color} !important;}}"
return d
@property
def get_category_class(self):
if self.owner.operation_type:
return f"{self.owner.operation_type.name.replace(' ', '-').lower()}"
@property
def get_html_url(self):
url = reverse("opcalendar:ingame-event-detail", args=(self.event_id,))
return f"{url}"
@property
def get_html_title(self):
return f'<span>{self.event_start_date.strftime("%H:%M")} - {self.event_end_date.strftime("%H:%M")}<i> {self.owner_name}</i></span><span><b>{self.title}</b></span>'
class EventMember(models.Model):
event = models.ForeignKey(Event, on_delete=models.CASCADE)
character = models.ForeignKey(
EveCharacter,
null=True,
on_delete=models.SET_NULL,
help_text="Event creator main character",
)
class Meta:
unique_together = ["event", "character"]
| 33.285714
| 309
| 0.604907
| 2,768
| 24,698
| 5.209176
| 0.151012
| 0.033844
| 0.037451
| 0.049934
| 0.492683
| 0.435467
| 0.38803
| 0.3365
| 0.28511
| 0.236008
| 0
| 0.005754
| 0.29634
| 24,698
| 741
| 310
| 33.330634
| 0.823925
| 0.027006
| 0
| 0.385113
| 0
| 0.011327
| 0.234114
| 0.052493
| 0
| 0
| 0
| 0
| 0
| 1
| 0.050162
| false
| 0
| 0.048544
| 0.022654
| 0.323625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0b49d08acf472e125d49a19fc95585b9f897f91
| 5,603
|
py
|
Python
|
scripts/mot_neural_solver/pl_module/pair_nuclei.py
|
taimurhassan/crc
|
930be78505dd17655542a38b0fc1ded9cf19a9a2
|
[
"MIT"
] | 1
|
2022-03-16T10:40:23.000Z
|
2022-03-16T10:40:23.000Z
|
scripts/mot_neural_solver/pl_module/pair_nuclei.py
|
taimurhassan/crc
|
930be78505dd17655542a38b0fc1ded9cf19a9a2
|
[
"MIT"
] | null | null | null |
scripts/mot_neural_solver/pl_module/pair_nuclei.py
|
taimurhassan/crc
|
930be78505dd17655542a38b0fc1ded9cf19a9a2
|
[
"MIT"
] | null | null | null |
import sacred
from sacred import Experiment
import os.path as osp
import pandas as pd
import scipy.io as sio
import numpy as np
from sacred import SETTINGS
SETTINGS.CONFIG.READ_ONLY_CONFIG=False
def pair_nuclei_and_generate_output(out_files_dir, datasetName, detector = "tracktor_prepr_det"):
startFrame = 159
endFrame = 238
if datasetName == "crchisto":
startFrame = 71
endFrame = 100
elif datasetName == "consep":
startFrame = 1
endFrame = 14
elif datasetName == "pannuke":
startFrame = 1
endFrame = 2359
elif datasetName == "lizard":
startFrame = 159
endFrame = 238
print("Start Frame: ",startFrame, ", End Frame: ", endFrame, ", Dataset: ", datasetName)
print("\n\nBackbone: ",detector,"\n\n")
for i in range(startFrame, endFrame + 1):
pred2 = []
gt2 = []
gt1 = []
pred1 = []
print(out_files_dir.replace(':','_'))
initial = "/MOT17-0"
if i > 9:
initial = "/MOT17-"
pn1 = out_files_dir.replace(':','_') + initial + str(i) + "-FRCNN.txt"
pn2 = "data/MOT17Det/test" + initial + str(i) + "/gt/gt.txt"
if detector == "tracktor_prepr_det":
pn3 = "data/MOT17Labels/test" + initial + str(i) + "-FRCNN/det/tracktor_prepr_det.txt"
else:
pn3 = "data/MOT17Labels/test" + initial + str(i) + "-FRCNN/det/frcnn_prepr_det.txt"
print(pn1)
print(pn2)
print(pn3)
f1 = open(pn3, "r")
for x in f1:
minDist = 1000000000
pairNodeCentroid = [] #Centroid Point
xs = x.split(',')
#print(xs) # tokenized line
type = int(xs[1])
pred2.append(type)
x1 = float(xs[2]) + float(xs[4])/2
y1 = float(xs[3]) + float(xs[5])/2
f2 = open(pn1, "r")
for y in f2:
ys = y.split(',')
#print(ys)
#input()
x2 = float(ys[2]) + float(ys[4])/2
y2 = float(ys[3]) + float(ys[5])/2
distance = np.sqrt(((x1-x2) * (x1-x2)) + ((y1-y2) * (y1-y2)))
if distance < minDist:
minDist = distance
pairNodeCentroid = [x2, y2]
#print(int(type))
f2.close()
#input()
if len(pairNodeCentroid) > 0:
pred1.append([round(pairNodeCentroid[0],2),round(pairNodeCentroid[1],2)])
else:
pred1.append([round(x1,2),round(y1,2)])
f1.close()
print(len(pred2))
f1 = open(pn2, "r")
#print(len(pred1))
for x in f1:
minDist = 1000000000
pairNodeCentroid = [] #Centroid Point
xs = x.split(',')
#print(xs) # tokenized line
type = int(xs[1])
x1 = float(xs[2]) + float(xs[4])/2
y1 = float(xs[3]) + float(xs[5])/2
gt2.append(type)
gt1.append([round(x1,2),round(y1,2)])
#print(gt1)
f1.close()
print(len(gt2))
#print(len(pred2))
#input()
inst_type_pred = np.transpose(np.array(pred2))
inst_type_gt = np.transpose(np.array(gt2))
inst_centroid_pred = np.array(pred1)
inst_centroid_gt = np.array(gt1)
index = 0
index2 = 0
pn4 = "mot_neural_solver/output/"+ datasetName +"/true/detections_"+ str(i) + "_" + str(index2) + ".mat"
pn5 = "mot_neural_solver/output/"+ datasetName +"/pred/detections_"+ str(i) + "_" + str(index2) + ".mat"
pred1 = []
pred2 = []
gt1 = []
gt2 = []
if datasetName == "pannuke":
pn4 = "mot_neural_solver/output/"+ datasetName +"/true/detections_"+ str(i) + ".mat"
pn5 = "mot_neural_solver/output/"+ datasetName +"/pred/detections_"+ str(i) + ".mat"
mdic = {"inst_centroid": inst_centroid_gt, "inst_type": inst_type_gt}
sio.savemat(pn4, mdic)
mdic = {"inst_centroid": inst_centroid_pred, "inst_type": inst_type_pred}
sio.savemat(pn5, mdic)
else:
for ii in range(min(len(inst_type_gt),len(inst_type_pred))):
if index > 99:
pn4 = "mot_neural_solver/output/"+ datasetName +"/true/detections_"+ str(i) + "_" + str(index2) + ".mat"
pn5 = "mot_neural_solver/output/"+ datasetName +"/pred/detections_"+ str(i) + "_" + str(index2) + ".mat"
index = 0
#print(len(pred2))
mdic = {"inst_centroid": gt1, "inst_type": gt2}
sio.savemat(pn4, mdic)
mdic = {"inst_centroid": pred1, "inst_type": pred2}
sio.savemat(pn5, mdic)
pred1 = []
pred2 = []
gt1 = []
gt2 = []
else:
index = index + 1
index2 = index2 + 1
pred1.append(inst_centroid_pred[ii])
pred2.append(inst_type_pred[ii])
if inst_centroid_gt[ii] != '':
gt1.append(inst_centroid_gt[ii])
gt2.append(inst_type_gt[ii])
if inst_type_gt[ii] == 4:
print(inst_type_gt[ii], " type is coming in gt")
if inst_type_gt[ii] == 5:
print(inst_type_gt[ii], " type is coming in gt")
| 29.489474
| 124
| 0.493129
| 627
| 5,603
| 4.263158
| 0.216906
| 0.047886
| 0.029929
| 0.047138
| 0.375982
| 0.331837
| 0.331837
| 0.290685
| 0.290685
| 0.260007
| 0
| 0.0529
| 0.369088
| 5,603
| 189
| 125
| 29.645503
| 0.703253
| 0.033018
| 0
| 0.4
| 0
| 0
| 0.130113
| 0.047196
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008
| false
| 0
| 0.056
| 0
| 0.064
| 0.08
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0b8c5cb52a0f84127322d8ea824dbfd7a2fbbb9
| 1,087
|
py
|
Python
|
onnxmltools/convert/coreml/operator_converters/ArrayFeatureExtractor.py
|
szha/onnxmltools
|
b04d05bda625cbc006955ce0a220277739a95825
|
[
"MIT"
] | 3
|
2019-02-27T21:03:43.000Z
|
2020-04-07T22:16:50.000Z
|
onnxmltools/convert/coreml/operator_converters/ArrayFeatureExtractor.py
|
szha/onnxmltools
|
b04d05bda625cbc006955ce0a220277739a95825
|
[
"MIT"
] | null | null | null |
onnxmltools/convert/coreml/operator_converters/ArrayFeatureExtractor.py
|
szha/onnxmltools
|
b04d05bda625cbc006955ce0a220277739a95825
|
[
"MIT"
] | 2
|
2020-10-01T09:24:55.000Z
|
2021-04-17T13:57:31.000Z
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from ....proto import onnx_proto
from ...common._registration import register_converter
def convert_array_feature_extractor(scope, operator, container):
op_type = 'ArrayFeatureExtractor'
attrs = {'name': operator.full_name}
target_indexes = operator.raw_operator.arrayFeatureExtractor.extractIndex
index_buffer_name = scope.get_unique_variable_name('target_indexes')
container.add_initializer(index_buffer_name, onnx_proto.TensorProto.INT64, [len(target_indexes)], target_indexes)
inputs = [operator.inputs[0].full_name, index_buffer_name]
outputs = [operator.outputs[0].full_name]
container.add_node(op_type, inputs, outputs, op_domain='ai.onnx.ml', **attrs)
register_converter('arrayFeatureExtractor', convert_array_feature_extractor)
| 41.807692
| 117
| 0.678933
| 116
| 1,087
| 6.077586
| 0.534483
| 0.073759
| 0.06383
| 0.079433
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004098
| 0.102116
| 1,087
| 25
| 118
| 43.48
| 0.718238
| 0.275069
| 0
| 0
| 0
| 0
| 0.089514
| 0.053708
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0.166667
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0b987edf568de32ee6c05d30261bbe4ded56c15
| 2,682
|
py
|
Python
|
constants.py
|
xuefei1/Graph-Seq2Attn
|
336c69877e483c95d9996ee205d2a005342f08af
|
[
"MIT"
] | 1
|
2020-01-06T07:49:46.000Z
|
2020-01-06T07:49:46.000Z
|
constants.py
|
xuefei1/Graph-Seq2Attn
|
336c69877e483c95d9996ee205d2a005342f08af
|
[
"MIT"
] | 1
|
2020-04-16T10:15:27.000Z
|
2020-04-16T16:41:42.000Z
|
constants.py
|
xuefei1/Graph-Seq2Attn
|
336c69877e483c95d9996ee205d2a005342f08af
|
[
"MIT"
] | null | null | null |
# one identifier for one types of dict
# for instance, DK_SOME_KEY means this is a key for a data_dict
DK_BATCH_SIZE = "batch_size"
DK_PAD = "pad" # DK: general purpose data_dict
DK_SRC_WID = "src_wid" # src = msg + ctx
DK_SRC_WID_MASK = "src_wid_mask"
DK_SRC_SEQ_MASK = "src_seq_mask"
DK_MSG_WID = "msg_wid" # msg is usually shorter than ctx
DK_MSG_WID_MASK = "msg_wid_mask"
DK_CTX_WID = "ctx_wid" # msg is usually shorter than ctx
DK_CTX_WID_MASK = "ctx_wid_mask"
DK_SRC_POS = "src_pos"
DK_SRC_NER = "src_ner"
DK_SRC_SEG_LISTS = "src_seg_lists"
DK_TGT_GEN_WID = "tgt_gen_wid"
DK_TGT_CPY_WID = "tgt_cpy_wid"
DK_TGT_CPY_GATE = "tgt_cpy_gate"
DK_TGT_N_TOKEN = "tgt_n_token"
DK_TGT_SEG_LISTS = "tgt_seg_lists"
DK_SRC_IOB = "src_iob" # iob: SQuAD QG specific
DK_DOC_WID = "doc_wid"
DK_DOC_SEG_LISTS = "doc_seg_lists"
DK_DOC_WID_MASK = "doc_wid_mask"
DK_DOC_SENTS_WID = "doc_sents_wid"
DK_DOC_SENTS_WID_MASK = "doc_sents_wid_mask"
DK_TITLE_WID = "title_wid"
DK_TQ_SEG_LISTS = "title_seg_lists"
DK_TITLE_WID_MASK = "title_wid_mask"
DK_CONCEPT_SEG_LISTS = "concept_seg_lists"
DK_TGT_CONCEPT_GEN_WID = "tgt_concept_gen_wid" # concept gen specific
DK_TGT_CONCEPT_CPY_WID = "tgt_concept_cpy_wid"
DK_TGT_CONCEPT_CPY_GATE = "tgt_concept_cpy_gate"
DK_TGT_CONCEPT_N_TOKEN = "tgt_concept_n_token"
DK_TGT_TITLE_GEN_WID = "tgt_title_gen_wid" # title gen specific
DK_TGT_TITLE_CPY_WID = "tgt_title_cpy_wid"
DK_TGT_TITLE_CPY_GATE = "tgt_title_cpy_gate"
DK_TGT_TITLE_N_TOKEN = "tgt_title_n_token"
DK_SENT_DEPEND_GRAPH_LIST = "sent_depend_graph_list"
DK_DOC_KW_DIST_GRAPH = "doc_kw_dist_graph"
DK_DOC_SENT_MEAN_TFIDF_SIM_GRAPH = "doc_sent_mean_tfidf_sim_graph"
DK_DOC_SENT_PAIR_TFIDF_SIM_GRAPH = "doc_sent_pair_tfidf_sim_graph"
DK_DOC_SENT_WORD_OVERLAP_GRAPH = "doc_sent_word_overlap_graph"
DK_G2S_WID_GRAPH = "graph2seq_wid_graph"
SQGK_SRC_W_LIST = "src_word_list" # SQGK: SQuAD data reader keys
SQGK_SRC_IOB_LIST = "src_iob_list"
SQGK_SRC_POS_LIST = "src_pos_list"
SQGK_SRC_NER_LIST = "src_ner_list"
SQGK_TGT_W_LIST = "tgt_word_list"
SQGK_DATA_LIST = "data_list"
SQGK_IOB_T2I = "iob_t2i"
SQGK_POS_T2I = "pos_t2i"
SQGK_NER_T2I = "ner_t2i"
CHKPT_COMPLETED_EPOCHS = "completed_epochs" # CHKPT: checkpoint dict keys
CHKPT_MODEL = "model"
CHKPT_OPTIMIZER = "optimizer"
CHKPT_METADATA = "metadata"
CHKPT_PARAMS = "params"
CHKPT_BEST_EVAL_RESULT = "best_eval_result"
CHKPT_BEST_EVAL_EPOCH = "best_eval_epoch"
CHKPT_PAST_EVAL_RESULTS = "past_eval_results"
GK_EDGE_WEIGHT = "edge_weight" # GK: graph keys
GK_EDGE_WORD_PAIR = "edge_word_pair"
GK_EDGE_GV_IDX_PAIR = "edge_v_idx_pair"
GK_EDGE_TYPE = "edge_type"
GK_EDGE_DIR = "edge_directed"
GK_EDGE_UNDIR = "edge_undirected"
GK_SENT_DEP = "sentence_depends"
| 37.774648
| 73
| 0.818792
| 500
| 2,682
| 3.77
| 0.19
| 0.034483
| 0.028647
| 0.017507
| 0.110345
| 0.089655
| 0.032891
| 0.032891
| 0
| 0
| 0
| 0.003318
| 0.101044
| 2,682
| 70
| 74
| 38.314286
| 0.778515
| 0.12789
| 0
| 0
| 0
| 0
| 0.368874
| 0.046002
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0bd703517c8b3f6a8e778d87ff497a305805d45
| 12,308
|
py
|
Python
|
tests/evaluator_test.py
|
NightShade256/prymate
|
deeb81ab685854599d803719971e85ead6699a90
|
[
"MIT"
] | 6
|
2020-06-22T14:54:55.000Z
|
2021-12-13T12:33:21.000Z
|
tests/evaluator_test.py
|
NightShade256/prymate
|
deeb81ab685854599d803719971e85ead6699a90
|
[
"MIT"
] | null | null | null |
tests/evaluator_test.py
|
NightShade256/prymate
|
deeb81ab685854599d803719971e85ead6699a90
|
[
"MIT"
] | 1
|
2020-10-11T18:31:57.000Z
|
2020-10-11T18:31:57.000Z
|
import unittest
from prymate import evaluator, objects
from prymate.lexer import Lexer
from prymate.parser import Parser
class TestEvaluator(unittest.TestCase):
def test_eval_numeric_exp(self):
tests = [
["5", 5],
["10", 10],
["-5", -5],
["-10", -10],
["5 + 5 + 5 + 5 - 10", 10],
["2 * 2 * 2 * 2 * 2", 32],
["-50 + 100 + -50", 0],
["5 * 2 + 10", 20],
["5 + 2 * 10", 25],
["20 + 2 * -10", 0],
["50 / 2 * 2 + 10", 60],
["2 * (5 + 10)", 30],
["3 * 3 * 3 + 10", 37],
["3 * (3 * 3) + 10", 37],
["(5 + 10 * 2 + 15 / 3) * 2 + -10", 50],
["2 * 3 % 4 * 2", 4],
["2 * 3 % 4 * 2 - 10", -6],
["2.2 * 2", 4.4],
["12 / 6", 2.0],
["12.2 / 2", 6.1],
["1.5 * -1.5", -2.25],
]
for tt in tests:
evaluated = self._test_eval(tt[0])
self._test_int_object(evaluated, tt[1])
def test_eval_bool_exp(self):
tests = [
["true", True],
["false", False],
["1 < 2", True],
["1 > 2", False],
["1 < 1", False],
["1 > 1", False],
["1 == 1", True],
["1 != 1", False],
["1 == 2", False],
["1 != 2", True],
["true == true", True],
["false == false", True],
["true == false", False],
["true != false", True],
["false != true", True],
["(1 < 2) == true", True],
["(1 < 2) == false", False],
["(1 > 2) == true", False],
["(1 > 2) == false", True],
]
for tt in tests:
evaluated = self._test_eval(tt[0])
self._test_boolean_object(evaluated, tt[1])
def test_bang_operator(self):
tests = [
["!true", False],
["!false", True],
["!5", False],
["!!true", True],
["!!false", False],
["!!5", True],
]
for tt in tests:
evaluated = self._test_eval(tt[0])
self._test_boolean_object(evaluated, tt[1])
def test_if_exp(self):
tests = [
["if (true) { 10 }", 10],
["if (false) { 10 }", None],
["if (1) { 10 }", 10],
["if (1 < 2) { 10 }", 10],
["if (1 > 2) { 10 }", None],
["if (1 > 2) { 10 } else { 20 }", 20],
["if (1 < 2) { 10 } else { 20 }", 10],
]
for tt in tests:
evaluated = self._test_eval(tt[0])
if isinstance(tt[1], int):
self._test_int_object(evaluated, int(tt[1]))
else:
self._test_null_object(evaluated)
def test_return_statements(self):
tests = [
["return 10;", 10],
["return 10; 9;", 10],
["return 2 * 5; 9;", 10],
["9; return 2 * 5; 9;", 10],
["9; return 2 * 5.2; 9;", 10.4],
]
for tt in tests:
evaluated = self._test_eval(tt[0])
self._test_int_object(evaluated, tt[1])
def test_error_handling(self):
tests = [
["5 + true;", "type mismatch: INTEGER + BOOLEAN"],
["5 + true; 5;", "type mismatch: INTEGER + BOOLEAN"],
["-true", "unknown operator: -BOOLEAN"],
["true + false;", "unknown operator: BOOLEAN + BOOLEAN"],
["true % false;", "unknown operator: BOOLEAN % BOOLEAN"],
["5; true + false; 5", "unknown operator: BOOLEAN + BOOLEAN"],
["if (10 > 1) { true + false; }", "unknown operator: BOOLEAN + BOOLEAN"],
[
"""
132
if (10 > 1) {
if (10 > 1) {
return true + false;
}
return 1;
""",
"unknown operator: BOOLEAN + BOOLEAN",
],
["foobar", "identifier not found: foobar"],
['"Hello" - "World"', "unknown operator: STRING - STRING"],
[
'{"name": "Monkey"}[fn(x) { x }];',
"unusable as dictionary key: FUNCTION",
],
]
for tt in tests:
evaluated = self._test_eval(tt[0])
if not isinstance(evaluated, objects.Error):
self.fail(f"No error object returned. Got {evaluated}.")
self.assertEqual(
evaluated.message,
tt[1],
f"Wrong error message. Expected {tt[1]}, got {evaluated.message}.",
)
def test_let_statements(self):
tests = [
["let a = 5; a;", 5],
["let a = 5 * 5; a;", 25],
["let a = 5; let b = a; b;", 5],
["let a = 5; let b = a; let c = a + b + 5; c;", 15],
]
for tt in tests:
self._test_int_object(self._test_eval(tt[0]), tt[1])
def test_function_object(self):
input_case = "fn(x) { x + 2; };"
evaluated = self._test_eval(input_case)
if not isinstance(evaluated, objects.Function):
self.fail(f"Object is not Function. Got, {evaluated}")
self.assertEqual(
len(evaluated.parameters),
1,
f"Function has wrong parameters, got {len(evaluated.parameters)}.",
)
self.assertEqual(
str(evaluated.parameters[0]),
"x",
f"Parameter is not 'x', got {str(evaluated.parameters[0])}.",
)
expected_body = "(x + 2)"
self.assertEqual(
str(evaluated.body),
expected_body,
f"body is not {expected_body}. got {str(evaluated.body)}.",
)
def test_function_application(self):
tests = [
["let identity = fn(x) { x; }; identity(5);", 5],
["let identity = fn(x) { return x; }; identity(5);", 5],
["let double = fn(x) { x * 2; }; double(5);", 10],
["let add = fn(x, y) { x + y; }; add(5, 5);", 10],
["let add = fn(x, y) { x + y; }; add(5 + 5, add(5, 5));", 20],
["fn(x) { x; }(5)", 5],
]
for tt in tests:
self._test_int_object(self._test_eval(tt[0]), tt[1])
def test_string_literal(self):
input_case = '"Hello, World!"'
evaluated = self._test_eval(input_case)
if not isinstance(evaluated, objects.String):
self.fail(f"Object is not String, got {evaluated}.")
self.assertEqual(
evaluated.value,
"Hello, World!",
f"String has wrong value. Got {evaluated.value}.",
)
def test_string_concatenation(self):
input_case = '"Hello" + " " + "World!"'
evaluated = self._test_eval(input_case)
if not isinstance(evaluated, objects.String):
self.fail(f"Object is not String, got {evaluated}.")
self.assertEqual(
evaluated.value,
"Hello World!",
f"String has wrong value. Got {evaluated.value}.",
)
def test_builtin_functions(self):
tests = [
['len("")', 0],
['len("four")', 4],
['len("hello world")', 11],
["len(1)", "argument to `len` not supported, got INTEGER"],
['len("one", "two")', "wrong number of arguments. got=2, want=1"],
]
for tt in tests:
evaluated = self._test_eval(tt[0])
if isinstance(tt[1], int):
self._test_int_object(evaluated, tt[1])
elif isinstance(tt[1], str):
if not isinstance(evaluated, objects.Error):
self.fail(f"Object not of type Error, got {evaluated.tp().value}.")
self.assertEqual(
evaluated.message,
tt[1],
f"Wrong error message. Expected={tt[1]}, Got={evaluated.message}.",
)
def test_array_literals(self):
input_case = "[1, 2 * 2, 3 + 3];"
ev = self._test_eval(input_case)
if not isinstance(ev, objects.Array):
self.fail(f"Object not of type Array, got {ev.tp().value}.")
self.assertEqual(
len(ev.elements),
3,
f"Array has wrong number of elements. Got {len(ev.elements)}.",
)
self._test_int_object(ev.elements[0], 1)
self._test_int_object(ev.elements[1], 4)
self._test_int_object(ev.elements[2], 6)
def test_array_index_exp(self):
tests = [
["[1, 2, 3][0]", 1],
["[1, 2, 3][1]", 2],
["[1, 2, 3][2]", 3],
["let i = 0; [1][i];", 1],
["[1, 2, 3][1 + 1];", 3],
["let myArray = [1, 2, 3]; myArray[2];", 3],
["let myArray = [1, 2, 3]; myArray[0] + myArray[1] + myArray[2];", 6],
["let myArray = [1, 2, 3]; let i = myArray[0]; myArray[i]", 2],
["[1, 2, 3][3]", None],
["[1, 2, 3][-1]", None],
]
for tt in tests:
evaluated = self._test_eval(tt[0])
if isinstance(tt[1], int):
self._test_int_object(evaluated, tt[1])
else:
self._test_null_object(evaluated)
def test_dictionary(self):
input_case = """
let two = "two";
{
"one": 10 - 9,
two: 1 + 1,
"thr" + "ee": 6 / 2,
4: 4,
true: 5,
false: 6
}
"""
evaluated = self._test_eval(input_case)
if not isinstance(evaluated, objects.Dictionary):
self.fail(f"Eval didn't return Hash. Got {evaluated}.")
expected = {
objects.String("one").hashkey(): 1,
objects.String("two").hashkey(): 2,
objects.String("three").hashkey(): 3,
objects.Integer(4).hashkey(): 4,
objects.Boolean(True).hashkey(): 5,
objects.Boolean(False).hashkey(): 6,
}
self.assertEqual(
len(evaluated.pairs),
len(expected),
f"Hash has wrong num of pairs. Got {len(evaluated.pairs)}.",
)
for key, val in expected.items():
pair = evaluated.pairs.get(key, None)
self.assertNotEqual(pair, None, "No pair for given key in pairs")
self._test_int_object(pair.value, val)
def test_dict_index_exp(self):
tests = [
['{"foo": 5}["foo"]', 5],
['{"foo": 5}["bar"]', None],
['let key = "foo"; {"foo": 5}[key]', 5],
['{}["foo"]', None],
["{5: 5}[5]", 5],
["{true: 5}[true]", 5],
["{false: 5}[false]", 5],
]
for tt in tests:
evaluated = self._test_eval(tt[0])
integer = tt[1]
if integer is not None:
self._test_int_object(evaluated, int(integer))
else:
self._test_null_object(evaluated)
def _test_int_object(self, obj: objects.Object, expected: int):
if not isinstance(obj, objects.Integer) and not isinstance(obj, objects.Float):
self.fail(f"Expected object to be Integer/Float, got {type(obj)}.")
self.assertEqual(
obj.value,
expected,
f"Object has wrong value. Expected {expected}, got {obj.value}.",
)
def _test_boolean_object(self, obj: objects.Object, expected: bool):
if not isinstance(obj, objects.Boolean):
self.fail(f"Expected object to be Boolean, got {type(obj)}.")
self.assertEqual(
obj.value,
expected,
f"Object has wrong value. Expected {expected}, got {obj.value}.",
)
def _test_null_object(self, obj: objects.Object):
if obj is not objects.Null():
self.fail(f"Object is not NULL. Got {obj}.")
def _test_eval(self, input_case: str):
lexer = Lexer(input_case)
parser = Parser(lexer)
env = objects.Environment()
program = parser.parse_program()
return evaluator.evaluate(program, env)
if __name__ == "__main__":
unittest.main()
| 32.474934
| 87
| 0.456451
| 1,425
| 12,308
| 3.835088
| 0.111579
| 0.048307
| 0.035133
| 0.049954
| 0.508692
| 0.462946
| 0.397804
| 0.357914
| 0.344373
| 0.322415
| 0
| 0.052632
| 0.379428
| 12,308
| 378
| 88
| 32.560847
| 0.66287
| 0
| 0
| 0.270701
| 0
| 0.015924
| 0.28294
| 0.012427
| 0.003185
| 0
| 0
| 0
| 0.038217
| 1
| 0.063694
| false
| 0
| 0.012739
| 0
| 0.082803
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0be74bdfe9cb84b8767afe5f63676a2412c89f4
| 1,074
|
py
|
Python
|
chrome/common/extensions/docs/examples/extensions/native_messaging/echo.py
|
codenote/chromium-test
|
0637af0080f7e80bf7d20b29ce94c5edc817f390
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2018-03-10T13:08:49.000Z
|
2018-03-10T13:08:49.000Z
|
chrome/common/extensions/docs/examples/extensions/native_messaging/echo.py
|
codenote/chromium-test
|
0637af0080f7e80bf7d20b29ce94c5edc817f390
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
chrome/common/extensions/docs/examples/extensions/native_messaging/echo.py
|
codenote/chromium-test
|
0637af0080f7e80bf7d20b29ce94c5edc817f390
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2020-11-04T07:25:45.000Z
|
2020-11-04T07:25:45.000Z
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# A simple native client in python.
# All this client does is echo the text it receives back at the extension.
import sys
import struct
def Main():
message_number = 0
while 1:
# Read the message type (first 4 bytes).
text_length_bytes = sys.stdin.read(4)
if len(text_length_bytes) == 0:
break
# Read the message length (4 bytes).
text_length = struct.unpack('i', text_length_bytes)[0]
# Read the text (JSON object) of the message.
text = sys.stdin.read(text_length).decode('utf-8')
message_number += 1
response = '{{"id": {0}, "echo": {1}}}'.format(message_number,
text).encode('utf-8')
try:
sys.stdout.write(struct.pack("I", len(response)))
sys.stdout.write(response)
sys.stdout.flush()
except IOError:
break
if __name__ == '__main__':
Main()
| 25.571429
| 74
| 0.633147
| 155
| 1,074
| 4.264516
| 0.522581
| 0.075643
| 0.068079
| 0.048412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019901
| 0.251397
| 1,074
| 41
| 75
| 26.195122
| 0.802239
| 0.377095
| 0
| 0.095238
| 0
| 0
| 0.069697
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| false
| 0
| 0.095238
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0bf9ddf2a1b5e4b50f545954e0579d25793cb8e
| 1,748
|
py
|
Python
|
Wrappers/Python/setup.py
|
lauramurgatroyd/CILViewer
|
3aafa4693498a55ffd270c55118399dd807dee5f
|
[
"Apache-2.0"
] | null | null | null |
Wrappers/Python/setup.py
|
lauramurgatroyd/CILViewer
|
3aafa4693498a55ffd270c55118399dd807dee5f
|
[
"Apache-2.0"
] | null | null | null |
Wrappers/Python/setup.py
|
lauramurgatroyd/CILViewer
|
3aafa4693498a55ffd270c55118399dd807dee5f
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2017 Edoardo Pasca
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Created on Wed Jun 7 09:57:13 2017
@author: ofn77899
"""
from distutils.core import setup
#from setuptools import setup, find_packages
import os
import sys
cil_version = "20.07.4"
setup(
name="ccpi-viewer",
version=cil_version,
packages=['ccpi','ccpi.viewer', 'ccpi.viewer.utils'],
install_requires=['numpy','vtk'],
# Project uses reStructuredText, so ensure that the docutils get
# installed or upgraded on the target machine
#install_requires=['docutils>=0.3'],
# package_data={
# # If any package contains *.txt or *.rst files, include them:
# '': ['*.txt', '*.rst'],
# # And include any *.msg files found in the 'hello' package, too:
# 'hello': ['*.msg'],
# },
zip_safe = False,
# metadata for upload to PyPI
author="Edoardo Pasca",
author_email="edo.paskino@gmail.com",
description='CCPi Core Imaging Library - VTK Viewer Module',
license="Apache v2.0",
keywords="3D data viewer",
url="http://www.ccpi.ac.uk", # project home page, if any
# could also include long_description, download_url, classifiers, etc.
)
| 31.214286
| 76
| 0.677346
| 243
| 1,748
| 4.831276
| 0.625514
| 0.051107
| 0.022147
| 0.027257
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025216
| 0.20595
| 1,748
| 55
| 77
| 31.781818
| 0.820605
| 0.673341
| 0
| 0
| 0
| 0
| 0.342056
| 0.039252
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.176471
| 0
| 0.176471
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0c48f41277d0c455a9e37dabfa1c49d07148ba0
| 1,982
|
py
|
Python
|
src/server/ClientHandler.py
|
ENDERZOMBI102/chatapp
|
3f54e72a8d3b10457cf88ec5f87b2984cc84a51f
|
[
"MIT"
] | 1
|
2021-06-20T05:47:53.000Z
|
2021-06-20T05:47:53.000Z
|
src/server/ClientHandler.py
|
ENDERZOMBI102/chatapp
|
3f54e72a8d3b10457cf88ec5f87b2984cc84a51f
|
[
"MIT"
] | null | null | null |
src/server/ClientHandler.py
|
ENDERZOMBI102/chatapp
|
3f54e72a8d3b10457cf88ec5f87b2984cc84a51f
|
[
"MIT"
] | null | null | null |
import asyncio
import traceback
from asyncio import StreamWriter, StreamReader, Task
from .BaseClientHandler import BaseClientHandler
from data import Message
class ClientHandler(BaseClientHandler):
_inputTask: Task
_errorCheckTask: Task
reader: StreamReader
writer: StreamWriter
# noinspection PyUnresolvedReferences
def __init__( self, server: 'AServer', reader: StreamReader, writer: StreamWriter ):
super().__init__( server, ':'.join( [ str(i) for i in writer.get_extra_info('peername') ] ) )
self.reader = reader
self.writer = writer
print( f'[{self.addr}] starting input loop' )
self._inputTask = asyncio.create_task( self.InputLoop() )
self._errorCheckTask = asyncio.create_task( self.CheckErrors() )
async def Send( self, message: Message ) -> None:
if self.isAlive():
message = await self.ReplacePlaceholders(message)
enc_message = message.toJson().encode( 'utf8' )
header = int.to_bytes( len( enc_message ), length=4, byteorder='big' )
self.writer.write( header )
self.writer.write( enc_message )
await self.writer.drain()
async def CheckErrors( self ) -> None:
while True:
await asyncio.sleep(10)
exc: Exception = self.reader.exception()
if exc is not None:
if isinstance( exc, ConnectionResetError ):
self._alive = False
break
print('Exception on reader:')
traceback.print_exception( type( exc ), exc, exc.__traceback__ )
async def InputLoop( self ) -> None:
while (
self.isAlive() and (
self.reader.exception() is None or
isinstance( self.reader.exception(), ConnectionResetError )
)
):
size = int.from_bytes( await self.reader.read( 4 ), 'big' )
msg = Message.fromJson(
(
await self.reader.read(size)
).decode( 'utf8' )
)
await self.HandleMessage(msg)
print( f'closed connection to [{self.addr}]' )
self._alive = False
def isAlive( self ) -> bool:
return self._alive and not ( self.reader.at_eof() or self.writer.is_closing() )
| 30.492308
| 95
| 0.700807
| 239
| 1,982
| 5.682008
| 0.393305
| 0.051546
| 0.041973
| 0.053019
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003695
| 0.180626
| 1,982
| 64
| 96
| 30.96875
| 0.832512
| 0.017659
| 0
| 0.037736
| 0
| 0
| 0.060154
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037736
| false
| 0
| 0.09434
| 0.018868
| 0.245283
| 0.075472
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0c5b0e690a24fec09fd97682f7f29681f7e57f6
| 8,658
|
py
|
Python
|
strangeflix/room/consumers.py
|
samsoldeinstein/webster2020
|
9795635e806caa261bb33d629f3d1f2bd603638c
|
[
"MIT"
] | 6
|
2020-11-02T16:40:56.000Z
|
2020-11-07T06:59:00.000Z
|
strangeflix/room/consumers.py
|
samsoldeinstein/webster2020
|
9795635e806caa261bb33d629f3d1f2bd603638c
|
[
"MIT"
] | null | null | null |
strangeflix/room/consumers.py
|
samsoldeinstein/webster2020
|
9795635e806caa261bb33d629f3d1f2bd603638c
|
[
"MIT"
] | 2
|
2020-11-03T05:20:25.000Z
|
2020-11-03T05:38:47.000Z
|
# chat/consumers.py
import json
from channels.generic.websocket import AsyncWebsocketConsumer
from .models import RoomControl
from channels.db import database_sync_to_async
class ChatConsumer(AsyncWebsocketConsumer):
# when a user connect
async def connect(self):
self.user = self.scope['user']
self.room_name = self.scope['url_route']['kwargs']['room_name']
self.room_group_name = 'chat_%s' % self.room_name
if self.user.is_authenticated:
self.is_member = await self.check_if_member()
self.is_host = await self.check_if_host()
if self.is_member or self.is_host:
# Join room group
await self.channel_layer.group_add(
self.room_group_name,
self.channel_name
)
await self.channel_layer.group_send(
self.room_group_name,
{
'type': 'add_user',
'user':self.user.username
}
)
await self.accept()
# to check membership of the user
@database_sync_to_async
def check_if_member(self):
return self.user in RoomControl.objects.filter(room_id = self.room_name).first().members.all()
# to check if user is host
@database_sync_to_async
def check_if_host(self):
return self.user == RoomControl.objects.filter(room_id = self.room_name).first().host_user
# when a user exits
async def disconnect(self, close_code):
# Leave room group
if(hasattr(self,'room_group_name')):
await self.channel_layer.group_discard(
self.room_group_name,
self.channel_name
)
await self.channel_layer.group_send(
self.room_group_name,
{
'type': 'remove_user',
'user':self.user.username
}
)
# Receive message from WebSocket
async def receive(self, text_data):
if self.user.is_authenticated:
self.is_member = await self.check_if_member()
self.is_host = await self.check_if_host()
if self.is_member or self.is_host:
text_data_json = json.loads(text_data)
message = text_data_json['message']
message_type = text_data_json['type']
# Send message to room group
if message_type == 'chat_message':
await self.channel_layer.group_send(
self.room_group_name,
{
'type': 'chat_message',
'message': message,
'user':self.user.username
}
)
if message_type == 'play':
await self.channel_layer.group_send(
self.room_group_name,
{
'type': 'play',
'message':message,
'user':self.user.username
}
)
if message_type == 'skip':
await self.channel_layer.group_send(
self.room_group_name,
{
'type': 'skip',
'message': message,
'skipAmount': text_data_json['skipAmount'],
'user':self.user.username
}
)
if message_type == 'upd':
await self.channel_layer.group_send(
self.room_group_name,
{
'type':'upd',
'message':message,
'updTime':text_data_json['updTime'],
'user':self.user.username
}
)
if message_type == 'join':
await self.channel_layer.group_send(
self.room_group_name,
{
'type':'join',
'message':message,
'user':self.user.username
}
)
if message_type == 'hostupd' and self.is_host:
await self.channel_layer.group_send(
self.room_group_name,
{
'type':'hostupd',
'message':message,
'pausedStatus':text_data_json['pausedStatus'],
'currentTimeStatus':text_data_json['currentTimeStatus'],
'videoStatus':text_data_json['videoStatus'],
'users':text_data_json['users'],
'user':self.user.username
}
)
if message_type == 'close_room':
print(hi)
await self.channel_layer.group_send(
self.room_group_name,
{
'type': 'close_room',
'message': message,
'user':self.user.username
}
)
# Receive message from room group
async def chat_message(self, event):
message = event['message']
# Send message to WebSocket
await self.send(text_data=json.dumps({
'type':'chat_message',
'message': message,
'user':event['user']
}))
#Send play control
async def play(self,event):
message = event['message']
#Send play control to WebSocket
await self.send(text_data=json.dumps({
'type':'play',
'message':message,
'user':event['user']
}))
#Send skip control
async def skip(self,event):
message = event['message']
skipAmount = event['skipAmount']
#Send skip control to WebSocket
await self.send(text_data=json.dumps({
'type':'skip',
'message':message,
'skipAmount':skipAmount,
'user':event['user']
}))
#Send updated time
async def upd(self,event):
message = event['message']
updTime = event['updTime']
#Send updated time control to WebSocket
await self.send(text_data=json.dumps({
'type':'upd',
'message':message,
'updTime':updTime,
'user':event['user']
}))
#Send join request
async def join(self,event):
message = event['message']
#Send join request to WebSocket
await self.send(text_data=json.dumps({
'type':'join',
'message':message,
'user':event['user']
}))
#Send host update
async def hostupd(self,event):
message = event['message']
pausedStatus = event['pausedStatus']
currentTimeStatus = event['currentTimeStatus']
videoStatus = event['videoStatus']
users = event['users']
#Send join request to WebSocket
await self.send(text_data=json.dumps({
'type':'hostupd',
'message':message,
'pausedStatus': pausedStatus,
'currentTimeStatus': currentTimeStatus,
'videoStatus' : videoStatus,
'users':users,
'user':event['user']
}))
#Send add user
async def add_user(self,event):
print(event['user'])
#Send play control to WebSocket
await self.send(text_data=json.dumps({
'type':'add_user',
'user':event['user']
}))
#Send remove user
async def remove_user(self,event):
#Send play control to WebSocket
await self.send(text_data=json.dumps({
'type':'remove_user',
'user':event['user']
}))
# Receive host left from room group
async def close_room(self, event):
message = event['message']
# Send host left to WebSocket
await self.send(text_data=json.dumps({
'type':'close_room',
'message': message,
'user':event['user']
}))
| 36.378151
| 102
| 0.473897
| 793
| 8,658
| 4.992434
| 0.118537
| 0.056833
| 0.054559
| 0.055822
| 0.614044
| 0.512756
| 0.43319
| 0.374337
| 0.374337
| 0.314979
| 0
| 0
| 0.430931
| 8,658
| 238
| 103
| 36.378151
| 0.803532
| 0.075999
| 0
| 0.551546
| 0
| 0
| 0.101593
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010309
| false
| 0
| 0.020619
| 0.010309
| 0.046392
| 0.010309
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0c819007b9eb94c341aa70c5a8a5172d3857e95
| 7,342
|
py
|
Python
|
src/cnnclustering/hooks.py
|
janjoswig/CNN
|
06ab0e07da46141cca941e99ac1a11ddc7ce233d
|
[
"MIT"
] | 4
|
2020-06-16T13:33:57.000Z
|
2021-01-05T18:19:57.000Z
|
src/cnnclustering/hooks.py
|
janjoswig/CNN
|
06ab0e07da46141cca941e99ac1a11ddc7ce233d
|
[
"MIT"
] | 12
|
2019-10-22T09:15:09.000Z
|
2020-07-02T09:42:44.000Z
|
src/cnnclustering/hooks.py
|
janjoswig/CommonNNClustering
|
06ab0e07da46141cca941e99ac1a11ddc7ce233d
|
[
"MIT"
] | null | null | null |
import numpy as np
from cnnclustering._primitive_types import P_AINDEX, P_AVALUE
from cnnclustering import _types, _fit
COMPONENT_ALT_KW_MAP = {
"input": "input_data",
"data": "input_data",
"n": "neighbours",
"na": "neighbours",
"nb": "neighbour_neighbours",
"getter": "neighbours_getter",
"ogetter": "neighbours_getter_other",
"ngetter": "neighbours_getter",
"ongetter": "neighbours_getter_other",
"dgetter": "distance_getter",
"checker": "similarity_checker",
"q": "queue",
}
COMPONENT_KW_TYPE_ALIAS_MAP = {
"neighbour_neighbours": "neighbours",
"neighbour_getter_other": "neighbours_getter",
}
COMPONENT_NAME_TYPE_MAP = {
"input_data": {
"components_mview": _types.InputDataExtComponentsMemoryview,
"neighbourhoods_mview": _types.InputDataExtNeighbourhoodsMemoryview
},
"neighbours_getter": {
"brute_force": _types.NeighboursGetterExtBruteForce,
"lookup": _types.NeighboursGetterExtLookup,
},
"distance_getter": {
"metric": _types.DistanceGetterExtMetric,
"lookup": _types.DistanceGetterExtLookup,
},
"neighbours": {
"vector": _types.NeighboursExtVector,
"uset": _types.NeighboursExtCPPUnorderedSet,
"vuset": _types.NeighboursExtVectorCPPUnorderedSet,
},
"metric": {
"dummy": _types.MetricExtDummy,
"precomputed": _types.MetricExtPrecomputed,
"euclidean": _types.MetricExtEuclidean,
"euclidean_r": _types.MetricExtEuclideanReduced,
"euclidean_periodic_r": _types.MetricExtEuclideanPeriodicReduced,
"euclidean_reduced": _types.MetricExtEuclideanReduced,
"euclidean_periodic_reduced": _types.MetricExtEuclideanPeriodicReduced,
},
"similarity_checker": {
"contains": _types.SimilarityCheckerExtContains,
"switch": _types.SimilarityCheckerExtSwitchContains,
"screen": _types.SimilarityCheckerExtScreensorted,
},
"queue": {
"fifo": _types.QueueExtFIFOQueue
},
"fitter": {
"bfs": _fit.FitterExtBFS,
"bfs_debug": _fit.FitterExtBFSDebug
}
}
def get_registered_recipe(key):
registered_recipes = {
"none": {},
"coordinates": {
"input_data": "components_mview",
"fitter": "bfs",
"fitter.ngetter": "brute_force",
"fitter.na": "vuset",
"fitter.checker": "switch",
"fitter.queue": "fifo",
"fitter.ngetter.dgetter": "metric",
"fitter.ngetter.dgetter.metric": "euclidean_r",
},
"distances": {
"input_data": "components_mview",
"fitter": "bfs",
"fitter.ngetter": "brute_force",
"fitter.na": "vuset",
"fitter.checker": "switch",
"fitter.queue": "fifo",
"fitter.ngetter.dgetter": "metric",
"fitter.ngetter.dgetter.metric": "precomputed",
},
"neighbourhoods": {
"input_data": "neighbourhoods_mview",
"fitter": "bfs",
"fitter.ngetter": "lookup",
"fitter.na": "vuset",
"fitter.checker": "switch",
"fitter.queue": "fifo",
},
"sorted_neighbourhoods": {
"input_data": "neighbourhoods_mview",
"fitter": "bfs",
"fitter.ngetter": ("lookup", (), {"is_sorted": True}),
"fitter.na": "vector",
"fitter.checker": "screen",
"fitter.queue": "fifo",
}
}
return registered_recipes[key.lower()]
def prepare_pass(data):
"""Dummy preparation hook
Use if no preparation of input data is desired.
Args:
data: Input data that should be prepared.
Returns:
(data,), {}
"""
return (data,), {}
def prepare_points_from_parts(data):
r"""Prepare input data points
Use when point components are passed as sequence of parts, e.g. as
>>> input_data, meta = prepare_points_parts([[[0, 0],
... [1, 1]],
... [[2, 2],
... [3,3]]])
>>> input_data
array([[0, 0],
[1, 1],
[2, 2],
[3, 3]])
>>> meta
{"edges": [2, 2]}
Recognised data formats are:
* Sequence of length *d*:
interpreted as 1 point with *d* components.
* 2D Sequence (sequence of sequences all of same length) with
length *n* (rows) and width *d* (columns):
interpreted as *n* points with *d* components.
* Sequence of 2D sequences all of same width:
interpreted as parts (groups) of points.
The returned input data format is compatible with:
* `cnnclustering._types.InputDataExtPointsMemoryview`
Args:
data: Input data that should be prepared.
Returns:
* Formatted input data (NumPy array of shape
:math:`\sum n_\mathrm{part}, d`)
* Dictionary of meta-information
Notes:
Does not catch deeper nested formats.
"""
try:
d1 = len(data)
except TypeError as error:
raise error
finished = False
if d1 == 0:
# Empty sequence
data = [np.array([[]])]
finished = True
if not finished:
try:
d2 = [len(x) for x in data]
all_d2_equal = (len(set(d2)) == 1)
except TypeError:
# 1D Sequence
data = [np.array([data])]
finished = True
if not finished:
try:
d3 = [len(y) for x in data for y in x]
all_d3_equal = (len(set(d3)) == 1)
except TypeError:
if not all_d2_equal:
raise ValueError(
"Dimension mismatch"
)
# 2D Sequence of sequences of same length
data = [np.asarray(data)]
finished = True
if not finished:
if not all_d3_equal:
raise ValueError(
"Dimension mismatch"
)
# Sequence of 2D sequences of same width
data = [np.asarray(x) for x in data]
finished = True
meta = {}
meta["edges"] = [x.shape[0] for x in data]
data_args = (np.asarray(np.vstack(data), order="C", dtype=P_AVALUE),)
data_kwargs = {"meta": meta}
return data_args, data_kwargs
def prepare_neighbourhoods(data):
"""Prepare neighbourhood information by padding
Args:
data: Expects a sequence of sequences with neighbour indices.
Returns:
Data as a 2D NumPy array of shape (#points, max. number of neighbours)
and a 1D array with the actual number of neighbours for each point (data
args). Also returns meta information (data kwargs).
"""
n_neighbours = [len(s) for s in data]
pad_to = max(n_neighbours)
data = [
np.pad(a, (0, pad_to - n_neighbours[i]), mode="constant", constant_values=0)
for i, a in enumerate(data)
]
meta = {}
data_args = (
np.asarray(data, order="C", dtype=P_AINDEX),
np.asarray(n_neighbours, dtype=P_AINDEX)
)
data_kwargs = {"meta": meta}
return data_args, data_kwargs
| 28.679688
| 84
| 0.566194
| 725
| 7,342
| 5.566897
| 0.284138
| 0.033449
| 0.013875
| 0.019822
| 0.223489
| 0.191774
| 0.169723
| 0.169723
| 0.165758
| 0.111497
| 0
| 0.008357
| 0.315445
| 7,342
| 255
| 85
| 28.792157
| 0.794668
| 0.243667
| 0
| 0.278481
| 0
| 0
| 0.244258
| 0.040523
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025316
| false
| 0.006329
| 0.018987
| 0
| 0.06962
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0c895b700d9298c6544f69260721fb2fce2376e
| 15,620
|
py
|
Python
|
cybergis/jobs.py
|
cybergis/jupyterlib
|
b39cf9c525b52fc9f67a388a751126df00b498f2
|
[
"NCSA"
] | 5
|
2017-11-08T15:32:09.000Z
|
2019-12-20T03:05:34.000Z
|
cybergis/jobs.py
|
cybergis/jupyterlib
|
b39cf9c525b52fc9f67a388a751126df00b498f2
|
[
"NCSA"
] | null | null | null |
cybergis/jobs.py
|
cybergis/jupyterlib
|
b39cf9c525b52fc9f67a388a751126df00b498f2
|
[
"NCSA"
] | 1
|
2019-12-20T02:46:56.000Z
|
2019-12-20T02:46:56.000Z
|
#!/usr/bin/env python
from __future__ import print_function
from ipywidgets import *
from IPython.display import display
from getpass import getpass
import glob
import os
import stat
import paramiko
from string import Template
from os.path import expanduser
from pkg_resources import resource_string
from IPython.core.magic import (register_line_magic, register_cell_magic,register_line_cell_magic)
import hashlib
from itertools import izip,cycle
from IPython.display import IFrame
USERNAME = os.environ['USER']
CONF_DIR='.rg_conf'
CONF_MOD=int('700', 8) # exclusive access
CONF_FILE='%s/%s'%(CONF_DIR, USERNAME)
#ROGER_PRJ='/projects/class/jhub/users'
#JUPYTER_HOME='/mnt/jhub/users'
ROGER_PRJ='/projects/jupyter'
JUPYTER_HOME='/home'
def encrypt(plaintext):
ciphertext = ''.join(chr(ord(x) ^ ord(y)) for (x,y) in izip(plaintext, cycle(hashlib.sha256(USERNAME).hexdigest())))
return ciphertext.encode('base64')
def decrypt(ciphertext):
ciphertext = ciphertext.decode('base64')
return ''.join(chr(ord(x) ^ ord(y)) for (x,y) in izip(ciphertext, cycle(hashlib.sha256(USERNAME).hexdigest())))
def Labeled(label, widget):
width='130px'
return (Box([HTML(value='<p align="right" style="width:%s">%s  </p>'%(width,label)),widget],
layout=Layout(display='flex',align_items='center',flex_flow='row')))
def listExeutables(folder='.'):
executable = stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH
return [filename for filename in os.listdir(folder)
if os.path.isfile(filename)]# and (os.stat(filename).st_mode & executable)]
def tilemap(tif, name, overwrite=False, overlay=None,tilelvl=[9,13]):
id=hashlib.sha1(name).hexdigest()[:10]
if overwrite:
os.system('rm -rf %s'%id)
os.system('gdal2tiles.py -e -z %d-%d -a 0,0,0 -s epsg:4326 -r bilinear -t "%s" %s -z 8-14 %s'%(tilelvl[0], tilelvl[1], name,tif,id))
with open('%s/leaflet.html'%id) as input:
s=input.read()
s=s.replace('http://cdn.leafletjs.com','https://cdn.leafletjs.com')
s=s.replace('http://{s}.tile.osm.org','https://{s}.tile.openstreetmap.org')
addLayer='map.addLayer(lyr);'
if overlay:
os.system("wget 'https://raw.githubusercontent.com/calvinmetcalf/leaflet-ajax/master/dist/leaflet.ajax.min.js' -O %s/leaflet.ajax.min.js"%id)
s=s.replace('leaflet.js"></script>','leaflet.js"></script>\n<script src="leaflet.ajax.min.js"></script>')
vectorNewLayers = []
vectorOverlay = []
vectorAdd = []
for vecFile,vecName in overlay:
vecId=hashlib.sha1(vecName).hexdigest()[:10]
os.system('ogr2ogr -f "geojson" %s/%s.json %s'%(id,vecId,vecFile))
vectorNewLayers.append('var vecLayer%s = new L.GeoJSON.AJAX("%s.json");'%(vecId,vecId))
vectorOverlay.append('"%s":vecLayer%s'%(vecName, vecId))
vectorAdd.append('map.addLayer(vecLayer%s);'%vecId)
s=s.replace('// Map','\n'.join(vectorNewLayers)+'\n // Map')
s=s.replace('{"Layer": lyr}','{'+','.join(vectorOverlay)+', "Layer": lyr}')
addLayer+='\n'.join(vectorAdd)
s=s.replace(').addTo(map);',').addTo(map); '+addLayer)
with open('%s/leaflet.html'%id,'w') as output:
output.write(s)
return IFrame('%s/leaflet.html'%id, width='1000',height='600')
class Job():
def __init__(self):
#user=widgets.Text(value=USERNAME,placeholder='Your ROGER Account name', description='Username',disabled=False)
#display(user)
#pw=getpass(prompt='Password')
#paramiko.util.log_to_file("ssh.log")
self.client = paramiko.SSHClient()
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.homeDir = '%s/%s'%(JUPYTER_HOME,USERNAME)
self.jobDir = self.homeDir + '/.jobs'
if not os.path.exists(self.jobDir):
os.makedirs(self.jobDir)
self.userName = USERNAME
self.rogerRoot = '%s/%s'%(ROGER_PRJ, self.userName)
self.rogerJobDir = self.rogerRoot + '/.jobs'
self.relPath = os.path.relpath(os.getcwd(), self.homeDir)
self.rogerPath = self.rogerRoot + '/' + self.relPath
self.editMode = True
self.jobId = None
with open(os.path.dirname(__file__)+'/qsub.template') as input:
self.job_template=Template(input.read())
self.login()
def login(self):
if not os.path.exists(CONF_DIR):
os.makedirs(CONF_DIR)
if stat.S_IMODE(os.stat(CONF_DIR).st_mode)!=CONF_MOD:
os.chmod(CONF_DIR, stat.S_IREAD | stat.S_IWUSR | stat.S_IXUSR)
if not os.path.exists(CONF_FILE):
#user=widgets.Text(value=USERNAME,placeholder='Your Roger Username', description='Username',disabled=False)
#display(user)
login_success = False
while not login_success:
pw=getpass(prompt='Password')
try:
self.client.connect('roger-login.ncsa.illinois.edu', username=USERNAME, password=pw)
self.sftp=self.client.open_sftp()
except Exception as e:
print(e)
else:
print('Successfully logged in as %s'%self.userName)
login_success = True
with open(CONF_FILE,'w') as output:
output.write(encrypt(pw))
else:
pw=decrypt(open(CONF_FILE).read())
try:
self.client.connect('roger-login.ncsa.illinois.edu', username=USERNAME, password=pw)
#key = paramiko.RSAKey.from_private_key_file(self.homeDir+'/.ssh/roger.key')
#self.client.connect('roger-login.ncsa.illinois.edu', username='dyin4', pkey = key)
self.sftp=self.client.open_sftp()
except Exception as e:
print(e)
else:
print('Successfully logged in as %s'%self.userName)
def submit(self,jobName='test',entrance='test.sh',nNodes=4,ppn=1,isGPU=False,walltime=1,submit=False,hideUI=False):
self.jobName=jobName
self.entrance=entrance
self.nNodes=nNodes
self.ppn=ppn
self.isGPU=isGPU
self.walltime=walltime
res=self.__submitUI(submit,hideUI)
if submit and hideUI:
return res
def __runCommand(self, command):
stdin,stdout,stderr = self.client.exec_command(command)
return ''.join(stdout.readlines())+''.join(stderr.readlines())
def __submitUI(self, direct_submit=False,hideUI=False):
fileList=listExeutables()
if len(fileList) == 0:
with open('test.sh','w') as output:
output.write('#!/bin/bash\n\necho test')
jobName=Text(value=self.jobName)
entrance=Dropdown(
options=fileList,
value=fileList[0],
layout=Layout()
)
nNodes=IntSlider(
value=self.nNodes,
min=1,
max=10,
step=1,
continuous_update=False,
orientation='horizontal',
readout=True,
readout_format='i',
slider_color='white'
)
ppn=IntSlider(
value=self.ppn,
min=1,
max=20,
step=1,
continuous_update=False,
orientation='horizontal',
readout=True,
readout_format='i',
slider_color='white'
)
isGPU=RadioButtons(
options=['No GPU','GPU'],
value = 'GPU' if self.isGPU else 'No GPU'
)
ppn=IntSlider(
value=self.ppn,
min=1,
max=20,
step=1,
continuous_update=False,
orientation='horizontal',
readout=True,
readout_format='i',
slider_color='white'
)
walltime=FloatSlider(
value=float(self.walltime),
min=1.0,
max=48.0,
step=1.0,
continuous_update=False,
orientation='horizontal',
readout=True,
readout_format='.1f',
slider_color='white'
)
preview=Button(
description='Preview Job script',
button_style='', # 'success', 'info', 'warning', 'danger' or ''
tooltip='Preview Job'
)
jobview=Textarea(
layout=Layout(width='500px',height='225px',max_width='1000px', max_height='1000px')
)
confirm=Button(
description='Submit Job',
button_style='', # 'success', 'info', 'warning', 'danger' or ''
tooltip='Submit job'
)
status=HTML(
layout=Layout(width='850px',height='200px',max_width='1000px', min_height='200px', max_height='1000px')
)
refresh=Button(
description='Refresh Status',
disabled=True
)
cancel=Button(
description='Cancel Job',
disabled=True
)
newJob=Button(
description='New Job',
disabled=True
)
jobEdits = [jobName,entrance,nNodes,ppn,isGPU,walltime,confirm]
postSubmission = [refresh, cancel, newJob]
def switchMode():
if not self.editMode:
status.value = ''
for w in jobEdits:
w.disabled = self.editMode
jobview.disabled = self.editMode
self.editMode = not self.editMode
for w in postSubmission:
w.disabled = self.editMode
def click_preview(b):
jobview.value=self.job_template.substitute(
jobname = jobName.value,
n_nodes = nNodes.value,
is_gpu = isGPU.value.lower().replace(' ',''),
ppn = ppn.value,
walltime = '%d:00:00'%int(walltime.value),
username = self.userName,
jobDir = self.rogerJobDir,
rogerPath= self.rogerPath,
exe = entrance.value
)
click_preview(1)
preview.on_click(click_preview)
for w in jobEdits:
w.observe(click_preview, names='value')
def refreshStatus(b):
#status.value='<pre>'+self.__runCommand('date; qstat | awk \'NR < 3 || /%s/\''%(self.username))+'</pre>'
if self.jobId is None:
status.value='<pre><font size=2>%s</font></pre>'%('\n'*8)
return
result = self.__runCommand('date; qstat -a %s | sed 1,3d '%self.jobId)
if 'Unknown Job Id Error' in result:
result = 'Job %s is finished'%self.jobId
est_time= '\n'*7
else:
est_time = self.__runCommand('showstart %s | head -3'%self.jobId)
if 'cannot locate job' in est_time:
est_time = 'Job %s is currently out of queue.\n\n'%self.jobId
status.value='<pre><font size=2>%s\n%s</font></pre>'%(result, est_time)
refreshStatus(1)
refresh.on_click(refreshStatus)
def submit(b):
filename='%s.pbs'%jobName.value
with open(self.jobDir + '/' + filename,'w') as output:
output.write(jobview.value)
self.jobId = self.__runCommand('qsub %s/%s 2>/dev/null'%(self.rogerJobDir, filename)).strip()
switchMode()
refreshStatus(1)
#status.value='<pre>'+self.__runCommand('qsub %s >/dev/null 2>&1; date; qstat | awk \'NR < 3 || /%s/ \''%(filename,self.username))+'</pre>'
#status.value='<pre><font size=2>'+self.__runCommand('date; qstat -u %s | sed 1,3d'%(self.userName))+'</font></pre>'
confirm.on_click(submit)
def click_cancel(b):
if self.jobId:
self.__runCommand('qdel %s'%self.jobId)
switchMode()
cancel.on_click(click_cancel)
def click_newJob(b):
switchMode()
newJob.on_click(click_newJob)
submitForm=VBox([
Labeled('Job name', jobName),
Labeled('Executable', entrance),
Labeled('No. nodes', nNodes),
Labeled('Cores per node', ppn),
Labeled('GPU needed', isGPU),
Labeled('Walltime (h)', walltime),
#Labeled('', preview),
Labeled('Job script', jobview),
Labeled('', confirm)
])
statusTab=VBox([
Labeled('Job Status', status),
Labeled('', HBox([refresh,cancel,newJob])),
])
if direct_submit:
submit(1)
#display(Tab([submitForm, statusTab], _titles={0: 'Submit New Job', 1: 'Check Job Status'}))
if direct_submit:
if hideUI:
return self.jobId
else:
display(VBox([
Labeled('Job script', jobview),
VBox([
Labeled('Job Status', status),
Labeled('', HBox([refresh,cancel])),
])
]))
else:
display(VBox([submitForm, statusTab]))
def listRunning(self, user=USERNAME, hideUI=False):
header=HTML(
layout=Layout(width='800px',max_width='1000px',
min_width='50px', max_height='1000px')
)
status=SelectMultiple(
layout=Layout(width='850px',height='125px',max_width='1000px',
min_width='800px', min_height='125px', max_height='1000px')
)
refresh=Button(
description='Refresh Status',
disabled=False
)
cancel=Button(
description='Cancel Job',
disabled=False
)
def refreshStatus(b):
#status.value='<pre>'+self.__runCommand('date; qstat | awk \'NR < 3 || /%s/\''%(self.username))+'</pre>'
result = self.__runCommand("qstat | sed -n '1,2p;/%s/p'"%user)
header.value='<pre>%s</pre>'%result
self.runningIds = [_.split()[0] for _ in result.strip().split('\n')[2:]]
#status.options = [_.split()[0] for _ in result.strip().split('\n')[2:]]
refreshStatus(1)
refresh.on_click(refreshStatus)
def click_cancel(b):
pass
#self.__runCommand('qdel %s'%status.value[0].split()[0])
cancel.on_click(click_cancel)
if not hideUI:
display(
VBox([
header,
#HBox([status,header]),
#status,
HBox([refresh, cancel])
])
)
else:
return self.runningIds
def cancel(self, jobIds):
if isinstance(jobIds, str):
self.__runCommand('qdel %s'%jobIds)
elif isinstance(JobIds, list):
self.__runCommand('qdel %s'%' '.join(jobIds))
#def showDetail(self, jobId): # Not handling large output
# print(self.__runCommand('qstat -f %s'%jobId))
#@register_line_magic
#def roger(line):
# Roger()
#del roger
| 37.368421
| 151
| 0.541613
| 1,710
| 15,620
| 4.853216
| 0.234503
| 0.003133
| 0.006507
| 0.00723
| 0.272804
| 0.225208
| 0.190143
| 0.173756
| 0.151585
| 0.104109
| 0
| 0.016194
| 0.320038
| 15,620
| 417
| 152
| 37.458034
| 0.765182
| 0.101985
| 0
| 0.281899
| 0
| 0.008902
| 0.122972
| 0.021293
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059347
| false
| 0.014837
| 0.04451
| 0
| 0.136499
| 0.014837
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0c95eb3b0bfb04075898983cf10d20a892318cb
| 5,419
|
py
|
Python
|
compile.py
|
Wizard-collab/wizard_2
|
a2cb23362e178a0205f6dd0b9b4328c329b5b142
|
[
"MIT"
] | 1
|
2021-10-13T15:07:32.000Z
|
2021-10-13T15:07:32.000Z
|
compile.py
|
Wizard-collab/wizard_2
|
a2cb23362e178a0205f6dd0b9b4328c329b5b142
|
[
"MIT"
] | null | null | null |
compile.py
|
Wizard-collab/wizard_2
|
a2cb23362e178a0205f6dd0b9b4328c329b5b142
|
[
"MIT"
] | null | null | null |
import subprocess
import os
import shutil
import time
import yaml
import sys
import logging
logger = logging.getLogger(__name__)
class compile():
def __init__(self):
args = sys.argv
args.pop(0)
if len(args) >= 1:
self.release_type = args.pop(0)
else:
self.release_type = None
self.build_folder = None
self.get_release_name()
self.compile()
def get_release_name(self):
if self.release_type is not None:
compil_dir = 'compile'
if not os.path.isdir(compil_dir):
os.mkdir(compil_dir)
compil_data_file = 'version.yaml'
if not os.path.isfile(compil_data_file):
compil_data_dic = dict()
compil_data_dic['builds'] = 0
# version name : MAJOR.MINOR.PATCH
compil_data_dic['MAJOR'] = 2
compil_data_dic['MINOR'] = 0
compil_data_dic['PATCH'] = 0
with open(compil_data_file, 'w') as f:
yaml.dump(compil_data_dic, f)
else:
with open(compil_data_file, 'r') as f:
compil_data_dic = yaml.load(f, Loader=yaml.Loader)
build_no = compil_data_dic['builds'] + 1
MAJOR = compil_data_dic['MAJOR']
MINOR = compil_data_dic['MINOR']
PATCH = compil_data_dic['PATCH']
if self.release_type == 'MAJOR':
MAJOR += 1
MINOR = 0
PATCH = 0
elif self.release_type == 'MINOR':
MINOR += 1
PATCH = 0
elif self.release_type == 'PATCH':
PATCH += 1
elif self.release_type == 'REBUILD':
pass
else:
logger.error(f"{self.release_type} is not a valid release type")
MAJOR = None
MINOR = None
PATCH = None
if (MAJOR and MINOR and PATCH) is not None:
release_name = f"{MAJOR}.{MINOR}.{PATCH}"
self.build_folder = os.path.join(compil_dir, f"{release_name}_{str(build_no).zfill(4)}")
self.setup_name = f'{release_name}.{str(build_no).zfill(4)}-setup.exe'
compil_data_dic['MAJOR'] = MAJOR
compil_data_dic['MINOR'] = MINOR
compil_data_dic['PATCH'] = PATCH
compil_data_dic['builds'] = build_no
compil_data_dic['date'] = time.time()
with open(compil_data_file, 'w') as f:
yaml.dump(compil_data_dic, f)
logger.info(f"Release name : {release_name}")
logger.info(f"Build : {build_no}")
else:
logger.error(f"please provide a release type")
def compile(self):
if self.build_folder is not None:
self.clean_pycache()
if os.path.isdir('dist'):
shutil.rmtree('dist')
if os.path.isdir('build'):
shutil.rmtree('build')
command_line = "PyInstaller wizard.spec"
p = subprocess.Popen(command_line)
p.wait()
command_line = "PyInstaller create_repository.spec"
p = subprocess.Popen(command_line)
p.wait()
command_line = "PyInstaller PyWizard.spec"
p = subprocess.Popen(command_line)
p.wait()
command_line = "PyInstaller wizard_cmd.spec"
p = subprocess.Popen(command_line)
p.wait()
command_line = "PyInstaller server.spec"
p = subprocess.Popen(command_line)
p.wait()
command_line = "PyInstaller uninstall.spec"
p = subprocess.Popen(command_line)
p.wait()
command_line = "PyInstaller error_handler.spec"
p = subprocess.Popen(command_line)
p.wait()
command_line = "PyInstaller project_manager.spec"
p = subprocess.Popen(command_line)
p.wait()
folders_list = ['ressources', 'softwares']
dist_folder = 'dist/Wizard'
for folder in folders_list:
destination = os.path.join(dist_folder, folder)
shutil.copytree(folder, destination)
files_list = [ 'version.yaml',
'LICENSE',
'wapi.py',
'dist/PyWizard/PyWizard.exe',
'dist/PyWizard/PyWizard.exe.manifest',
'dist/Create Repository/Create Repository.exe',
'dist/Create Repository/Create Repository.exe.manifest',
'dist/wizard_cmd/wizard_cmd.exe',
'dist/wizard_cmd/wizard_cmd.exe.manifest',
'dist/server/server.exe',
'dist/server/server.exe.manifest',
'dist/uninstall.exe',
'dist/Project Manager/Project Manager.exe',
'dist/Project Manager/Project Manager.exe.manifest',
'dist/error_handler/error_handler.exe',
'dist/error_handler/error_handler.exe.manifest']
for file in files_list:
destination = os.path.join(dist_folder, os.path.basename(file))
shutil.copyfile(file, destination)
shutil.copytree(dist_folder, self.build_folder)
if os.path.isdir('dist'):
shutil.rmtree('dist')
if os.path.isdir('build'):
shutil.rmtree('build')
shutil.make_archive(f'{self.build_folder}', 'zip', self.build_folder)
if os.path.isdir(self.build_folder):
shutil.rmtree(self.build_folder)
# Making installer
zip_file = self.build_folder+'.zip'
shutil.copyfile(zip_file, '__wizard__.zip')
command_line = "PyInstaller installer.spec"
p = subprocess.Popen(command_line)
p.wait()
shutil.copyfile('dist/__installer_temp__.exe', os.path.join('compile', self.setup_name))
os.remove('__wizard__.zip')
if os.path.isdir('dist'):
shutil.rmtree('dist')
if os.path.isdir('build'):
shutil.rmtree('build')
self.clean_pycache()
os.startfile(os.path.dirname(self.build_folder))
def clean_pycache(self):
total_chars = 0
total_files = 0
for root, dirs, files in os.walk(os.path.abspath(""), topdown=False):
for directory in dirs:
if directory == '__pycache__':
dir_name = os.path.join(root, directory)
logger.info(f"Deleting {dir_name}...")
shutil.rmtree(dir_name)
if __name__ == '__main__':
compile()
| 29.291892
| 92
| 0.677985
| 753
| 5,419
| 4.661355
| 0.173971
| 0.062678
| 0.062963
| 0.051282
| 0.40114
| 0.358974
| 0.290598
| 0.237607
| 0.20114
| 0.20114
| 0
| 0.004092
| 0.188227
| 5,419
| 185
| 93
| 29.291892
| 0.793817
| 0.009042
| 0
| 0.265823
| 0
| 0
| 0.241803
| 0.083271
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025316
| false
| 0.006329
| 0.044304
| 0
| 0.075949
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0d20743fdd39b355e497598543bd007290f251f
| 840
|
py
|
Python
|
src/discolight/loaders/annotation/widthheightcsv.py
|
denzel-datature/discolight
|
7c8309d3f883263b2e4cae0b289f17be1d1c07ea
|
[
"MIT"
] | 27
|
2020-07-23T08:09:25.000Z
|
2022-03-01T08:24:43.000Z
|
src/discolight/loaders/annotation/widthheightcsv.py
|
denzel-datature/discolight
|
7c8309d3f883263b2e4cae0b289f17be1d1c07ea
|
[
"MIT"
] | 7
|
2020-08-05T07:26:55.000Z
|
2020-12-31T04:20:40.000Z
|
src/discolight/loaders/annotation/widthheightcsv.py
|
denzel-datature/discolight
|
7c8309d3f883263b2e4cae0b289f17be1d1c07ea
|
[
"MIT"
] | 6
|
2020-07-27T04:30:01.000Z
|
2020-08-13T02:39:25.000Z
|
"""A CSV annotation writer that reads the bbox in x, y, w, h format."""
from discolight.annotations import BoundingBox
from .types import CSVRow, CSVAnnotationLoader
class WidthHeightCSV(CSVAnnotationLoader):
"""Loads annotations from a CSV file in the following format.
image_name, x_min, y_min, width, height, label
"""
def get_csv_row(self, row):
"""Return the image and annotation from a CSV row."""
x_min = float(row["x_min"])
y_min = float(row["y_min"])
width = float(row["width"])
height = float(row["height"])
x_max = x_min + width
y_max = y_min + height
image_name = row["image_name"]
class_idx = row["label"]
return CSVRow(image_name=image_name,
bbox=BoundingBox(x_min, y_min, x_max, y_max, class_idx))
| 28.965517
| 78
| 0.633333
| 118
| 840
| 4.313559
| 0.355932
| 0.088409
| 0.02947
| 0.047151
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.257143
| 840
| 28
| 79
| 30
| 0.815705
| 0.261905
| 0
| 0
| 0
| 0
| 0.060504
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.142857
| 0
| 0.357143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0d3e5c8138c7d0eda8194549ae4292083be2818
| 1,286
|
py
|
Python
|
test/test_day09.py
|
frangiz/AdventOfCode2017
|
5fc171d4a83bfb9a408b4647ded4cb3efd12247e
|
[
"MIT"
] | null | null | null |
test/test_day09.py
|
frangiz/AdventOfCode2017
|
5fc171d4a83bfb9a408b4647ded4cb3efd12247e
|
[
"MIT"
] | null | null | null |
test/test_day09.py
|
frangiz/AdventOfCode2017
|
5fc171d4a83bfb9a408b4647ded4cb3efd12247e
|
[
"MIT"
] | null | null | null |
from days import day09
from ddt import ddt, data, unpack
import unittest
import util
@ddt
class MyTestCase(unittest.TestCase):
@data(
[['{}'], '1'],
[['{{{}}}'], '6'],
[['{{},{}}'], '5'],
[['{{{},{},{{}}}}'], '16'],
[['{<a>,<a>,<a>,<a>}'], '1'],
[['{{<ab>},{<ab>},{<ab>},{<ab>}}'], '9'],
[['{{<!!>},{<!!>},{<!!>},{<!!>}}'], '9'],
[['{{<a!>},{<a!>},{<a!>},{<ab>}}'], '3'])
@unpack
def test_example_a(self, test_input, expected):
result = day09.part_a(test_input)
self.assertEqual(result, expected)
def test_answer_part_a(self):
result = day09.part_a(util.get_file_contents('day09.txt'))
self.assertEqual(result, '14190')
@data(
[['<>'], '0'],
[['<random characters>'], '17'],
[['<<<<>'], '3'],
[['<{!>}>'], '2'],
[['<!!>'], '0'],
[['<!!!>>'], '0'],
[['<{o"i!a,<{i<a>'], '10'])
@unpack
def test_example_b(self, test_input, expected):
result = day09.part_b(test_input)
self.assertEqual(result, expected)
def test_answer_part_b(self):
result = day09.part_b(util.get_file_contents('day09.txt'))
self.assertEqual(result, '7053')
| 29.906977
| 67
| 0.437014
| 133
| 1,286
| 4.06015
| 0.338346
| 0.018519
| 0.111111
| 0.074074
| 0.514815
| 0.514815
| 0.514815
| 0.381481
| 0.381481
| 0.203704
| 0
| 0.043663
| 0.269829
| 1,286
| 42
| 68
| 30.619048
| 0.531416
| 0
| 0
| 0.162162
| 0
| 0
| 0.188103
| 0.069936
| 0
| 0
| 0
| 0
| 0.108108
| 1
| 0.108108
| false
| 0
| 0.108108
| 0
| 0.243243
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0d55d407b26fa73a5076bdbaa2919b847abf548
| 6,760
|
py
|
Python
|
jps-people-importer.py
|
UniversalSuperBox/jps-people-importer
|
eb7128122d00879798a88b599d90e53c139a00da
|
[
"MIT"
] | null | null | null |
jps-people-importer.py
|
UniversalSuperBox/jps-people-importer
|
eb7128122d00879798a88b599d90e53c139a00da
|
[
"MIT"
] | null | null | null |
jps-people-importer.py
|
UniversalSuperBox/jps-people-importer
|
eb7128122d00879798a88b599d90e53c139a00da
|
[
"MIT"
] | null | null | null |
"""
This script creates users in a JAMF Pro Server instance from an LDAP query.
"""
# Copyright 2020 Dalton Durst
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
import sys
from collections import namedtuple
from multiprocessing.pool import ThreadPool
from typing import List
from json.decoder import JSONDecodeError
import ldap
import requests
from ldap.controls import SimplePagedResultsControl
from conf import (
JAMF_PASSWORD,
JAMF_URL,
JAMF_USERNAME,
LDAP_BIND_PASSWORD,
LDAP_BIND_URI,
LDAP_BIND_USERNAME,
LDAP_FILTER,
LDAP_INSECURE,
LDAP_SEARCH_DN_LIST,
)
JAMF_AUTH = requests.auth.HTTPBasicAuth(JAMF_USERNAME, JAMF_PASSWORD)
SESSION = requests.Session()
User = namedtuple("User", ["sAMAccountName", "email", "last_name", "first_name"])
def eprint(*args, **kwargs):
"""Like print, but outputs to stderr."""
print(*args, file=sys.stderr, **kwargs)
def results_for_dn(directory: ldap.ldapobject, base_dn: str, filter: str) -> List[User]:
"""Returns a list of User objects found in the directory object for filter
:param directory: A ldap.LDAPObject that has already been bound to a
directory.
:param base_dn: The base of the directory tree to run the search filter
against.
:param filter: The LDAP search filter to run on base_dn using directory.
"""
req_ctrl = SimplePagedResultsControl(True, size=5000, cookie="")
known_ldap_resp_ctrls = {
SimplePagedResultsControl.controlType: SimplePagedResultsControl,
}
# Send search request
msgid = directory.search_ext(
base_dn, ldap.SCOPE_SUBTREE, filterstr=LDAP_FILTER, serverctrls=[req_ctrl]
)
results = []
while True:
__, result_data, __, serverctrls = directory.result3(
msgid, resp_ctrl_classes=known_ldap_resp_ctrls
)
results.extend(
[
User(
ldap_entry["sAMAccountName"][0].decode(),
ldap_entry["mail"][0].decode(),
ldap_entry["sn"][0].decode(),
ldap_entry["givenName"][0].decode(),
)
for __, ldap_entry in result_data
]
)
page_controls = [
control
for control in serverctrls
if control.controlType == SimplePagedResultsControl.controlType
]
if page_controls:
if page_controls[0].cookie:
# Copy cookie from response control to request control
req_ctrl.cookie = page_controls[0].cookie
msgid = directory.search_ext(
base_dn,
ldap.SCOPE_SUBTREE,
filterstr=LDAP_FILTER,
serverctrls=[req_ctrl],
)
else:
break
else:
eprint("Warning: Server ignores RFC 2696 control.")
break
return results
def create_user_in_jamf(user: User):
""" Creates a user in the JPS
:param user: A User object which will be used to create the JPS user.
This function uses the following module variables:
* SESSION must be a requests.Session instance
* JAMF_AUTH must be a requests.auth interface instance
* JAMF_URL must be the full base URL of a JAMF instance.
"""
eprint("Attempting to create", user.sAMAccountName)
xml = """
<user>
<name>{name}</name>
<full_name>{last_name}, {first_name}</full_name>
<email>{email}</email>
</user>
""".format(
name=user.sAMAccountName,
last_name=user.last_name,
first_name=user.first_name,
email=user.email,
).encode()
r = SESSION.post(
JAMF_URL + "/JSSResource/users/id/-1",
data=xml,
headers={"Content-Type": "application/xml", "Accept": "application/xml"},
auth=JAMF_AUTH,
)
try:
r.raise_for_status()
except requests.exceptions.RequestException as e:
eprint("Failed to create user with username", user.sAMAccountName)
eprint(e)
eprint(r.text)
else:
print(user.sAMAccountName)
def main():
eprint("Binding to LDAP...")
if LDAP_INSECURE:
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER)
directory = ldap.initialize(LDAP_BIND_URI)
directory.protocol_version = 3
directory.simple_bind_s(who=LDAP_BIND_USERNAME, cred=LDAP_BIND_PASSWORD)
eprint("Searching directory for users...")
ldap_users = []
for base_dn in LDAP_SEARCH_DN_LIST:
eprint("Searching DN", base_dn, "with filter", LDAP_FILTER)
ldap_users.extend(results_for_dn(directory, base_dn, LDAP_FILTER))
directory.unbind_s()
directory = None
eprint("Total LDAP users:", len(ldap_users))
eprint("Asking JPS for its user list...")
jamf_user_request = requests.get(
JAMF_URL + "/JSSResource/users",
auth=JAMF_AUTH,
headers={"Accept": "application/json"},
)
try:
jamf_user_json = jamf_user_request.json()
except JSONDecodeError:
eprint(jamf_user_request.text)
eprint("Failed to decode /JSSResource/users response as JSON.")
sys.exit(1)
jamf_usernames = frozenset([user["name"] for user in jamf_user_json["users"]])
eprint("Total JAMF users:", len(jamf_usernames))
missing_users = [
user for user in ldap_users if user.sAMAccountName not in jamf_usernames
]
eprint("Users to create:", len(missing_users))
with ThreadPool(10) as pool:
results = pool.map(create_user_in_jamf, missing_users)
eprint("Done. Created users:", len(results))
if __name__ == "__main__":
main()
| 31.009174
| 88
| 0.657988
| 842
| 6,760
| 5.11639
| 0.327791
| 0.020427
| 0.009053
| 0.011838
| 0.038069
| 0.038069
| 0.038069
| 0.038069
| 0.038069
| 0.038069
| 0
| 0.004761
| 0.25429
| 6,760
| 217
| 89
| 31.152074
| 0.849831
| 0.274852
| 0
| 0.08209
| 0
| 0
| 0.142887
| 0.019343
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029851
| false
| 0.029851
| 0.067164
| 0
| 0.104478
| 0.134328
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0d8be8cd8fd46f56d5540bc555ac35643dd277b
| 1,387
|
py
|
Python
|
app.py
|
arian-nasr/Temporary-SMS
|
cedbe68b3e329362049c86e0974396bc660875da
|
[
"MIT"
] | null | null | null |
app.py
|
arian-nasr/Temporary-SMS
|
cedbe68b3e329362049c86e0974396bc660875da
|
[
"MIT"
] | null | null | null |
app.py
|
arian-nasr/Temporary-SMS
|
cedbe68b3e329362049c86e0974396bc660875da
|
[
"MIT"
] | 1
|
2021-09-10T05:02:48.000Z
|
2021-09-10T05:02:48.000Z
|
from flask import Flask, jsonify, request
from flask_cors import CORS
from twilio.twiml.messaging_response import MessagingResponse, Message
from twilio.rest import Client
import sqlconnector as sql
from datetime import datetime
import os
# configuration
DEBUG = True
twilio_sid = os.environ.get('TWILIO_SID')
twilio_secret = os.environ.get('TWILIO_SECRET')
client = Client(twilio_sid, twilio_secret)
# instantiate the app
app = Flask(__name__)
app.config.from_object(__name__)
# enable CORS
CORS(app, resources={r'/*': {'origins': '*'}})
@app.route('/api/temporarysms/writemessage', methods=['POST'])
def inbound_sms():
response = MessagingResponse()
message_sender = request.form['From']
message_body = request.form['Body']
message_date = datetime.now()
sql.write_to_database('phone1', message_date, message_sender, message_body)
return str(response)
@app.route('/api/temporarysms/available', methods=['GET'])
def available():
numbers = sql.read_availability_from_database('numbers')
return jsonify({
'status': 'success',
'numbers': numbers
})
@app.route('/api/temporarysms/readmessage', methods=['GET'])
def allmessages():
messages = sql.read_messages_from_database('phone1')
return jsonify({
'status': 'success',
'messages': messages
})
if __name__ == '__main__':
app.run(host="192.168.0.21")
| 28.895833
| 79
| 0.716655
| 169
| 1,387
| 5.64497
| 0.431953
| 0.028302
| 0.034591
| 0.072327
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009298
| 0.14708
| 1,387
| 48
| 80
| 28.895833
| 0.797126
| 0.032444
| 0
| 0.157895
| 0
| 0
| 0.162061
| 0.064227
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078947
| false
| 0
| 0.184211
| 0
| 0.342105
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0df0de6b9dc212d463d40040bc158f2287e5e3f
| 1,101
|
py
|
Python
|
scripts/txtool/txtool/get_logs.py
|
baajur/cita
|
763c7866e6ea59ff96de085b4c72665f4e2f69ba
|
[
"Apache-2.0"
] | 930
|
2017-07-25T08:27:55.000Z
|
2019-11-26T10:07:48.000Z
|
scripts/txtool/txtool/get_logs.py
|
baajur/cita
|
763c7866e6ea59ff96de085b4c72665f4e2f69ba
|
[
"Apache-2.0"
] | 484
|
2017-07-25T14:32:44.000Z
|
2019-11-14T11:16:45.000Z
|
scripts/txtool/txtool/get_logs.py
|
QingYanL/testCITA
|
6d2e82c87831553c8d34749c56c4e5c8b94ece9c
|
[
"Apache-2.0"
] | 184
|
2017-07-26T01:37:36.000Z
|
2019-11-19T07:07:49.000Z
|
#!/usr/bin/env python3
# coding=utf-8
from __future__ import print_function
from jsonrpcclient.http_client import HTTPClient
from url_util import endpoint
import argparse
import simplejson
def get_topics():
with open("../output/transaction/topics", 'r') as topicfile:
topics = simplejson.load(topicfile)
return topics
def get_logs(topics, from_block, to_block):
try:
url = endpoint()
response = HTTPClient(url).request("getLogs", [{
"topics": topics,
"fromBlock": from_block,
"toBlock": to_block
}])
except:
return None
return response
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument("--fromBlock", default="0")
parser.add_argument("--toBlock", default="latest")
opts = parser.parse_args()
return opts.fromBlock, opts.toBlock
def main():
from_block, to_block = parse_arguments()
topics = get_topics()
logger.debug(topics)
resp = get_logs(topics, from_block, to_block)
print(resp)
if __name__ == "__main__":
main()
| 22.02
| 64
| 0.659401
| 128
| 1,101
| 5.421875
| 0.476563
| 0.051873
| 0.04755
| 0.069164
| 0.083573
| 0.083573
| 0.083573
| 0
| 0
| 0
| 0
| 0.003521
| 0.226158
| 1,101
| 49
| 65
| 22.469388
| 0.811033
| 0.030881
| 0
| 0
| 0
| 0
| 0.087324
| 0.026291
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117647
| false
| 0
| 0.147059
| 0
| 0.382353
| 0.058824
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0e07e8ca8f962f207b1a467ec124c229cd57722
| 2,095
|
py
|
Python
|
w2/w2/t1.py
|
mvgrigoriev/ml-course
|
fc5cf01d0de0eb5771389ea3d978e0bd291fdf2b
|
[
"MIT"
] | null | null | null |
w2/w2/t1.py
|
mvgrigoriev/ml-course
|
fc5cf01d0de0eb5771389ea3d978e0bd291fdf2b
|
[
"MIT"
] | null | null | null |
w2/w2/t1.py
|
mvgrigoriev/ml-course
|
fc5cf01d0de0eb5771389ea3d978e0bd291fdf2b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Sun Jan 07 07:52:52 2018
@author: MVGrigoriev
@task: kNN method
"""
import pandas
import numpy as np
from sklearn.neighbors import KNeighborsClassifier # Import class from scikit-learn
from sklearn.model_selection import KFold # Import KFold function
from sklearn.model_selection import cross_val_score # Import metrics for cross validation
from sklearn.preprocessing import scale # Import Scale function
data = pandas.read_csv('wine.data', header=None) # Import data
target = data[0] # Extract target
features = data.drop(0, axis=1) # Extract features
kf = KFold(n_splits=5, shuffle=True, random_state=42)
# At what k is the maximum quality obtained without normalization of characteristics?
#
# What is the maximum quality without the normalization of characteristics (the number in the scale from 0 to 1)?
#
listOfAccuracy = []
for i in range(1, 51):
neigh = KNeighborsClassifier(n_neighbors=i)
neigh.fit(features, target)
cvs = cross_val_score(neigh, features, target, cv=kf, scoring='accuracy')
cvsValue = np.mean(cvs)
listOfAccuracy.append(cvsValue)
optValue = max(listOfAccuracy)
optIndex = listOfAccuracy.index(optValue)
with open('2_1.txt', 'w') as f1:
print(optIndex+1, file=f1, end='')
with open('2_2.txt', 'w') as f2:
print(round(optValue, 2), file=f2, end='')
# Which optimal K is obtained after the normalization of the characteristics?
#
# What is the maximum quality after the normalization of characteristics (a number in the range from 0 to 1)?
#
features = scale(features)
listOfAccuracy = []
for i in range(1, 51):
neigh = KNeighborsClassifier(n_neighbors=i)
neigh.fit(features, target)
cvs = cross_val_score(neigh, features, target, cv=kf, scoring='accuracy')
cvsValue = np.mean(cvs)
listOfAccuracy.append(cvsValue)
optValue = max(listOfAccuracy)
optIndex = listOfAccuracy.index(optValue)
with open('2_3.txt', 'w') as f3:
print(optIndex+1, file=f3, end='')
with open('2_4.txt', 'w') as f4:
print(round(optValue, 2), file=f4, end='')
| 36.754386
| 115
| 0.719809
| 300
| 2,095
| 4.97
| 0.373333
| 0.02951
| 0.024145
| 0.038229
| 0.464118
| 0.391683
| 0.340711
| 0.340711
| 0.340711
| 0.340711
| 0
| 0.028129
| 0.168496
| 2,095
| 56
| 116
| 37.410714
| 0.827784
| 0.313604
| 0
| 0.486486
| 0
| 0
| 0.040426
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.162162
| 0
| 0.162162
| 0.108108
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0e15314e67099f053fc8acbea6a1a91c7a8ed52
| 1,946
|
py
|
Python
|
tutorial/tutorial.py
|
isabella232/sosp21-artifact
|
1b4a11c648e456c9ff9d74f16b09f4238d6694a0
|
[
"BSD-3-Clause"
] | 1
|
2021-09-20T07:57:50.000Z
|
2021-09-20T07:57:50.000Z
|
tutorial/tutorial.py
|
digi-project/sosp21-artifact
|
1b4a11c648e456c9ff9d74f16b09f4238d6694a0
|
[
"BSD-3-Clause"
] | 1
|
2022-03-21T11:33:33.000Z
|
2022-03-21T11:33:33.000Z
|
tutorial/tutorial.py
|
isabella232/sosp21-artifact
|
1b4a11c648e456c9ff9d74f16b09f4238d6694a0
|
[
"BSD-3-Clause"
] | 2
|
2021-12-09T12:54:52.000Z
|
2022-03-21T08:43:31.000Z
|
# ipython utils
import os
import sys
import time
import yaml
import datetime
from pathlib import Path
from IPython import get_ipython
from IPython.core.magic import (register_line_magic, register_cell_magic,
register_line_cell_magic)
import warnings; warnings.simplefilter('ignore')
start = time.time()
@register_line_cell_magic
def elapsed_time(line, cell=None):
if cell is not None:
get_ipython().run_cell(cell)
print(datetime.timedelta(seconds=round(time.time() - start)))
os.environ.update({
"GROUP": "tutorial",
"VERSION": "v1",
"KOPFLOG": "false",
"DOCKER_TLS_VERIFY": "1",
"DOCKER_HOST": "tcp://127.0.0.1:32770",
"DOCKER_CERT_PATH": str(Path(os.environ["HOME"], ".minikube/certs")),
"MINIKUBE_ACTIVE_DOCKERD": "minikube",
"IMAGEPULL": "Never",
"REPO": "tutorial",
})
workdir = (Path(os.environ["GOPATH"],
"src", "digi.dev",
"tutorial", "workdir"))
os.environ["WORKDIR"] = str(workdir)
def _rm_tree(pth):
pth = Path(pth)
for child in pth.glob('*'):
if child.is_file():
child.unlink()
else:
_rm_tree(child)
pth.rmdir()
def create(m: str, new=True):
y = yaml.load(m, Loader=yaml.FullLoader)
assert "kind" in y
_dir = Path(workdir, y["kind"].lower())
if _dir.is_dir() and new:
_rm_tree(_dir)
Path(_dir, "driver").mkdir(parents=True, exist_ok=True)
Path(_dir, "deploy").mkdir(parents=True, exist_ok=True)
Path(_dir, "deploy", "cr_run.yaml").touch()
Path(_dir, "driver", "handler.py").touch()
with open(Path(_dir, "model.yaml"), "w") as f:
f.write(m)
def handler_file(k):
return Path(workdir, k, "driver", "handler.py")
def model_file(k, new=True):
if new:
return Path(workdir, k, "deploy", "cr.yaml")
else:
return Path(workdir, k, "deploy", "cr_run.yaml")
| 26.297297
| 73
| 0.613052
| 260
| 1,946
| 4.423077
| 0.411538
| 0.030435
| 0.044348
| 0.046957
| 0.114783
| 0.114783
| 0.069565
| 0.069565
| 0.069565
| 0
| 0
| 0.008649
| 0.227646
| 1,946
| 74
| 74
| 26.297297
| 0.756487
| 0.00668
| 0
| 0.034483
| 0
| 0
| 0.171843
| 0.022774
| 0
| 0
| 0
| 0
| 0.017241
| 1
| 0.086207
| false
| 0
| 0.155172
| 0.017241
| 0.293103
| 0.017241
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0e275de32ffad1ac148c2e85a79a876fec1fd53
| 362
|
py
|
Python
|
examples/rotation.py
|
aallan/picamera2
|
d64fbe669e071402d11c043cf044f52f6b2edc57
|
[
"BSD-2-Clause"
] | null | null | null |
examples/rotation.py
|
aallan/picamera2
|
d64fbe669e071402d11c043cf044f52f6b2edc57
|
[
"BSD-2-Clause"
] | null | null | null |
examples/rotation.py
|
aallan/picamera2
|
d64fbe669e071402d11c043cf044f52f6b2edc57
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/python3
# Run the camera with a 180 degree rotation.
from qt_gl_preview import *
from picamera2 import *
import time
picam2 = Picamera2()
preview = QtGlPreview(picam2)
preview_config = picam2.preview_configuration()
preview_config["transform"] = libcamera.Transform(hflip=1, vflip=1)
picam2.configure(preview_config)
picam2.start()
time.sleep(5)
| 20.111111
| 67
| 0.779006
| 49
| 362
| 5.632653
| 0.632653
| 0.141304
| 0.137681
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043614
| 0.11326
| 362
| 17
| 68
| 21.294118
| 0.816199
| 0.165746
| 0
| 0
| 0
| 0
| 0.03
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.3
| 0
| 0.3
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0e4e293078cbb35e7bb94fd2a5b26005400333e
| 3,294
|
py
|
Python
|
searcheval/test/test_metrics.py
|
VikasNeha/searcheval
|
90f3be8e57dd70179f707ef73241306cdd2ec915
|
[
"Apache-2.0"
] | 1
|
2018-01-18T18:37:11.000Z
|
2018-01-18T18:37:11.000Z
|
searcheval/test/test_metrics.py
|
VikasNeha/searcheval
|
90f3be8e57dd70179f707ef73241306cdd2ec915
|
[
"Apache-2.0"
] | 1
|
2022-01-11T10:37:11.000Z
|
2022-01-11T17:11:01.000Z
|
searcheval/test/test_metrics.py
|
VikasNeha/searcheval
|
90f3be8e57dd70179f707ef73241306cdd2ec915
|
[
"Apache-2.0"
] | 1
|
2022-01-11T10:46:05.000Z
|
2022-01-11T10:46:05.000Z
|
import unittest
import searcheval.metrics as sm
class MetricsTests(unittest.TestCase):
def test_mean(self):
vector = [2, 3, 7]
mean = sm.mean(vector)
self.assertEqual(mean, 4)
def test_precision(self):
relevance_vector = [1, 0, 0, 1, 0]
precision = sm.precision(relevance_vector)
self.assertEqual(precision, 0.4)
def test_precision_at_rank(self):
relevance_vector = [1, 0, 0, 1, 0]
rank = 2
precision_at_rank = sm.precision_at_rank(relevance_vector, rank)
self.assertEqual(precision_at_rank, 0.5)
def test_precision_vector(self):
relevance_vector = [1, 0]
precision_vector = sm.precision_vector(relevance_vector)
self.assertEqual(list(precision_vector), [1.0, 0.5])
def test_avg_prec(self):
relevance_vector = [1, 0]
avg_prec = sm.avg_prec(relevance_vector)
self.assertEqual(avg_prec, 0.5)
def test_r_prec(self):
relevance_vector = [1, 0, 0, 1, 0]
recall_base = 2
r_prec = sm.r_prec(relevance_vector, recall_base)
self.assertEqual(r_prec, 0.5)
# check that r_prec handles recall base larger than number of samples
r_prec = sm.r_prec([1, 0], 5)
self.assertEqual(r_prec, 0.2)
def test_recall(self):
relevance_vector = [1, 0, 0, 1, 0]
recall_base = 4
recall = sm.recall(relevance_vector, recall_base)
self.assertEqual(recall, 0.5)
def test_recall_at_rank(self):
relevance_vector = [1, 0, 0, 1, 0]
recall_base = 4
rank = 2
precision_at_rank = sm.recall_at_rank(relevance_vector, recall_base,
rank)
self.assertEqual(precision_at_rank, 0.25)
def test_recall_vector(self):
relevance_vector = [1, 0, 0, 1, 0]
recall_base = 4
recall_vector = sm.recall_vector(relevance_vector, recall_base)
self.assertEqual(list(recall_vector), [0.25, 0.25, 0.25, 0.5, 0.5])
def test_nDCG(self):
# binary relevance
gain_vector = [1, 1, 0, 0, 0] # perfect query
ideal_gain_vector = [1, 1, 0, 0, 0]
nDCG = sm.nDCG(gain_vector, ideal_gain_vector)
self.assertEqual(nDCG, 1.0)
# graded relevance
gain_vector = [3, 2, 1, 1, 0] # perfect query
ideal_gain_vector = [3, 2, 1, 1, 0]
nDCG = sm.nDCG(gain_vector, ideal_gain_vector)
self.assertEqual(nDCG, 1.0)
def test_nDCG_at_rank(self):
gain_vector = [1, 0, 1, 0, 0]
ideal_gain_vector = [1, 1, 0, 0, 0]
rank = 1
nDCG_at_rank = sm.nDCG_at_rank(gain_vector, ideal_gain_vector, rank)
self.assertEqual(nDCG_at_rank, 1.0)
# not perfect query
gain_vector = [1, 0, 1, 0, 0]
ideal_gain_vector = [1, 1, 0, 0, 0]
rank = 2
nDCG_at_rank = sm.nDCG_at_rank(gain_vector, ideal_gain_vector, rank)
self.assertTrue(nDCG_at_rank < 1.0)
def test_nDCG_vector(self):
gain_vector = [1, 1, 0, 0, 0]
ideal_gain_vector = [1, 1, 0, 0, 0]
nDCG_vector = sm.nDCG_vector(gain_vector, ideal_gain_vector)
self.assertEqual(nDCG_vector, [1.0, 1.0, 1.0, 1.0, 1.0])
if __name__ == '__main__':
unittest.main()
| 32.94
| 77
| 0.608682
| 485
| 3,294
| 3.870103
| 0.109278
| 0.039425
| 0.023974
| 0.085242
| 0.589771
| 0.529568
| 0.393713
| 0.326052
| 0.284497
| 0.272243
| 0
| 0.062816
| 0.279903
| 3,294
| 99
| 78
| 33.272727
| 0.728499
| 0.044627
| 0
| 0.373333
| 0
| 0
| 0.002548
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.16
| false
| 0
| 0.026667
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0e6ec9507e696a89752b35b6c0b3c155c6656fe
| 16,732
|
py
|
Python
|
nomic/proposal.py
|
HactarCE/Quobot
|
e13f28990f212b92835dd9c8fcbdc53bc37d5ab8
|
[
"MIT"
] | null | null | null |
nomic/proposal.py
|
HactarCE/Quobot
|
e13f28990f212b92835dd9c8fcbdc53bc37d5ab8
|
[
"MIT"
] | null | null | null |
nomic/proposal.py
|
HactarCE/Quobot
|
e13f28990f212b92835dd9c8fcbdc53bc37d5ab8
|
[
"MIT"
] | null | null | null |
from collections import OrderedDict
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import Optional, Set
import discord
import functools
from .gameflags import GameFlagsManager
from .playerdict import PlayerDict
from .repoman import GameRepoManager
from constants import colors, emoji, info, strings
import utils
class ProposalStatus(Enum):
VOTING = 'voting'
PASSED = 'passed'
FAILED = 'failed'
DELETED = 'deleted'
VOTE_ALIASES = {
'+': 'for',
'-': 'against',
'abstain': 'abstain',
'against': 'against',
'del': 'remove',
'delete': 'remove',
'for': 'for',
'remove': 'remove',
'rm': 'remove',
}
VOTE_TYPES = ('for', 'against', 'abstain')
@dataclass
class _Proposal:
game: 'ProposalManager' and GameFlagsManager
n: int
author: discord.Member
content: str
status: ProposalStatus = ProposalStatus.VOTING
message_id: Optional[int] = None
votes: PlayerDict = None
timestamp: int = None
@functools.total_ordering
class Proposal(_Proposal):
"""A dataclass representing a Nomic proposal.
Attributes:
- game
- n -- integer; proposal ID number
- author -- discord.Member
- content -- string
Optional attributes:
- status (default Proposal.Status.Voting)
- message_id (default None) -- discord.Message or the ID of one (converted
to integer ID)
- votes (default {}) -- PlayerDict of ints; positive numbers are votes
for, negative numbers are votes against, and zero is an abstention
- timestamp (default now)
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if not isinstance(self.author, discord.Member):
self.author = self.game.get_member(self.author)
# if isinstance(self.message_id, discord.Message):
# self.message_id = self.message_id.id
self.votes = PlayerDict(self.game, self.votes)
self.status = ProposalStatus(self.status)
if self.timestamp is None:
self.timestamp = utils.now()
def export(self) -> dict:
return OrderedDict(
n=self.n,
author=self.author and self.author.id,
content=self.content,
status=self.status.value,
message_id=self.message_id,
votes=self.votes.export(),
timestamp=self.timestamp,
)
async def set_vote(self, player: discord.Member, new_vote_amount: int):
self.game.assert_locked()
if self.status != ProposalStatus.VOTING:
return False
if new_vote_amount == 0 and not self.game.flags.allow_vote_abstain:
new_vote_amount = None
if player in self.votes and not self.game.flags.allow_vote_change:
return False
if new_vote_amount and abs(new_vote_amount) > 1 and not self.game.flags.allow_vote_multi:
new_vote_amount //= abs(new_vote_amount)
self.votes[player] = new_vote_amount
if new_vote_amount is None:
del self.votes[player]
await self.refresh()
self.game.save()
return True
async def vote_for(self, player: discord.Member, amount: int = 1):
old_vote_amount = self.votes.get(player)
if old_vote_amount is None:
new_vote_amount = amount
elif old_vote_amount < 0:
new_vote_amount = None
else:
new_vote_amount = old_vote_amount + amount
return await self.set_vote(player, new_vote_amount)
async def vote_against(self, player: discord.Member, amount: int = 1):
old_vote_amount = self.votes.get(player)
if old_vote_amount is None:
new_vote_amount = -amount
elif old_vote_amount > 0:
new_vote_amount = None
else:
new_vote_amount = old_vote_amount - amount
return await self.set_vote(player, new_vote_amount)
async def vote_abstain(self, player: discord.Member):
old_vote_amount = self.votes.get(player)
if old_vote_amount == 0:
new_vote_amount = None
else:
new_vote_amount = 0
return await self.set_vote(player, new_vote_amount)
async def vote_abstain_or_remove(self, player: discord.Member):
old_vote_amount = self.votes.get(player)
if old_vote_amount is None:
new_vote_amount = 0
else:
new_vote_amount = None
return await self.set_vote(player, new_vote_amount)
async def vote_remove(self, player: discord.Member):
return await self.set_vote(player, None)
@property
def votes_for(self) -> int:
return sum(v for v in self.votes.values() if v > 0)
@property
def votes_against(self) -> int:
return -sum(v for v in self.votes.values() if v < 0)
@property
def votes_abstain(self) -> int:
return sum(v == 0 for v in self.votes.values())
async def set_status(self, new_status: ProposalStatus):
self.game.assert_locked()
self.status = new_status
await self.refresh()
self.game.save()
async def set_content(self, new_content: str):
self.game.assert_locked()
self.content = new_content
await self.refresh()
self.game.save()
async def refresh(self):
await self.game.refresh_proposal(self)
async def repost(self):
await self.game.repost_proposal(self)
async def fetch_message(self) -> discord.Message:
try:
return await self.game.proposals_channel.fetch_message(self.message_id)
except (discord.NotFound, discord.Forbidden, discord.HTTPException):
return None
@property
def discord_link(self) -> str:
return utils.discord.MESSAGE_LINK_FORMAT.format(
guild=self.game.guild,
channel=self.game.proposals_channel,
message_id=self.message_id,
)
@property
def github_link(self):
return f'{info.GITHUB_REPO_LINK}/blob/{self.game.repo.name}/proposals.md#{self.n}'
@property
def embed(self) -> discord.Embed:
"""Return an embed displaying this proposal."""
# Make the title; e.g. "Proposal #10 -- Passed"
title = f"Proposal #{self.n}"
if self.status != ProposalStatus.VOTING:
title += " \N{EM DASH} "
title += self.status.value.capitalize()
if self.status == ProposalStatus.DELETED:
return discord.Embed(
color=colors.DELETED,
title=title,
)
embed = discord.Embed(
color={
ProposalStatus.VOTING: colors.INFO,
ProposalStatus.PASSED: colors.SUCCESS,
ProposalStatus.FAILED: colors.ERROR,
}[self.status],
title=title,
description=self.content,
timestamp=datetime.fromtimestamp(self.timestamp),
)
# Make an embed field for each type of vote
for vote_type in VOTE_TYPES:
total = 0
value = ''
# Count the votes and list the users
for player, vote_amount in self.votes.items():
if vote_type == 'for':
if vote_amount <= 0:
continue
elif vote_type == 'against':
if vote_amount >= 0:
continue
vote_amount *= -1
elif vote_type == 'abstain':
if vote_amount != 0:
continue
vote_amount = 1
value += player.mention
if vote_amount > 1:
value += f" ({vote_amount}x)"
value += "\n"
total += vote_amount
name = vote_type.capitalize()
if total:
name += f" ({total})"
if vote_type == 'abstain' and total == 0:
continue
embed.add_field(
name=name,
value=value or strings.EMPTY_LIST,
inline=True,
)
# Set the footer
embed.set_footer(**utils.discord.embed_happened_footer("Submitted", self.author))
return embed
@property
def markdown(self):
s = f"<a name='{self.n}'/>"
s += "\n\n"
s += f"## #{self.n}"
if self.status != ProposalStatus.VOTING:
s += f" \N{EM DASH} {self.status.value.capitalize()}"
s += "\n\n"
if self.status != ProposalStatus.DELETED:
s += self.content
s += "\n\n"
return s
def __str__(self):
return f"proposal #{self.n}"
def __lt__(self, other):
return self.n < other.n
def __eq__(self, other):
return type(self) == type(other) and self.n == other.n
def __hash__(self):
# None of these values should ever change, and they should uniquely
# identify this proposal.
return hash((self.game.guild.id, self.n, self.timestamp))
class ProposalManager(GameRepoManager):
def load(self):
db = self.get_db('proposals')
self.proposals_channel = db.get('channel')
if self.proposals_channel:
self.proposals_channel = self.guild.get_channel(self.proposals_channel)
self.proposals = []
if db.get('proposals'):
for proposal in db['proposals']:
self.proposals.append(Proposal(game=self, **proposal))
def save(self):
db = self.get_db('proposals')
db.replace(OrderedDict(
channel=self.proposals_channel and self.proposals_channel.id,
proposals=[p.export() for p in self.proposals],
))
db.save()
with open(self.get_file('proposals.md'), 'w') as f:
f.write(f"# {self.guild.name} \N{EM DASH} Proposals")
f.write('\n\n')
for p in self.proposals:
f.write(p.markdown)
async def commit_proposals_and_log(self,
agent: discord.Member,
action: str,
proposal: Proposal,
post: str = '',
link_to_proposal: bool = True,
**kwargs):
"""Commit the proposals Markdown file and log the event."""
if await self.repo.is_clean('proposals.md'):
return
commit_msg = markdown_msg = f"{utils.discord.fake_mention(agent)} {action} "
commit_msg += str(proposal)
if link_to_proposal:
markdown_msg += f"[{proposal}](../proposals.md#{proposal.n})"
else:
markdown_msg += str(proposal)
await self.commit('proposals.md', msg=commit_msg + post)
await self.log(markdown_msg + post, **kwargs)
async def refresh_proposal(self, *proposals: Proposal):
"""Update the messages for one or more proposals.
May throw `TypeError`, `ValueError`, or `discord.Forbidden` exceptions.
"""
self.assert_locked()
for proposal in sorted(set(proposals)):
try:
m = await proposal.fetch_message()
await m.clear_reactions()
await m.edit(embed=proposal.embed)
if proposal.status == ProposalStatus.VOTING:
await m.add_reaction(emoji.VOTE_FOR)
await m.add_reaction(emoji.VOTE_AGAINST)
await m.add_reaction(emoji.VOTE_ABSTAIN)
except discord.NotFound:
await self.repost_proposal(proposal)
return
async def repost_proposal(self, *proposals: Proposal):
"""Remove and repost the messages for one or more proposals.
May throw `TypeError`, `ValueError`, or `discord.Forbidden` exceptions.
"""
self.assert_locked()
proposal_range = range(min(proposals).n, len(self.proposals) + 1)
proposals = list(map(self.get_proposal, proposal_range))
proposal_messages = []
for proposal in proposals:
m = await proposal.fetch_message()
if m:
proposal_messages.append(m)
if proposal_messages:
await utils.discord.safe_bulk_delete(proposal_messages)
for proposal in proposals:
m = await self.proposals_channel.send(embed=discord.Embed(
color=colors.TEMPORARY,
title=f"Preparing proposal #{proposal.n}\N{HORIZONTAL ELLIPSIS}",
))
proposal.message_id = m.id
self.save()
await self.refresh_proposal(*proposals)
def has_proposal(self, n: int) -> bool:
return isinstance(n, int) and 1 <= n <= len(self.proposals)
def get_proposal(self, n: int) -> Optional[Proposal]:
if self.has_proposal(n):
return self.proposals[n - 1]
async def get_proposal_messages(self) -> Set[discord.Message]:
messages = set()
for proposal in self.proposals:
messages.add(await proposal.fetch_message())
return messages
async def add_proposal(self, **kwargs):
self.assert_locked()
n = len(self.proposals) + 1
new_proposal = Proposal(game=self, n=n, **kwargs)
self.proposals.append(new_proposal)
# ProposalManager.repost_proposal() calls BaseGame.save() so we
# don't have to do that here.
await self.repost_proposal(new_proposal)
return new_proposal
async def permadel_proposal(self, proposal: Proposal):
self.assert_locked()
if not proposal.n == len(self.proposals):
raise RuntimeError("Cannot delete any proposal other than the last one")
del self.proposals[proposal.n - 1]
self.save()
await (await proposal.fetch_message()).delete()
async def log_proposal_submit(self,
agent: discord.Member,
proposal: Proposal):
await self.commit_proposals_and_log(
agent, "submitted", proposal, link_to_commit=True
)
async def log_proposal_permadel(self,
agent: discord.Member,
proposal: Proposal):
await self.commit_proposals_and_log(
agent, "permanently deleted", proposal, link_to_proposal=False, link_to_commit=True
)
async def log_proposal_change_status(self,
agent: discord.Member,
proposal: Proposal):
if proposal.status == ProposalStatus.VOTING:
action = "reopened"
else:
action = proposal.status.value
await self.commit_proposals_and_log(
agent, action, proposal, link_to_commit=True
)
async def log_proposal_change_content(self,
agent: discord.Member,
proposal: Proposal):
await self.commit_proposals_and_log(
agent, "edited", proposal, link_to_commit=True
)
async def log_proposal_vote(self,
agent: discord.Member,
proposal: Proposal,
player: discord.Member,
old_vote_amount: Optional[int],
new_vote_amount: Optional[int]):
if old_vote_amount == new_vote_amount:
return
if new_vote_amount is None:
action = "removed their vote from"
elif old_vote_amount is not None:
action = "changed their vote on"
elif new_vote_amount == 0:
action = "abstained on"
elif new_vote_amount > 0:
action = "voted for"
elif new_vote_amount < 0:
action = "voted against"
else:
action = "WTFed"
if player != agent:
post = f" on behalf of {utils.discord.fake_mention(player)}"
else:
post = ''
if abs(old_vote_amount or 0) > 1 or abs(new_vote_amount or 0) > 1:
post += " ("
if old_vote_amount is not None:
post += f"was {old_vote_amount}"
if new_vote_amount:
post += "; "
if new_vote_amount is not None:
post += f"now {new_vote_amount}"
post += ")"
await self.commit_proposals_and_log(
agent, action, proposal, post=post
)
| 35.449153
| 97
| 0.573392
| 1,901
| 16,732
| 4.882167
| 0.137296
| 0.064648
| 0.046223
| 0.014869
| 0.346191
| 0.275401
| 0.225299
| 0.193837
| 0.169702
| 0.141041
| 0
| 0.003041
| 0.331819
| 16,732
| 471
| 98
| 35.524416
| 0.827102
| 0.055044
| 0
| 0.257895
| 0
| 0.002632
| 0.062342
| 0.015797
| 0
| 0
| 0
| 0
| 0.018421
| 1
| 0.044737
| false
| 0.005263
| 0.031579
| 0.028947
| 0.197368
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0e74c23345f71b01c04f878f36260962612bba5
| 897
|
py
|
Python
|
vexmpp/features/stream_mgmt.py
|
nicfit/vexmpp
|
e67070d2822da8356345976fb15d365935b550a6
|
[
"MIT"
] | null | null | null |
vexmpp/features/stream_mgmt.py
|
nicfit/vexmpp
|
e67070d2822da8356345976fb15d365935b550a6
|
[
"MIT"
] | 349
|
2017-02-18T22:48:17.000Z
|
2021-12-13T19:50:23.000Z
|
vexmpp/features/stream_mgmt.py
|
nicfit/vexmpp
|
e67070d2822da8356345976fb15d365935b550a6
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from ..stanzas import Stanza
from ..errors import makeStanzaError
from ..protocols.stream_mgmt import NS_URI
async def handle(stream, feature_elem, sm_opts, timeout=None):
assert(feature_elem is not None)
nsmap = {"sm": NS_URI}
enable_elem = Stanza("enable", nsmap={None: NS_URI})
if sm_opts and sm_opts.resume:
enable_elem.set("resume", "true")
stream.send(enable_elem)
resp = await stream.wait([("/sm:enabled", nsmap),
("/sm:failed", nsmap)], timeout=timeout)
if resp.name == "{%s}failed" % NS_URI:
raise makeStanzaError(resp.xml)
sm_opts.sm_id = resp.get("id")
sm_opts.resume = bool(resp.get("resume") and
resp.get("resume") in ("1", "true"))
sm_opts.resume_location = resp.get("location")
sm_opts.max_resume_time = resp.get("max")
return True
| 32.035714
| 70
| 0.625418
| 123
| 897
| 4.390244
| 0.422764
| 0.077778
| 0.066667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002886
| 0.227425
| 897
| 27
| 71
| 33.222222
| 0.776335
| 0.023411
| 0
| 0
| 0
| 0
| 0.090389
| 0
| 0
| 0
| 0
| 0
| 0.05
| 1
| 0
| false
| 0
| 0.15
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0e808e3235cc1782c3a0aac8f2ccd3eaf6e8e7d
| 705
|
py
|
Python
|
tests/feature_extraction/pattern/test_pattern.py
|
fidsusj/HateSpeechDetection
|
1306a8a901aed856e51ee8fe16158ff267fb5405
|
[
"BSD-3-Clause"
] | null | null | null |
tests/feature_extraction/pattern/test_pattern.py
|
fidsusj/HateSpeechDetection
|
1306a8a901aed856e51ee8fe16158ff267fb5405
|
[
"BSD-3-Clause"
] | 17
|
2020-11-08T16:55:54.000Z
|
2021-05-28T05:58:17.000Z
|
tests/feature_extraction/pattern/test_pattern.py
|
fidsusj/HateSpeechDetection
|
1306a8a901aed856e51ee8fe16158ff267fb5405
|
[
"BSD-3-Clause"
] | 2
|
2020-12-18T10:42:58.000Z
|
2021-05-24T19:32:57.000Z
|
from unittest import TestCase
import pandas as pd
from feature_extraction.pattern.pattern import Pattern
from preprocessing.corpus import build_corpus
class Test_Pattern(TestCase):
raw_data = {
"class": [0, 0, 1, 0],
"content": [
"John hates bitches",
"John hates hookers",
"John loves turtles",
"Bitch ass nigga",
],
}
def test_pattern_count(self):
df = build_corpus(pd.DataFrame(data=self.raw_data))
pattern_extractor = Pattern(min_pattern_size=2, max_pattern_size=2, threshold=2)
pattern_extractor.extract_features(df)
self.assertEqual([2, 2, 2, 1], df["pattern_count"].tolist())
| 27.115385
| 88
| 0.639716
| 87
| 705
| 5
| 0.505747
| 0.050575
| 0.055172
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020952
| 0.255319
| 705
| 25
| 89
| 28.2
| 0.807619
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 0
| 0
| 0
| 0
| 0
| 0.052632
| 1
| 0.052632
| false
| 0
| 0.210526
| 0
| 0.368421
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0e812a6800aac72cae877576878f53d8cd3bd64
| 11,525
|
py
|
Python
|
main.py
|
C3ald/Token-API
|
5bb34ac1276b23a6f3c780c8d7011d621f02ab90
|
[
"MIT"
] | 4
|
2021-12-20T22:51:20.000Z
|
2021-12-30T17:55:34.000Z
|
main.py
|
C3ald/Token-API
|
5bb34ac1276b23a6f3c780c8d7011d621f02ab90
|
[
"MIT"
] | 14
|
2021-12-08T18:30:00.000Z
|
2022-01-06T05:27:08.000Z
|
main.py
|
C3ald/Token-API
|
5bb34ac1276b23a6f3c780c8d7011d621f02ab90
|
[
"MIT"
] | null | null | null |
from starlette.responses import Response
from passlib.hash import pbkdf2_sha256
from starlette.websockets import WebSocketDisconnect
from blockchain import Blockchain
# from wallet import Wallet
from fastapi import FastAPI, WebSocket
import uvicorn
import socket
import requests as r
from pydantic import BaseModel
from fastapi.templating import Jinja2Templates
import json
import asyncio
# from Utilities.algorithims import Algs
import time as t
import random
import base64
from sys import getsizeof
# from Utilities.cryptography_testing import Make_Keys
# from Utilities.cryptography_testing import primary_addresses
# from Utilities.cryptography_testing import Check_Wallet_Balance
# from Utilities.cryptography_testing import Ring_CT
# from Utilities.cryptography_testing import Decoy_addresses
from Utilities.cryptography_testing import *
from fastapi_signals import *
ring_ct = Ring_CT()
checkbalance = Check_Wallet_Balance()
create_keys = Make_Keys()
primary_addr = primary_addresses()
decoy_addresses = Decoy_addresses()
#imported templates
#from fastapi.staticfiles import StaticFiles #imported staticfiles
# {
# "node": [
# "http://127.0.0.1:8000", "http://127.0.0.1:8001"
# ]
#}
tags_metadata = [
{'name':'information',
'description': 'This will allow you to get info about the blockchain',
'name':'wallet',
'description': 'this will allow you to access your wallet and make wallets',
'name': 'transaction',
'description': 'transactions',
'name': 'mining',
'description': 'mining',
'name': 'nodes',
'description': 'adding nodes and replacing the chain',
'name': 'contracts',
'description': 'smart contracts on the blockchain'
}]
# CONSTANTS
SERVER_NAME = 'Token Network'
SERVER_HOST = '0.0.0.0'
SERVER_PORT = 8000
SERVER_RELOAD = False
DESCRIPTION = "Welcome to The Token Network, a blockchain network with a cryptocurrency called Token, it's like Dogecoin and Bitcoin but faster than Bitcoin and harder to mine than Dogecoin, welcome to the Future of the world."
algs = Algs()
S = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
hostname = socket.gethostname()
IP = socket.gethostbyname(hostname)
# wallet = Wallet()
class Url(BaseModel):
node: str
# class Phrase(BaseModel):
# phrase: str
app = FastAPI(title=SERVER_NAME, openapi_tags=tags_metadata, description=DESCRIPTION)
templates = Jinja2Templates(directory="templates/")
blockchain = Blockchain()
class Transaction(BaseModel):
sender_public_send_key: str
sender_private_send_key: str
sender_view_key: str
receiver: str
amount: float
class AddTransaction(BaseModel):
sender_public_send_key: str
sender_private_send_key: str
sender_view_key: str
receiver: str
transactionID: str
timestamp: str
amount: float
transactiontype: str
class Contract(BaseModel):
sender_public_send_key: str
sender_private_send_key: str
sender_view_key: str
receiver: str
contractbinary: bytes
class Walletkey(BaseModel):
publickey: str
privatekey: str
class Wallet_public(BaseModel):
viewkey: str
class Passphrase(BaseModel):
passphrase: str
class Blockchain(BaseModel):
block: dict
class Recover(BaseModel):
passphrase: str
class Mining(BaseModel):
address: str
class EncryptedTransaction(BaseModel):
sender_publickey: bytes
receiver: bytes
amount: float
@app.get('/')
async def index():
""" returns index page """
return "see /docs for the api"
@app.post('/add_contract', tags=['contracts'])
async def addContract(contractTransaction: Contract):
""" Use this to add smart contracts """
senderPublicKey = contractTransaction.sender_public_send_key
senderPrivateKey = contractTransaction.sender_private_send_key
receiver = contractTransaction.receiver
senderViewKey = contractTransaction.sender_view_key
contractdata = contractTransaction.contractbinary
contract = blockchain.add_smartContract(senderprivatekey= senderPrivateKey,
sendersendpublickey= senderPublicKey,
senderviewkey= senderViewKey,
receiver= receiver,
compiledcontract=contractdata)
return {'message': contract}
@app.get("/get_the_chain", tags=['information'])
async def get_the_chain():
""" Use this to get the whole blockchain """
# update = blockchain.replace_chain()
response = {"blockchain": blockchain.chain, "length": len(blockchain.chain)}
return response
@app.post("/mining", tags=['mining'])
async def mine(keys:Mining):
""" This allows you to mine blocks """
# get previous block
prev_block = blockchain.get_prev_block()
# previous proof
prev_proof = prev_block['proof']
# proof
proof = blockchain.proof_of_work(previous_proof=prev_proof)
# previous hash
prev_hash = blockchain.hash(block=prev_block)
# add data
amount = algs.amount_change(chain=blockchain.chain)
# stealth_key = pbkdf2_sha256.hash(str(keys.publickey))
# decoy = decoy_addresses.decoy_transactions(amount)
# create block
message = blockchain.create_block(proof=proof, previous_hash=prev_hash, forger=keys.address)
#returns the last block in the chain
return {'message': message}
# else:
# return "no wallet detected"
@app.get("/status", tags=['information'])
async def is_valid():
""" Checks to see if chain is valid """
is_valid = blockchain.is_chain_valid(chain=blockchain.chain)
if is_valid:
response = {"message": "Not compromised"}
else:
response = {"message": "Blockchain has been compromised"}
return response
@app.post("/add_transaction/", tags=['transaction'])
async def add_transaction(transaction: AddTransaction):
""" Allows transactions to be added to the chain from nodes"""
senderpublicsendkey = transaction.sender_public_send_key
senderprivatesendkey = transaction.sender_private_send_key
senderviewkey = transaction.sender_view_key
receiver = transaction.receiver
amount = transaction.amount
transactionid = transaction.transactionID
new_transaction = blockchain.add_transaction(
senderprivatekey=senderprivatesendkey,
sendersendpublickey=senderpublicsendkey,
senderviewkey=senderviewkey,
receiver=receiver,
amount=amount,
transactionID=transactionid
)
result = 'transaction has been added and is awaiting verification'
return result
@app.post('/add_unconfirmed_transaction', tags=['transaction'])
async def add_unconfirmed_transaction(transaction: Transaction):
""" broadcasts transactions to all nodes to be verified by miners"""
senderpublicsendkey = transaction.sender_public_send_key
senderprivatesendkey = transaction.sender_private_send_key
senderviewkey = transaction.sender_view_key
receiver = transaction.receiver
amount = transaction.amount
new_transaction = blockchain.add_unconfirmed_transaction(senderprivatekey=senderprivatesendkey,
sendersendpublickey=senderpublicsendkey,
receiver=receiver,
senderviewkey=senderviewkey,
amount=amount)
blockchain.broadcast_transaction(transaction=new_transaction)
result = 'transaction has been added and is awaiting verification'
return result
""" Wallets should be made offline. """
@app.post("/add_node/", tags=['nodes'])
async def add_node(url:Url):
""" This is used to add nodes """
item = url.node
blockchain.add_node(item)
# transaction = blockchain.add_transaction(sender='Network', receiver=wallets.publickey, amount=30)
result = item
return result
@app.post('/add_one_node/', tags=['nodes'])
async def add_one_node(url:Url):
""" adds one node """
item = url.node
blockchain.update_nodes(node=item)
return item
@app.get("/replace_chain", tags=['nodes'])
async def replace_chain():
""" replaces the current chain with the most recent and longest chain """
blockchain.replace_chain()
blockchain.is_chain_valid(chain=blockchain.chain)
return{'message': 'chain has been updated and is valid',
'longest chain': blockchain.chain}
@app.websocket('/dashboard')
async def dashboard(websocket: WebSocket):
""" P2p Dashboard """
await websocket.accept()
# block = blockchain.chain
# websocket.send_json(block)
while True:
block = blockchain.chain
await websocket.send_text(f'Message: {block}')
await asyncio.sleep(10)
@app.websocket("/ws")
async def dashboard_endpoint(websocket: WebSocket):
""" This shows real time data for nodes"""
await websocket.accept()
message = None
while True:
try:
if message != blockchain.chain:
message = blockchain.chain
await websocket.send_json(message)
print(message)
t.sleep(0.2)
else:
pass
except Exception as e:
pass
break
print('client disconnected')
@app.websocket("/nodes")
async def dashboard_endpoint(websocket: WebSocket):
""" This shows real time data of each node, this should be used for detecting new nodes in the network or helping with automating adding nodes"""
await websocket.accept()
message = None
while True:
try:
if message != blockchain.nodes:
message = blockchain.nodes
await websocket.send_json(message)
print(message)
t.sleep(0.2)
else:
pass
except Exception as e:
pass
break
print('client disconnected')
@app.post('/check_balance', tags=['wallet'])
async def check_balance(wallet:Wallet_public):
""" Checks the balance of a wallet with the view key """
#this route checks the balance of a publickey
# wallets.checkbalance(viewkey=wallet.viewkey, chain=blockchain.chain)
# return {"publickey":wallet.viewkey,
# "balance": wallets.balance}
balance = checkbalance.balance_check(wallet.viewkey, blockchain=blockchain.chain)
return {'Address': balance['receive address'], 'balance': f'{balance["balance"]}Tokens'}
@app.post('/insert_block', tags=['nodes'])
async def insert_chain(chain:Blockchain):
""" replace the chain if all nodes are down or if node has a
firewall preventing get requests from web servers """
updated_chain = blockchain.update_chain(new_chain=chain.block)
return updated_chain
# if is_valid == True:
# blockchain.chain
# return chain
# else:
# return "Invalid chain"
# @app.post('/recover_wallet', tags=['wallet'])
# async def recover_wallet(recover:Recover):
# """ recover wallet with passphrase and publickey """
# is_valid = wallets.recover_wallet_with_passphrase(recover.passphrase)
# if is_valid == True:
# return {'message': 'Wallet recovery is successful!', 'private key': wallets.privatekey, 'public key': wallets.publickey, 'passphrase': recover.passphrase}
# else:
# return 'invalid publickey or passphrase!'
if __name__ == '__main__':
# hostname = socket.gethostname()
# IP = socket.gethostbyname(hostname)
# blockchain.replace_chain()
uvicorn.run('main:app', host=SERVER_HOST, port=SERVER_PORT, reload=SERVER_RELOAD)
# ran = run
# while run == ran:
# update = blockchain.replace_chain()
# t.sleep(60.0)
| 29.551282
| 227
| 0.702907
| 1,316
| 11,525
| 6.019757
| 0.215805
| 0.016158
| 0.018935
| 0.024236
| 0.278591
| 0.214214
| 0.179626
| 0.15539
| 0.15539
| 0.15539
| 0
| 0.005647
| 0.200954
| 11,525
| 390
| 228
| 29.551282
| 0.854599
| 0.168156
| 0
| 0.325991
| 0
| 0.004405
| 0.14289
| 0.006233
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.035242
| 0.079295
| 0
| 0.303965
| 0.017621
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0e9d30679706cc8bbbaa272614c4af5c8ce41cd
| 7,096
|
py
|
Python
|
app/user/views.py
|
cosmos-sajal/magic-link
|
346e828673f298bae9ec3075db8d5e837e4b7aaf
|
[
"MIT"
] | 2
|
2020-10-19T07:35:59.000Z
|
2020-10-24T17:43:41.000Z
|
app/user/views.py
|
cosmos-sajal/magic-link
|
346e828673f298bae9ec3075db8d5e837e4b7aaf
|
[
"MIT"
] | null | null | null |
app/user/views.py
|
cosmos-sajal/magic-link
|
346e828673f298bae9ec3075db8d5e837e4b7aaf
|
[
"MIT"
] | null | null | null |
import json
from django.views import View
from django.shortcuts import redirect, render
from django.core.exceptions import ObjectDoesNotExist
from django.contrib import messages
from django.http.response import HttpResponseRedirect
from rest_framework.renderers import TemplateHTMLRenderer
from rest_framework import status
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.authtoken.models import Token
from user.services.user_service import UserService, TokenService
from user.services.cookies_service import CookiesService
from user.forms.user_forms import LoginForm, MagicLinkForm, RegisterUserForm
from helpers.cache_adapter import CacheAdapter
from user.serializers import RegisterUserSerializer, LoginUserSerializer, \
GenerateMagicLinkSerializer
from user.services.magic_link_service import MagicLinkService
from worker.send_email import send_email
class RegisterUserView(View):
form_class = RegisterUserForm
template_name = 'user/user_register_form.html'
def __create_user(self, data):
email = data['email']
password = data['password']
username = data['username']
user_service = UserService()
user_service.create_user(
email=email,
password=password,
username=username
)
def get(self, request, **kwargs):
form = self.form_class()
return render(request, self.template_name, context={'form': form})
def post(self, request, *args, **kwargs):
form = self.form_class(request.POST)
if form.is_valid():
self.__create_user(form.cleaned_data)
messages.success(request, 'User registered')
return HttpResponseRedirect("/api/v1/user/login/")
else:
messages.error(
request, 'User registration failed!')
return render(request, self.template_name, context={'form': form})
class GenerateMagicLinkView(View):
form_class = MagicLinkForm
template_name = 'user/magic_link_form.html'
def get(self, request, **kwargs):
form = self.form_class()
return render(request, self.template_name, context={'form': form})
def post(self, request, *args, **kwargs):
form = self.form_class(request.POST)
if form.is_valid():
email = form.cleaned_data['email']
user_service = UserService()
user = user_service.get_user(email=email)
magic_link_service = MagicLinkService()
res = magic_link_service.generate_magic_link(
request,
user,
"/api/v1/user/details/"
)
if not res['is_success']:
messages.error(
request, 'Link generation failed!')
return render(request, self.template_name, context={'form': form})
send_email.delay(res['email'], res['content'])
messages.error(
request, 'Magic Link sent to your email!')
else:
messages.error(
request, 'Link generation failed!')
return render(request, self.template_name, context={'form': form})
class RedirectMagicLinkView(APIView):
"""
Redirect the user to the redirect link
corresponding to the magic link token key
"""
def __get_token(self, user_id):
"""
Return token for the user
Args:
user (User)
"""
try:
token = Token.objects.get(user_id=user_id)
return token.key
except ObjectDoesNotExist:
token = Token.objects.create(user_id=user_id)
return token.key
def get(self, request, token):
"""
GET API -> /api/v1/user/magic_link/sign_in/<token>/
"""
service = MagicLinkService()
key = service.get_cache_key(token)
cache_adapter = CacheAdapter()
value = cache_adapter.get(key)
if value is None:
redirect_url = service.get_default_redirect_url()
return HttpResponseRedirect(redirect_url)
value = json.loads(value)
user_id = value['user_id']
redirect_link = value['redirect_link']
token = self.__get_token(user_id)
response = service.set_cookies_in_response(
request,
redirect(redirect_link),
token
)
cache_adapter.delete(key)
return response
class LoginView(View):
form_class = LoginForm
template_name = 'user/login_form.html'
def get(self, request, **kwargs):
form = self.form_class()
return render(request, self.template_name, context={'form': form})
def post(self, request, *args, **kwargs):
form = self.form_class(request.POST)
if form.is_valid():
messages.success(request, 'User logged in')
token_service = TokenService(form.cleaned_data['email'])
token = token_service.get_token()
cookies_service = CookiesService()
response = cookies_service.set_cookies_in_response(
request,
redirect("/api/v1/user/details/"),
token
)
return response
else:
messages.error(
request, 'User login failed!')
return render(request, self.template_name, context={'form': form})
class UserDetailView(APIView):
"""
Returns the user details
"""
renderer_classes = [TemplateHTMLRenderer]
template_name = 'user/user_details.html'
def __get_user_from_token(self, token):
"""
Returns user from token
Args:
token (str)
"""
if token is None:
return None
try:
token = Token.objects.get(key=token)
return token.user
except ObjectDoesNotExist:
return None
def get(self, request):
"""
GET API -> /api/v1/user/details/
"""
token = request.COOKIES.get('token', None)
user = self.__get_user_from_token(token)
if user is None:
return Response({
'is_success': False,
'message': 'No token or incorrect token provided.'
})
return Response({
'is_success': True,
'username': user.username,
'email': user.email
})
class LogoutView(APIView):
"""
Logsout a user by deleting the token
from cookies
"""
def get(self, request):
"""
GET API -> /api/v1/user/logout/
"""
token = request.COOKIES.get('token', None)
cookies_service = CookiesService()
token_service = TokenService(token=token)
token_service.delete_token()
response = cookies_service.delete_cookies_in_response(
HttpResponseRedirect("/api/v1/user/login/")
)
return response
| 28.612903
| 82
| 0.604284
| 742
| 7,096
| 5.609164
| 0.179245
| 0.025949
| 0.031956
| 0.038683
| 0.306343
| 0.255166
| 0.240269
| 0.207593
| 0.207593
| 0.192215
| 0
| 0.001415
| 0.302847
| 7,096
| 247
| 83
| 28.728745
| 0.839903
| 0.05186
| 0
| 0.379747
| 0
| 0
| 0.07829
| 0.017926
| 0
| 0
| 0
| 0
| 0
| 1
| 0.075949
| false
| 0.012658
| 0.113924
| 0
| 0.398734
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0f002a005c22f45ad7c152982173fb33768f83a
| 1,822
|
py
|
Python
|
src/aves/features/sparse.py
|
sergioangulo/aves
|
43a14ec9c82929136a39590b15fe7f92182aae20
|
[
"CC-BY-3.0"
] | 34
|
2020-10-23T08:57:03.000Z
|
2022-03-23T17:07:20.000Z
|
src/aves/features/sparse.py
|
sergioangulo/aves
|
43a14ec9c82929136a39590b15fe7f92182aae20
|
[
"CC-BY-3.0"
] | 3
|
2021-12-02T22:42:25.000Z
|
2021-12-10T02:37:01.000Z
|
src/aves/features/sparse.py
|
sergioangulo/aves
|
43a14ec9c82929136a39590b15fe7f92182aae20
|
[
"CC-BY-3.0"
] | 11
|
2021-03-25T02:40:34.000Z
|
2022-01-03T22:41:29.000Z
|
from scipy.sparse import dok_matrix
import pandas as pd
from cytoolz import itemmap
def long_dataframe_to_sparse_matrix(
df, index, vars, values, id_to_row=None, var_to_column=None
):
if id_to_row is None:
unique_index_values = df[index].unique()
id_to_row = dict(zip(unique_index_values, range(len(unique_index_values))))
n_rows = len(id_to_row)
if var_to_column is None:
unique_vars = df[vars].unique()
var_to_column = dict(zip(unique_vars, range(len(unique_vars))))
n_cols = len(var_to_column)
dtm = dok_matrix((n_rows, n_cols), dtype=df[values].dtype)
for i, tup in enumerate(df.itertuples()):
elem_row, elem_col, elem_val = (
getattr(tup, index),
getattr(tup, vars),
getattr(tup, values),
)
if elem_row in id_to_row:
row_id = id_to_row[elem_row]
else:
continue
if elem_col in var_to_column:
col_id = var_to_column[elem_col]
else:
continue
dtm[row_id, col_id] = elem_val
return dtm.tocsr(), id_to_row, var_to_column
def sparse_matrix_to_long_dataframe(
matrix,
index_name="index",
var_name="column",
value_name="value",
index_map=None,
var_map=None,
reverse_maps=False,
):
matrix = matrix.todok()
df = pd.DataFrame.from_records(
list(map(lambda x: (x[0][0], x[0][1], x[1]), matrix.items()))
)
df.columns = [index_name, var_name, value_name]
if index_map:
if reverse_maps:
index_map = itemmap(reversed, index_map)
df[index_name] = df[index_name].map(index_map)
if var_map:
if reverse_maps:
var_map = itemmap(reversed, var_map)
df[var_name] = df[var_name].map(var_map)
return df
| 25.305556
| 83
| 0.625137
| 271
| 1,822
| 3.881919
| 0.243542
| 0.026616
| 0.046578
| 0.030418
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003754
| 0.268935
| 1,822
| 71
| 84
| 25.661972
| 0.786036
| 0
| 0
| 0.148148
| 0
| 0
| 0.008782
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037037
| false
| 0
| 0.055556
| 0
| 0.12963
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0f14602b75f864977df1e3824cf1f1c1da55ef2
| 25,329
|
py
|
Python
|
wltp/autograph.py
|
ankostis/wltp
|
c95462cadbcab32d4fc94f8ea8bf9d85a0a3763e
|
[
"Apache-2.0"
] | null | null | null |
wltp/autograph.py
|
ankostis/wltp
|
c95462cadbcab32d4fc94f8ea8bf9d85a0a3763e
|
[
"Apache-2.0"
] | null | null | null |
wltp/autograph.py
|
ankostis/wltp
|
c95462cadbcab32d4fc94f8ea8bf9d85a0a3763e
|
[
"Apache-2.0"
] | 1
|
2015-02-20T11:47:33.000Z
|
2015-02-20T11:47:33.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019-2020 European Commission (JRC);
# Licensed under the EUPL (the 'Licence');
# You may not use this work except in compliance with the Licence.
# You may obtain a copy of the Licence at: http://ec.europa.eu/idabc/eupl
"""
Harvest functions & annotate their :term:`dependencies <dependency>` to build :term:`pipeline`\\s.
>>> from wltp.autograph import *
>>> __name__ = "wltp.autograph"
"""
import functools as fnt
import inspect
import logging
import re
import sys
from collections import ChainMap
from inspect import Parameter
from pathlib import Path
from types import ModuleType
from typing import (
Any,
Callable,
Collection,
Iterable,
List,
Mapping,
Pattern,
Set,
Tuple,
Union,
cast,
)
from boltons.iterutils import first
from boltons.setutils import IndexedSet as iset
from graphtik import keyword, optional, sfx, sfxed
from graphtik.base import Operation, func_name
from graphtik.fnop import FnOp, reparse_operation_data
from graphtik.modifier import is_sfx
from .utils import Literal, Token, asdict, aslist, astuple
try:
from re import Pattern as RegexPattern
except ImportError:
# PY3.6
from typing import Pattern as RegexPattern
log = logging.getLogger(__name__)
_my_project_dir = Path(__file__).parent
_FnKey = Union[Union[str, Pattern], Iterable[Union[str, Pattern]]]
def camel_2_snake_case(word):
"""
>>> camel_2_snake_case("HTTPResponseCodeXYZ")
'http_response_code_xyz'
From https://stackoverflow.com/a/1176023/548792
"""
return re.sub(r"((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))", r"_\1", word).lower()
def is_regular_class(name, item):
return inspect.isclass(item) and name[0].isupper()
def _is_in_my_project(item) -> bool:
"""UNUSED"""
in_my_project = False
try:
path = inspect.getfile(item)
except TypeError:
pass # raised for builtins e.g.`sys`
else:
try:
Path(path).relative_to(_my_project_dir)
in_my_project = True
except ValueError:
pass # raised when unrelated
return in_my_project
class Prefkey:
"""Index into dicts with a key or a joined(prefix+key), where prefix: tuple"""
sep = "."
def __init__(self, sep=None):
if sep is not None:
self.sep = sep
def _join_path_names(self, *names):
return self.sep.join(str(i) for i in names)
def _prefkey(self, d, key: _FnKey, default: Union[Callable, Any] = None):
if isinstance(key, tuple):
long_key = self.sep.join(key)
if long_key in d:
return d[long_key]
if key[-1] in d:
return d[key[-1]]
if key in d:
return d[key]
if callable(default):
return default()
return default
class FnHarvester(Prefkey):
"""
Collect public ops, routines, classes & their methods, partials into :attr:`collected`.
:param collected:
a list of 2-tuples::
(name_path, item_path)
where the 2 paths correspond to the same items;
the last path element is always a callable, and
the previous items may be modules and/or classes,
in case non-modules are given directly in :meth:`harvest()`::
[module, [class, ...] callable
E.g. the path of a class constructor is ``(module_name, class_name)``.
For :term:`operation`\\s, the name-part is ``None``.
:param excludes:
names to exclude; they can.be.prefixed or not
:param base_modules:
skip function/classes not in these modules; if not given, include all items.
If string, they are searched in :data:`sys.modules`.
:param predicate:
any user callable accepting a single argument returning falsy to exclude
the visited item
:param include_methods:
Whether to collect methods from classes
**Example:**
>>> from wltp import cycler, downscale, engine, vehicle, vmax
>>> modules = ('os', 'sys')
>>> funcs = FnHarvester(
... base_modules=modules,
... include_methods=False,
... ).harvest()
>>> len(funcs) > 50
True
>>> funcs
[(('os', 'PathLike'),
...
Use this pattern when iterating, to account for any :term:`operation` instances:
>>> funcs = [
... (name, fn if isinstance(fn, Operation) else fn)
... for name, fn
... in funcs
... ]
"""
collected: List[Tuple[Tuple[str, ...], Tuple[Callable, ...]]]
include_methods: bool = True
def __init__(
self,
*,
excludes: Iterable[_FnKey] = None,
base_modules: Iterable[Union[ModuleType, str]] = None,
predicate: Callable[[Any], bool] = None,
include_methods=False,
sep=None,
):
super().__init__(sep)
if include_methods is not None:
self.include_methods = bool(include_methods)
self._seen: Set[int] = set()
self.excludes = set(excludes or ())
self.base_modules = iset(
sys.modules[m] if isinstance(m, str) else m for m in (base_modules or ())
)
self.predicate = predicate
self.collected = []
def is_harvestable(self, name_path, item):
"""Exclude already-seen, private, user-excluded objects(by name or path). """
name = name_path[-1]
if (
name.startswith("_")
or id(item) in self._seen
or name in self.excludes
or self._join_path_names(*name_path) in self.excludes
):
return False
self._seen.add(id(item))
return (
(callable(item) or is_regular_class(name, item) or inspect.ismodule(item))
and (not self.base_modules or inspect.getmodule(item) in self.base_modules)
and (not self.predicate or self.predicate(item))
)
def _collect(self, name_path, item_path):
"""Obey decorated `name`"""
fn = item_path[-1]
decors = get_autograph_decors(fn)
if decors and "name" in decors:
name_path = name_path[:-1] + (decors["name"],)
self.collected.append((name_path, item_path))
def _harvest(self, name_path, item_path):
"""Recursively collect modules, routines & classes,."""
name = name_path[-1]
item = item_path[-1]
if not self.is_harvestable(name_path, item):
pass
elif isinstance(item, Operation):
self._collect(None, item_path)
elif inspect.ismodule(item):
for mb_name, member in inspect.getmembers(item):
# Reset path on modules
self._harvest((item.__name__, mb_name), (item, member))
elif inspect.isroutine(item):
self._collect(name_path, item_path)
elif is_regular_class(name, item):
self._collect(name_path, item_path)
if self.include_methods:
# TIP: scavenge ideas from :class:`doctest.DocTestFinder`
for mb_name, member in inspect.getmembers(item, predicate=callable):
self._harvest(name_path + (mb_name,), item_path + (member,))
else:
pass # partial?
def harvest(self, *items: Any, base_modules=...) -> List[Tuple[str, Callable]]:
"""
Collect any callable `items` and children, respecting `base_modules`, `excludes` etc.
:param items:
module fqdn (if already imported), items with ``__name__``, like
modules, classes, functions, or partials (without ``__name__``).
If nothing is given, `attr:`baseModules` is used in its place.
.. Note::
This parameter works differently from :attr:`base_modules`, that is,
harvesting is not limited to those modules only, recursing to
any imported ones from `items`.
:return:
the :attr:`collected`
"""
old_base_modules = self.base_modules
try:
if base_modules is not ...:
self.base_modules = base_modules
if not items:
items = self.base_modules # type: ignore
for bi in items:
if isinstance(bi, str):
bi, name_path = sys.modules[bi], bi
else:
name_path = tuple(
func_name(bi, mod=0, fqdn=0, human=0, partials=1).split(".")
)
self._harvest(name_path, (bi,))
return self.collected
finally:
self.base_modules = old_base_modules
def paths(self):
"""returns the paths only (no callables), sorted"""
return list(zip(*self.collected))[0]
_unset = Token("unset") # TODO: replace `_unset` with ...
def autographed(
fn=_unset,
*,
name=None,
needs=_unset,
provides=_unset,
renames=_unset,
returns_dict=_unset,
aliases=_unset,
inp_sideffects=_unset,
out_sideffects=_unset,
domain: Union[str, int, Collection] = None,
**kws,
):
"""
Decorator adding ``_autograph`` func-attribute with overrides for :class:`Autograph`.
:param name:
the name of the operation.
- If the same `name` has already been defined for the same `domain`,
it is overwritten; otherwise, a new decoration is appended, so that
:meth:`.Autograph.yield_wrapped_ops()` will produce more than one operations.
- if not given, it will be derrived from the `fn` on wrap-time.
:param domain:
one or more list-ified domains to assign decors into
(instead of the "default" domain);
it allows to reuse the same function to build different operation,
when later wrapped into an operation by :class:`.Autograph`.
:param renames:
mappings to rename both any matching the final `needs` & `provides`
:param inp_sideffects:
appended into `needs`; if a tuple, makes it a :class:`.sfxed`
:param out_sideffects:
appended into `provides`; if a tuple, makes it a :class:`.sfxed`
:param kws:
the rest arguments of :class:`graphtik.operation`, such as::
endured, parallel, marshalled, node_props
The rest arguments (e.g. `needs`, etc) are coming from :class:`graphtik.operation`.
"""
kws.update(
{
k: v
for k, v in locals().items()
if v is not _unset and k not in "kws fn name domain".split()
}
)
def decorator(fn):
if hasattr(fn, "_autograph"):
autographs = fn._autograph
if domain in autographs:
autographs[domain][name] = kws
else:
autographs[domain] = {name: kws}
else:
decors = {domain: {name: kws}}
try:
fn._autograph = decors
except TypeError as ex:
# Built-in?
log.debug(
"Wrapped as partial %s to attach `autographed` attribute.", fn
)
fn = fnt.wraps(fn)(fnt.partial(fn))
fn._autograph = decors
return fn
if fn is _unset:
return decorator
return decorator(fn)
def get_autograph_decors(
fn, default=None, domain: Union[str, int, Collection] = None
) -> dict:
"""
Get the 1st match in `domain` of the `fn` :func:`autographed` special attribute.
:param default:
return this if `fn` non-autographed, or domain don't match
:param domain:
list-ified if a single str
:return:
the decors that will override :class:`Autograph` attributes, as found
from the given `fn`, and for the 1st matching domain in `domain`::
<fn>():
_autograph (function-attribute)
<domain> (dict)
<name> (dict)
<decors> (dict)
"""
for dmn in astuple(domain, "domain"):
if hasattr(fn, "_autograph"):
if dmn in fn._autograph:
return fn._autograph[dmn]
return default
class Autograph(Prefkey):
"""
Make a graphtik operation by inspecting a function
The params below (except `full_path_names`) are merged in this order
(1st takes precendance):
1. dict from overrides keyed by `name`
2. decorated with :func:`autographed`
3. inspected from the callable
**Example:**
>>> def calc_sum_ab(a, b=0):
... return a + b
>>> aug = Autograph(out_patterns=['calc_', 'upd_'], renames={"a": "A"})
>>> aug.wrap_funcs([calc_sum_ab])
[FnOp(name='calc_sum_ab',
needs=['A', 'b'(?)],
provides=['sum_ab'],
fn='calc_sum_ab')]
"""
def __init__(
self,
out_patterns: _FnKey = None,
overrides: Mapping[_FnKey, Mapping] = None,
renames: Mapping = None,
full_path_names: bool = False,
domain: Union[str, int, Collection] = None,
sep=None,
):
super().__init__(sep)
#: Autodeduce `provides` by parsing function-names against a collection
#: of these items, and decide `provides` by the the 1st one matching
#: (unless `provides` are specified in the `overrides`):
#:
#: - regex: may contain 1 or 2 groups:
#:
#: - 1 group: the name of a single `provides`
#: - 2 groups: 2nd is the name of a single :term:`sideffected` dependency,
#: the 1st is the sideffect acting upon the former;
#:
#: - str: matched as a prefix of the function-name, which is trimmed
#: by the first one matching to derrive a single `provides`;
#:
#: Note that any `out_sideffects` in overrides, alone, do not block the rule above.
self.out_patterns = out_patterns and aslist(out_patterns, "out_patterns")
#: a mapping of ``fn-keys --> dicts`` with keys::
#:
#: name, needs, provides, renames, inp_sideffects, out_sideffects
#:
#: An `fn-key` may be a string-tuple of names like::
#:
#: [module, [class, ...] callable
self.overrides = overrides and asdict(overrides, "overrides")
#: global ``from --> to`` renamings applied both onto `needs` & `provides`.
#: They are applied after merging has been completed, so they can rename
#: even "inspected" names.
self.renames = renames and asdict(renames, "renames")
#: Whether operation-nodes would be named after the fully qualified name
#: (separated with `.` by default)
self.full_path_names = full_path_names
#: the :func:`.autographed` domains to search when wrapping functions, in-order;
#: if undefined, only the default domain (``None``) is included,
#: otherwise, the default, ``None``, must be appended explicitely
#: (usually at the end).
#: List-ified if a single str, :func:`autographed` decors for the 1st one
#: matching are used;
self.domain: Collection = (None,) if domain is None else domain
def _from_overrides(self, key):
return self.overrides and self._prefkey(self.overrides, key) or {}
def _match_fn_name_pattern(
self, fn_name, pattern
) -> Union[str, Tuple[str, str], None]:
"""return matched group or groups, callable results or after matched prefix string"""
if isinstance(pattern, RegexPattern):
m = pattern.search(fn_name)
groups = m and m.groups()
if groups:
if len(groups) == 1:
return groups[0]
if len(groups) > 2:
raise ValueError(
f"The `out_pattern` {pattern} matched on '{fn_name}' >2 groups: {groups}"
)
return sfxed(*reversed(groups))
elif callable(pattern):
return pattern(fn_name)
elif fn_name.startswith(pattern):
return fn_name[len(pattern) :]
def _deduce_provides_from_fn_name(self, fn_name):
## Trim prefix from function-name to derive a singular "provides".
provides = first(
self._match_fn_name_pattern(fn_name, p) for p in self.out_patterns
)
return provides
def _apply_renames(
self,
rename_maps: Iterable[Union[Mapping, Literal[_unset]]],
word_lists: Iterable,
):
"""
Rename words in all `word_lists` matching keys in `rename_maps`.
"""
rename_maps = [d for d in rename_maps if d and d is not _unset]
renames = ChainMap(*rename_maps)
if renames:
word_lists = tuple([renames.get(w, w) for w in wl] for wl in word_lists)
return word_lists
def _collect_rest_op_args(self, decors: dict):
"""Collect the rest operation arguments from `autographed` decoration."""
# NOTE: append more arguments as graphtik lib evolves.
rest_op_args = (
"returns_dict aliases endured parallel marshalled node_props".split()
)
return {k: v for k, v in decors.items() if k in rest_op_args}
def yield_wrapped_ops(
self,
fn: Union[
Callable,
Tuple[Union[str, Collection[str]], Union[Callable, Collection[Callable]]],
],
exclude=(),
domain: Union[str, int, Collection] = None,
) -> Iterable[FnOp]:
"""
Convert a (possibly **@autographed**) function into an graphtik **FnOperations**,
respecting any configured overrides
:param fn:
either a callable, or a 2-tuple(`name-path`, `fn-path`) for::
[module[, class, ...]] callable
- If `fn` is an operation, yielded as is (found also in 2-tuple).
- Both tuple elements may be singulars, and are auto-tuple-zed.
- The `name-path` may (or may not) correspond to the given `fn-path`,
and is used to derrive the operation-name; If not given, the function
name is inspected.
- The last elements of the `name-path` are overridden by names in decorations;
if the decor-name is the "default" (`None`), the `name-path` becomes
the op-name.
- The `name-path` is not used when matching overrides.
:param exclude:
a list of decor-names to exclude, as stored in decors.
Ignored if `fn` already an operation.
:param domain:
if given, overrides :attr:`domain` for :func:`.autographed` decorators
to search.
List-ified if a single str, :func:`autographed` decors for the 1st one
matching are used.
:return:
one or more :class:`FnOp` instances (if more than one name is defined
when the given function was :func:`autographed`).
Overriddes order: my-args, self.overrides, autograph-decorator, inspection
See also: David Brubeck Quartet, "40 days"
"""
if isinstance(fn, tuple):
name_path, fn_path = fn
else:
name_path, fn_path = (), fn
fun_path = cast(Tuple[Callable, ...], astuple(fn_path, None))
fun = fun_path[-1]
if isinstance(fun, Operation):
## pass-through operations
yield fun
return
def param_to_modifier(name: str, param: inspect.Parameter) -> str:
return (
optional(name)
# is optional?
if param.default is not inspect._empty # type: ignore
else keyword(name)
if param.kind == Parameter.KEYWORD_ONLY
else name
)
given_name_path = astuple(name_path, None)
decors_by_name = get_autograph_decors(fun, {}, domain or self.domain)
for decor_name, decors in decors_by_name.items() or ((None, {}),):
if given_name_path and not decor_name:
name_path = decor_path = given_name_path
else: # Name in decors was "default"(None).
name_path = decor_path = astuple(
(decor_name if decor_name else func_name(fun, fqdn=1)).split("."),
None,
)
assert decor_path, locals()
if given_name_path:
# Overlay `decor_path` over `named_path`, right-aligned.
name_path = tuple(*name_path[: -len(decor_path)], *decor_path)
fn_name = str(name_path[-1])
if fn_name in exclude:
continue
overrides = self._from_overrides(decor_path)
op_data = (
ChainMap(overrides, decors)
if (overrides and decors)
else overrides
if overrides
else decors
)
if op_data:
log.debug("Autograph overrides for %r: %s", name_path, op_data)
op_props = "needs provides renames, inp_sideffects out_sideffects".split()
needs, provides, override_renames, inp_sideffects, out_sideffects = (
op_data.get(a, _unset) for a in op_props
)
sig = None
if needs is _unset:
sig = inspect.signature(fun)
needs = [
param_to_modifier(name, param)
for name, param in sig.parameters.items()
if name != "self" and param.kind is not Parameter.VAR_KEYWORD
]
## Insert object as 1st need for object-methods.
#
if len(fun_path) > 1:
clazz = fun_path[-2]
# TODO: respect autograph decorator for object-names.
class_name = name_path[-2] if len(name_path) > 1 else clazz.__name__
if is_regular_class(class_name, clazz):
log.debug("Object-method %s.%s", class_name, fn_name)
needs.insert(0, camel_2_snake_case(class_name))
needs = aslist(needs, "needs")
if ... in needs:
if sig is None:
sig = inspect.signature(fun)
needs = [
arg_name if n is ... else n
for n, arg_name in zip(needs, sig.parameters)
]
if provides is _unset:
if is_regular_class(fn_name, fun):
## Convert class-name into object variable.
provides = camel_2_snake_case(fn_name)
elif self.out_patterns:
provides = self._deduce_provides_from_fn_name(fn_name) or _unset
if provides is _unset:
provides = ()
provides = aslist(provides, "provides")
needs, provides = self._apply_renames(
(override_renames, self.renames), (needs, provides)
)
if inp_sideffects is not _unset:
needs.extend(
(i if is_sfx(i) else sfxed(*i) if isinstance(i, tuple) else sfx(i))
for i in aslist(inp_sideffects, "inp_sideffects")
)
if out_sideffects is not _unset:
provides.extend(
(i if is_sfx(i) else sfxed(*i) if isinstance(i, tuple) else sfx(i))
for i in aslist(out_sideffects, "out_sideffects")
)
if self.full_path_names:
fn_name = self._join_path_names(*name_path)
op_kws = self._collect_rest_op_args(decors)
yield FnOp(fn=fun, name=fn_name, needs=needs, provides=provides, **op_kws)
def wrap_funcs(
self,
funcs: Collection[
Union[
Callable,
Tuple[
Union[str, Collection[str]], Union[Callable, Collection[Callable]]
],
]
],
exclude=(),
domain: Union[str, int, Collection] = None,
) -> List[FnOp]:
"""
Convert a (possibly **@autographed**) function into one (or more) :term:`operation`\\s.
:param fn:
a list of funcs (or 2-tuples (name-path, fn-path)
.. seealso:: :meth:`yield_wrapped_ops()` for the rest arguments.
"""
return [
op
for fn_or_paths in funcs
for op in self.yield_wrapped_ops(
fn_or_paths, exclude=exclude, domain=domain
)
]
"""
Example code hidden from Sphinx:
>>> from graphtik import compose
>>> aug = Autograph(['calc_', 'upd_'], {
... 'calc_p_available':{'provides': 'p_avail'},
... 'calc_p_resist': {'provides': 'p_resist'},
... 'calc_inertial_power': {'provides': 'p_inert'},
... })
>>> ops = [aug.wrap_funcs(funcs.items()]
>>> netop = compose('wltp', *(op for op in ops if op.provides))
"""
| 34.182186
| 98
| 0.569189
| 3,012
| 25,329
| 4.641766
| 0.183599
| 0.022316
| 0.006866
| 0.006866
| 0.112081
| 0.074387
| 0.061369
| 0.044203
| 0.038767
| 0.034332
| 0
| 0.004822
| 0.328675
| 25,329
| 740
| 99
| 34.228378
| 0.817396
| 0.354179
| 0
| 0.171875
| 0
| 0
| 0.030952
| 0.002668
| 0
| 0
| 0
| 0.002703
| 0.002604
| 1
| 0.0625
| false
| 0.010417
| 0.052083
| 0.010417
| 0.205729
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0f233170bb0ccf428c1b03e76710d47e94cea40
| 2,637
|
py
|
Python
|
gen_embeddings.py
|
dominiccarrano/backdoor-nn-geometry
|
d1fa0754f1d57a9b303e2eb71edf0787a86529c8
|
[
"MIT"
] | 1
|
2021-05-28T14:57:57.000Z
|
2021-05-28T14:57:57.000Z
|
gen_embeddings.py
|
dominiccarrano/backdoor-nn-geometry
|
d1fa0754f1d57a9b303e2eb71edf0787a86529c8
|
[
"MIT"
] | null | null | null |
gen_embeddings.py
|
dominiccarrano/backdoor-nn-geometry
|
d1fa0754f1d57a9b303e2eb71edf0787a86529c8
|
[
"MIT"
] | null | null | null |
import pandas as pd
import os
import torch
import numpy as np
import argparse
from trojai_utils import *
def batch_embeddings(reviews, N, batch_size, tokenizer, embedding, cls_first, embedding_dim=768):
embeddings = torch.zeros((N, 1, embedding_dim))
for i in range(N // batch_size):
review_batch = reviews[i*batch_size:(i+1)*batch_size]
embedding_batch = get_embeddings(tokenizer, embedding, review_batch, cls_token_is_first=cls_first)
embeddings[i*batch_size:(i+1)*batch_size, :, :] = embedding_batch
return embeddings
# Get args
parser = argparse.ArgumentParser(description="Generate embeddings")
parser.add_argument('--embedding-type', type=str,
help='Model architecture (one of "BERT", "DistilBERT", "GPT-2")')
parser.add_argument('--n', type=int, default=1000,
help='Number of embeddings of each sentiment to generate')
parser.add_argument('--batch-size', type=int, default=50,
help='Size of batches to feed into the language model for embedding generation')
args = parser.parse_args()
# Load in the data
base_huggingface_path = "your path with the huggingface transformer files"
base_data_path = "your file path with the reviews datasets"
sentiment_data = pd.read_csv(os.path.join(base_data_path, "train_datasets.csv"))
# Split by sentiment
pos_data = sentiment_data[sentiment_data.sentiment==True].sample(args.n)
neg_data = sentiment_data[sentiment_data.sentiment==False].sample(args.n)
# Get random samples
pos_reviews = list(np.asarray(pos_data.reviewText, dtype=str))
pos_labels = torch.ones(args.n)
neg_reviews = list(np.asarray(neg_data.reviewText, dtype=str))
neg_labels = torch.zeros(args.n)
# Make embeddings
cls_first = (args.embedding_type == "DistilBERT") or (args.embedding_type == "BERT")
tokenizer, embedding = get_LM(args.embedding_type, base_huggingface_path)
pos_embeddings = batch_embeddings(pos_reviews, args.n, args.batch_size, tokenizer, embedding, cls_first)
neg_embeddings = batch_embeddings(neg_reviews, args.n, args.batch_size, tokenizer, embedding, cls_first)
# Save results
base_embedding_path = "your path to save embeddings to"
torch.save(pos_embeddings, os.path.join(base_embedding_path, args.embedding_type, "pos_embeddings{}.pt".format(args.n)))
torch.save(neg_embeddings, os.path.join(base_embedding_path, args.embedding_type, "neg_embeddings{}.pt".format(args.n)))
torch.save(pos_labels, os.path.join(base_embedding_path, args.embedding_type, "pos_labels{}.pt".format(args.n)))
torch.save(neg_labels, os.path.join(base_embedding_path, args.embedding_type, "neg_labels{}.pt".format(args.n)))
| 49.754717
| 120
| 0.759954
| 387
| 2,637
| 4.96124
| 0.271318
| 0.026042
| 0.061979
| 0.036458
| 0.323958
| 0.310938
| 0.252083
| 0.204167
| 0.204167
| 0.167708
| 0
| 0.005603
| 0.120212
| 2,637
| 53
| 121
| 49.754717
| 0.821983
| 0.035267
| 0
| 0
| 0
| 0
| 0.176517
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025641
| false
| 0
| 0.153846
| 0
| 0.205128
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0f527a740c29092c88c485c40c531a07e3a243b
| 1,584
|
py
|
Python
|
example/image-classification/test_score.py
|
Abusnina/mxnet
|
7f8d94a24bf64fe0f24712a7952a09725c2df9bd
|
[
"Apache-2.0"
] | 399
|
2017-05-30T05:12:48.000Z
|
2022-01-29T05:53:08.000Z
|
smd_hpi/examples/binary-imagenet1k/test_score.py
|
yanghaojin/BMXNet
|
102f8d0ed59529bbd162c37bf07ae58ad6c4caa1
|
[
"Apache-2.0"
] | 58
|
2017-05-30T23:25:32.000Z
|
2019-11-18T09:30:54.000Z
|
smd_hpi/examples/binary-imagenet1k/test_score.py
|
yanghaojin/BMXNet
|
102f8d0ed59529bbd162c37bf07ae58ad6c4caa1
|
[
"Apache-2.0"
] | 107
|
2017-05-30T05:53:22.000Z
|
2021-06-24T02:43:31.000Z
|
"""
test pretrained models
"""
from __future__ import print_function
import mxnet as mx
from common import find_mxnet, modelzoo
from common.util import download_file, get_gpus
from score import score
def download_data():
download_file('http://data.mxnet.io/data/val-5k-256.rec', 'data/val-5k-256.rec')
def test_imagenet1k_resnet(**kwargs):
models = ['imagenet1k-resnet-34',
'imagenet1k-resnet-50',
'imagenet1k-resnet-101',
'imagenet1k-resnet-152']
accs = [.72, .75, .765, .76]
for (m, g) in zip(models, accs):
acc = mx.metric.create('acc')
(speed,) = score(model=m, data_val='data/val-5k-256.rec',
rgb_mean='0,0,0', metrics=acc, **kwargs)
r = acc.get()[1]
print('testing %s, acc = %f, speed = %f img/sec' % (m, r, speed))
assert r > g and r < g + .1
def test_imagenet1k_inception_bn(**kwargs):
acc = mx.metric.create('acc')
m = 'imagenet1k-inception-bn'
g = 0.72
(speed,) = score(model=m,
data_val='data/val-5k-256.rec',
rgb_mean='123.68,116.779,103.939', metrics=acc, **kwargs)
r = acc.get()[1]
print('Tested %s acc = %f, speed = %f img/sec' % (m, r, speed))
assert r > g and r < g + .1
if __name__ == '__main__':
gpus = get_gpus()
assert len(gpus) > 0
batch_size = 16 * len(gpus)
gpus = ','.join([str(i) for i in gpus])
download_data()
test_imagenet1k_resnet(gpus=gpus, batch_size=batch_size)
test_imagenet1k_inception_bn(gpus=gpus, batch_size=batch_size)
| 33.702128
| 84
| 0.599747
| 233
| 1,584
| 3.909871
| 0.351931
| 0.046103
| 0.039517
| 0.052689
| 0.379802
| 0.302964
| 0.245884
| 0.245884
| 0.182217
| 0.182217
| 0
| 0.061718
| 0.243056
| 1,584
| 46
| 85
| 34.434783
| 0.698082
| 0.013889
| 0
| 0.157895
| 0
| 0
| 0.207207
| 0.055985
| 0
| 0
| 0
| 0
| 0.078947
| 1
| 0.078947
| false
| 0
| 0.131579
| 0
| 0.210526
| 0.078947
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0f98a3a2c3fd048d824ad74c18868bd24ec85c0
| 1,686
|
py
|
Python
|
inference.py
|
KirtoXX/Object_track
|
92b7d3308ab12d9211b04d18f825bf9a488c46a2
|
[
"Apache-2.0"
] | null | null | null |
inference.py
|
KirtoXX/Object_track
|
92b7d3308ab12d9211b04d18f825bf9a488c46a2
|
[
"Apache-2.0"
] | null | null | null |
inference.py
|
KirtoXX/Object_track
|
92b7d3308ab12d9211b04d18f825bf9a488c46a2
|
[
"Apache-2.0"
] | null | null | null |
from keras import layers
import tensorflow as tf
from Resnet import ResNet50
import keras
from keras.models import Input,Model
def inference(image_pre,image_now,location_tensor,shape):
#extra high leavl feature
input = Input(shape=shape)
vision_model = keras.applications.MobileNet(include_top=False,
weights='imagenet',
input_tensor=input,
input_shape=[224,224,3])
vision_model.trainable = False
feature1 = vision_model(image_pre)
feature2 = vision_model(image_now)
#reshape tensor to vector
flatten = layers.Flatten()
feature1 = flatten(feature1)
feature2 = flatten(feature2)
#get high level feature
fc_unit = 512
fc1 = layers.Dense(units=fc_unit,name='fc1',activation='relu')
reshape = layers.Reshape((1,fc_unit))
bn1 = layers.BatchNormalization(name='bn1')
#fc1 block
feature1 = fc1(feature1)
feature2 = fc1(feature2)
feature1 = bn1(feature1)
feature2 = bn1(feature2)
feature1 = layers.Activation('relu')(feature1)
feature2 = layers.Activation('relu')(feature2)
feature1 = reshape(feature1)
feature2 = reshape(feature2)
#build feature to (samle,time_step,input_dim)
out = layers.concatenate([feature1,feature2],axis=1)
out = layers.GRU(units=128,name='GRU')(out)
out = layers.concatenate([out,location_tensor],axis=1)
out = layers.Dense(units=64, name='fc2')(out)
out = layers.Activation('relu')(out)
out = layers.Dense(units=4,name='fc3')(out)
out = layers.Activation('sigmoid')(out)
return out
| 31.811321
| 72
| 0.645907
| 199
| 1,686
| 5.38191
| 0.361809
| 0.058824
| 0.044818
| 0.026144
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043922
| 0.243772
| 1,686
| 52
| 73
| 32.423077
| 0.796078
| 0.072954
| 0
| 0
| 0
| 0
| 0.029525
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027027
| false
| 0
| 0.135135
| 0
| 0.189189
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0f9bc7af6fe73617d028c362d0710385e92b83d
| 1,807
|
py
|
Python
|
src/visualizations/Visualize.py
|
chpatola/election_nlp
|
6463edb2eacca09ff828029c69d11be7985ceeb0
|
[
"MIT"
] | 1
|
2020-04-11T12:00:09.000Z
|
2020-04-11T12:00:09.000Z
|
src/visualizations/Visualize.py
|
chpatola/election_nlp
|
6463edb2eacca09ff828029c69d11be7985ceeb0
|
[
"MIT"
] | null | null | null |
src/visualizations/Visualize.py
|
chpatola/election_nlp
|
6463edb2eacca09ff828029c69d11be7985ceeb0
|
[
"MIT"
] | null | null | null |
"""Visualizations for NLP analysis"""
import pandas as pd
import numpy as np
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
import matplotlib.pyplot as plt
import seaborn as sns
def _plot_classification_report(val_y,predictions):
report = classification_report(val_y, predictions, output_dict=True)
report_df = pd.DataFrame(report).transpose().round(4)
fig, ax = plt.subplots()
ax.axis('off')
ax.axis('tight')
ax.table(cellText=report_df.values,
colLabels=report_df.columns,
rowLabels=report_df.index,
loc='center',
bbox=[0.2, 0.2, 0.8, 0.8])
fig.tight_layout()
return fig
def cm_analysis(y_true, y_pred, filename, labels, ymap=None, figsize=(15,50)):
if ymap is not None:
y_pred = [ymap[yi] for yi in y_pred]
y_true = [ymap[yi] for yi in y_true]
labels = [ymap[yi] for yi in labels]
cm = confusion_matrix(y_true, y_pred, labels=labels)
cm_sum = np.sum(cm, axis=1, keepdims=True)
cm_perc = cm / cm_sum.astype(float) * 100
annot = np.empty_like(cm).astype(str)
nrows, ncols = cm.shape
for i in range(nrows):
for j in range(ncols):
c = cm[i, j]
p = cm_perc[i, j]
if i == j:
s = cm_sum[i]
annot[i, j] = '%.1f%%\n%d/%d' % (p, c, s)
elif c == 0:
annot[i, j] = ''
else:
annot[i, j] = '%.1f%%\n%d' % (p, c)
cm = pd.DataFrame(cm, index=labels, columns=labels)
cm.index.name = 'Actual'
cm.columns.name = 'Predicted'
fig, ax = plt.subplots(figsize=figsize)
sns.heatmap(cm, annot=annot, fmt='', ax=ax,vmin=0, vmax=60)
plt.show()
plt.savefig(filename,bbox_inches='tight')
| 35.431373
| 78
| 0.598229
| 271
| 1,807
| 3.874539
| 0.391144
| 0.011429
| 0.025714
| 0.031429
| 0.126667
| 0.047619
| 0
| 0
| 0
| 0
| 0
| 0.017332
| 0.265634
| 1,807
| 51
| 79
| 35.431373
| 0.773926
| 0.017156
| 0
| 0
| 0
| 0
| 0.032185
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042553
| false
| 0
| 0.12766
| 0
| 0.191489
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0fc29ac4209ca758dbc0af3c328c5e20828a2e9
| 9,189
|
py
|
Python
|
cvprac_abstraction/cvpChangeControl.py
|
titom73/arista-cvp-scripts
|
64f7ffa28d2483b3dd357e9b6c671725a51661b4
|
[
"BSD-3-Clause"
] | 2
|
2019-08-20T07:35:08.000Z
|
2019-10-01T00:52:14.000Z
|
cvprac_abstraction/cvpChangeControl.py
|
inetsix/arista-cvp-scripts
|
64f7ffa28d2483b3dd357e9b6c671725a51661b4
|
[
"BSD-3-Clause"
] | 2
|
2019-05-07T14:36:38.000Z
|
2019-07-26T05:56:51.000Z
|
cvprac_abstraction/cvpChangeControl.py
|
titom73/arista-cvp-scripts
|
64f7ffa28d2483b3dd357e9b6c671725a51661b4
|
[
"BSD-3-Clause"
] | 1
|
2021-05-08T20:15:36.000Z
|
2021-05-08T20:15:36.000Z
|
import logging
from datetime import datetime
from datetime import timedelta
from cvprac.cvp_client_errors import CvpApiError
class CvpChangeControl(object):
"""Change-control class to provide generic method for CVP CC mechanism.
Change Control structure is based on:
- A name to identify change
- A list of tasks already created on CVP and on pending state
- An optional scheduling. If no schedule is defined,
then task will be run 3 minutes after creatio of CC
**List of public available methods:**
Methods
-------
add_task()
Append a task to self._list_changes
get_tasks()
Return list of of available tasks for this CC
get_list_changes()
Return list of tasks attached to this CC
create()
Create change-control on CVP server
Example
-------
>>> from cvprac_abstraction import CVP
>>> from cvprac_abstraction import connect_to_cvp
>>> from cvprac_abstraction.cvpConfiglet import CvpChangeControl
>>>
>>> parameters['cvp'] = '127.0.0.1'
>>> parameters['username'] = 'arista'
>>> parameters['password'] = 'arista'
>>>
>>> client = connect_to_cvp(parameters)
>>>
>>> change_control = CvpChangeControl(cvp_server=client, name='MyChanegControl')
>>> result = change_control.create(tz=timezone,
country='FR',
schedule=True,
schedule_at='2019-03-01-12h00',
snap_template="snapshotTemplate_9_4694793526491",
change_type='Custom', stop_on_error="true")
>>>
Warnings
--------
- Change Control execution is not running snapshot before and after with cvprac 1.0.1
"""
def __init__(self, cvp_server, name='Automated_Change_Control'):
"""Class Constructor.
Build class content with followinactivities:
- save cvp_server information
- save name for CC
- instanciate list for tasks
- Collect tasks available from CVP
Parameters
----------
cvp_server : CvpClient
CVP Server information
name : str
Optional - Name of the Change Control.
Default is ``Automated_Change_Control``
"""
logging.debug('create instance of CvpChangeControl')
self._cvp_server = cvp_server
self._name = name
# List of available tasks from server
self._available = list()
# List to save tasks to run with their order
# Ex: [{'taskId': '100', 'taskOrder': 1},
# {'taskId': '101', 'taskOrder': 1},
# {'taskId': '102', 'taskOrder': 2}]
self._list_changes = list()
self._retrieve_tasks()
def _retrieve_tasks(self):
"""Extract tasks from CVP Server.
Connect to CVP server and collect tasks in pending state
These tasks are saved in self._available structure dedicated
to pending tasks.
"""
logging.debug('getting list of available task for change control')
self._available = self._cvp_server.api.change_control_available_tasks()
def add_task(self, task):
"""Add a tasks to available list.
This task attach this new tasks to the pending tasks list.
Parameters
----------
task : str
TaskID from CVP server
"""
self._available.append(task)
def get_tasks(self, refresh=False):
"""Provide list of all available tasks.
Return list of all tasks getting from CVP and/or attached
with add_task method.
Parameters
----------
refresh : bool
Optional - Make a call to CVP to get latest list of tasks
Returns
-------
list
List of available tasks found in this CC
"""
logging.debug('extractig list of available tasks out of our instance')
if refresh:
logging.debug('refreshing list of tasks available for change control') # noqa E501
self._retrieve_tasks()
return self._available
def _build_change_dictionnary(self, order_mode='linear'):
"""Build ordered list to schedule changes.
CVP Change Control expect a list with an order to run tasks.
By default, all tasks are executed at the same time.
But using order_mode set to incremental every task will
be scheduled sequentially in this change-control
Parameters
----------
order_mode : str
Optional - Method to build task list.
Shall be ``linear`` or ``incremental``.
Note
----
Only linear has been tested.
"""
logging.info('Building a dictionary of changes')
change_position = 1
for task in self._available:
change = dict()
change['taskId'] = task['workOrderId']
change['taskOrder'] = (change_position)
logging.debug(' > Adding task %s to position %s',
change['taskId'],
change['taskOrder'])
self._list_changes.append(change)
if order_mode == 'incremental':
change_position += 1
def get_list_changes(self, mode='linear'):
"""Return list of tasks and their execution order.
Parameters
----------
mode : str
Information about tasks scheduling.
Shall be ``linear`` or ``incremental``.
Note
----
Only linear has been tested.
Returns
-------
list
List of changes and their order
"""
if len(self._list_changes) == 0:
self._build_change_dictionnary(order_mode=mode)
return self._list_changes
# TODO: manage way to retrieve Template ID
def create(self, mode='linear',
country='France',
tz='Europe/Paris',
schedule=False,
schedule_at='',
snap_template='1708dd89-ff4b-4d1e-b09e-ee490b3e27f0',
change_type='Custom',
stop_on_error="true"):
"""Create a change-control.
Parameters
----------
mode : str
Optional - method to order tasks (default : linear)
country : str
Optional - Country requested by CVP API (default:France)
tz : str
Optional - Timezone required by CVP (default: Europe/Paris)
schedule : bool
Optional - Enable CC scheduling (default: False)
schedule_at : str
Optional - Time to execute CC if scheduled
snap_template : str
Optional - Snapshot template ID to run before / after tasks
change_type : str
Optional - CVP definition for CC Might be Custom or Rollback.
(default: Custom)
stop_on_error : str
Optional - boolean string to stop CVP on errors
Returns
-------
dict
CVP creation result (None if error occurs)
"""
# If scheduling is not enable, then we create cahnge control
# to be run now+3 minutes by default
if schedule is False:
schedule_at = (datetime.now() + timedelta(seconds=180)).strftime("%Y-%m-%d %H:%M") # noqa E501
logging.debug('configure execution time in +3 minutes (%s)',
schedule_at)
# If list of changes to apply hsa not been built already,
# then we do it before creating change request
if len(self._list_changes) == 0:
self._build_change_dictionnary(order_mode=mode)
logging.debug('Tasks to attach to current change-control:')
for entry in self._list_changes:
logging.debug(' * Found task %s w/ position %s',
entry['taskId'],
entry['taskOrder'])
# FIXME: change-control does not set snapshot ID correctly and this one is not run before and after change
# Fix implemented in develop version :
# https://github.com/aristanetworks/cvprac/blob/develop/cvprac/cvp_api.py#L1633
# pip install pip install git+https://github.com/aristanetworks/cvprac.git@develop
# Should solve problem
try:
creation_request = self._cvp_server.api.create_change_control(name=self._name, # noqa E501
change_control_tasks=self._list_changes,
timezone=tz,
country_id=country,
date_time=schedule_at,
snapshot_template_key=snap_template,
change_control_type=change_type,
stop_on_error=stop_on_error)
return creation_request
except CvpApiError as err:
logging.error('Cannot create change-control - error message is %s',
format(err))
return None
| 34.939163
| 114
| 0.575144
| 1,018
| 9,189
| 5.064833
| 0.260314
| 0.052948
| 0.023274
| 0.009891
| 0.077967
| 0.055857
| 0.055857
| 0.043832
| 0.043832
| 0.043832
| 0
| 0.014533
| 0.34106
| 9,189
| 262
| 115
| 35.072519
| 0.836994
| 0.518228
| 0
| 0.082192
| 0
| 0
| 0.173011
| 0.017045
| 0
| 0
| 0
| 0.007634
| 0
| 1
| 0.09589
| false
| 0
| 0.054795
| 0
| 0.219178
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
d0fd59da3a0fd9fe3acc37b2d63f3055243a7e1f
| 1,289
|
py
|
Python
|
app/controllers/stores/update.py
|
Brunoro811/api_dangels
|
21c064eaa4f5009412dddc9676044d6cc08a5b65
|
[
"MIT"
] | null | null | null |
app/controllers/stores/update.py
|
Brunoro811/api_dangels
|
21c064eaa4f5009412dddc9676044d6cc08a5b65
|
[
"MIT"
] | null | null | null |
app/controllers/stores/update.py
|
Brunoro811/api_dangels
|
21c064eaa4f5009412dddc9676044d6cc08a5b65
|
[
"MIT"
] | null | null | null |
from flask import current_app, request
from http import HTTPStatus
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.orm import Session
from app.models.stores.store_model import StoreModel
from app.decorators import verify_payload, validator
@validator(zip_code="zip_code")
@verify_payload(
fields_and_types={
"name_store": str,
"street": str,
"number": int,
"zip_code": str,
"other_information": str,
},
optional=[
"name_store",
"street",
"number",
"zip_code",
"other_information",
],
)
def update_store(data: dict, id: int):
session: Session = current_app.db.session
try:
storie = StoreModel.query.get(id)
if not (storie):
raise NoResultFound
data: dict = request.get_json()
for key, value in data.items():
if key == "name_store":
value = value.title()
else:
value = value.capitalize()
setattr(storie, key, value)
session.add(storie)
session.commit()
return "", HTTPStatus.NO_CONTENT
except NoResultFound:
return {"error": "Not found store."}, HTTPStatus.BAD_REQUEST
except Exception as e:
raise e
| 25.78
| 68
| 0.59969
| 143
| 1,289
| 5.265734
| 0.48951
| 0.037185
| 0.045153
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.299457
| 1,289
| 49
| 69
| 26.306122
| 0.833887
| 0
| 0
| 0
| 0
| 0
| 0.103181
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023256
| false
| 0
| 0.139535
| 0
| 0.209302
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
1905b552f6b906092520144e21a33c6cfbd7fe0b
| 883
|
py
|
Python
|
src/save_docs.py
|
j-c-m-code/gutenbergsearch
|
b08f69d1d35fcca57e8ad0fcceaab614b9104abc
|
[
"MIT"
] | null | null | null |
src/save_docs.py
|
j-c-m-code/gutenbergsearch
|
b08f69d1d35fcca57e8ad0fcceaab614b9104abc
|
[
"MIT"
] | null | null | null |
src/save_docs.py
|
j-c-m-code/gutenbergsearch
|
b08f69d1d35fcca57e8ad0fcceaab614b9104abc
|
[
"MIT"
] | null | null | null |
"""
Processes a folder of .txt files to Spacy docs then saves the docs
"""
# first import standard modules
import glob
import os
from pathlib import Path
# then import third-party modules
import spacy
# finally import my own code (PEP-8 convention)
from askdir import whichdir
nlp = spacy.load("en_core_web_lg")
source_directory = whichdir()
os.chdir(source_directory)
filelist = glob.glob("*")
output_directory = whichdir()
for filename in filelist:
with open(filename, "r", encoding="utf-8") as f:
novel = f.read()
# the novel is too long for the default, so increase allocated memory
nlp.max_length = len(novel) + 100
# Process a text
doc = nlp(novel)
short_name = Path(filename).stem
# r for raw string--no escape characters
# f for format string--allow me to pass in variable
doc.to_disk(rf"{output_directory}\{short_name}")
| 23.864865
| 73
| 0.711212
| 134
| 883
| 4.604478
| 0.634328
| 0.042139
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007032
| 0.19479
| 883
| 36
| 74
| 24.527778
| 0.860759
| 0.392978
| 0
| 0
| 0
| 0
| 0.099617
| 0.059387
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.294118
| 0
| 0.294118
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
1908a3c6547cb1830569167b36fc11ceff479110
| 652
|
py
|
Python
|
bosm2015/pcradmin_old/urls.py
|
dvm-bitspilani/BITS-BOSM-2015
|
df3e69ee6ee9b179a2d6cd6cad61423c177dbe0a
|
[
"MIT"
] | 1
|
2015-09-15T17:19:30.000Z
|
2015-09-15T17:19:30.000Z
|
bosm2015/pcradmin_old/urls.py
|
DVM-BITS-Pilani/BITS-BOSM-2015
|
df3e69ee6ee9b179a2d6cd6cad61423c177dbe0a
|
[
"MIT"
] | null | null | null |
bosm2015/pcradmin_old/urls.py
|
DVM-BITS-Pilani/BITS-BOSM-2015
|
df3e69ee6ee9b179a2d6cd6cad61423c177dbe0a
|
[
"MIT"
] | 1
|
2016-03-28T19:44:41.000Z
|
2016-03-28T19:44:41.000Z
|
from pcradmin import views
from django.conf.urls import url, include
urlpatterns = [
url(r'^(?P<pagename>\w+)/', views.index),
#url(r'^sendmail$', views.sendmail),
#url(r'^sentmail$', views.sentmail),
url(r'^changelimit$', views.change_team_limits),
url(r'^change_team_limit$', views.change_team_limit_list),
url(r'^limit_changed$', views.change_limits),
url(r'^changesportslimit$', views.change_sports_limits),
url(r'^sports_limits_changed$', views.save_sports_limits),
url(r'^setstatus', views.set_status),
url(r'^showstatus', views.save_status),
url(r'^emailsend', views.send_mail),
url(r'^compose', views.compose),
]
| 38.352941
| 62
| 0.71319
| 92
| 652
| 4.858696
| 0.380435
| 0.107383
| 0.089485
| 0.071588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104294
| 652
| 16
| 63
| 40.75
| 0.765411
| 0.107362
| 0
| 0
| 0
| 0
| 0.253448
| 0.039655
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
190c0898a136d9b08e445150bbf358f595547ad3
| 8,349
|
py
|
Python
|
tcsv.py
|
eadanfahey/transform-csv
|
40d3aaf34b286fe9d6262fe69c7245e3a44a5b41
|
[
"MIT"
] | null | null | null |
tcsv.py
|
eadanfahey/transform-csv
|
40d3aaf34b286fe9d6262fe69c7245e3a44a5b41
|
[
"MIT"
] | null | null | null |
tcsv.py
|
eadanfahey/transform-csv
|
40d3aaf34b286fe9d6262fe69c7245e3a44a5b41
|
[
"MIT"
] | null | null | null |
import csv
class ConstraintError(Exception):
def __init__(self, column, value, fn_name, rownumber):
self.column = column
self.value = value
self.fn_name = fn_name
self.rown = rownumber
def __str__(self):
message = "{} value {} does not satisfy the constraint {} on row {}"
return message.format(self.column, self.value, self.fn_name, self.rown)
class TransformError(Exception):
def __init__(self, row, err):
self.row = row
self.err = err
def __str__(self):
return "on csv row {} with error: {}".format(self.row, self.err)
class TransformCSV(object):
def __init__(self, input_file, skiprows=0):
self.rownumber = 1
self.ifile = open(input_file)
self.names = None
self.reader = None
self.idx = None
self._create_reader(skiprows)
self.mutation_fns = []
self.constraint_fns = []
self.select_fn = lambda row: row
def __iter__(self):
return self
def __next__(self):
row = next(self.reader)
self.rownumber += 1
try:
mutated = row[:]
for fn in self.mutation_fns:
mutated = fn(row)
mutated_cp = mutated[:]
for cfn in self.constraint_fns:
cfn(mutated_cp)
return self.select_fn(mutated)
except Exception as e:
raise TransformError(self.rownumber, e)
def close(self):
self.ifile.close()
def _create_reader(self, skiprows):
"""
Create a csv reader object from the input csv file.
"""
# with open(self.input_file) as f:
reader = csv.reader(self.ifile)
for _ in range(skiprows):
next(reader)
self.rownumber += 1
names = next(reader)
self.reader = reader
self.names = names
self.idx = dict(zip(names, range(len(names))))
def rename(self, name_map):
"""
Change the column names.
Args:
name_map: A dictionary mapping the current names to new names.
Returns:
None
"""
new_names = []
for name in self.names:
new_name = name_map.get(name)
if new_name is None:
new_names.append(name)
else:
new_names.append(new_name)
self.names = new_names
self.idx = dict(zip(self.names, range(len(self.names))))
def add(self, name, val):
"""
Add a column to the csv containing a constant value.
TODO: replace this method with add_column
Args:
name: The name of the new column.
val: The value to place in each row of the new column.
Returns:
None
"""
def f(row):
row.append(val)
return row
self.mutation_fns.append(f)
self.names.append(name)
self.idx[name] = len(self.names) - 1
def add_column(self, name, fn, col):
"""
Add a column to the csv with the new value produced by a
user defined function that can access all entries on the same row.
TODO: Perhaps I use inspect.signature to verify that the number of
arguments that `fn` takes is the same as the number of columns
passed. But, this doesn't work for some builting functions e.g. int.
Args:
name: The name of the new column.
fn: The function to apply to the row.
col: The columns that are arguments to the function.
Returns:
None
"""
if isinstance(col, str):
columns = [col]
elif isinstance(col, list) or isinstance(col, tuple):
columns = col
else:
raise TypeError('The parameter col must be of type str, list or tuple')
# check that the column names are valid.
for c in columns:
try:
self.idx[c]
except KeyError:
raise KeyError("The column '{}' does not exist".format(c))
def add_column_fn(row):
vals = [row[self.idx[c]] for c in columns]
new_val = fn(*vals)
row.append(new_val)
return row
self.mutation_fns.append(add_column_fn)
self.names.append(name)
self.idx[name] = len(self.names) - 1
def mutate(self, fn, col=None):
"""
Mutate a column by applying a function to it.
Args:
fn: The function to apply. Takes a string or numeric argument and
returns a string or numeric argument.
col: The name of the column to be mutated. Can be of three forms:
1) None (default): the function is applied to all columns.
2) list/tuple of column names to apply the function to.
3) A single column name to apply the function to.
Returns:
None
Raises:
TypeError: The parameter `col` is the wrong type.
KeyError: When trying to mutate a column that doesn't exist.
"""
if col is None:
columns = self.names
elif isinstance(col, str):
columns = [col]
elif isinstance(col, list) or isinstance(col, tuple):
columns = col
else:
raise TypeError("col must be of type None, str, list or tuple")
# check that the column names are valid.
for c in columns:
try:
self.idx[c]
except KeyError:
raise KeyError("The column '{}' does not exist".format(c))
def mutate_fn(row):
for c in columns:
row[self.idx[c]] = fn(row[self.idx[c]])
return row
self.mutation_fns.append(mutate_fn)
def constraint(self, fn, col):
"""
Check that a column satisfies a constraint.
Args:
fn: A function of a single argument that returns True if the
column value satisfies the constraint, or False otherwise.
col: The name of the column to check.
Returns:
None
Raises:
ConstraintError: If fn returns False.
TypeError: If col is not the correct type.
KeyError: If a column name does not exist.
"""
if col is None:
columns = self.names
elif isinstance(col, str):
columns = [col]
elif isinstance(col, list) or isinstance(col, tuple):
columns = col
else:
raise TypeError("col must be of type None, str, list or tuple")
# check that the column names are valid.
for c in columns:
try:
self.idx[c]
except KeyError:
raise KeyError("The column '{}' does not exist".format(c))
def constraint_fn(row):
for c in columns:
val = row[self.idx[c]]
if not fn(val):
raise ConstraintError(c, val, fn.__name__, self.rownumber)
self.constraint_fns.append(constraint_fn)
def select(self, columns):
"""
Select only the supplied columns.
columns:
columns: A list of column names to select.
Returns:
None
Raises:
KeyError: If a column does not exist.
"""
for c in columns:
try:
self.idx[c]
except KeyError:
raise KeyError("The column '{}' does not exist".format(c))
def select_fn(row):
return [row[self.idx[col]] for col in columns]
self.names = columns
self.select_fn = select_fn
def write(self, filename):
"""
Write the csv to file. This will exhaust the iterator.
Args:
filename: the name of the csv file.
Returns:
None
Raises:
FileNotFoundError: the file could not be created.
"""
with open(filename, 'w') as f:
writer = csv.writer(f)
writer.writerow(self.names)
while True:
try:
writer.writerow(self.__next__())
except StopIteration:
break
| 31.387218
| 83
| 0.542819
| 1,031
| 8,349
| 4.313288
| 0.176528
| 0.028334
| 0.014392
| 0.020463
| 0.315494
| 0.274792
| 0.251855
| 0.22667
| 0.213627
| 0.213627
| 0
| 0.001727
| 0.375734
| 8,349
| 265
| 84
| 31.50566
| 0.851497
| 0.277279
| 0
| 0.378378
| 0
| 0
| 0.063735
| 0
| 0
| 0
| 0
| 0.007547
| 0
| 1
| 0.141892
| false
| 0
| 0.006757
| 0.02027
| 0.222973
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
190dc436e49d1655496d4e4796285c2ff4464f81
| 13,074
|
py
|
Python
|
selim/datasets/lidc.py
|
tilacyn/dsb2018_topcoders
|
e0f95ef70bc062d4dea321d2aa73231a9538cd63
|
[
"MIT"
] | null | null | null |
selim/datasets/lidc.py
|
tilacyn/dsb2018_topcoders
|
e0f95ef70bc062d4dea321d2aa73231a9538cd63
|
[
"MIT"
] | null | null | null |
selim/datasets/lidc.py
|
tilacyn/dsb2018_topcoders
|
e0f95ef70bc062d4dea321d2aa73231a9538cd63
|
[
"MIT"
] | null | null | null |
import numpy as np
from tensorflow.keras.preprocessing.image import Iterator
import time
import os
import xml.etree.ElementTree as ET
import cv2
import pydicom as dicom
from os.path import join as opjoin
import json
from tqdm import tqdm
def make_mask(image, image_id, nodules):
height, width = image.shape
# print(image.shape)
filled_mask = np.full((height, width), 0, np.uint8)
contoured_mask = np.full((height, width), 0, np.uint8)
# todo OR for all masks
for nodule in nodules:
for roi in nodule['roi']:
if roi['sop_uid'] == image_id:
edge_map = roi['xy']
cv2.fillPoly(filled_mask, np.int32([np.array(edge_map)]), 255)
# cv2.polylines(contoured_mask, np.int32([np.array(edge_map)]), color=255, isClosed=False)
# mask = np.swapaxes(np.array([contoured_mask, filled_mask]), 0, 2)
# cv2.imwrite('kek0.jpg', image)
# cv2.imwrite('kek1.jpg', filled_mask)
return np.reshape(filled_mask, (height, width, 1)) / 255
def get_files_with_nodules(nodules, root):
files = os.listdir(root)
image_ids_with_nodules = set()
for nodule in nodules:
for roi in nodule['roi']:
image_ids_with_nodules.add(roi['sop_uid'])
result = []
for file in files:
if not file.endswith('dcm'):
continue
_, ds = imread(opjoin(root, file))
if ds.SOPInstanceUID in image_ids_with_nodules:
result.append(opjoin(root, file))
return result
def test(a, b):
root = '/Users/mkryuchkov/lung-ds/3000566-03192'
nodules = parseXML('/Users/mkryuchkov/lung-ds/3000566-03192')
image = cv2.imread('/Users/mkryuchkov/lung-ds/000001.jpg')
for im_name in os.listdir(root):
if not im_name.endswith('dcm'):
continue
image, dcm_ds = imread(root + '/' + im_name)
print(dcm_ds.SliceLocation)
if dcm_ds.SliceLocation == a:
print(im_name)
return make_mask(image, dcm_ds.SOPInstanceUID, nodules, b)
# break
# print(dcm_ds.get('UID'))
# return make_mask(image, image_id, nodules)
def imread(image_path):
ds = dicom.dcmread(image_path)
img = ds.pixel_array
img_2d = img.astype(float)
img_2d_scaled = (np.maximum(img_2d, 0) / img_2d.max()) * 255.0
img_2d_scaled = np.uint8(img_2d_scaled)
image = img_2d_scaled
return image, ds
def parseXML(scan_path):
'''
parse xml file
args:
xml file path
output:
nodule list
[{nodule_id, roi:[{z, sop_uid, xy:[[x1,y1],[x2,y2],...]}]}]
'''
file_list = os.listdir(scan_path)
xml_file = None
for file in file_list:
if '.' in file and file.split('.')[1] == 'xml':
xml_file = file
break
prefix = "{http://www.nih.gov}"
if xml_file is None:
print('SCAN PATH: {}'.format(scan_path))
tree = ET.parse(scan_path + '/' + xml_file)
root = tree.getroot()
readingSession_list = root.findall(prefix + "readingSession")
nodules = []
for session in readingSession_list:
# print(session)
unblinded_list = session.findall(prefix + "unblindedReadNodule")
for unblinded in unblinded_list:
nodule_id = unblinded.find(prefix + "noduleID").text
edgeMap_num = len(unblinded.findall(prefix + "roi/" + prefix + "edgeMap"))
if edgeMap_num >= 1:
# it's segmentation label
nodule_info = {}
nodule_info['nodule_id'] = nodule_id
nodule_info['roi'] = []
roi_list = unblinded.findall(prefix + "roi")
for roi in roi_list:
roi_info = {}
# roi_info['z'] = float(roi.find(prefix + "imageZposition").text)
roi_info['sop_uid'] = roi.find(prefix + "imageSOP_UID").text
roi_info['xy'] = []
edgeMap_list = roi.findall(prefix + "edgeMap")
for edgeMap in edgeMap_list:
x = float(edgeMap.find(prefix + "xCoord").text)
y = float(edgeMap.find(prefix + "yCoord").text)
xy = [x, y]
roi_info['xy'].append(xy)
nodule_info['roi'].append(roi_info)
nodules.append(nodule_info)
return nodules
class LIDCDatasetIterator(Iterator):
def __init__(self, image_dir, batch_size, val_len, test_len=0, data_shape=(64, 64), grid_size=1):
seed = np.uint32(time.time() * 1000)
self.image_dir = image_dir
self.image_ids = self.create_image_ids()
n = len(self.image_ids)
self.val_len = val_len
self.train_index_list = np.arange(n)
np.random.shuffle(self.train_index_list)
self.val_index_list = self.train_index_list[:val_len]
self.test_index_list = self.train_index_list[val_len:(val_len + test_len)]
self.train_index_list = self.train_index_list[val_len + test_len:]
self.val_i = 0
self.train_i = 0
self.grid_size = grid_size
self.data_shape = data_shape
print("total len: {}".format(n))
print("train index array: {}".format(len(self.train_index_list)))
print("val index array: {}".format(len(self.val_index_list)))
super().__init__(n, batch_size, False, seed)
def train_generator(self):
def index_inc_function():
prev = self.train_i
self.train_i += self.batch_size // 2
if self.train_i >= len(self.train_index_list):
np.random.shuffle(self.train_index_list)
prev = 0
self.train_i = self.batch_size // 2
return prev, self.train_i
return self.generator(index_inc_function, self.train_index_list)
def val_generator(self):
def index_inc_function():
prev = self.val_i
self.val_i += self.batch_size // 2
if self.val_i >= len(self.val_index_list):
np.random.shuffle(self.val_index_list)
prev = 0
self.val_i = self.batch_size // 2
return prev, self.val_i
return self.generator(index_inc_function, self.val_index_list)
def generator(self, index_inc_function, index_list):
def gen():
while 1:
batch_x = []
batch_y = []
index, next_index = index_inc_function()
index_array = index_list[index: next_index]
for image_index in index_array:
file_name, parent_name = self.image_ids[image_index]
image, dcm_ds = imread(file_name)
image = self.pad_if_need(image)
nodules = parseXML(parent_name)
mask = make_mask(image, dcm_ds.SOPInstanceUID, nodules)
image_parts, mask_parts = self.split(image, mask)
for i in range(2):
image = image_parts[i]
image = self.preprocess_x(image)
mask = mask_parts[i]
mask = self.preprocess_y(mask)
batch_x.append(image)
batch_y.append(mask)
batch_x = np.array(batch_x, dtype=np.uint8)
batch_y = np.array(batch_y, dtype=np.uint8)
yield batch_x, batch_y
return gen
def pad_if_need(self, image):
h, w = image.shape
if 2022 == h or 2022 == w:
hpad = (2048 - h) // 2
wpad = (2048 - w) // 2
image = np.pad(image, ((hpad, hpad), (wpad, wpad)), constant_values=0)
return image
def preprocess_x(self, image):
image = np.reshape(image, (image.shape[0], image.shape[1], 1))
image = np.repeat(image, 3, axis=2)
image = cv2.resize(image, self.data_shape)
return image
def preprocess_y(self, mask):
mask = cv2.resize(mask, self.data_shape)
mask = np.reshape(mask, (self.data_shape[0], self.data_shape[1], 1))
return mask
def split(self, image, mask):
if self.grid_size == 1:
return [image, image], [mask, mask]
h, w = image.shape
gs = h // self.grid_size
image_parts = image.reshape(h // gs, gs, -1, gs).swapaxes(1, 2).reshape(-1, gs, gs)
mask_parts = mask.reshape(h // gs, gs, -1, gs).swapaxes(1, 2).reshape(-1, gs, gs)
max_part_idx = np.argmax([np.count_nonzero(part > 0) for part in mask_parts])
max_mask = mask_parts[max_part_idx]
random_idx = np.random.randint(self.grid_size * self.grid_size)
return [image_parts[max_part_idx], image_parts[random_idx]], [max_mask, mask[random_idx]]
def create_image_ids(self):
with open("index.json", "r") as read_file:
dcms = json.load(read_file)
image_ids = {}
# print('total training ds len: {}'.format(len(dcms)))
for i, dcm in enumerate(dcms):
image_ids[i] = dcm, '/'.join(dcm.split('/')[:-1])
return image_ids
def create_index(image_dir):
dcms = []
for root, folders, files in tqdm(os.walk(image_dir)):
xml_file = None
for file in files:
if 'xml' in file:
xml_file = file
break
if xml_file is None:
continue
print('extending with {}'.format(root))
dcms.extend(get_files_with_nodules(parseXML(root), root))
print('total training ds len: {}'.format(len(dcms)))
with open("index.json", "w") as write_file:
json.dump(dcms, write_file)
class LIDCTestDatasetIterator(LIDCDatasetIterator):
def __init__(self, image_dir, batch_size, test_index_list, val_len, data_shape=(64, 64), grid_size=1):
super().__init__(image_dir, batch_size, 0, data_shape=data_shape, grid_size=grid_size)
self.test_index_list = test_index_list
self.test_i = 0
self.all_images = []
self.create_negative()
self.batch_size //= 2
def create_negative(self):
for root, folders, files in tqdm(os.walk(self.image_dir)):
xml_file = None
for file in files:
if 'xml' in file:
xml_file = file
break
if xml_file is None:
continue
else:
extension = [(dcm, root) for dcm in files if dcm.endswith('dcm')]
self.all_images.extend(extension)
if len(self.all_images) > 10000:
break
def split_for_test(self, image, mask):
if self.grid_size == 1:
return [image, image], [mask, mask]
h, w = image.shape
gs = h // self.grid_size
image_parts = image.reshape(h // gs, gs, -1, gs).swapaxes(1, 2).reshape(-1, gs, gs)
mask_parts = mask.reshape(h // gs, gs, -1, gs).swapaxes(1, 2).reshape(-1, gs, gs)
return image_parts, mask_parts
def test_generator(self):
def index_inc_function():
prev = self.test_i
self.test_i += self.batch_size
if self.test_i >= len(self.test_index_list):
np.random.shuffle(self.test_index_list)
prev = 0
self.test_i = self.batch_size
return prev, self.test_i
index_list = self.test_index_list
def gen():
while 1:
batch_x = []
batch_y = []
index, next_index = index_inc_function()
index_array = index_list[index: next_index]
print(index_array)
new_index_array = []
for i in index_array:
new_index_array.append(i)
new_index_array.append(-1)
index_array = new_index_array
for image_index in index_array:
if image_index == -1:
ii = np.random.randint(1e4)
file_name, parent_name = self.all_images[ii]
file_name = opjoin(parent_name, file_name)
else:
file_name, parent_name = self.image_ids[image_index]
image, dcm_ds = imread(file_name)
image = self.pad_if_need(image)
nodules = parseXML(parent_name)
mask = make_mask(image, dcm_ds.SOPInstanceUID, nodules)
image_parts, mask_parts = self.split_for_test(image, mask)
image_parts = [self.preprocess_x(image_part) for image_part in image_parts]
mask_parts = [self.preprocess_y(mask_part) for mask_part in mask_parts]
image = self.preprocess_x(image)
mask = self.preprocess_y(mask)
batch_x.append((image, image_parts))
batch_y.append((mask, mask_parts))
yield batch_x, batch_y
return gen
| 38.795252
| 106
| 0.567998
| 1,689
| 13,074
| 4.164002
| 0.138544
| 0.033272
| 0.019906
| 0.025594
| 0.41419
| 0.370112
| 0.307977
| 0.253804
| 0.175743
| 0.154415
| 0
| 0.019066
| 0.322013
| 13,074
| 336
| 107
| 38.910714
| 0.774368
| 0.046581
| 0
| 0.299639
| 0
| 0
| 0.033988
| 0.009182
| 0
| 0
| 0
| 0.002976
| 0
| 1
| 0.086643
| false
| 0
| 0.036101
| 0
| 0.202166
| 0.032491
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
190de2ec2acd9e5640757238ffbce83a69af9dc2
| 2,058
|
py
|
Python
|
hexagon/__main__.py
|
redbeestudios/hexagon
|
dc906ae31a14eb750a3f9bde8dd0633d8e1af486
|
[
"Apache-2.0"
] | 8
|
2021-06-27T21:46:04.000Z
|
2022-02-26T18:03:10.000Z
|
hexagon/__main__.py
|
redbeestudios/hexagon
|
dc906ae31a14eb750a3f9bde8dd0633d8e1af486
|
[
"Apache-2.0"
] | 31
|
2021-06-24T14:35:38.000Z
|
2022-02-17T03:01:23.000Z
|
hexagon/__main__.py
|
redbeestudios/hexagon
|
dc906ae31a14eb750a3f9bde8dd0633d8e1af486
|
[
"Apache-2.0"
] | 1
|
2021-08-16T16:15:16.000Z
|
2021-08-16T16:15:16.000Z
|
from hexagon.support.hooks import HexagonHooks
from hexagon.support.execute.tool import select_and_execute_tool
from hexagon.support.update.cli import check_for_cli_updates
import sys
from hexagon.support.args import fill_args
from hexagon.domain import cli, tools, envs
from hexagon.support.help import print_help
from hexagon.support.tracer import tracer
from hexagon.support.printer import log
from hexagon.support.update.hexagon import check_for_hexagon_updates
from hexagon.support.storage import (
HexagonStorageKeys,
store_user_data,
)
from hexagon.plugins import collect_plugins
def main():
_, _tool, _env = fill_args(sys.argv, 3)
if _tool == "-h" or _tool == "--help":
return print_help(cli, tools, envs)
collect_plugins()
HexagonHooks.start.run()
log.start(f"[bold]{cli.name}")
log.gap()
check_for_hexagon_updates()
if cli.name == "Hexagon":
log.info(
"This looks like your first time running Hexagon.",
'You should probably run "Install CLI".',
gap_end=1,
)
else:
check_for_cli_updates()
try:
result = select_and_execute_tool(tools, _tool, _env, sys.argv[3:])
log.gap()
if result:
for item in result:
log.info(item)
log.finish()
if tracer.has_traced():
log.extra(
"[cyan dim]To run again do:[/cyan dim]",
f"[cyan] {cli.command} {tracer.command()}[/cyan]",
)
command_as_aliases = tracer.command_as_aliases(tools, envs)
if command_as_aliases:
log.extra(
"[cyan dim] or:[/cyan dim]",
f"[cyan] {cli.command} {command_as_aliases}[/cyan]",
)
store_user_data(
HexagonStorageKeys.last_command.value,
f"{cli.command} {tracer.command()}",
)
except KeyboardInterrupt:
sys.exit(1)
HexagonHooks.end.run()
if __name__ == "__main__":
main()
| 27.078947
| 76
| 0.614189
| 250
| 2,058
| 4.84
| 0.336
| 0.1
| 0.133884
| 0.033058
| 0.036364
| 0.036364
| 0
| 0
| 0
| 0
| 0
| 0.002703
| 0.280855
| 2,058
| 75
| 77
| 27.44
| 0.814865
| 0
| 0
| 0.067797
| 0
| 0
| 0.156463
| 0.025267
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016949
| false
| 0
| 0.20339
| 0
| 0.237288
| 0.050847
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef6ed1166f6e406d8fb8cc64a8cdbbcd50db4769
| 7,103
|
py
|
Python
|
store/main.py
|
Soemonewho2/pi-ware
|
86d2cd84ca85e36cbcdbc7511f6a4565b18e81d9
|
[
"MIT"
] | null | null | null |
store/main.py
|
Soemonewho2/pi-ware
|
86d2cd84ca85e36cbcdbc7511f6a4565b18e81d9
|
[
"MIT"
] | null | null | null |
store/main.py
|
Soemonewho2/pi-ware
|
86d2cd84ca85e36cbcdbc7511f6a4565b18e81d9
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Pi-Ware main UI
from tkinter import *
from tkinter.ttk import *
import tkinter as tk
import os
import webbrowser
from functools import partial
import getpass
#Set global var username
global username
username = getpass.getuser()
#Set global install/uninstall scripts
global install_script
global uninstall_script
#Import custom pi-ware functions
#import function
import classes
window = tk.Tk()
#Functions
def show_desc(apt,*args):
mainwinx = str(window.winfo_x())
mainwiny = str(window.winfo_y())
item = tree.selection()[0]
app = tree.item(item,"text")
global install_script, uninstall_script, desc_win
desc_win = tk.Toplevel(window)
p2 = PhotoImage(file = f'/home/{username}/pi-ware/apps/{app}/icon.png')
# Icon set for program window
desc_win.iconphoto(False, p2)
window.resizable(0, 0)
desc_win.title(f"{app}")
print("320x500+" + mainwinx + "+" + mainwiny)
desc_win.geometry("320x500+" + mainwinx + "+" + mainwiny)
window.withdraw()
desc = open(f"/home/{username}/pi-ware/apps/{app}/description.txt", "r")
desc_contents = desc.read()
text_box = Text(desc_win, height=12, width=40)
text_box.pack()
text_box.insert('end', desc_contents)
text_box.config(state='disabled')
#Disabled for now.
#app_desc = tk.Label(desc_win, text=desc_contents, font="Arial 9")
#app_desc.pack()
#Check if website file exist
filepath = f"/home/{username}/pi-ware/apps/{app}/website"
try:
file_tst = open(filepath)
file_tst.close()
except FileNotFoundError:
Web = "False"
else:
Web = "True"
#Add website from file
if Web == "True":
websiteurlfile = open(f'/home/{username}/pi-ware/apps/{app}/website', 'r')
websiteurl = websiteurlfile.readlines()
# Strips the newline character
for line in websiteurl:
#print("{}".format(line.strip()))
Website = classes.HyperLink(desc_win, f"""{line}""");
Website.pack()
install = tk.Button(desc_win, text="INSTALL", font="Arial 11 bold", width=200, bg="darkblue", fg="white", command=install_app)
install.pack()
uninstall = tk.Button(desc_win, text="UNINSTALL", font="Arial 11 bold", width=200, bg="red", fg="white", command=uninstall_app)
uninstall.pack()
ucommand = f"""bash /home/{username}/pi-ware/func/term/uninst '{app}' 'Uninstalling {app}'"""
command = f"""bash /home/{username}/pi-ware/func/term/inst '{app}' 'Installing {app}'"""
install_script = "'%s'" % command
uninstall_script = "'%s'" % ucommand
back_to_menu_button = tk.Button(desc_win, text="BACK", font="Arial 11 bold", width=200, height=2, bg="green", fg="white", command=back_to_menu)
back_to_menu_button.pack(side = "bottom")
desc_win.protocol("WM_DELETE_WINDOW",back_to_menu)
def back_to_menu(window, parent, app=None):
parent.destroy()
window.deiconify()
def install_app():
global install_script
if IsDev == "True":
print(f"bash /home/{username}/pi-ware/func/term/term-run {install_script}")
os.system(f"bash /home/{username}/pi-ware/func/term/term-run {install_script}")
def uninstall_app():
global uninstall_script
if IsDev == "True":
print(f"bash /home/{username}/pi-ware/func/term/term-run {uninstall_script}")
os.system(f"bash /home/{username}/pi-ware/func/term/term-run {uninstall_script}")
def back_to_menu():
window.deiconify()
desc_win.destroy()
window.title("Pi-Ware")
#window.eval('tk::PlaceWindow . center')
def quit():
window.destroy()
#Check if dev files exist
filepath = f"/home/{username}/pi-ware/.dev"
try:
file_tst = open(filepath)
file_tst.close()
except FileNotFoundError:
IsDev = "False"
else:
IsDev = "True"
#Set window icon
p1 = PhotoImage(file = f'/home/{username}/pi-ware/icons/logo.png')
window.iconphoto(False, p1)
#Main
window.resizable(0, 0)
window.geometry("330x500")
window.eval('tk::PlaceWindow . center')
window.title("Pi-Ware")
# Window tabs
tab_control = Notebook(window)
apps_tab = Frame(tab_control)
news_tab = Frame(tab_control)
credits_tab = Frame(tab_control)
DEV_tab = Frame(tab_control)
tab_control.add(apps_tab, text="Apps")
tab_control.add(news_tab, text="News")
tab_control.add(credits_tab, text="Credits")
#Show dev tab if dev files are found
if IsDev == "True":
tab_control.add(DEV_tab, text="Dev")
tab_control.pack(expand=0, fill="both")
#Show DEV stuff
PiWareVersionFile = open(f"/home/{username}/.local/share/pi-ware/version", "r")
PiWareVersioncontent = PiWareVersionFile.read()
files = folders = 0
for _, dirnames, filenames in os.walk(f"/home/{username}/pi-ware/apps"):
files += len(filenames)
folders += len(dirnames)
InstallibleApps = "{:,} installible Apps".format(folders)
PiWareVersion = tk.Label(DEV_tab, text=f"Pi-Ware Version:\n{PiWareVersioncontent}", font="Arial 11 bold")
PiWareInstallableApps = tk.Label(DEV_tab, text=f"{InstallibleApps}", font="Arial 11 bold")
PiWareVersion.pack()
PiWareInstallableApps.pack()
#Show latest news message
NewsMessagefile = open(f"/home/{username}/pi-ware/func/info/latestnewsmessage", "r")
NewsMessagecontent = NewsMessagefile.read()
NewsMessage = tk.Label(news_tab, text=f"Latest news:\n{NewsMessagecontent}", font="Arial 11 bold")
NewsMessage.pack()
#Show info message
InfoMessagefile = open(f"/home/{username}/pi-ware/func/info/infomessage", "r")
InfoMessagecontent = InfoMessagefile.read()
InfoMessage = tk.Label(credits_tab, text=f"{InfoMessagecontent}", font="Arial 11 bold")
InfoMessage.pack()
#Show commit links
commitmessage = tk.Label(credits_tab, text=f"To see commits, please go to the link below.", font="Arial 11 bold")
commitmessage.pack()
commit = classes.HyperLink(credits_tab, f"""https://github.com/piware14/pi-ware/graphs/contributors""");
commit.pack()
#Add pi-ware website
piwarewebsite = tk.Label(credits_tab, text=f"To vist the pi-ware website, click the link below.", font="Arial 11 bold")
piwarewebsite.pack()
Website = classes.HyperLink(credits_tab, f"""https://pi-ware.ml""");
Website.pack()
tree = Treeview(apps_tab)
tree.pack(expand=YES, fill=BOTH)
tree.column("#0", minwidth=0, width=330, stretch=NO)
s = Style()
s.configure('Treeview', rowheight=35)
ap = next(os.walk(f"/home/{username}/pi-ware/apps"))[1]
applist = sorted(ap)
print("Current apps:\n")
for app in applist:
print(app)
appb = ""
for a in app:
if(a == " "):
appb += "_"
else:
appb += a
tree.bind("<<TreeviewSelect>>", partial(show_desc,app))
exec(appb + """_button = PhotoImage(file=f'/home/{username}/pi-ware/apps/{app}/icon.png')""")
exec("""tree.insert('', 'end', text=f"{app}",image=""" + appb + """_button)""")
ScrollForMore = tk.Label(apps_tab, text="Scroll down for more apps.", font="Arial 11 bold")
ScrollForMore.pack()
quitbutton = tk.Button(window, text="Quit", font="Arial 11 bold", width=200, bg="grey", fg="white", command=quit)
quitbutton.pack(side="bottom")
window.mainloop()
| 33.504717
| 147
| 0.68464
| 983
| 7,103
| 4.851475
| 0.255341
| 0.033969
| 0.049906
| 0.064164
| 0.283288
| 0.242818
| 0.212414
| 0.158943
| 0.099811
| 0.099811
| 0
| 0.013331
| 0.155146
| 7,103
| 211
| 148
| 33.663507
| 0.78137
| 0.085316
| 0
| 0.169935
| 0
| 0.019608
| 0.270325
| 0.127512
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039216
| false
| 0.013072
| 0.052288
| 0
| 0.091503
| 0.03268
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef703db82c659a484347e75656e30bf7c5cabb9f
| 854
|
py
|
Python
|
data/transcoder_evaluation_gfg/python/TILING_WITH_DOMINOES.py
|
mxl1n/CodeGen
|
e5101dd5c5e9c3720c70c80f78b18f13e118335a
|
[
"MIT"
] | 241
|
2021-07-20T08:35:20.000Z
|
2022-03-31T02:39:08.000Z
|
data/transcoder_evaluation_gfg/python/TILING_WITH_DOMINOES.py
|
mxl1n/CodeGen
|
e5101dd5c5e9c3720c70c80f78b18f13e118335a
|
[
"MIT"
] | 49
|
2021-07-22T23:18:42.000Z
|
2022-03-24T09:15:26.000Z
|
data/transcoder_evaluation_gfg/python/TILING_WITH_DOMINOES.py
|
mxl1n/CodeGen
|
e5101dd5c5e9c3720c70c80f78b18f13e118335a
|
[
"MIT"
] | 71
|
2021-07-21T05:17:52.000Z
|
2022-03-29T23:49:28.000Z
|
# Copyright (c) 2019-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
def f_gold ( n ) :
A = [ 0 ] * ( n + 1 )
B = [ 0 ] * ( n + 1 )
A [ 0 ] = 1
A [ 1 ] = 0
B [ 0 ] = 0
B [ 1 ] = 1
for i in range ( 2 , n + 1 ) :
A [ i ] = A [ i - 2 ] + 2 * B [ i - 1 ]
B [ i ] = A [ i - 1 ] + B [ i - 2 ]
return A [ n ]
#TOFILL
if __name__ == '__main__':
param = [
(29,),
(13,),
(25,),
(65,),
(27,),
(42,),
(19,),
(50,),
(59,),
(13,)
]
n_success = 0
for i, parameters_set in enumerate(param):
if f_filled(*parameters_set) == f_gold(*parameters_set):
n_success+=1
print("#Results: %i, %i" % (n_success, len(param)))
| 21.897436
| 64
| 0.456674
| 126
| 854
| 2.960317
| 0.468254
| 0.016086
| 0.016086
| 0.021448
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 0.367682
| 854
| 39
| 65
| 21.897436
| 0.607407
| 0.216628
| 0
| 0
| 0
| 0
| 0.036254
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034483
| false
| 0
| 0
| 0
| 0.068966
| 0.034483
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef70dbcac1c09bceb1d774bc2a9dbf1cb0f819da
| 3,342
|
py
|
Python
|
make3d.py
|
BritishMuseumDH/scaffold3D
|
314ee4ca5f52304c89fac71b8f293774341d6278
|
[
"CC0-1.0"
] | 4
|
2017-03-30T09:41:21.000Z
|
2021-10-01T09:18:02.000Z
|
make3d.py
|
BritishMuseumDH/scaffold3D
|
314ee4ca5f52304c89fac71b8f293774341d6278
|
[
"CC0-1.0"
] | null | null | null |
make3d.py
|
BritishMuseumDH/scaffold3D
|
314ee4ca5f52304c89fac71b8f293774341d6278
|
[
"CC0-1.0"
] | 3
|
2018-01-30T09:18:34.000Z
|
2019-06-16T17:55:24.000Z
|
import os
import shutil
from textwrap import dedent
import argparse
import subprocess
parser = argparse.ArgumentParser(description='This is a script to create 3D model folder structure')
parser.add_argument('-p', '--project', help='3D project name', required=True)
parser.add_argument('-wd', '--wd', help='Working directory', required=True)
args = parser.parse_args()
os.chdir(args.wd)
root_dir = os.path.join(args.wd, args.project)
if os.path.exists(root_dir) and os.listdir(root_dir):
# If the path already exists and it is not empty, raise an error
err_msg = '''
{directory} already exists and it is not empty.
Please try a different project name or root directory.
'''.format(directory=root_dir)
raise IOError(000, dedent(err_msg))
else:
os.mkdir(root_dir) # Create the root directory
dirnames = ('images', 'masks', 'models')
# Create all the other directories
for item in dirnames:
path3D = os.path.join(args.wd, args.project, item)
os.mkdir(path3D)
def write_readme(project, root_dir):
readme_path = os.path.join(root_dir, "README.md")
readme_content = get_readme_text(project)
with open(readme_path, 'w') as readme_file:
readme_file.write(readme_content)
def get_readme_text(project):
readme_text = """
[](http://creativecommons.org/licenses/by-sa/4.0/)
[](http://orcid.org/0000-0002-0246-2335)
# {project}
3D data for recreation of a British Museum object.
# LICENSE
The contents of this repository are licensed under CC-BY-NC-SA
# Credits
Photographs and models by {author} <{author_email}>, Digital Humanities Lead, British Museum
Copyright Trustees of the British Museum
""".format(
project=project,
license=license,
author=get_user_name_from_git() or "My Name",
author_email=get_user_email_from_git() or "My email.")
return dedent(readme_text)
def get_user_name_from_git():
try:
git_process = subprocess.Popen(['git', 'config', 'user.name'], stdout=subprocess.PIPE
, stderr=subprocess.PIPE)
user_name, err = git_process.communicate()
return user_name.rstrip().decode()
except OSError:
return None
def get_user_email_from_git():
try:
git_process = subprocess.Popen(['git', 'config', 'user.email'], stdout=subprocess.PIPE
, stderr=subprocess.PIPE)
user_email, err = git_process.communicate()
return user_email.rstrip().decode()
except OSError:
return None
def write_license(root_dir):
license_path = os.path.join(root_dir, "LICENSE.md")
shutil.copy(os.path.join(os.path.dirname(os.path.realpath(__file__)),'scaffold3D/templates/LICENSE.md'), license_path)
return None
def write_ignore(root_dir):
ignore_path = os.path.join(root_dir, ".gitignore")
shutil.copy(os.path.join(os.path.dirname(os.path.realpath(__file__)),'scaffold3D/templates/.gitignore'), ignore_path)
return None
write_readme(args.project, root_dir)
write_license(root_dir)
write_ignore(root_dir)
| 35.178947
| 153
| 0.680132
| 461
| 3,342
| 4.767896
| 0.334056
| 0.044586
| 0.031847
| 0.019108
| 0.338944
| 0.291629
| 0.232029
| 0.10737
| 0.10737
| 0.10737
| 0
| 0.019381
| 0.197187
| 3,342
| 94
| 154
| 35.553191
| 0.799851
| 0.036206
| 0
| 0.138889
| 0
| 0.027778
| 0.319242
| 0.019273
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0.069444
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef7b9c110a5e75cb118c0870480aa130248a1ef2
| 1,432
|
py
|
Python
|
piWriters/graphiteSender.py
|
shackledtodesk/piWeather
|
e0b4b4ded7ebd01fe7844807de6949a83aa3913f
|
[
"Apache-2.0"
] | null | null | null |
piWriters/graphiteSender.py
|
shackledtodesk/piWeather
|
e0b4b4ded7ebd01fe7844807de6949a83aa3913f
|
[
"Apache-2.0"
] | null | null | null |
piWriters/graphiteSender.py
|
shackledtodesk/piWeather
|
e0b4b4ded7ebd01fe7844807de6949a83aa3913f
|
[
"Apache-2.0"
] | null | null | null |
## Send data to a Graphite/Carbon Server
import traceback
import sys, time, socket, datetime
from datetime import datetime
class piSender:
carbon_server = '127.0.0.1'
carbon_port = 2003
station = "pi2wu"
def __init__(self, config):
if config.has_option('graphite','server'):
self.carbon_server = config.get('graphite','server')
if config.has_option('graphite','port'):
self.carbon_port = config.get('graphite','port')
if config.has_option('general','station'):
self.station = config.get('general','station')
self.sock = socket.socket()
try:
self.sock.connect( (self.carbon_server, self.carbon_port) )
except socket.error:
raise SystemExit("Could not connect to carbon server.")
def genReq(self, inTime, data):
epoch_time = time.mktime(datetime.strptime(inTime, "%Y-%m-%dT%H:%M:%S.%fZ").timetuple())
lines = []
for name, value in data.items():
lines.append("%s.%s %s %d" %
(self.station, name, value, epoch_time))
message = '\n'.join(lines) + '\n'
return message
def sendReq(self, req):
try:
self.sock.sendall(req)
except Exception:
traceback.print_exc()
e = sys.exc_info()[0]
return e
else:
return "ok"
| 31.130435
| 96
| 0.561453
| 168
| 1,432
| 4.684524
| 0.446429
| 0.076239
| 0.041931
| 0.064803
| 0.063532
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012097
| 0.307263
| 1,432
| 45
| 97
| 31.822222
| 0.78125
| 0.025838
| 0
| 0.055556
| 0
| 0
| 0.119971
| 0.015086
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0.083333
| 0
| 0.361111
| 0.027778
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef7c2ae59e8e02d4a104708b8f76dca033259df6
| 479
|
py
|
Python
|
src/main/python/bots/b_jira.py
|
jceaser/gcmd_bot
|
2b2ae0631d69d9f95a3a23b04e12a4467a116ffa
|
[
"MIT"
] | null | null | null |
src/main/python/bots/b_jira.py
|
jceaser/gcmd_bot
|
2b2ae0631d69d9f95a3a23b04e12a4467a116ffa
|
[
"MIT"
] | null | null | null |
src/main/python/bots/b_jira.py
|
jceaser/gcmd_bot
|
2b2ae0631d69d9f95a3a23b04e12a4467a116ffa
|
[
"MIT"
] | null | null | null |
from b_bot import BBot
from rand_str import *
class BJira(BBot):
def __init__(self):
BBot.__init__(self)
self.responses = RandomString(
[
"Looks like you were talking about ticket"
, "You might find that ticket at"
, "Try"
])
def action(self, cmd, id, found):
url = "https://bugs.earthdata.nasa.gov/browse/%s" % found.group(1)
return "%s %s" % (self.responses.pick(), url)
| 28.176471
| 74
| 0.553236
| 59
| 479
| 4.322034
| 0.711864
| 0.062745
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003096
| 0.325679
| 479
| 17
| 75
| 28.176471
| 0.786378
| 0
| 0
| 0
| 0
| 0
| 0.246347
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.142857
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef7c66788463fc4b72dcc5b29d43203643002b12
| 2,295
|
py
|
Python
|
preprocessors/neg_sample_from_run.py
|
felipemoraes/pyNeuIR
|
5256857387c8fe57d28167e42077ad1dcade1983
|
[
"MIT"
] | 4
|
2019-11-09T19:46:44.000Z
|
2022-01-03T07:58:20.000Z
|
preprocessors/neg_sample_from_run.py
|
felipemoraes/pyNeuIR
|
5256857387c8fe57d28167e42077ad1dcade1983
|
[
"MIT"
] | null | null | null |
preprocessors/neg_sample_from_run.py
|
felipemoraes/pyNeuIR
|
5256857387c8fe57d28167e42077ad1dcade1983
|
[
"MIT"
] | 3
|
2019-06-18T12:31:49.000Z
|
2020-11-22T08:35:07.000Z
|
"""Samples negative pairs from run."""
import argparse
from utils import load_qrels, load_run
import numpy as np
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-run')
parser.add_argument('-qrel')
parser.add_argument('-p', type=int)
parser.add_argument('-n', type=int)
parser.add_argument('-top', type=int)
parser.add_argument('-o')
args = parser.parse_args()
qrels, _ = load_qrels(args.qrel)
n = args.n
p = args.p
top = args.top
f = open(args.o, "w")
np.random.seed(230)
# Here documents with the lowest label (e.g, 0) should be ranked lower
previous_qid = "-"
c = 0
for line in open(args.run):
qid, _, doc, _, score, _ = line.strip().split()
if qid not in qrels:
continue
# Get relevants for query
rels = set()
if qid != previous_qid and previous_qid != "-" :
c += 1
for label in qrels[qid]:
if label != "0":
for doc in qrels[qid][label]:
rels.add(doc)
# Get top 100 non rel docs
top_nonrels = [doc for doc in sorted(results, key=results.get, reverse=True) if doc not in rels][:top]
rels = list(rels)
if len(top_nonrels) == 0:
results = {doc: float(score)}
break
if len(rels) > p:
rels = np.random.choice(rels, p, replace=False)
for rel_doc in rels:
if len(top_nonrels) < n:
sample_neg_docs = top_nonrels
r = n - len(sample_neg_docs)
for i in range(r):
sample_neg_docs.append(np.random.choice(top_nonrels, 1)[0])
else:
sample_neg_docs = np.random.choice(top_nonrels, n, replace=False)
sample_neg_docs = " ".join(sample_neg_docs)
f.write("{} {} {}\n".format(qid, rel_doc, sample_neg_docs))
results = {doc: float(score)}
elif previous_qid == "-":
results = {doc: float(score)}
else:
results[doc] = float(score)
previous_qid = qid
print(c)
f.close()
if __name__ == "__main__":
main()
| 28.333333
| 114
| 0.525054
| 290
| 2,295
| 3.986207
| 0.337931
| 0.054498
| 0.07872
| 0.069204
| 0.136678
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008802
| 0.356427
| 2,295
| 80
| 115
| 28.6875
| 0.773866
| 0.065795
| 0
| 0.086207
| 0
| 0
| 0.020131
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017241
| false
| 0
| 0.051724
| 0
| 0.068966
| 0.017241
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef7cc313a84b2a9b9ea62469241644f5f1b9560b
| 1,123
|
py
|
Python
|
Search_Algorithms/testing/python_scripts/GridNet.py
|
JAOP1/GO
|
48c0275fd37bb552c0db4b968391a5a95ed6c860
|
[
"MIT"
] | null | null | null |
Search_Algorithms/testing/python_scripts/GridNet.py
|
JAOP1/GO
|
48c0275fd37bb552c0db4b968391a5a95ed6c860
|
[
"MIT"
] | null | null | null |
Search_Algorithms/testing/python_scripts/GridNet.py
|
JAOP1/GO
|
48c0275fd37bb552c0db4b968391a5a95ed6c860
|
[
"MIT"
] | 2
|
2019-12-12T18:55:35.000Z
|
2019-12-12T19:03:35.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
#Unicamente como esta ahorita funciona para un grafo de 5x5.
class NNGrid(nn.Module):
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(in_channels = 3, out_channels= 5, kernel_size= 3, padding= 1)
self.conv2 = nn.Conv2d(in_channels = 5, out_channels= 5, kernel_size= 3, padding= 1)
self.conv3 = nn.Conv2d(in_channels = 5, out_channels= 5, kernel_size= 2, padding= 1)
self.fc1 = nn.Linear(180,90)
self.fc2 = nn.Linear(90,45)
self.fc3 = nn.Linear(45,1)
self.drop1 = nn.Dropout()
self.drop2 = nn.Dropout()
def forward(self, x):
#print(x.shape)
x = F.relu(self.conv1(x))
#print(x.shape)
x = F.relu(self.conv2(x))
#print(x.shape)
x = F.relu(self.conv3(x))
#print(x.shape)
x = x.view(-1,180)
#print(x.shape)
x = self.drop1(F.relu(self.fc1(x)))
#print(x.shape)
x = self.drop2(F.relu(self.fc2(x)))
#print(x.shape)
return torch.tanh(self.fc3(x))
| 31.194444
| 92
| 0.577026
| 173
| 1,123
| 3.647399
| 0.317919
| 0.066561
| 0.122029
| 0.114105
| 0.383518
| 0.310618
| 0.310618
| 0.310618
| 0.206022
| 0.129952
| 0
| 0.060197
| 0.275156
| 1,123
| 35
| 93
| 32.085714
| 0.714988
| 0.139804
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0.136364
| 0
| 0.318182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef7d0ee9d64040c9087075b823e521c746835c31
| 3,436
|
py
|
Python
|
instances/game_instances.py
|
Napam/MayhemPacman
|
cbcb3b4a2c83ed920e32748a8aaadb29b19ab5bf
|
[
"MIT"
] | 1
|
2021-04-07T12:54:13.000Z
|
2021-04-07T12:54:13.000Z
|
instances/game_instances.py
|
Napam/MayhemPacman
|
cbcb3b4a2c83ed920e32748a8aaadb29b19ab5bf
|
[
"MIT"
] | null | null | null |
instances/game_instances.py
|
Napam/MayhemPacman
|
cbcb3b4a2c83ed920e32748a8aaadb29b19ab5bf
|
[
"MIT"
] | null | null | null |
'''
Module containing the in-game mayhem instances
such as the ship, planets, asteroid objects etc etc...
Written by Naphat Amundsen
'''
import numpy as np
import pygame as pg
import configparser
import sys
import os
sys.path.insert(0,'..')
from classes import spaceship
from classes import planet
from classes import maps
from classes import interface
import user_settings as user_cng
from instances import instance_config as icng
pg.font.init()
w_shape = user_cng.w_shape
w_norm = np.linalg.norm(w_shape)
COLORS = pg.colordict.THECOLORS
# The initial values of the objects
# are mostly just educated guesses
game_map = maps.game_map(
map_shape=(icng.map_shape)
)
minimap = maps.minimap(
gmap=game_map,
w_shape=w_shape,
w_norm=w_norm)
ship = spaceship.spaceship(
pos=(200,200),
init_dir=icng.RIGHT
)
sun = planet.planet(
pos=game_map.center,
init_vel=None,
init_dir=None,
rforce=None
)
earth = planet.rotating_planet(
pos=(game_map.shape[0]/2, 800),
init_vel=[-3,0],
init_dir=[1,0],
r_force=25000,
omega=0.25
)
venus = planet.rotating_planet(
pos=(game_map.shape[0]/2, 2000),
init_vel=[-5,0],
init_dir=[1,0],
r_force=40000,
omega=0.25
)
asteroids = [
planet.rotating_planet(
pos=(3000, 1000),
init_vel=[-8,2],
init_dir=[1,0],
r_force=150000,
omega=0.25
),
planet.rotating_planet(
pos=(1200, 1000),
init_vel=[10,1],
init_dir=[1,0],
r_force=390000,
omega=0.25
),
planet.rotating_planet(
pos=(500, 2000),
init_vel=[2,10],
init_dir=[1,0],
r_force=540000,
omega=0.25
),
planet.rotating_planet(
pos=(6500, 6000),
init_vel=[5,-15],
init_dir=[1,0],
r_force=1500000,
omega=0.5
),
planet.rotating_planet(
pos=(6000, 6000),
init_vel=[-15,1],
init_dir=[1,0],
r_force=1000000,
omega=0.5
),
planet.rotating_planet(
pos=(6000, 500),
init_vel=[-8,-2],
init_dir=[1,0],
r_force=600000,
omega=0.25
),
planet.rotating_planet(
pos=(5000, 2000),
init_vel=[-2,-8],
init_dir=[1,0],
r_force=200000,
omega=0.25
),
planet.rotating_planet(
pos=(game_map.shape[0]/2, 800),
init_vel=[15,0],
init_dir=[1,0],
r_force=590000,
omega=0.25
),
planet.rotating_planet(
pos=(5000, game_map.shape[1]/2),
init_vel=[0,10],
init_dir=[1,0],
r_force=150000,
omega=0.25
),
]
# For convenience
planets = [earth, venus]
all_celestials = planets + asteroids
minimap_colors = [
COLORS['white'],
COLORS['orange'],
COLORS['blue'],
COLORS['green']
]
minimap_sizes = [
1,
int(500/5000*minimap.shape[0]),
int(250/5000*minimap.shape[0]),
1
]
'''Minimap stuff for LAN-mayhem'''
minimap_colors_online = [
COLORS['white'],
COLORS['orange'],
COLORS['blue'],
COLORS['green'],
COLORS['red'],
]
minimap_sizes_online = [
1,
int(500/5000*minimap.shape[0]),
int(250/5000*minimap.shape[0]),
1,
3
]
| 20.093567
| 55
| 0.556752
| 457
| 3,436
| 4.019694
| 0.260394
| 0.049537
| 0.11976
| 0.137725
| 0.416984
| 0.416984
| 0.400653
| 0.296135
| 0.173108
| 0.152967
| 0
| 0.112374
| 0.308498
| 3,436
| 170
| 56
| 20.211765
| 0.660774
| 0.062282
| 0
| 0.407143
| 0
| 0
| 0.014965
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.078571
| 0
| 0.078571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef7dd46d9034574570b5f449e1ddf8eb84731597
| 286
|
py
|
Python
|
test.py
|
RoyLQ/Advanced-_TCGAIntegrator
|
4767ab74b14e9d7e65e2c1ffe656619ef414148b
|
[
"MIT"
] | 2
|
2021-09-14T05:53:16.000Z
|
2021-12-01T23:59:18.000Z
|
test.py
|
RoyLQ/Advanced-_TCGAIntegrator
|
4767ab74b14e9d7e65e2c1ffe656619ef414148b
|
[
"MIT"
] | null | null | null |
test.py
|
RoyLQ/Advanced-_TCGAIntegrator
|
4767ab74b14e9d7e65e2c1ffe656619ef414148b
|
[
"MIT"
] | null | null | null |
import sys
import os
simp_path = 'TCGAIntegrator'
abs_path = os.path.abspath(simp_path)
sys.path.append(abs_path)
from TCGAIntegrator import TCGAData as TCGAData
def main():
df = TCGAData.loadData("LGG",mode="Hybird")
print(df.shape)
if __name__ == '__main__':
main()
| 15.888889
| 47
| 0.716783
| 40
| 286
| 4.825
| 0.575
| 0.082902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160839
| 286
| 17
| 48
| 16.823529
| 0.804167
| 0
| 0
| 0
| 0
| 0
| 0.108392
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0.272727
| 0
| 0.363636
| 0.090909
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef7f8e86f21851da0cc13ef9dc3a597eb38daaa9
| 1,649
|
py
|
Python
|
synergy/conf/global_context.py
|
mushkevych/scheduler
|
8228cde0f027c0025852cb63a6698cdd320838f1
|
[
"BSD-3-Clause"
] | 15
|
2015-02-01T09:20:23.000Z
|
2021-04-27T08:46:45.000Z
|
synergy/conf/global_context.py
|
mushkevych/scheduler
|
8228cde0f027c0025852cb63a6698cdd320838f1
|
[
"BSD-3-Clause"
] | 26
|
2015-01-12T22:28:40.000Z
|
2021-07-05T01:22:17.000Z
|
synergy/conf/global_context.py
|
mushkevych/scheduler
|
8228cde0f027c0025852cb63a6698cdd320838f1
|
[
"BSD-3-Clause"
] | 2
|
2016-07-21T03:02:46.000Z
|
2019-10-03T23:59:23.000Z
|
from synergy.db.model.queue_context_entry import queue_context_entry
from synergy.scheduler.scheduler_constants import PROCESS_GC, TOKEN_GC, PROCESS_MX, TOKEN_WERKZEUG, EXCHANGE_UTILS, \
PROCESS_SCHEDULER, TOKEN_SCHEDULER, QUEUE_UOW_STATUS, QUEUE_JOB_STATUS, PROCESS_LAUNCH_PY, TOKEN_LAUNCH_PY, \
ROUTING_IRRELEVANT
from synergy.supervisor.supervisor_constants import PROCESS_SUPERVISOR, TOKEN_SUPERVISOR
from synergy.db.model.daemon_process_entry import daemon_context_entry
process_context = {
PROCESS_LAUNCH_PY: daemon_context_entry(
process_name=PROCESS_LAUNCH_PY,
classname='',
token=TOKEN_LAUNCH_PY,
routing=ROUTING_IRRELEVANT,
exchange=EXCHANGE_UTILS),
PROCESS_MX: daemon_context_entry(
process_name=PROCESS_MX,
token=TOKEN_WERKZEUG,
classname=''),
PROCESS_GC: daemon_context_entry(
process_name=PROCESS_GC,
token=TOKEN_GC,
classname=''),
PROCESS_SCHEDULER: daemon_context_entry(
process_name=PROCESS_SCHEDULER,
classname='synergy.scheduler.synergy_scheduler.Scheduler.start',
token=TOKEN_SCHEDULER,
queue='',
routing='',
exchange=''),
PROCESS_SUPERVISOR: daemon_context_entry(
process_name=PROCESS_SUPERVISOR,
classname='synergy.supervisor.synergy_supervisor.Supervisor.start',
token=TOKEN_SUPERVISOR),
}
mq_queue_context = {
QUEUE_UOW_STATUS: queue_context_entry(exchange=EXCHANGE_UTILS, queue_name=QUEUE_UOW_STATUS),
QUEUE_JOB_STATUS: queue_context_entry(exchange=EXCHANGE_UTILS, queue_name=QUEUE_JOB_STATUS),
}
timetable_context = {
}
| 35.085106
| 117
| 0.753184
| 190
| 1,649
| 6.089474
| 0.163158
| 0.103717
| 0.093345
| 0.129646
| 0.294728
| 0.294728
| 0.100259
| 0.100259
| 0.100259
| 0.100259
| 0
| 0
| 0.170406
| 1,649
| 46
| 118
| 35.847826
| 0.84576
| 0
| 0
| 0.051282
| 0
| 0
| 0.063675
| 0.063675
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.102564
| 0
| 0.102564
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef8179e868198d6a8e03937bb76a29cb988fcda9
| 6,164
|
py
|
Python
|
67-2.py
|
paqul/ALX
|
0f397b53f8208df62ed3bc1f63f27a087799eb32
|
[
"MIT"
] | null | null | null |
67-2.py
|
paqul/ALX
|
0f397b53f8208df62ed3bc1f63f27a087799eb32
|
[
"MIT"
] | null | null | null |
67-2.py
|
paqul/ALX
|
0f397b53f8208df62ed3bc1f63f27a087799eb32
|
[
"MIT"
] | null | null | null |
from datetime import date as d
#---------------------------HOTEL---------------------------#
def exit_function():
print("Do zobaczenia!")
exit()
def forumarz_rezerwacji(var1, var2, var3, var4, var5, var6, var7, var8, ile_dni, ile_osob, sniadanie, imie):
print("\n"*3)
print("================================")
print("| FORMULARZ REJESTRACYJNY |")
print("|------------------------------|")
print("| Data przybycia: %i%i-%i%i-%i%i%i%i |" % (var7, var8, var5, var6, var1, var2, var3, var4))
print("| Ilość dni pobytu: %3i |" % ile_dni)
print("| Ilość osób: %3i |" % ile_osob)
print("| Śniadania w każdy dzień: %s |" % sniadanie)
print("| Rezerwujący: %15s |" % imie)
print("|------------------------------|")
print("| ŻYCZYMY MIŁEGO POBYTU |")
print("| W NASZYM HOTELU |")
print("================================")
exit_function()
def termin_przybycia():
print("Podaj date przybycia do hotelu w formacie (rrrr-mm-dd) np. 2018-12-07")
while True:
data_przybycia=input("Data przybycia: ")
tab = []
for var in data_przybycia:
try:
var = int(var)
tab.append(var)
except:
myslnik = var
if len(tab) == 8:
dzien = int(("%i" + "%i") % (tab[6], tab[7]))
miesiac = int(("%i" + "%i") % (tab[4], tab[5]))
if dzien > 31 or miesiac > 12:
print("Popełniłeś błąd przy wpisywaniu daty - proszę sprawź to i wpisz jeszcze raz poprawnie!")
termin_przybycia()
else:
try:
if data_przybycia == ("%i%i%i%i-%i%i-%i%i"% (tab[0], tab[1], tab[2], tab[3], tab[4], tab[5], tab[6], tab[7])):
if data_przybycia > str(d.today()):
print("Podana data: %s - została zaakceptowana" % data_przybycia)
return tab[0], tab[1], tab[2], tab[3], tab[4], tab[5], tab[6], tab[7]
else:
print("Podana data jest z przeszłości lub teraźniejszości, podaj ją jeszcze raz najbliższy możliwy termin rezerwacji to jutro!")
except IndexError:
print("Podałeś datę przybycia w niewłaściwym formacie")
else:
print("Podales date w niewłaściwym formacie - spróboj jeszcze raz")
def ilosc_dni():
while True:
try:
ile_dni = int(input("Podaj liczbę dni, przez jaką zostaniesz w hotelu: "))
if ile_dni > 731:
print("Jeżeli chcesz zostać w hotelu powyżej dwóch lat to powinieneś to osobiście ustalić z włąścicielem bezpośrednio podczas pobytu")
print("Wpisz np 14 dni - przyjedź na 2 tygodnie i resztę swojego pobytu ustal z właścicielem")
elif ile_dni <= 0:
print("Dalsze wypełnianie formularza nie ma sensu, skoro nawet 1 dnia nie zostaniesz w hotelu")
while True:
dec = input("Czy chcesz zacząc wypełniać formularz od początku: (t/n)")
dec = dec.lower()
if dec == "t":
main()
elif dec == "n":
exit_function()
else:
print("Wpisałeś nie poprawnie")
else:
return ile_dni
except ValueError:
print("Podaj prosze ilość dni (nie używaj ułamków)")
def ilosc_osob():
while True:
try:
ile_osob = int(input("Podaj ile osob z tobą przyjedzie?, Jeżeli będziesz sam to wpisz \"0\""))
ile_osob += 1
if ile_osob > 100:
print("Jeżeli chcesz zabrać ze sobą regiment wojska to proszę o bezpośredni kontakt z hotelem")
else:
return ile_osob
except ValueError:
print("Podaj prosze ilość osób które przybędą razem z tobą (nie używaj ułamków)")
def sniadanie():
while True:
sniadanie = input("Czy chcesz zamówić dodatkowo śniadanie w hotelu na każdy dzień pobytu: (T/N)")
sniadanie = sniadanie.lower()
if sniadanie == "t":
return "tak"
elif sniadanie == "n":
return "nie"
else:
print("Wpisałeś nie poprawnie")
def imie_fun():
imie = input("Podaj swoje imie: ")
imie_OK = imie #takie szybkie obejscie :)
imie = list(imie)
counter_error = 0
for sprawdz in imie:
if sprawdz.isdigit() == True or sprawdz in "~!@#$%^&*()_+-={}[]'\/,.<>":
counter_error += 1
if counter_error >= 1:
print("Błąd! - Podaj imie bez cyfr oraz znaków innych niż litery w imieniu!")
return imie_fun()
else:
return imie_OK.capitalize()
def rezerwacja():
print("Prosze o podanie natepujacych informacji w celu rezerwacji w naszym hotelu")
[var1, var2, var3, var4, var5, var6, var7, var8] = termin_przybycia()
days = ilosc_dni()
people = ilosc_osob()
breakfest = sniadanie()
name = imie_fun()
return forumarz_rezerwacji(var1, var2, var3, var4, var5, var6, var7, var8, days, people, breakfest, name)
def opishotelu():
print("Nasz hotel wogole jest super i inny \"marketingowy belkot\", \ntak aby zachecic Cie do rezerwacji i wydania pieniedzy na hotel!")
while True:
decyzja=input("Czy napewno chcesz wyjsc z aplikacji? (t/n)")
if decyzja == "t":
exit_function()
elif decyzja == "n":
main()
else:
print("Wpisz \"t\" lub \"n\"")
def main():
print("Witaj w aplikacji Hotelowej!\nCzy chcesz zarezerwować sobie miejsce w Hotelu? (t/n)", end="")
while True:
decyzja=input()
if decyzja == "t":
rezerwacja()
elif decyzja == "n":
opishotelu()
else:
print("Wpisz \"t\" lub \"n\"", end="")
main()
| 39.512821
| 157
| 0.514114
| 691
| 6,164
| 4.529667
| 0.353111
| 0.010224
| 0.011502
| 0.01278
| 0.130351
| 0.111821
| 0.076038
| 0.076038
| 0.065815
| 0.065815
| 0
| 0.021314
| 0.345393
| 6,164
| 155
| 158
| 39.767742
| 0.754399
| 0.013628
| 0
| 0.293233
| 0
| 0.015038
| 0.35461
| 0.026005
| 0
| 0
| 0
| 0
| 0
| 1
| 0.075188
| false
| 0
| 0.007519
| 0
| 0.142857
| 0.255639
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef841a52c1f626cc7c84690f06d3bbb17715d9c8
| 3,733
|
py
|
Python
|
GreedyGRASP/Solver_Greedy.py
|
HamidL/AMMM_Project
|
7679d1c336578464317b8326311c1ab4b69cbf11
|
[
"MIT"
] | null | null | null |
GreedyGRASP/Solver_Greedy.py
|
HamidL/AMMM_Project
|
7679d1c336578464317b8326311c1ab4b69cbf11
|
[
"MIT"
] | null | null | null |
GreedyGRASP/Solver_Greedy.py
|
HamidL/AMMM_Project
|
7679d1c336578464317b8326311c1ab4b69cbf11
|
[
"MIT"
] | null | null | null |
from GreedyGRASP.Solver import Solver
from GreedyGRASP.Solution import Solution
from GreedyGRASP.LocalSearch import LocalSearch
# Inherits from a parent abstract solver.
class Solver_Greedy(Solver):
def greedyFunctionCost(self, solution, remainCap, busesAssignments):
for busAssi in busesAssignments:
bus = solution.getBuses()[busAssi.bus]
service = solution.getServices()[busAssi.service]
if (remainCap <= bus.getCapacity()):
cost = busAssi.cost + busAssi.cost*(bus.getCapacity()-remainCap)/bus.getCapacity()
else:
cost = busAssi.cost + (busAssi.cost + service.getMinutes()*solution.inputData.CBM) * remainCap / bus.getCapacity()
busAssi.greedyCost = cost
return busesAssignments
def greedyConstruction(self, config, problem):
# get an empty solution for the problem
solution = Solution.createEmptySolution(config, problem)
# get tasks and sort them by their total required resources in descending order
services = problem.getServices()
sortedServices = sorted(services,
key=lambda service: (service.getPassengers(), service.getNumOverlappingServices()),
reverse=True)
elapsedEvalTime = 0
evaluatedCandidates = 0
# for each task taken in sorted order
for service in sortedServices:
serviceId = service.getId()
busesAssignments, driversAssignments = solution.findFeasibleAssignments(serviceId)
remainCap = service.getPassengers()
selBuses = []
while (remainCap > 0 and len(busesAssignments) > 0):
busesAssignments = self.greedyFunctionCost(solution, remainCap, busesAssignments)
busesAssignments = sorted(busesAssignments, key=lambda busAssi: busAssi.greedyCost)
candidate = busesAssignments[0]
if (candidate is None):
solution.makeInfeasible()
break
selBuses.append(candidate)
busesAssignments.remove(candidate)
remainCap -= problem.getBuses()[candidate.bus].getCapacity()
if (remainCap > 0):
solution.makeInfeasible()
break
sortedDriversAssignments = sorted(driversAssignments, key=lambda driverAssi: driverAssi.cost)
if (len(sortedDriversAssignments) < len(selBuses)):
solution.makeInfeasible()
break
for i in range(0,len(selBuses)):
solution.assign(sortedDriversAssignments[i], selBuses[i])
return(solution, elapsedEvalTime, evaluatedCandidates)
def solve(self, config, problem):
self.startTimeMeasure()
self.writeLogLine(float('infinity'), 0)
solution, elapsedEvalTime, evaluatedCandidates = self.greedyConstruction(config, problem)
self.writeLogLine((solution.cost), 1)
localSearch = LocalSearch(config)
solution = localSearch.run(solution)
self.writeLogLine(solution.cost, 1)
avg_evalTimePerCandidate = 0.0
if (evaluatedCandidates != 0):
avg_evalTimePerCandidate = 1000.0 * elapsedEvalTime / float(evaluatedCandidates)
print ('')
print ('Greedy Candidate Evaluation Performance:')
print (' Num. Candidates Eval.', evaluatedCandidates)
print (' Total Eval. Time ', elapsedEvalTime, 's')
print (' Avg. Time / Candidate', avg_evalTimePerCandidate, 'ms')
localSearch.printPerformance()
return(solution)
| 41.477778
| 130
| 0.625234
| 310
| 3,733
| 7.516129
| 0.341935
| 0.030043
| 0.025751
| 0.018884
| 0.04721
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006808
| 0.291722
| 3,733
| 89
| 131
| 41.94382
| 0.874433
| 0.051165
| 0
| 0.092308
| 0
| 0
| 0.033937
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046154
| false
| 0.030769
| 0.046154
| 0
| 0.123077
| 0.092308
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef854c6d7447ee5fbf75a72fb0ffd6549ac302f6
| 5,654
|
py
|
Python
|
statslib/_lib/gmodel.py
|
ashubertt/statslib
|
5a35c0d10c3ca44c2d48f329c4f3790c91c385ac
|
[
"Apache-2.0"
] | null | null | null |
statslib/_lib/gmodel.py
|
ashubertt/statslib
|
5a35c0d10c3ca44c2d48f329c4f3790c91c385ac
|
[
"Apache-2.0"
] | 1
|
2021-04-06T10:55:34.000Z
|
2021-04-06T10:55:34.000Z
|
statslib/_lib/gmodel.py
|
ashubertt/statslib
|
5a35c0d10c3ca44c2d48f329c4f3790c91c385ac
|
[
"Apache-2.0"
] | null | null | null |
import inspect
import math as _math
from copy import deepcopy
import matplotlib.pyplot as _plt
import numpy as np
import pandas as pd
import statsmodels.api as _sm
from statslib._lib.gcalib import CalibType
class GeneralModel:
def __init__(self, gc, DM):
self.gc = deepcopy(gc)
self.DM = deepcopy(DM)
self.calibrator = None
self.fitted = None
self.v_hat = None
self.y0 = None
self.y_hat = None
self.residuals = None
def exog(self, idx):
return self.DM.gX.iloc[idx] if self.DM.gX is not None else None
def endog(self, idx):
return self.DM.dm.v.iloc[idx]
def fit(self, idx, **kwargs):
if self.gc.calib_type is CalibType.sm:
self.calibrator = self.gc.cf(endog=self.endog(idx),
exog=self.exog(idx),
**self.gc.kwargs)
self.fitted = self.calibrator.fit(**kwargs)
if self.gc.calib_type is CalibType.sk:
self.calibrator = self.gc.cf(**self.gc.kwargs)
self.fitted = self.calibrator.fit(self.exog(idx), self.endog(idx))
self.y0 = self.DM.dm.y.iloc[idx].tail(self.DM.f.n)
def forecast(self, idx):
def sumofsq(x, axis=0):
"""Helper function to calculate sum of squares along first axis"""
return np.sum(x ** 2, axis=axis)
self.forecast_index = idx
if 'start' in inspect.signature(self.fitted.predict).parameters:
self.v_hat = self.fitted.predict(
self.exog(idx).index.min(),
self.exog(idx).index.max(),
exog=self.exog(idx))
else:
if self.gc.calib_type is CalibType.sm:
self.v_hat = self.fitted.predict(exog=self.exog(idx))
if self.gc.calib_type is CalibType.sk:
self.v_hat = self.fitted.predict(self.exog(idx))
self.v_hat = pd.Series(self.v_hat, index=self.exog(idx).index).rename('v_hat')
self.y_hat = self.DM.f.inv(self.v_hat, y0=self.y0, idx=self.v_hat.index)
try:
self.residuals = self.DM.dm.loc[self.v_hat.index]['v'].values - self.v_hat.values
sigma2 = 1.0 / self.fitted.nobs * sumofsq(self.residuals)
self.std_residuals = self.residuals / np.sqrt(sigma2)
self.residuals = pd.Series(self.std_residuals, index=self.v_hat.index)
self.std_residuals = pd.Series(self.std_residuals, index=self.v_hat.index)
except Exception:
pass
def plot_diagnostics(self, figsize=(15, 15), drop_names=None):
import math
if drop_names is None:
drop_names = list()
std_resid = self.std_residuals
if std_resid is not None:
fig, axs = _plt.subplots(3, 2, figsize=figsize)
from statslib.utils.plots import get_standard_colors
clrs = get_standard_colors()
std_resid.plot(ax=axs[0, 0], color=clrs[1])
axs[0, 0].hlines(0, self.v_hat.index.min(), self.v_hat.index.max())
axs[0, 0].set_title('Standardized residuals')
axs[0, 1].hist(std_resid.values, density=True)
from scipy.stats import gaussian_kde, norm
kde = gaussian_kde(std_resid)
xlim = (-1.96 * 2, 1.96 * 2)
x = np.linspace(xlim[0], xlim[1])
axs[0, 1].plot(x, kde(x), label="KernelDensityEstimator")
axs[0, 1].plot(x, norm.pdf(x), label="N(0,1)")
axs[0, 1].set_xlim(xlim)
axs[0, 1].legend()
axs[0, 1].set_title("Histogram plus estimated density")
_sm.graphics.qqplot(std_resid.values, line='q', fit=True, ax=axs[1, 0])
axs[1, 0].set_title('Normal QQ Plot')
_sm.graphics.tsa.plot_acf(std_resid, ax=axs[1, 1])
axs[1, 1].set_title('Correlogram')
axs[2, 0].scatter(self.fitted.fittedvalues, self.residuals.values, color='red')
axs[2, 0].hlines(0, min(self.fitted.fittedvalues), max(self.fitted.fittedvalues), color='blue')
axs[2, 0].set_xlabel('fitted')
axs[2, 0].set_ylabel('resid')
axs[2, 0].set_title('Fitted values vs. Residuals')
axs[2, 1].scatter(range(len(self.std_residuals)), self.std_residuals.values, color='red')
axs[2, 1].hlines(0, 0, len(self.std_residuals.values), color='blue')
axs[2, 1].set_xlabel('index')
axs[2, 1].set_ylabel('std_resid')
axs[2, 1].set_title('Index plot of standardized residuals')
_plt.tight_layout()
_plt.show()
print(" ")
L = 2
K = _math.ceil(len([k for k in self.DM.exog_names if k not in drop_names]) / L)
i = j = 0
fig, axs = _plt.subplots(K, L, figsize=(15, 15))
for curve in self.DM.exog_names:
if curve not in drop_names:
x_vals = self.DM.dm_ext[curve].iloc[self.forecast_index].values.tolist()
axs[i, j].scatter(x_vals, self.std_residuals.values)
min_max_x = [x for x in x_vals if not math.isnan(x)]
axs[i, j].hlines(0, min(min_max_x), max(min_max_x), color='blue')
axs[i, j].set_xlabel(curve)
axs[i, j].set_ylabel('std_res')
j += 1
if j % L == 0:
i += 1
j = 0
_plt.suptitle('Standardized Residuals vs. Explanatory Variable')
_plt.tight_layout(pad=3)
_plt.show()
| 40.385714
| 107
| 0.561726
| 802
| 5,654
| 3.836658
| 0.214464
| 0.0195
| 0.036399
| 0.029574
| 0.213845
| 0.165421
| 0.128697
| 0.128697
| 0.099448
| 0.033149
| 0
| 0.02328
| 0.308631
| 5,654
| 139
| 108
| 40.676259
| 0.763878
| 0.010612
| 0
| 0.052632
| 0
| 0
| 0.050116
| 0.003938
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061404
| false
| 0.008772
| 0.096491
| 0.017544
| 0.192982
| 0.008772
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef86d428f2e17ef9b526fc491dcb0a17513a95ba
| 1,581
|
py
|
Python
|
app.py
|
Chen-Junbao/MalwareClassification
|
a2ef045c1e5f1f57ff183bfc6577275b14bf84d2
|
[
"MIT"
] | 4
|
2020-06-17T03:14:47.000Z
|
2022-03-29T12:15:33.000Z
|
app.py
|
Chen-Junbao/MalwareClassification
|
a2ef045c1e5f1f57ff183bfc6577275b14bf84d2
|
[
"MIT"
] | 1
|
2020-12-20T03:14:33.000Z
|
2021-02-01T17:13:44.000Z
|
app.py
|
Chen-Junbao/MalwareClassification
|
a2ef045c1e5f1f57ff183bfc6577275b14bf84d2
|
[
"MIT"
] | 1
|
2021-03-07T15:43:20.000Z
|
2021-03-07T15:43:20.000Z
|
import os
from flask import Flask, render_template, request, jsonify
from display.predict import predict_file
app = Flask(__name__, static_folder="./display/static", template_folder="./display/templates")
@app.route('/')
def en():
return render_template('index_EN.html')
@app.route('/chs')
def chs():
return render_template('index_CHS.html')
@app.route('/uploader', methods=['POST', 'GET'])
def predict_image():
if request.method == 'POST':
# get uploaded file
file = request.files['file']
# save uploaded file to "files" directory
file.save("./display/files/" + file.filename)
file_type = file.filename.split('.')[-1]
ans = {}
if file_type == "asm":
probability = predict_file("./display/files/" + file.filename, "asm")
ans = {
'fileName': file.filename,
'probabilityLD': probability
}
elif file_type == "bytes":
probability = predict_file("./display/files/" + file.filename, "bytes")
ans = {
'fileName': file.filename,
'probabilityResNet': probability
}
elif file_type == "bmp":
probability = predict_file("./display/files/" + file.filename, "bmp")
ans = {
'fileName': file.filename,
'probabilityResNet': probability
}
return jsonify(ans)
if __name__ == '__main__':
if not os.path.exists('./display/files'):
os.mkdir('./display/files')
app.run(debug=True)
| 29.277778
| 94
| 0.571157
| 162
| 1,581
| 5.401235
| 0.339506
| 0.109714
| 0.073143
| 0.109714
| 0.274286
| 0.274286
| 0.157714
| 0
| 0
| 0
| 0
| 0.000883
| 0.283365
| 1,581
| 53
| 95
| 29.830189
| 0.771403
| 0.036053
| 0
| 0.2
| 0
| 0
| 0.188692
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.075
| false
| 0
| 0.075
| 0.05
| 0.225
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef873aee93350e545e2097a1a737710da0346193
| 886
|
py
|
Python
|
test_linprog_curvefit.py
|
drofp/linprog_curvefit
|
96ba704edae7cea42d768d7cc6d4036da2ba313a
|
[
"Apache-2.0"
] | null | null | null |
test_linprog_curvefit.py
|
drofp/linprog_curvefit
|
96ba704edae7cea42d768d7cc6d4036da2ba313a
|
[
"Apache-2.0"
] | 3
|
2019-11-22T08:04:18.000Z
|
2019-11-26T06:55:36.000Z
|
test_linprog_curvefit.py
|
drofp/linprog_curvefit
|
96ba704edae7cea42d768d7cc6d4036da2ba313a
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from ortools.linear_solver import pywraplp
class TestLinprogCurvefit(unittest.TestCase):
def setUp(self):
linprog_curvefit = __import__('linprog_curvefit')
self.generate_variables = linprog_curvefit._generate_variables
self.ErrorDefinition = linprog_curvefit.ErrorDefinition
def test_generate_variables_2PointsLinearCorrectVarCnt(self):
points = ((0, 0), (1.5, 3))
coeff_ranges = ((-10, 10), (-10, 10))
solver = pywraplp.Solver(
'polynomial_solver', pywraplp.Solver.GLOP_LINEAR_PROGRAMMING)
err_def = self.ErrorDefinition.SUM_ABS_DEV
expected_num_of_vars = 6
variables = self.generate_variables(
solver, points=points, coeff_ranges=coeff_ranges, err_max=10000,
error_def=err_def)
self.assertEqual(len(variables), expected_num_of_vars)
| 40.272727
| 80
| 0.700903
| 98
| 886
| 5.989796
| 0.459184
| 0.102215
| 0.07155
| 0.057922
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028694
| 0.213318
| 886
| 21
| 81
| 42.190476
| 0.813486
| 0
| 0
| 0
| 0
| 0
| 0.037246
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 1
| 0.111111
| false
| 0
| 0.166667
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef87a851b0ff397ab056489c49ee4d54f1a8b8b0
| 14,278
|
py
|
Python
|
uno_ct_v3.py
|
simple-circuit/Component-Curve-Tracer
|
3842f1b0054230325f55296cbc88628b3f88fa88
|
[
"MIT"
] | 1
|
2021-08-04T03:08:07.000Z
|
2021-08-04T03:08:07.000Z
|
uno_ct_v3.py
|
simple-circuit/Component-Curve-Tracer
|
3842f1b0054230325f55296cbc88628b3f88fa88
|
[
"MIT"
] | null | null | null |
uno_ct_v3.py
|
simple-circuit/Component-Curve-Tracer
|
3842f1b0054230325f55296cbc88628b3f88fa88
|
[
"MIT"
] | 1
|
2021-08-29T14:05:42.000Z
|
2021-08-29T14:05:42.000Z
|
# Uno PWM bipolar curve tracer app by simple-circuit 12-22-19
# rev 3 1-13-20
from tkinter import *
from tkinter import ttk
from tkinter import filedialog
from tkinter import font
import numpy as np
import serial
root = Tk()
default_font = font.nametofont("TkDefaultFont")
default_font.configure(size=9)
canvas = Canvas(root)
root.geometry("720x540")
root.title('Uno Curve Tracer in Python')
canvas.grid(column=0, row=0, sticky=(N,W,E,S))
root.grid_columnconfigure(0, weight=1)
root.grid_rowconfigure(0, weight=1)
xtvar = BooleanVar()
xtvar.set(False)
contvar = BooleanVar()
contvar.set(False)
crampvar = BooleanVar()
crampvar.set(False)
#ser = serial.Serial('/dev/ttyACM0', baudrate=115200, timeout = 1)
ser = serial.Serial('COM10', baudrate=115200, timeout = 1)
def globalVar():
global x
global y
global tms
global mkv,mki,mkt
global dv,di,dt
x = np.zeros((10,256))
y = np.zeros((10,256))
tms = np.zeros((10,256))
mkv = np.zeros((1))
mki = np.zeros((1))
mkt = np.zeros((1))
dv = np.zeros((1))
di = np.zeros((1))
dt = np.zeros((1))
def plotxy(xtra = 0):
if xtra == 0:
canvas.delete('currentline')
canvas.delete('currentcursor')
a = int(curvar.get())
n = int(trcvar.get())
m = int(mtrcvar.get())
if m == 1:
m=n+1
n2 = n
else:
n2 = 0
if xtvar.get() == False:
if xtra == 0:
for j in range(n2,m):
for i in range(255):
canvas.create_line((256+x[j][i]*22.07, 256-y[j][i]*51.2, 256+x[j][i+1]*22.07, 256-y[j][i+1]*51.2), fill='lime', width=2, tags='currentline')
canvas.create_oval(253+x[n][a]*22.07, 253-y[n][a]*51.2, 259+x[n][a]*22.07, 259-y[n][a]*51.2, fill='', outline='white', width=1, tags='currentcursor')
else:
if xtra == 0:
for j in range(n2,m):
for i in range(255):
canvas.create_line((i*2, 256-y[j][i]*51.2, (i+1)*2, 256-y[j][i+1]*51.2), fill='blue', width=2, tags='currentline')
for i in range(255):
canvas.create_line((i*2, 256-x[j][i]*22.07, (i+1)*2, 256-x[j][i+1]*22.07), fill='orange', width=2, tags='currentline')
canvas.create_oval(a*2-3, 253-x[n][a]*22.07, a*2 + 3, 259-x[n][a]*22.07, fill='', outline='white', width=1, tags='currentcursor')
canvas.create_oval(a*2-3, 253-y[n][a]*51.2, a*2 + 3, 259-y[n][a]*51.2, fill='', outline='white', width=1, tags='currentcursor')
label1.config(text = str(format(x[n][a],'0.3f')+'V'))
label2.config(text = str(format(y[n][a],'0.3f')+'mA'))
label3.config(text = str(format(tms[n][a],'3.2f')+'ms'))
runDelta()
def sweep(c = b'cct'):
m = int(trcvar.get())
if c == b'cct':
tms[m][:] = np.linspace(0,30.95,256,endpoint=True)
else:
tms[m][:] = np.linspace(0,240.0,256,endpoint=True)
ser.write(b'@' + c + b'0000')
for i in range(256):
line = ser.readline()
x[m][i] = int(line[line.find(c)+3:line.find(c)+7])
x[m][i] = (x[m][i]-512)*0.02266
y[m][i] = int(line[line.find(c)+7:])
y[m][i] = -(y[m][i]-509.7)*0.00977 + x[m][i]*0.0055
plotxy()
def runSine(event):
sweep(b'cct')
def runCont():
if contvar.get() == True:
sweep(b'cct')
if (crampvar.get() == True) & (contvar.get() == False):
if stepn.get() >= 0:
trcvar.set(stepn.get())
m = float(startvar.get()) + stepn.get() * float(stepvar.get())
if m > 5.0:
m = 5.0
mi = int(m*51)
ms = '@dac' + str(mi) + '\r'
ser.write(bytes(ms, 'utf-8'))
ser.readline()
stepn.set(stepn.get() + 1)
if stepn.get() > 4:
stepn.set(-1)
crampvar.set(False)
sweep(b'mea')
root.after(500, runCont)
def runRamp(event):
sweep(b'mea')
def runMouse(event):
xx = canvas.canvasx(event.x)
xx = int(xx/2)
if xx <= 255:
curvar.set(xx)
cursor.invoke("buttondown")
def runMag(xtra = 0):
m = float(sinmag.get())
if m>11.5:
m = 11.5
sinmag.invoke("buttonup")
if m<2.4:
m=2.3
sinmag.invoke("buttonup")
mi = int(m*127/11.5)
ms = '@mag' + str(mi) + '\r'
ser.write(bytes(ms, 'utf-8'))
magvar.set(m)
def runPos(xtra = 0):
m = float(posmag.get())
if m>11.5:
m = 11.5
posmag.invoke("buttonup")
if m<-11.5:
m=-11.6
posmag.invoke("buttonup")
mi = int(m*127/11.5) + 128
ms = '@pos' + str(mi) + '\r'
ser.write(bytes(ms, 'utf-8'))
posvar.set(m)
def runNeg(xtra = 0):
m = float(negmag.get())
if m>11.5:
m = 11.5
negmag.invoke("buttonup")
if m<-11.5:
m=-11.6
negmag.invoke("buttonup")
mi = int(m*127/11.5) + 128
ms = '@neg' + str(mi) + '\r'
ser.write(bytes(ms, 'utf-8'))
negvar.set(m)
def runFreq(xtra = 0):
mi = int(freq.get())
if mi>50:
mi = 50
if mi<4:
mi=4
ms = '@frq' + str(mi) + '\r'
ser.write(bytes(ms, 'utf-8'))
labelfreq.config(text = str(format(963.234/mi,'3.1f'))+'Hz')
def runStart(xtra = 0):
m = float(startvar.get())
mi = int(m*51)
ms = '@dac' + str(mi) + '\r'
ser.write(bytes(ms, 'utf-8'))
def runAdc(xtra = 0):
c = b'ad'
mi = int(adcvar.get())
ms = '@adc' + str(mi) + '\r'
ser.write(bytes(ms, 'utf-8'))
line = ser.readline()
chval = int(line[line.find(c)+4:])
labeladc.config(text = str(format(chval*5.0/1024,'1.3f'))+' V')
def runSteps(xtra = 0):
stepn.set(0)
trcvar.set(0)
mtrcvar.set(5)
crampvar.set(True)
contvar.set(False)
def runMark(c = b'cct'):
n = int(trcvar.get())
a = int(curvar.get())
mkv[0] = x[n][a]
mki[0] = y[n][a]
mkt[0] = tms[n][a]
labelm1.config(text = str(format(mkv[0],'0.3f'))+'V')
labelm2.config(text = str(format(mki[0],'0.3f'))+'mA')
labelm3.config(text = str(format(mkt[0],'3.2f'))+'ms')
runDelta()
def runDelta():
n = int(trcvar.get())
a = int(curvar.get())
mi = int(freq.get())
dv[0] = x[n][a] - mkv
di[0] = y[n][a] - mki
dt[0] = tms[n][a] - mkt
try:
f = abs(1000/dt[0])
except:
f = 10000
labeld1.config(text = str(format(dv[0],'0.3f'))+'V')
labeld2.config(text = str(format(di[0],'0.3f'))+'mA')
labeld3.config(text = str(format(dt[0],'3.2f'))+'ms ' + str(format(f,'3.1f'))+' Hz')
try:
r = abs(dv[0]/di[0]) * 1000
c = (max(y[n][:]) - min(y[n][:]))/(max(x[n][:]) - min(x[n][:])) / (2 * np.pi * 0.963234/mi)
labelr.config(text = 'R = ' + str(format(r,'6.0f'))+' ohms')
labelc.config(text = 'C = ' + str(format(c,'2.2f'))+' uF')
except:
labelr.config(text = 'R = inf ohms')
def runXt():
plotxy(0)
def runPlot(xtra = 0):
plotxy(1)
def runSave(extra = 0):
with filedialog.asksaveasfile() as f:
for i in range(256):
for j in range(10):
f.write(str(format(x[j][i],'1.3f')+' '))
f.write(str(format(y[j][i],'1.3f')+' '))
f.write(str(format(tms[j][i],'1.6f')+' '))
f.write('\n')
f.close()
def runLoad(extra = 0):
with filedialog.askopenfile() as f:
for j in range(256):
s = f.readline()
fs = s.split(' ')
for k in range(10):
x[k][j] = float(fs[k*3])
y[k][j] = float(fs[k*3+1])
tms[k][j] = float(fs[k*3+2])
f.close()
plotxy(0)
def endserial():
ser.close()
root.destroy()
globalVar()
stepn = IntVar()
stepn.set(-1)
canvas.create_rectangle((0,0,512,512),fill='green')
for i in range(11):
canvas.create_line((51.2*i, 0, 51.2*i, 512), fill='black', width=1)
canvas.create_line((0,51.2*i, 512, 51.2*i), fill='black', width=1)
for i in range(50):
canvas.create_line((10.24*i, 254, 10.24*i, 258), fill='green', width=1)
canvas.create_line((254,10.24*i, 258, 10.24*i), fill='green', width=1)
canvas.create_line((612,10,612,480), fill='grey', width=3)
canvas.create_line((520,10,605,10), fill='grey', width=3)
canvas.create_line((520,115,605,115), fill='grey', width=3)
canvas.create_line((520,220,605,220), fill='grey', width=3)
canvas.create_line((520,247,605,247), fill='grey', width=3)
canvas.create_line((520,335,605,335), fill='grey', width=3)
canvas.create_line((520,430,605,430), fill='grey', width=3)
canvas.create_line((620,10,705,10), fill='grey', width=3)
canvas.create_line((620,75,705,75), fill='grey', width=3)
canvas.create_line((620,170,705,170), fill='grey', width=3)
canvas.create_line((620,275,705,275), fill='grey', width=3)
canvas.create_line((620,370,705,370), fill='grey', width=3)
canvas.create_line((520,515,705,515), fill='grey', width=3)
trcvar = IntVar(value=0) # initial value
trc = Spinbox(canvas, from_= 0, to = 4, increment = 1, width = 1, command = plotxy, textvariable=trcvar)
trc.place(x = 620, y = 20)
trcvar.set(0)
labeltrc = Label(canvas)
labeltrc.place(x = 655, y = 20)
labeltrc.config(text = 'Trace')
mtrcvar = IntVar(value=1) # initial value
mtrc = Spinbox(canvas, from_= 1, to = 5, increment = 1, width = 1, command = plotxy, textvariable=mtrcvar)
mtrc.place(x = 620, y = 45)
mtrcvar.set(1)
labelmtrc = Label(canvas)
labelmtrc.place(x = 655, y = 45)
labelmtrc.config(text = 'Multiple')
trcsave = ttk.Button(canvas, text="Save", command = runSave)
trcsave.place(x = 620, y = 90)
trcload = ttk.Button(canvas, text="Load", command = runLoad)
trcload.place(x = 620, y = 130)
startvar = DoubleVar(value=0.6) # initial value
startval = Spinbox(canvas, from_= 0.0, to = 5.0, increment = 0.02, width = 4, command = runStart, textvariable=startvar)
startval.place(x = 620, y = 185)
startvar.set(0.6)
labelstart = Label(canvas)
labelstart.place(x = 680, y = 185)
labelstart.config(text = 'Start')
stepvar = DoubleVar(value=0.88) # initial value
stepval = Spinbox(canvas, from_= 0.0, to = 5.0, increment = 0.02, width = 4, textvariable=stepvar)
stepval.place(x = 620, y = 205)
stepvar.set(0.88)
labelstep = Label(canvas)
labelstep.place(x = 680, y = 205)
labelstep.config(text = 'Step')
trcload = ttk.Button(canvas, text="Run Steps", command = runSteps)
trcload.place(x = 620, y = 235)
adcvar = IntVar(value=0) # initial value
adcval = Spinbox(canvas, from_= 0, to = 5, increment = 1, width = 2, textvariable=adcvar)
adcval.place(x = 620, y = 325)
adcvar.set(0)
labeladc = Label(canvas)
labeladc.place(x = 660, y = 325)
labeladc.config(text = '0.000V')
adcread = ttk.Button(canvas, text="Read ADC", command = runAdc)
adcread.place(x = 620, y = 295)
cts = ttk.Button(canvas, text="Sine")
cts.place(x = 520, y = 20)
cts.bind("<Button-1>", runSine)
magvar = DoubleVar(value=11.5) # initial value
sinmag = Spinbox(canvas, from_= 2.4, to = 11.5, increment = 0.1, width = 4, command = runMag, textvariable=magvar)
sinmag.place(x = 520, y = 50)
magvar.set(11.5)
sinmag.bind("<Return>", runMag)
labelmag = Label(canvas)
labelmag.place(x = 575, y = 50)
labelmag.config(text = 'Vp')
freqvar = DoubleVar(value=16) # initial value
freq = Spinbox(canvas, from_= 4, to =50, increment = 1.0, width = 2, command = runFreq, textvariable=freqvar)
freq.place(x = 520, y = 75)
freqvar.set(16)
labelfreq = Label(canvas)
labelfreq.place(x = 555, y = 75)
labelfreq.config(text = '60.2 Hz')
cnt = ttk.Checkbutton(canvas, text="Cont. Sine", variable=contvar, onvalue=True)
cnt.place(x = 520, y = 95)
ctr = ttk.Button(canvas, text="Ramp")
ctr.place(x = 520, y = 125)
ctr.bind("<Button-1>", runRamp)
posvar = DoubleVar(value=11.5) # initial value
posmag = Spinbox(canvas, from_= -11.5, to = 11.5, increment = 0.1, width = 4, command = runPos, textvariable=posvar)
posmag.place(x = 520, y = 155)
posvar.set(11.5)
posmag.bind("<Return>", runPos)
labelpos = Label(canvas)
labelpos.place(x = 572, y = 155)
labelpos.config(text = 'Vmax')
negvar = DoubleVar(value=-11.5) # initial value
negmag = Spinbox(canvas, from_= -11.5, to = 11.5, increment = 0.1, width = 4, command = runNeg, textvariable=negvar)
negmag.place(x = 520, y = 180)
negvar.set(-11.5)
negmag.bind("<Return>", runNeg)
labelneg = Label(canvas)
labelneg.place(x = 572, y = 180)
labelneg.config(text = 'Vmin')
cramp = ttk.Checkbutton(canvas, text="Cont. Ramp", variable=crampvar, onvalue=True)
cramp.place(x = 520, y = 200)
xt = ttk.Checkbutton(canvas, text="X-t Plot", variable=xtvar, command=runXt, onvalue=True)
xt.place(x = 520, y = 225)
curvar = IntVar(value= 0)
cursor = Spinbox(canvas, from_= 0, to = 255, width = 3, command = runPlot, textvariable = curvar)
cursor.place(x = 520, y = 255)
cursor.bind("<Return>", runPlot)
labelcur = Label(canvas)
labelcur.place(x = 565, y = 255)
labelcur.config(text = 'Cursor')
label1 = Label(canvas)
label1.place(x = 520, y = 275)
label1.config(text = 'V')
label2 = Label(canvas)
label2.place(x = 520, y = 295)
label2.config(text = 'mA')
label3 = Label(canvas)
label3.place(x = 520, y = 315)
label3.config(text = 'ms')
mrk = ttk.Button(canvas, text="Mark")
mrk.place(x = 520, y = 345, height = 22)
mrk.bind("<Button-1>", runMark)
labelm1 = Label(canvas)
labelm1.place(x = 520, y = 370)
labelm1.config(text = 'V')
labelm2 = Label(canvas)
labelm2.place(x = 520, y = 390)
labelm2.config(text = 'mA')
labelm3 = Label(canvas)
labelm3.place(x = 520, y = 410)
labelm3.config(text = 'ms')
labeldt = Label(canvas)
labeldt.place(x = 540, y = 433)
labeldt.config(text = 'Delta')
labeld1 = Label(canvas)
labeld1.place(x = 520, y = 450)
labeld1.config(text = 'V')
labeld2 = Label(canvas)
labeld2.place(x = 520, y = 470)
labeld2.config(text = 'mA')
labeld3 = Label(canvas)
labeld3.place(x = 520, y = 490)
labeld3.config(text = 'ms')
labelr = Label(canvas)
labelr.place(x = 520, y = 520)
labelr.config(text = 'ohms')
labelc = Label(canvas)
labelc.place(x = 640, y = 520)
labelc.config(text = 'C = uF')
canvas.bind("<Button-1>", runMouse)
plotxy()
root.after(0, runCont)
root.wm_protocol ("WM_DELETE_WINDOW", endserial)
root.mainloop()
| 29.745833
| 172
| 0.583975
| 2,281
| 14,278
| 3.63744
| 0.162648
| 0.030372
| 0.022779
| 0.02531
| 0.280463
| 0.227914
| 0.203447
| 0.164758
| 0.091358
| 0.067012
| 0
| 0.097384
| 0.218238
| 14,278
| 479
| 173
| 29.807933
| 0.645942
| 0.01849
| 0
| 0.193384
| 0
| 0
| 0.05571
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.053435
| false
| 0
| 0.015267
| 0
| 0.068702
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef911bdd33ff81cae4898bfd37e8a89b765f201c
| 2,565
|
py
|
Python
|
src/medical_test_service/medical_test.py
|
phamnam-mta/know-life
|
f7c226c41e315f21b5d7fe2ccbc9ec4f9961ed1d
|
[
"MIT"
] | null | null | null |
src/medical_test_service/medical_test.py
|
phamnam-mta/know-life
|
f7c226c41e315f21b5d7fe2ccbc9ec4f9961ed1d
|
[
"MIT"
] | null | null | null |
src/medical_test_service/medical_test.py
|
phamnam-mta/know-life
|
f7c226c41e315f21b5d7fe2ccbc9ec4f9961ed1d
|
[
"MIT"
] | null | null | null |
import logging
from typing import Text, List
from src.utils.io import read_json
from src.utils.fuzzy import is_relevant_string
from src.utils.common import is_float
from src.utils.constants import (
MEDICAL_TEST_PATH,
QUANTITATIVE_PATH,
POSITIVE_TEXT,
TestResult
)
logger = logging.getLogger(__name__)
class MedicalTest():
def __init__(self, medical_test_path=MEDICAL_TEST_PATH, quantitative_path= QUANTITATIVE_PATH) -> None:
self.medical_test = read_json(medical_test_path)
self.quantitative = read_json(quantitative_path)
logger.info("Medical test loaded")
def get_suggestions(self, indicators: List):
suggestions = []
count = 0
for i in indicators:
count += 1
sg = {
"id": count,
"input": i,
}
references = []
for m in self.medical_test:
if is_relevant_string(i["test_name"], m["name"], method=['exact','fuzzy'], score=90):
sg["name"] = m["name"]
sg["overview"] = m["overview"]
if m["references"]:
references.extend(m["references"])
for q in self.quantitative:
if q["medical_test"]["id"] == m["id"]:
if q["test_result"] == TestResult.positive.value and is_relevant_string(str(i["result"]), POSITIVE_TEXT, score=90, remove_accent=True):
sg["note"] = q["note"]
sg["cause"] = q["cause"]
sg["recommend"] = q["recommend"]
if m["references"]:
references.extend(m["references"])
break
elif is_float(str(i["result"])):
test_result = float(str(i["result"]))
if test_result >= q["min_value"] and test_result <= q["max_value"]:
sg["note"] = q["note"]
sg["cause"] = q["cause"]
sg["recommend"] = q["recommend"]
if m["references"]:
references.extend(m["references"])
break
break
sg["references"] = list(dict.fromkeys(references))
suggestions.append(sg)
return suggestions
| 43.474576
| 163
| 0.474854
| 246
| 2,565
| 4.768293
| 0.304878
| 0.075021
| 0.040921
| 0.058824
| 0.240409
| 0.187553
| 0.187553
| 0.153453
| 0.153453
| 0.153453
| 0
| 0.003992
| 0.414035
| 2,565
| 59
| 164
| 43.474576
| 0.776447
| 0
| 0
| 0.272727
| 0
| 0
| 0.10834
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036364
| false
| 0
| 0.109091
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef933f2244982928a2ce88206760be93146f1a77
| 1,064
|
py
|
Python
|
scam.py
|
TheToddLuci0/Tarkov-Scammer
|
5fced3952c6cec72fe3eb85384bc11f65ee6af9c
|
[
"BSD-3-Clause"
] | 2
|
2021-02-09T19:13:14.000Z
|
2021-02-23T08:41:14.000Z
|
scam.py
|
TheToddLuci0/Tarkov-Scammer
|
5fced3952c6cec72fe3eb85384bc11f65ee6af9c
|
[
"BSD-3-Clause"
] | null | null | null |
scam.py
|
TheToddLuci0/Tarkov-Scammer
|
5fced3952c6cec72fe3eb85384bc11f65ee6af9c
|
[
"BSD-3-Clause"
] | null | null | null |
import requests
import sys
from time import sleep
from tabulate import tabulate
def get_scams(api_key):
scams = []
headers = {"x-api-key": api_key}
r = requests.get('https://tarkov-market.com/api/v1/items/all', headers=headers)
for i in r.json():
r2 = requests.get('https://tarkov-market.com/api/v1/item?uid='+i['uid'], headers=headers)
while r2.status_code == 429:
print("Got rate limited, sleeping")
sleep(15)
r2 = requests.get('https://tarkov-market.com/api/v1/item?uid=' + i['uid'], headers=headers)
data = r2.json()[0]
if data['traderPrice'] > data["price"]:
scams.append([i['name'], data['price'], data['traderName'], data['traderPrice']-data['price']])
scams.sort(key=lambda x: x[3])
print(tabulate(scams, headers=["Item", "Market Price", "Trader", "Profit"]))
if __name__=='__main__':
try:
with open('.secret', 'r') as f:
secret = f.read().strip()
except IOException:
secret = sys.argv[1]
get_scams(secret)
| 34.322581
| 107
| 0.599624
| 144
| 1,064
| 4.340278
| 0.451389
| 0.0288
| 0.0768
| 0.1056
| 0.352
| 0.2592
| 0.2592
| 0.2592
| 0.2016
| 0.2016
| 0
| 0.018072
| 0.219925
| 1,064
| 30
| 108
| 35.466667
| 0.73494
| 0
| 0
| 0.076923
| 0
| 0
| 0.246241
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038462
| false
| 0
| 0.153846
| 0
| 0.192308
| 0.076923
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef978c724ad463ecd7562dae7e149d5ae0ce4282
| 677
|
py
|
Python
|
mapclientplugins/scaffoldfiniteelementmeshfitterstep/model/imageplanemodel.py
|
mahyar-osn/mapclientplugins.scaffoldfiniteelementmeshfitterstep
|
b35f6c0b2e264e2913d0a1c432bf89c7b329bf52
|
[
"Apache-2.0"
] | null | null | null |
mapclientplugins/scaffoldfiniteelementmeshfitterstep/model/imageplanemodel.py
|
mahyar-osn/mapclientplugins.scaffoldfiniteelementmeshfitterstep
|
b35f6c0b2e264e2913d0a1c432bf89c7b329bf52
|
[
"Apache-2.0"
] | null | null | null |
mapclientplugins/scaffoldfiniteelementmeshfitterstep/model/imageplanemodel.py
|
mahyar-osn/mapclientplugins.scaffoldfiniteelementmeshfitterstep
|
b35f6c0b2e264e2913d0a1c432bf89c7b329bf52
|
[
"Apache-2.0"
] | null | null | null |
from opencmiss.utils.maths.algorithms import calculate_line_plane_intersection
class ImagePlaneModel(object):
def __init__(self, master_model):
self._master_model = master_model
self._region = None
self._frames_per_second = -1
self._images_file_name_listing = []
self._image_dimensions = [-1, -1]
self._duration_field = None
self._image_based_material = None
self._scaled_coordinate_field = None
self._time_sequence = []
def set_image_information(self, frames_per_second, image_dimensions):
self._frames_per_second = frames_per_second
self._image_dimensions = image_dimensions
| 33.85
| 78
| 0.713442
| 80
| 677
| 5.4875
| 0.5
| 0.072893
| 0.136674
| 0.129841
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00566
| 0.217134
| 677
| 19
| 79
| 35.631579
| 0.822642
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.133333
| false
| 0
| 0.066667
| 0
| 0.266667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef9836ec7a3a89d88130ef5b51f413cd84a57435
| 2,152
|
py
|
Python
|
test/test/host_test_default.py
|
noralsydmp/mbed-os-tools
|
5a14958aa49eb5764afba8e1dc3208cae2955cd7
|
[
"Apache-2.0"
] | 29
|
2018-11-30T19:45:22.000Z
|
2022-03-29T17:02:16.000Z
|
test/test/host_test_default.py
|
noralsydmp/mbed-os-tools
|
5a14958aa49eb5764afba8e1dc3208cae2955cd7
|
[
"Apache-2.0"
] | 160
|
2018-11-30T21:55:52.000Z
|
2022-01-18T10:58:09.000Z
|
test/test/host_test_default.py
|
noralsydmp/mbed-os-tools
|
5a14958aa49eb5764afba8e1dc3208cae2955cd7
|
[
"Apache-2.0"
] | 73
|
2018-11-30T21:34:41.000Z
|
2021-10-02T05:51:40.000Z
|
# Copyright (c) 2018, Arm Limited and affiliates.
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from mbed_os_tools.test.host_tests_runner.host_test_default import DefaultTestSelector
class HostTestDefaultTestCase(unittest.TestCase):
def test_os_info(self):
expected = {
"grm_module" : "module_name",
"grm_host" : "10.2.123.43",
"grm_port" : "3334",
}
# Case that includes an IP address but no protocol
arg = [expected["grm_module"], expected["grm_host"], expected["grm_port"]]
result = DefaultTestSelector._parse_grm(":".join(arg))
self.assertEqual(result, expected)
# Case that includes an IP address but no protocol nor a no port
expected["grm_port"] = None
arg = [expected["grm_module"], expected["grm_host"]]
result = DefaultTestSelector._parse_grm(":".join(arg))
self.assertEqual(result, expected)
# Case that includes an IP address and a protocol
expected["grm_host"] = "https://10.2.123.43"
expected["grm_port"] = "443"
arg = [expected["grm_module"], expected["grm_host"], expected["grm_port"]]
result = DefaultTestSelector._parse_grm(":".join(arg))
self.assertEqual(result, expected)
# Case that includes an IP address and a protocol, but no port
expected["grm_port"] = None
arg = [expected["grm_module"], expected["grm_host"]]
result = DefaultTestSelector._parse_grm(":".join(arg))
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
| 39.127273
| 86
| 0.676115
| 279
| 2,152
| 5.064516
| 0.405018
| 0.116773
| 0.060156
| 0.050955
| 0.462845
| 0.462845
| 0.462845
| 0.462845
| 0.462845
| 0.425336
| 0
| 0.019492
| 0.21329
| 2,152
| 54
| 87
| 39.851852
| 0.815121
| 0.383829
| 0
| 0.518519
| 0
| 0
| 0.157613
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 1
| 0.037037
| false
| 0
| 0.074074
| 0
| 0.148148
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef9847d747aab77361f5e75e1a5b9c126c9e90f9
| 3,359
|
py
|
Python
|
lib/surface/debug/logpoints/delete.py
|
bopopescu/SDK
|
e6d9aaee2456f706d1d86e8ec2a41d146e33550d
|
[
"Apache-2.0"
] | null | null | null |
lib/surface/debug/logpoints/delete.py
|
bopopescu/SDK
|
e6d9aaee2456f706d1d86e8ec2a41d146e33550d
|
[
"Apache-2.0"
] | null | null | null |
lib/surface/debug/logpoints/delete.py
|
bopopescu/SDK
|
e6d9aaee2456f706d1d86e8ec2a41d146e33550d
|
[
"Apache-2.0"
] | 1
|
2020-07-25T12:23:41.000Z
|
2020-07-25T12:23:41.000Z
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Delete command for gcloud debug logpoints command group."""
from googlecloudsdk.api_lib.debug import debug
from googlecloudsdk.calliope import base
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
class Delete(base.DeleteCommand):
"""Delete debug logpoints.
This command deletes logpoints from a Cloud Debugger debug target.
"""
@staticmethod
def Args(parser):
parser.add_argument(
'id_or_location_regexp', metavar='(ID|LOCATION-REGEXP)', nargs='+',
help="""\
A logpoint ID or a regular expression to match against logpoint
locations. The logpoint with the given ID, or all logpoints whose
locations (file:line) contain the regular expression, will be
deleted.
""")
parser.add_argument(
'--all-users', action='store_true', default=False,
help="""\
If set, any location regexp will match logpoints from all users,
rather than only logpoints created by the current user. This flag is
not required when specifying the exact ID of a logpoint created by
another user.
""")
parser.add_argument(
'--include-inactive', action='store_true', default=False,
help="""\
If set, any location regexp will also match inactive logpoints,
rather than only logpoints which have not expired. This flag is
not required when specifying the exact ID of an inactive logpoint.
""")
def Run(self, args):
"""Run the delete command."""
project_id = properties.VALUES.core.project.Get(required=True)
debugger = debug.Debugger(project_id)
debuggee = debugger.FindDebuggee(args.target)
logpoints = debuggee.ListMatchingBreakpoints(
args.id_or_location_regexp, include_all_users=args.all_users,
include_inactive=args.include_inactive,
restrict_to_type=debugger.LOGPOINT_TYPE)
for s in logpoints:
debuggee.DeleteBreakpoint(s.id)
return logpoints
def Collection(self):
return 'debug.logpoints'
def Format(self, args):
"""Format for printing the results of the Run() method.
Args:
args: The arguments that command was run with.
Returns:
A format string
"""
fields = ['id']
if args.all_users:
fields.append('userEmail:label=USER')
fields.append('location')
fields.append('logLevel:label=LEVEL')
fields.append('short_status():label="STATUS BEFORE DELETION"')
return 'table({0})'.format(','.join(fields))
def Epilog(self, resources_were_displayed):
if resources_were_displayed:
log.status.write('Deleted Logpoints')
else:
log.status.write('No logpoints matched the requested values')
| 36.51087
| 80
| 0.693659
| 432
| 3,359
| 5.328704
| 0.421296
| 0.026064
| 0.022155
| 0.013901
| 0.090356
| 0.090356
| 0.090356
| 0.090356
| 0.090356
| 0.090356
| 0
| 0.003429
| 0.218517
| 3,359
| 91
| 81
| 36.912088
| 0.873524
| 0.261387
| 0
| 0.160714
| 0
| 0
| 0.430636
| 0.020231
| 0
| 0
| 0
| 0
| 0
| 1
| 0.089286
| false
| 0
| 0.071429
| 0.017857
| 0.232143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef99c583c045deb51df0a2fd8b0f81216762f3eb
| 3,844
|
py
|
Python
|
day-07/solution.py
|
wangjoshuah/Advent-Of-Code-2018
|
6bda7956bb7c6f9a54feffb19147961b56dc5d81
|
[
"MIT"
] | null | null | null |
day-07/solution.py
|
wangjoshuah/Advent-Of-Code-2018
|
6bda7956bb7c6f9a54feffb19147961b56dc5d81
|
[
"MIT"
] | null | null | null |
day-07/solution.py
|
wangjoshuah/Advent-Of-Code-2018
|
6bda7956bb7c6f9a54feffb19147961b56dc5d81
|
[
"MIT"
] | null | null | null |
# directed graph problem or breadth first search variant
from collections import defaultdict
import re
input_file = open("input.txt", "r")
input_lines = input_file.readlines()
letter_value = {
'A': 1,
'B': 2,
'C': 3,
'D': 4,
'E': 5,
'F': 6,
'G': 7,
'H': 8,
'I': 9,
'J': 10,
'K': 11,
'L': 12,
'M': 13,
'N': 14,
'O': 15,
'P': 16,
'Q': 17,
'R': 18,
'S': 19,
'T': 20,
'U': 21,
'V': 22,
'W': 23,
'X': 24,
'Y': 25,
'Z': 26
}
# construct graph of nodes and edges
# Read nodes and edges from input with regex
def construct_graph(lines):
# Graph is a Dictionary of String to Set
# { Node : Set of children }
edges = defaultdict(set)
nodes = set()
pattern = r"Step (.?) must be finished before step (.?) can begin\."
for line in lines:
matches = re.search(pattern, line)
edges[matches.group(1)].add(matches.group(2))
nodes.add(matches.group(1))
nodes.add(matches.group(2))
return nodes, edges
# A set of possible nodes to work on (starts with C)
# pick the first alphabetical node and remove it and its edges
def find_all_nodes_to_work(nodes, edges):
possible_nodes_to_work = nodes.copy()
for parent, children in edges.items():
for child in children:
if child in possible_nodes_to_work:
possible_nodes_to_work.remove(child)
sorted_work = list(possible_nodes_to_work)
sorted_work.sort()
return sorted_work
def find_next_node_to_work(nodes, edges):
return find_all_nodes_to_work(nodes, edges)[0]
def work_nodes(nodes, edges):
work_order = ""
while len(nodes) > 0:
node_to_work = find_next_node_to_work(nodes, edges)
nodes.remove(node_to_work)
if node_to_work in edges:
del edges[node_to_work]
work_order += node_to_work
return work_order
class Worker:
def __init__(self, base_time) -> None:
super().__init__()
self.current_node = None
self.seconds_left = 0
self.free = True
self.base_time = base_time
def assign(self, node):
self.current_node = node
self.seconds_left = letter_value[node] + self.base_time
self.free = False
def tick(self):
self.seconds_left -= 1
if self.seconds_left <= 0:
finished_node = self.current_node
self.current_node = None
self.free = True
self.seconds_left = 0
return finished_node
def work_nodes_in_parallel(nodes, edges, workers, base_time):
timer = 0
worker_pool = list()
nodes_worked = set()
for i in range(workers):
worker_pool.append(Worker(base_time))
while len(nodes) > 0:
print(f"Second {timer}")
nodes_to_work = find_all_nodes_to_work(nodes, edges)
for node in nodes_to_work:
if node not in nodes_worked:
for worker in worker_pool:
if worker.free and node not in nodes_worked:
worker.assign(node)
nodes_worked.add(node)
for worker in worker_pool:
print(f"Worker is working on {worker.current_node} with {worker.seconds_left} seconds left.")
finished_node = worker.tick()
if finished_node is not None:
# if a worker finishes a node
nodes.remove(finished_node)
nodes_worked.remove(finished_node)
if finished_node in edges:
del edges[finished_node]
timer += 1
return timer
# Part 1
# print(work_nodes(construct_graph(input_lines)))
# Part 2
nodes, edges = construct_graph(input_lines)
total_time = work_nodes_in_parallel(nodes, edges, 5, 60)
print(f"It took {total_time} seconds")
| 27.070423
| 105
| 0.601977
| 537
| 3,844
| 4.096834
| 0.301676
| 0.046364
| 0.05
| 0.043182
| 0.148182
| 0.09
| 0.063636
| 0
| 0
| 0
| 0
| 0.022551
| 0.296306
| 3,844
| 141
| 106
| 27.262411
| 0.790758
| 0.104058
| 0
| 0.091743
| 0
| 0
| 0.062955
| 0.012241
| 0
| 0
| 0
| 0
| 0
| 1
| 0.073395
| false
| 0
| 0.018349
| 0.009174
| 0.155963
| 0.027523
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef99d022220363214630da6ad916a3a41900d8d7
| 2,862
|
py
|
Python
|
src/infi/pypi_manager/scripts/compare_pypi_repos.py
|
Infinidat/infi.pypi_manager
|
7b5774b395ef47a23be2957a091b607b35a049f2
|
[
"BSD-3-Clause"
] | null | null | null |
src/infi/pypi_manager/scripts/compare_pypi_repos.py
|
Infinidat/infi.pypi_manager
|
7b5774b395ef47a23be2957a091b607b35a049f2
|
[
"BSD-3-Clause"
] | 1
|
2020-11-05T10:04:45.000Z
|
2020-11-05T11:03:25.000Z
|
src/infi/pypi_manager/scripts/compare_pypi_repos.py
|
Infinidat/infi.pypi_manager
|
7b5774b395ef47a23be2957a091b607b35a049f2
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import print_function
from .. import PyPI, DjangoPyPI, PackageNotFound
from prettytable import PrettyTable
from pkg_resources import parse_version, resource_filename
import requests
import re
try:
from urlparse import unquote
except ImportError:
# Python 3
from urllib.parse import unquote
def get_versions_from_reference(reference_repo):
reference_pypi_html = requests.get("{}/pypi".format(reference_repo.server)).text
search_result = re.findall("""href=["'](?:/pypi/)?([^/]+)/([^/]+)/["']""", reference_pypi_html)
return dict((k, unquote(v)) for k, v in search_result)
def get_skipped_packages():
with open(resource_filename(__name__, "skipped_packages.txt"), "r") as fd:
return [line.split("#")[0].strip() for line in fd.readlines()]
def get_major(version_string):
return int(version_string.split('.')[0])
def compare_pypi_repos(reference_repo, other_repo):
upgrade_packages = []
upgrade_table = PrettyTable(["Package", reference_repo.server, other_repo.server, 'Major'])
downgrade_table = PrettyTable(["Package", reference_repo.server, other_repo.server])
skipped_table = PrettyTable(["Package", reference_repo.server, other_repo.server])
skipped_packages = get_skipped_packages()
reference_repo_versions = get_versions_from_reference(reference_repo)
packages_to_check = list(reference_repo_versions.keys())
for name in sorted(packages_to_check):
try:
reference_repo_version = reference_repo_versions[name]
other_repo_version = other_repo.get_latest_version(name)
except PackageNotFound:
continue
if other_repo_version != reference_repo_version:
if name in skipped_packages or any(x in other_repo_version for x in ['a', 'b', 'dev', 'post', 'rc']):
skipped_table.add_row([name, reference_repo_version, other_repo_version])
elif parse_version(reference_repo_version) < parse_version(other_repo_version):
major_change = get_major(reference_repo_version) != get_major(other_repo_version)
upgrade_table.add_row([name, reference_repo_version, other_repo_version, 'Yes' if major_change else ''])
upgrade_packages.append((name, other_repo_version))
else:
downgrade_table.add_row([name, reference_repo_version, other_repo_version])
print("Upgradable Packages:")
print(upgrade_table)
print()
print("Downgradable Packages:")
print(downgrade_table)
print()
print("Skipped Packages:")
print(skipped_table)
print()
print("Upgrade commands:")
for name, version in upgrade_packages:
print("mirror_package %s %s" % (name, version))
def main():
import sys
local = DjangoPyPI(sys.argv[-1])
pypi = PyPI()
compare_pypi_repos(local, pypi)
| 40.885714
| 120
| 0.70615
| 353
| 2,862
| 5.393768
| 0.286119
| 0.116071
| 0.07563
| 0.042017
| 0.216387
| 0.216387
| 0.177521
| 0.177521
| 0.177521
| 0.147584
| 0
| 0.001712
| 0.183438
| 2,862
| 69
| 121
| 41.478261
| 0.813008
| 0.002795
| 0
| 0.084746
| 0
| 0
| 0.07223
| 0.014025
| 0
| 0
| 0
| 0
| 0
| 1
| 0.084746
| false
| 0
| 0.169492
| 0.016949
| 0.305085
| 0.20339
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef9afc42c7347b259e757e59b46b756f7ac092fc
| 6,954
|
py
|
Python
|
src/GNLSE_specific.py
|
Computational-Nonlinear-Optics-ORC/Compare-CNLSE
|
9b56cedbca2a06af3baa9f64e46ebfd4263f86c2
|
[
"MIT"
] | null | null | null |
src/GNLSE_specific.py
|
Computational-Nonlinear-Optics-ORC/Compare-CNLSE
|
9b56cedbca2a06af3baa9f64e46ebfd4263f86c2
|
[
"MIT"
] | null | null | null |
src/GNLSE_specific.py
|
Computational-Nonlinear-Optics-ORC/Compare-CNLSE
|
9b56cedbca2a06af3baa9f64e46ebfd4263f86c2
|
[
"MIT"
] | 3
|
2018-06-04T18:43:03.000Z
|
2021-11-24T07:57:03.000Z
|
import numpy as np
from scipy.interpolate import InterpolatedUnivariateSpline
from scipy.fftpack import fft
from combined_functions import check_ft_grid
from scipy.constants import pi, c, hbar
from numpy.fft import fftshift
from scipy.io import loadmat
from time import time
import sys
import matplotlib.pyplot as plt
from scipy.integrate import simps
def fv_creator(fp, df, F, int_fwm):
"""
Cretes frequency grid such that the estimated MI-FWM bands
will be on the grid and extends this such that to avoid
fft boundary problems.
Inputs::
lamp: wavelength of the pump (float)
lamda_c: wavelength of the zero dispersion wavelength(ZDW) (float)
int_fwm: class that holds nt (number of points in each band)
betas: Taylor coeffiencts of beta around the ZDW (Array)
M : The M coefficient (or 1/A_eff) (float)
P_p: pump power
Df_band: band frequency bandwidth in Thz, (float)
Output::
fv: Frequency vector of bands (Array of shape [nt])
"""
f_centrals = [fp + i * F for i in range(-1, 2)]
fv1 = np.linspace(f_centrals[0], f_centrals[1],
int_fwm.nt//4 - 1, endpoint=False)
df = fv1[1] - fv1[0]
fv2 = np.linspace(f_centrals[1], f_centrals[2], int_fwm.nt//4)
try:
assert df == fv2[1] - fv2[0]
except AssertionError:
print(df, fv2[1] - fv2[0])
fv0, fv3 = np.zeros(int_fwm.nt//4 + 1), np.zeros(int_fwm.nt//4)
fv0[-1] = fv1[0] - df
fv3[0] = fv2[-1] + df
for i in range(1, len(fv3)):
fv3[i] = fv3[i - 1] + df
for i in range(len(fv0) - 2, -1, -1):
fv0[i] = fv0[i + 1] - df
assert not(np.any(fv0 == fv1))
assert not(np.any(fv1 == fv2))
assert not(np.any(fv2 == fv3))
fv = np.concatenate((fv0, fv1, fv2, fv3))
for i in range(3):
assert f_centrals[i] in fv
check_ft_grid(fv, df)
p_pos = np.where(np.abs(fv - fp) == np.min(np.abs(fv - fp)))[0]
return fv, p_pos, f_centrals
class raman_object(object):
"""
Warning: hf comes back normalised but ht does not!!!
"""
def __init__(self, b=None):
self.how = b
self.hf = None
self.ht = None
def raman_load(self, t, dt):
if self.how == 'analytic':
t11 = 12.2e-3 # [ps]
t2 = 32e-3 # [ps]
# analytical response
ht = (t11**2 + t2**2)/(t11*t2**2) * \
np.exp(-t/t2*(t >= 0))*np.sin(t/t11)*(t >= 0)
self.ht = ht # * dt
ht_norm = ht / simps(ht, t)
# Fourier transform of the analytic nonlinear response
self.hf = fft(ht_norm)
elif self.how == 'load':
# loads the measured response (Stolen et al. JOSAB 1989)
mat = loadmat('loading_data/silicaRaman.mat')
ht = mat['ht']
t1 = mat['t1']
htmeas_func = InterpolatedUnivariateSpline(t1*1e-3, ht)
ht = htmeas_func(t)
ht *= (t > 0)*(t < 1) # only measured between +/- 1 ps)
self.ht = ht / simps(ht, t)
ht_norm = ht / simps(ht, t)
# Fourier transform of the measured nonlinear response
self.hf = fft(ht_norm)
else:
sys.exit("No raman response on the GNLSE")
return None
class sim_window(object):
def __init__(self, fv, lamda, F, lamda_c, int_fwm, where):
self.fv = fv
self.type = 'GNLSE'
self.lamda = lamda
self.fp = 1e-12*c/self.lamda
self.fmed = 0.5*(fv[-1] + fv[0])*1e12 # [Hz]
self.deltaf = np.max(self.fv) - np.min(self.fv) # [THz]
self.df = self.deltaf/int_fwm.nt # [THz]
self.T = 1 / self.df # Time window (period)[ps]
self.woffset = 2*pi*(self.fmed - c/lamda)*1e-12 # [rad/ps]
self.w0 = 2*pi*self.fmed # central angular frequency [rad/s]
self.tsh = (1/self.w0)*1e12 # shock time [ps]
self.dt = self.T/int_fwm.nt # timestep (dt) [ps]
# time vector [ps]
self.t = (range(int_fwm.nt)-np.ones(int_fwm.nt)*int_fwm.nt/2)*self.dt
self.w = fftshift(2*pi * (self.fv - 1e-12*self.fmed))
self.t_band = self.t
self.lv = 1e-3*c/self.fv
self.zv = int_fwm.dzstep*np.asarray(range(0, 2))
self.p_pos = where
self.F = F
self.f_centrals = np.array(
[1e-12 * c/lamda + i * F for i in range(-1, 2)])
self.w_tiled = fftshift(
2*pi * (self.fv - self.f_centrals[1])) # w of self-step
class Loss(object):
def __init__(self, int_fwm, sim_wind, amax=None, apart_div=8):
"""
Initialise the calss Loss, takes in the general parameters and
the freequenbcy window. From that it determines where
the loss will become freequency dependent. With the default value
being an 8th of the difference of max and min.
"""
self.alpha = int_fwm.alphadB/4.343
if amax is None:
self.amax = self.alpha
else:
self.amax = amax/4.343
self.flims_large = (np.min(sim_wind.fv), np.max(sim_wind.fv))
try:
self.begin = apart_div[0]
self.end = apart_div[1]
except TypeError:
self.apart = np.abs(self.flims_large[1] - self.flims_large[0])
self.apart /= apart_div
self.begin = self.flims_large[0] + self.apart
self.end = self.flims_large[1] - self.apart
def atten_func_full(self, fv):
aten = []
a_s = ((self.amax - self.alpha) / (self.flims_large[0] - self.begin),
(self.amax - self.alpha) / (self.flims_large[1] - self.end))
b_s = (-a_s[0] * self.begin, -a_s[1] * self.end)
for f in fv:
if f <= self.begin:
aten.append(a_s[0] * f + b_s[0])
elif f >= self.end:
aten.append(a_s[1] * f + b_s[1])
else:
aten.append(0)
return np.asanyarray(aten) + self.alpha
def plot(self, fv):
fig = plt.figure()
y = self.atten_func_full(fv)
plt.plot(fv, y)
plt.xlabel("Frequency (Thz)")
plt.ylabel("Attenuation (cm -1 )")
plt.savefig(
"loss_function_fibre.png", bbox_inches='tight')
plt.close(fig)
class Noise(object):
def __init__(self, int_fwm, sim_wind):
self.pquant = np.sum(
hbar*(sim_wind.w*1e12 + sim_wind.w0)/(sim_wind.T*1e-12))
self.pquant = (self.pquant/2)**0.5
return None
def noise_func(self, int_fwm):
seed = np.random.seed(int(time()*np.random.rand()))
noise = self.pquant * (np.random.randn(int_fwm.nt) +
1j*np.random.randn(int_fwm.nt))
return noise
def noise_func_freq(self, int_fwm, sim_wind):
noise = self.noise_func(int_fwm)
noise_freq = fftshift(fft(noise))
return noise_freq
| 33.757282
| 77
| 0.560397
| 1,049
| 6,954
| 3.608198
| 0.255481
| 0.033289
| 0.02325
| 0.014531
| 0.149802
| 0.116248
| 0.076618
| 0.043329
| 0.019551
| 0.019551
| 0
| 0.035759
| 0.308312
| 6,954
| 205
| 78
| 33.921951
| 0.751143
| 0.178746
| 0
| 0.077465
| 0
| 0
| 0.025507
| 0.009161
| 0
| 0
| 0
| 0
| 0.042254
| 1
| 0.070423
| false
| 0
| 0.077465
| 0
| 0.21831
| 0.007042
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef9be069a058d33204131a55950dcf855daf7d54
| 1,164
|
py
|
Python
|
example.py
|
jasonkatz/py-graphql-client
|
9f938f3d379a8f4d8810961c87baf25dbe35889d
|
[
"BSD-3-Clause"
] | 38
|
2019-03-22T16:27:08.000Z
|
2022-03-30T11:07:55.000Z
|
example.py
|
anthonyhiga/py-graphql-client
|
9c59b32bae5c5c6a12634b2bd6353f76328aa31a
|
[
"BSD-3-Clause"
] | 31
|
2019-03-25T20:28:40.000Z
|
2022-01-26T21:22:47.000Z
|
example.py
|
anthonyhiga/py-graphql-client
|
9c59b32bae5c5c6a12634b2bd6353f76328aa31a
|
[
"BSD-3-Clause"
] | 11
|
2019-03-25T18:54:32.000Z
|
2021-09-11T17:00:27.000Z
|
import time
from graphql_client import GraphQLClient
# some sample GraphQL server which supports websocket transport and subscription
client = GraphQLClient('ws://localhost:9001')
# Simple Query Example
# query example with GraphQL variables
query = """
query getUser($userId: Int!) {
users (id: $userId) {
id
username
}
}
"""
# This is a blocking call, you receive response in the `res` variable
print('Making a query first')
res = client.query(query, variables={'userId': 2})
print('query result', res)
# Subscription Example
subscription_query = """
subscription getUser {
users (id: 2) {
id
username
}
}
"""
# Our callback function, which will be called and passed data everytime new data is available
def my_callback(op_id, data):
print(f"Got data for Operation ID: {op_id}. Data: {data}")
print('Making a graphql subscription now...')
sub_id = client.subscribe(subscription_query, callback=my_callback)
print('Created subscription and waiting. Callback function is called whenever there is new data')
# do some operation while the subscription is running...
time.sleep(10)
client.stop_subscribe(sub_id)
client.close()
| 23.755102
| 97
| 0.734536
| 158
| 1,164
| 5.348101
| 0.493671
| 0.028402
| 0.028402
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008214
| 0.16323
| 1,164
| 48
| 98
| 24.25
| 0.859343
| 0.320447
| 0
| 0.2
| 0
| 0
| 0.484036
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033333
| false
| 0
| 0.066667
| 0
| 0.1
| 0.166667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef9c140412569fc3198bcf6324071fb38dea2030
| 2,465
|
py
|
Python
|
Scopus2Histcite.py
|
hengxyz/Scopus4HistCite
|
87395afe5d8a520b9c32a0efeed2288225430244
|
[
"Apache-2.0"
] | 2
|
2020-07-09T13:10:44.000Z
|
2020-07-10T13:00:52.000Z
|
Scopus2Histcite.py
|
hengxyz/Scopus4HistCite
|
87395afe5d8a520b9c32a0efeed2288225430244
|
[
"Apache-2.0"
] | null | null | null |
Scopus2Histcite.py
|
hengxyz/Scopus4HistCite
|
87395afe5d8a520b9c32a0efeed2288225430244
|
[
"Apache-2.0"
] | null | null | null |
# coding:utf-8
import os
import sys
def Scopus2HistCite():
try:
wrt_lines = []
if len(sys.argv) >= 2 and os.path.isfile(sys.argv[1]):
print("You are going to convert {}".format(sys.argv[1]))
Scopus_file = sys.argv[1]
elif os.path.isfile("./Scopus.ris"):
print("You are going to convert ./Scopus.ris")
Scopus_file = './Scopus.ris'
else:
raise Exception("No file spcified")
auth_started = False
ref_started = False
LT = [
'TI', # title
'T2', # jounal
'AU', # author
'VL', # volumn
'IS', # issue
'SP', # start page
'EP', # end page
'PY', # public year
'DO', # maybe doi? not important
]
wrt_lines.append('FN Thomson Reuters Web of Knowledge™')
wrt_lines.append('VR 1.0')
with open(Scopus_file, 'rb') as Scopus:
for each in Scopus.readlines():
line = each.strip()
line = line.decode().replace(' - ', ' ')
mark = line[:2]
if ref_started:
if mark == 'ER':
wrt_lines.append('ER')
wrt_lines.append('')
auth_started = False
ref_started = False
else:
wrt_lines.append(line)
elif line[:14] == 'N1 References:':
ref_started = True
line = line.replace(line[:14], 'CR')
wrt_lines.append(line)
elif mark in LT:
if mark == 'TI':
wrt_lines.append('PT J')
else:
line = line.replace('T2 ', 'SO ').replace('SP ', 'BP ')
if not auth_started and mark == 'AU':
auth_started = True
else:
line = line.replace('AU ', '')
wrt_lines.append(line)
with open("./savedres.txt", "w", encoding = "utf-8") as f:
for line in wrt_lines:
print(line)
f.write(line)
f.write("\n")
except Exception as e:
raise e
if __name__ == '__main__':
Scopus2HistCite()
| 35.724638
| 80
| 0.416633
| 246
| 2,465
| 4.065041
| 0.430894
| 0.08
| 0.112
| 0.054
| 0.156
| 0.112
| 0
| 0
| 0
| 0
| 0
| 0.013761
| 0.469371
| 2,465
| 69
| 81
| 35.724638
| 0.749235
| 0.041379
| 0
| 0.171875
| 0
| 0
| 0.106783
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015625
| false
| 0
| 0.03125
| 0
| 0.046875
| 0.046875
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef9d373a85947b14498743498aaf4ab814a074db
| 2,449
|
py
|
Python
|
mel_scale.py
|
zjlww/dsp
|
d7bcbf49bc8693560f3203c55b73956cc61dcd50
|
[
"MIT"
] | 9
|
2021-07-22T19:59:34.000Z
|
2021-12-16T06:37:27.000Z
|
mel_scale.py
|
zjlww/dsp
|
d7bcbf49bc8693560f3203c55b73956cc61dcd50
|
[
"MIT"
] | null | null | null |
mel_scale.py
|
zjlww/dsp
|
d7bcbf49bc8693560f3203c55b73956cc61dcd50
|
[
"MIT"
] | 2
|
2021-07-26T07:14:58.000Z
|
2021-12-16T06:37:30.000Z
|
"""
Mel-scale definition.
"""
import torch
from torch import Tensor
from typing import Union
import numpy as np
from math import log
import librosa
from librosa.filters import mel as mel_fn
def hz_to_mel(
frequencies: Union[float, int, Tensor, np.ndarray],
htk=False) -> Union[float, int, Tensor, np.ndarray]:
"""Convert Hz to Mels.
Extending librosa.hz_to_mel to accepting Tensor.
"""
if not isinstance(frequencies, Tensor):
return librosa.hz_to_mel(frequencies)
if htk:
return 2595.0 * torch.log10(1.0 + frequencies / 700.0)
f_min = 0.0
f_sp = 200.0 / 3
mels = (frequencies - f_min) / f_sp
min_log_hz = 1000.0 # beginning of log region (Hz)
min_log_mel = (min_log_hz - f_min) / f_sp # same (Mels)
logstep = log(6.4) / 27.0 # step size for log region
log_t = frequencies >= min_log_hz
mels[log_t] = min_log_mel + torch.log(frequencies[log_t] / min_log_hz) / \
logstep
return mels
def mel_to_hz(
mels: Union[int, float, Tensor, np.ndarray],
htk=False) -> Union[int, float, Tensor, np.ndarray]:
"""Convert mel bin numbers to frequencies."""
if not isinstance(mels, Tensor):
return librosa.mel_to_hz(mels, htk=htk)
if htk:
return 700.0 * (10.0 ** (mels / 2595.0) - 1.0)
f_min = 0.0
f_sp = 200.0 / 3
freqs = f_min + f_sp * mels
min_log_hz = 1000.0 # beginning of log region (Hz)
min_log_mel = (min_log_hz - f_min) / f_sp # same (Mels)
logstep = log(6.4) / 27.0 # step size for log region
log_t = mels >= min_log_mel
freqs[log_t] = min_log_hz * \
torch.exp(logstep * (mels[log_t] - min_log_mel))
return freqs
def linear_mel_matrix(
sampling_rate: int, fft_size: int, mel_size: int,
mel_min_f0: Union[int, float],
mel_max_f0: Union[int, float],
device: torch.device
) -> Tensor:
"""
Args:
sampling_rate: Sampling rate in Hertz.
fft_size: FFT size, must be an even number.
mel_size: Number of mel-filter banks.
mel_min_f0: Lowest frequency in the mel spectrogram.
mel_max_f0: Highest frequency in the mel spectrogram.
device: Target device of the transformation matrix.
Returns:
basis: [mel_size, fft_size // 2 + 1].
"""
basis = torch.FloatTensor(
mel_fn(sampling_rate, fft_size, mel_size, mel_min_f0, mel_max_f0)
).transpose(-1, -2)
return basis.to(device)
| 31.805195
| 78
| 0.642303
| 386
| 2,449
| 3.870466
| 0.238342
| 0.048193
| 0.037483
| 0.018742
| 0.323963
| 0.270415
| 0.161981
| 0.161981
| 0.161981
| 0.161981
| 0
| 0.038587
| 0.248673
| 2,449
| 77
| 79
| 31.805195
| 0.77337
| 0.255206
| 0
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0.137255
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef9edac80f3106bed3243580dd908ece6900cb29
| 379
|
py
|
Python
|
order/urls.py
|
xxcfun/trip-api
|
a51c8b6033ba2a70cf0e400180f31809f4ce476a
|
[
"Apache-2.0"
] | 1
|
2021-06-18T03:03:40.000Z
|
2021-06-18T03:03:40.000Z
|
order/urls.py
|
xxcfun/trip-api
|
a51c8b6033ba2a70cf0e400180f31809f4ce476a
|
[
"Apache-2.0"
] | null | null | null |
order/urls.py
|
xxcfun/trip-api
|
a51c8b6033ba2a70cf0e400180f31809f4ce476a
|
[
"Apache-2.0"
] | null | null | null |
from django.urls import path
from order import views
urlpatterns = [
# 订单提交接口
path('ticket/submit/', views.TicketOrderSubmitView.as_view(), name='ticket_submit'),
# 订单详情(支付、取消、删除)
path('order/detail/<int:sn>/', views.OrderDetail.as_view(), name='order_detail'),
# 订单列表
path('order/list/', views.OrderListView.as_view(), name='order_list')
]
| 29.153846
| 89
| 0.664908
| 48
| 379
| 5.125
| 0.541667
| 0.073171
| 0.121951
| 0.121951
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171504
| 379
| 12
| 90
| 31.583333
| 0.783439
| 0.068602
| 0
| 0
| 0
| 0
| 0.243323
| 0.065282
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.285714
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ef9f39f03563135dc82bcc1a0e27d1ea6a62e525
| 349
|
py
|
Python
|
api/models.py
|
yaroshyk/todo
|
828d5afc9abd85cd7f8f25e4d01f90c765231357
|
[
"MIT"
] | 3
|
2021-05-30T19:04:37.000Z
|
2021-08-30T14:16:57.000Z
|
api/models.py
|
yaroshyk/todo
|
828d5afc9abd85cd7f8f25e4d01f90c765231357
|
[
"MIT"
] | null | null | null |
api/models.py
|
yaroshyk/todo
|
828d5afc9abd85cd7f8f25e4d01f90c765231357
|
[
"MIT"
] | null | null | null |
from django.db import models
class Todo(models.Model):
title = models.CharField(max_length=100)
details = models.TextField()
date = models.DateTimeField(auto_now_add=True)
group = models.TextField(default='home')
user_id = models.IntegerField()
objects = models.Manager()
def __str__(self):
return self.title
| 23.266667
| 50
| 0.696275
| 43
| 349
| 5.465116
| 0.767442
| 0.12766
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010676
| 0.194842
| 349
| 14
| 51
| 24.928571
| 0.825623
| 0
| 0
| 0
| 0
| 0
| 0.011461
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.1
| 0.1
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
efa05bae4ae4b077bd16954d59ff3b20aac6edc2
| 17,709
|
py
|
Python
|
src/upper/utils.py
|
USArmyResearchLab/ARL-UPPER
|
2f79f25338f18655b2a19c8afe3fed267cc0f198
|
[
"Apache-2.0"
] | 4
|
2020-09-14T06:13:04.000Z
|
2020-11-21T07:10:36.000Z
|
src/upper/utils.py
|
USArmyResearchLab/ARL-UPPER
|
2f79f25338f18655b2a19c8afe3fed267cc0f198
|
[
"Apache-2.0"
] | null | null | null |
src/upper/utils.py
|
USArmyResearchLab/ARL-UPPER
|
2f79f25338f18655b2a19c8afe3fed267cc0f198
|
[
"Apache-2.0"
] | 2
|
2020-03-15T17:59:26.000Z
|
2020-09-14T06:13:05.000Z
|
from typing import Tuple
from rdkit import Chem
from rdkit.Chem import Draw
import re
import itertools
import numpy as np
import networkx as nx
import logging
import collections
def FindBreakingBonds(cnids: list, bids: list, bts: list, atomic_nums: list) -> list:
"""Returns bond ids to be broken. Check for double/triple bonds;
if exists, check if heteroatom; if heteroatom, bonds of that C atom
are not broken."""
x1s = []
rmflag = None
for (i, bt) in enumerate(bts):
for (j, x) in enumerate(bt):
if x == Chem.rdchem.BondType.DOUBLE or x == Chem.rdchem.BondType.TRIPLE:
if atomic_nums[cnids[i][j]] != 6 and atomic_nums[cnids[i][j]] != 1:
rmflag = True
break
if not rmflag:
x1s.append(i)
rmflag = None
return [bids[x1] for x1 in x1s]
def FragNeighborBreakingBondTypes(
neighbor_ids: list, fnids: list, faids: list, bond_type_matrix: list
) -> list:
"""Determine broken bond types between fragments and fragment neighbors."""
# neighbor ids of fragment neighbors
nids_of_fnids = [[neighbor_ids[x] for x in y] for y in fnids]
# atom ids 'bonded' to fragment neighbor
int_ = [
[Intersection(x, faids[i])[0] for x in y] for (i, y) in enumerate(nids_of_fnids)
]
return [
[bond_type_matrix[x[i]][y[i]] for (i, z) in enumerate(x)]
for (x, y) in zip(fnids, int_)
]
def EditFragNeighborIds(fnids: list, bbtps: list) -> list:
"""Remove fragment neighbor ids that are doubly/triply bonded to fragment."""
# not double/triple bonds
n23bonds = [
[
(x != Chem.rdchem.BondType.DOUBLE and x != Chem.rdchem.BondType.TRIPLE)
for x in y
]
for y in bbtps
]
# return new fragment neighbor ids
return [
[x for (j, x) in enumerate(y) if n23bonds[i][j]] for (i, y) in enumerate(fnids)
]
def num_atom_rings_1bond(atom_rings: tuple, bond_rings: tuple, num_atoms: int) -> list:
"""Number of rings each atoms is in. Only rings sharing at most
1 bond with neighboring rings are considered."""
# atom ids of rings that share at most 1 bond with neighboring rings
atom_rings_1bond = [
atom_rings[i]
for (i, y) in enumerate(bond_rings)
if not any(
IntersectionBoolean(x, y, 2)
for x in [z for (j, z) in enumerate(bond_rings) if i != j]
)
]
return [sum(i in x for x in atom_rings_1bond) for i in range(num_atoms)]
def UniqueElements(x: list) -> list:
"""Returns unique elements of a list (not order preserving)."""
keys = {}
for e in x:
keys[e] = 1
return list(keys.keys())
def NeighborIDs(neighbor_ids: list, atomic_nums: list, y: list) -> list:
"""Find neighbor ids of a list of atoms (Hs not included)."""
# neighbor ids
z = [neighbor_ids[x] for x in y]
# remove Hs
return [[x for x in y if atomic_nums[x] != 1] for y in z]
def GetFragments(
smiles: str,
mol: Chem.rdchem.Mol,
neighbor_ids: list,
atomic_nums: list,
bond_id_matrix: list,
bond_type_matrix: list,
) -> Tuple[list, list]:
"""Fragment the molecule with isolated carbons method, see
Lian and Yalkowsky, JOURNAL OF PHARMACEUTICAL SCIENCES 103:2710-2723."""
# carbons
cids = [i for (i, x) in enumerate(atomic_nums) if x == 6]
# carbon neighbor ids
cnids = NeighborIDs(neighbor_ids, atomic_nums, cids)
# bond ids
bids = [
[bond_id_matrix[cid][cnid] for cnid in cnids]
for (cid, cnids) in zip(cids, cnids)
]
# bond types
bts = [
[bond_type_matrix[cid][cnid] for cnid in cnids]
for (cid, cnids) in zip(cids, cnids)
]
# broken bond ids
bbids = FindBreakingBonds(cnids, bids, bts, atomic_nums)
# break bonds, get fragments
try:
fmol = Chem.FragmentOnBonds(
mol, UniqueElements(list(itertools.chain.from_iterable(bbids)))
)
except:
fmol = mol
logging.info("fragmentation exception: %s" % (smiles))
# draw fragments, debugging only, expensive
# Draw.MolToFile(fmol,'fmol.png')
# fragment atom ids
faids = [list(x) for x in Chem.rdmolops.GetMolFrags(fmol)]
# fragment smiles
fsmiles = [Chem.rdmolfiles.MolFragmentToSmiles(fmol, frag) for frag in faids]
# fragment smarts
fsmarts = [Chem.rdmolfiles.MolFragmentToSmarts(fmol, frag) for frag in faids]
return faids, fsmiles, fsmarts
def FragNeighborID(fsmile: str) -> list:
"""End atoms bonded to a fragment."""
fnid = re.compile(r"(%s|%s)" % ("\d+(?=\*)", "\*[^\]]")).findall(fsmile)
fnid = fnid if fnid else ["-1"]
return [int(x) if "*" not in x else 0 for x in fnid]
def FragNeighborIDs(fsmiles: list) -> list:
"""End atoms bonded to fragments."""
fnids = list(map(FragNeighborID, fsmiles))
return [x if (-1 not in x) else [] for x in fnids]
def BondedFragNeighborIDs(true_faids: list, fnids: list) -> list:
"""Neighbor fragment ids (not atom ids)."""
return [[k for (k, x) in enumerate(true_faids) for j in i if j in x] for i in fnids]
def NumHybridizationType(htype: Chem.rdchem.HybridizationType, fnhybrds: list) -> list:
"""Number of specified hybridization type for each fragment."""
return [sum(x == htype for x in fnhybrd) for fnhybrd in fnhybrds]
def Intersection(x: list, y: list) -> list:
"""Elements that match between two lists."""
return list(set(x) & set(y))
def IntersectionBoolean(x: list, y: list, z: int) -> bool:
"""Returns whether or not two lists overlap with at least z common elements."""
return len(set(x) & set(y)) >= z
def FindIdsWithHtype(
fids: list, fnids: list, fnhybrds: list, htype: Chem.rdchem.HybridizationType
) -> list:
"""Find fragment neighbor ids with htype."""
fnhybrds_in_fids = [fnhybrds[x] for x in fids]
fnids_in_fids = [fnids[x] for x in fids]
hids = []
x1 = 0
for x in fnhybrds_in_fids:
x2 = 0
for y in x:
if y == htype:
hids.append(fnids_in_fids[x1][x2])
x2 += 1
x1 += 1
return hids
def AromaticRings(atom_ids_in_rings: list, bond_type_matrix: list) -> list:
"""Return if bonds in rings are aromatic."""
# atom ids in rings
atom_ids_in_rings = [np.array(x) for x in atom_ids_in_rings]
return [
[
(bond_type_matrix[int(x)][int(y)] == Chem.rdchem.BondType.AROMATIC)
for (x, y) in zip(z, z.take(range(1, len(z) + 1), mode="wrap"))
]
for z in atom_ids_in_rings
]
def TrueFragAtomIDs(num_atoms: int, faids: list) -> list:
"""Remove dummy atom ids from fragments."""
return [[x for x in y if x < num_atoms] for y in faids]
def FindCentralCarbonsOfBiphenyl(
biphenyl_substructs: list,
neighbor_ids: list,
atomic_nums: list,
bond_matrix: list,
bond_type_matrix: list,
) -> list:
"""Find central carbons of biphenyl substructures."""
# find one of the central carbons in biphenyl substructures
cc = []
for z in biphenyl_substructs:
for (x, y) in zip(z, z.take(range(1, len(z) + 1), mode="wrap")):
if not bond_matrix[int(x)][int(y)]:
cc.append(int(y))
break
# find carbon that is singly bonded - other central carbon
ccs = []
for (i, y) in enumerate(NeighborIDs(neighbor_ids, atomic_nums, cc)):
for x in y:
if bond_type_matrix[cc[i]][x] == Chem.rdchem.BondType.SINGLE:
ccs.append([cc[i], x])
break
return ccs
def Flatten(x: list) -> list:
"""Flatten a list."""
return list(itertools.chain.from_iterable(x))
def RemoveElements(x: list, y: list) -> list:
"""Remove elements (y) from a list (x)."""
for e in y:
x.remove(e)
return x
def Graph(x: tuple) -> nx.classes.graph.Graph:
"""Make graph structure from atom ids. Used to find independent ring systems."""
# initialize graph
graph = nx.Graph()
# add nodes and edges
for part in x:
graph.add_nodes_from(part)
graph.add_edges_from(zip(part[:-1], part[1:]))
return graph
def NumIndRings(x: tuple) -> int:
"""Number of independent single, fused, or conjugated rings."""
return len(list(nx.connected_components(Graph(x))))
def ReduceFsmarts(fsmarts: list) -> list:
"""Rewrite fragment smarts."""
return [re.sub(r"\d+\#", "#", x) for x in fsmarts]
def EndLabels(fnbbtps: list) -> list:
"""End label of group.
- : bonded to one neighbor and btype = single
= : one neighbor is bonded with btype = double
tri- : one neighbor is bonded with btype = triple
allenic : allenic atom, two neighbors are bonded with btype = double"""
l = ["" for x in fnbbtps]
for (i, x) in enumerate(fnbbtps):
if len(x) == 1 and x.count(Chem.rdchem.BondType.SINGLE) == 1:
l[i] = "-"
continue
if x.count(Chem.rdchem.BondType.DOUBLE) == 1:
l[i] = "="
continue
if x.count(Chem.rdchem.BondType.TRIPLE) == 1:
l[i] = "tri-"
continue
if x.count(Chem.rdchem.BondType.DOUBLE) == 2:
l[i] = "allenic-"
return l
def FragAtomBondTypeWithSp2(
fnhybrds: list,
fnids: list,
neighbor_ids: list,
atomic_nums: list,
faids: list,
bond_type_matrix: list,
) -> list:
"""Bond type between fragment atom and neighboring sp2 atom."""
# fragment ids bonded to one sp2 atom
fids = [
i
for i, x in enumerate(
NumHybridizationType(Chem.rdchem.HybridizationType.SP2, fnhybrds)
)
if x == 1
]
# atom id in fragments corresponding to the sp2 atom
sp2ids = FindIdsWithHtype(fids, fnids, fnhybrds, Chem.rdchem.HybridizationType.SP2)
# neighbor atom ids of sp2 atoms
sp2nids = NeighborIDs(neighbor_ids, atomic_nums, sp2ids)
# intersection between sp2nids and atom ids in fragments with one sp2 atom
faid = list(
itertools.chain.from_iterable(
[Intersection(x, y) for (x, y) in zip([faids[x] for x in fids], sp2nids)]
)
)
# bond type fragment atom and sp2 atom
bts = [bond_type_matrix[x][y] for (x, y) in zip(sp2ids, faid)]
# generate list with bond types for each fragment, zero for fragments without one sp2 atom
afbts = [0] * len(fnhybrds)
for (x, y) in zip(fids, bts):
afbts[x] = y
return afbts
symm_rules: dict = {
2: {
1: {
Chem.rdchem.HybridizationType.SP: 2,
Chem.rdchem.HybridizationType.SP2: 2,
Chem.rdchem.HybridizationType.SP3: 2,
},
2: {
Chem.rdchem.HybridizationType.SP: 1,
Chem.rdchem.HybridizationType.SP2: 1,
Chem.rdchem.HybridizationType.SP3: 1,
},
},
3: {
1: {Chem.rdchem.HybridizationType.SP2: 6, Chem.rdchem.HybridizationType.SP3: 3},
2: {Chem.rdchem.HybridizationType.SP2: 2, Chem.rdchem.HybridizationType.SP3: 1},
3: {Chem.rdchem.HybridizationType.SP2: 1, Chem.rdchem.HybridizationType.SP3: 1},
},
4: {
1: {Chem.rdchem.HybridizationType.SP3: 12},
2: {Chem.rdchem.HybridizationType.SP3: 0},
3: {Chem.rdchem.HybridizationType.SP3: 1},
4: {Chem.rdchem.HybridizationType.SP3: 1},
},
}
def Symm(
smiles: str,
num_attached_atoms: int,
num_attached_types: int,
center_hybrid: Chem.rdchem.HybridizationType,
count_rankings: collections.Counter,
) -> int:
"""Molecular symmetry."""
try:
symm = symm_rules[num_attached_atoms][num_attached_types][center_hybrid]
except:
logging.warning("symmetry exception: {}".format(smiles))
symm = np.nan
# special case
if symm == 0:
vals = list(count_rankings.values())
symm = 3 if (vals == [1, 3] or vals == [3, 1]) else 2
return symm
def DataReduction(y: dict, group_labels: list) -> None:
"""Remove superfluous data for single molecule."""
for l in group_labels:
y[l] = list(itertools.compress(zip(y["fsmarts"], range(y["num_frags"])), y[l]))
def NFragBadIndices(d: np.ndarray, group_labels: list, smiles: list) -> None:
"""Indices of compounds that do not have consistent number of fragments."""
def NFragCheck(y: dict) -> bool:
"""Check number of fragments and group contributions are consistent."""
num_frags = 0
for l in group_labels:
num_frags += len(y[l])
return num_frags != y["num_frags"]
x = list(map(NFragCheck, d))
indices = list(itertools.compress(range(len(x)), x))
logging.info(
"indices of molecules with inconsistent number of fragments:\n{}".format(
indices
)
)
logging.info("and their smiles:\n{}".format([smiles[x] for x in indices]))
def UniqueGroups(d: np.ndarray, num_mol: int, group_labels: list) -> list:
"""Unique fragments for each environmental group."""
# fragments for each group
groups = [[d[i][j] for i in range(num_mol)] for j in group_labels]
# eliminate fragment ids
groups = [[x[0] for x in Flatten(y)] for y in groups]
return [UniqueElements(x) for x in groups]
def UniqueLabelIndices(flabels: list) -> list:
"""Indices of unique fingerprint labels."""
sort_ = [sorted(x) for x in flabels]
tuple_ = [tuple(x) for x in sort_]
unique_labels = [list(x) for x in sorted(set(tuple_), key=tuple_.index)]
return [[i for (i, x) in enumerate(sort_) if x == y] for y in unique_labels]
def UniqueLabels(flabels: list, indices: list) -> list:
"""Unique fingerprint labels."""
return [flabels[x[0]] for x in indices]
def UniqueFingerprint(indices: list, fingerprint: np.ndarray) -> np.ndarray:
"""Reduce fingerprint according to unique labels."""
fp = np.zeros((fingerprint.shape[0], len(indices)))
for (j, x) in enumerate(indices):
fp[:, j] = np.sum(fingerprint[:, x], axis=1)
return fp
def UniqueLabelsAndFingerprint(
flabels: list, fingerprint: np.ndarray
) -> Tuple[list, np.ndarray]:
"""Reduced labels and fingerprint."""
uli = UniqueLabelIndices(flabels)
ul = UniqueLabels(flabels, uli)
fp = UniqueFingerprint(uli, fingerprint)
return ul, fp
def CountGroups(fingerprint_groups: list, group_labels: list, d: dict) -> list:
"""Count groups for fingerprint."""
return [
[[x[0] for x in d[y]].count(z) for z in fingerprint_groups[i]]
for (i, y) in enumerate(group_labels)
]
def Concat(x: list, y: list) -> list:
"""Concatenate groups and singles in fingerprint."""
return x + y
def MakeFingerprint(
fingerprint_groups: list, labels: dict, d: np.ndarray, num_mol: int
) -> np.ndarray:
"""Make fingerprint."""
# count groups and make fingerprint
fp_groups = [
Flatten(CountGroups(fingerprint_groups, labels["groups"], d[:, 0][i]))
for i in range(num_mol)
]
# reduce singles to requested
fp_singles = [[d[:, 1][i][j] for j in labels["singles"]] for i in range(num_mol)]
# concat groups and singles
return np.array(list(map(Concat, fp_groups, fp_singles)))
def ReduceMultiCount(d: dict) -> None:
"""Ensure each fragment belongs to one environmental group.
Falsify Y, Z when YZ true
Falsify YY, Z when YYZ true
Falsify YYY, Z when YYYZ true
Falsify RG when AR true
..."""
def TrueIndices(group: str) -> list:
"""Return True indices."""
x = d[group]
return list(itertools.compress(range(len(x)), x))
def ReplaceTrue(replace_group: list, actual_group: list) -> None:
"""Replace True elements with False to avoid overcounting fragment contribution."""
replace_indices = list(map(TrueIndices, replace_group))
actual_indices = list(map(TrueIndices, actual_group))
for actual_index in actual_indices:
for (group, replace_index) in zip(replace_group, replace_indices):
int_ = Intersection(replace_index, actual_index)
for x in int_:
d[group][x] = False
replace_groups = [
["Y", "Z"],
["YY", "Z"],
["YYY", "Z"],
["RG"],
["X", "Y", "YY", "YYY", "YYYY", "YYYYY", "Z", "ZZ", "YZ", "YYZ"],
["RG", "AR"],
["AR", "BR2", "BR3", "FU"],
["RG", "AR"],
]
actual_groups = [
["YZ"],
["YYZ"],
["YYYZ"],
["AR"],
["RG", "AR"],
["BR2", "BR3"],
["BIP"],
["FU"],
]
list(map(ReplaceTrue, replace_groups, actual_groups))
def RewriteFsmarts(d: dict) -> None:
"""Rewrite fsmarts to 'fsmiles unique' fsmarts."""
def FsmartsDict(d: dict) -> dict:
"""Dict of original fsmarts to 'fsmiles unique' fsmarts."""
# unique smarts in dataset, mols
fsmarts = UniqueElements(Flatten([x[0]["fsmarts"] for x in d]))
fmols = [Chem.MolFromSmarts(x) for x in fsmarts]
# smiles, not necessarily unique
fsmiles = [Chem.MolToSmiles(x) for x in fmols]
# dict: original fsmarts to 'fsmiles unique' fsmarts
dict_ = collections.defaultdict(lambda: len(dict_))
fsmarts_dict = {}
for (i, x) in enumerate(fsmarts):
fsmarts_dict[x] = fsmarts[dict_[fsmiles[i]]]
return fsmarts_dict
fsmarts_dict = FsmartsDict(d)
# rewrite fsmarts
for (i, y) in enumerate(d):
d[i][0]["fsmarts"] = [fsmarts_dict[x] for x in y[0]["fsmarts"]]
| 28.65534
| 94
| 0.611666
| 2,390
| 17,709
| 4.457741
| 0.156904
| 0.011827
| 0.018585
| 0.012484
| 0.239441
| 0.135818
| 0.088981
| 0.057443
| 0.046555
| 0.046555
| 0
| 0.01064
| 0.262296
| 17,709
| 617
| 95
| 28.701783
| 0.804884
| 0.205884
| 0
| 0.112994
| 0
| 0
| 0.023607
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.115819
| false
| 0
| 0.025424
| 0
| 0.242938
| 0.031073
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
efa2c84741d3637cb65c8fc32a0abc9a577fb053
| 3,317
|
py
|
Python
|
025_reverse-nodes-in-k-group.py
|
tasselcui/leetcode
|
5c32446b8b5bf3711cf28e465f448c6a0980f259
|
[
"MIT"
] | null | null | null |
025_reverse-nodes-in-k-group.py
|
tasselcui/leetcode
|
5c32446b8b5bf3711cf28e465f448c6a0980f259
|
[
"MIT"
] | null | null | null |
025_reverse-nodes-in-k-group.py
|
tasselcui/leetcode
|
5c32446b8b5bf3711cf28e465f448c6a0980f259
|
[
"MIT"
] | null | null | null |
# =============================================================================
# # -*- coding: utf-8 -*-
# """
# Created on Sun Aug 5 08:07:19 2018
#
# @author: lenovo
# """
# 25. Reverse Nodes in k-Group
# Given a linked list, reverse the nodes of a linked list k at a time and return its modified list.
#
# k is a positive integer and is less than or equal to the length of the linked list. If the number of nodes is not a multiple of k then left-out nodes in the end should remain as it is.
#
# Example:
#
# Given this linked list: 1->2->3->4->5
#
# For k = 2, you should return: 2->1->4->3->5
#
# For k = 3, you should return: 3->2->1->4->5
#
# Note:
#
# Only constant extra memory is allowed.
# You may not alter the values in the list's nodes, only nodes itself may be changed.
# =============================================================================
# =============================================================================
# difficulty: hard
# acceptance: 32.7%
# contributor: LeetCode
# =============================================================================
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def reverseKGroup(self, head, k):
"""
:type head: ListNode
:type k: int
:rtype: ListNode
"""
def reverseList(head, k):
pre = None
cur = head
while cur and k:
temp = cur.next
cur.next = pre
pre = cur
cur = temp
k -= 1
return (cur, pre)
p1 = head
length = 0
while p1:
length += 1
p1 = p1.next
if length < k:
return head
step = length // k
p = head
res = None
pre = None
while p and step:
nextp, newhead = reverseList(p, k)
if not res:
res = newhead
if pre:
pre.next = newhead
pre = p
p = nextp
step -= 1
pre.next = p
return res
# reverseList(head, k)
# =============================================================================
# def reverseList(head, k):
# pre = None
# cur = head
# while cur and k:
# temp = cur.next
# cur.next = pre
# pre = cur
# cur = temp
# k -= 1
# return (cur, pre)
# =============================================================================
#------------------------------------------------------------------------------
# note: below is the test code
a = ListNode(1)
b = ListNode(2)
c = ListNode(3)
d = ListNode(4)
a.next = b
b.next = c
c.next = d
test = a
S = Solution()
result = S.reverseKGroup(test, 3)
#result = a
while result:
print(result.val)
result = result.next
#------------------------------------------------------------------------------
# note: below is the submission detail
# =============================================================================
# Submission Detail
# 81 / 81 test cases passed.
# Status: Accepted
# Runtime: 56 ms
# Submitted: 0 minutes ago
# beats 93.99% python3 submissions
# =============================================================================
| 28.843478
| 186
| 0.403075
| 356
| 3,317
| 3.744382
| 0.376404
| 0.030008
| 0.036009
| 0.028507
| 0.139535
| 0.139535
| 0.139535
| 0.139535
| 0.139535
| 0.139535
| 0
| 0.025274
| 0.284293
| 3,317
| 114
| 187
| 29.096491
| 0.536226
| 0.60205
| 0
| 0.039216
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0
| 0
| 0.156863
| 0.019608
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
efa5e69113b0347792c870829c3f62690cf050bb
| 2,708
|
py
|
Python
|
perma_web/perma/tests/test_views_common.py
|
leppert/perma
|
adb0cec29679c3d161d72330e19114f89f8c42ac
|
[
"MIT",
"Unlicense"
] | null | null | null |
perma_web/perma/tests/test_views_common.py
|
leppert/perma
|
adb0cec29679c3d161d72330e19114f89f8c42ac
|
[
"MIT",
"Unlicense"
] | null | null | null |
perma_web/perma/tests/test_views_common.py
|
leppert/perma
|
adb0cec29679c3d161d72330e19114f89f8c42ac
|
[
"MIT",
"Unlicense"
] | null | null | null |
from django.conf import settings
from django.core import mail
from django.core.urlresolvers import reverse
from perma.urls import urlpatterns
from .utils import PermaTestCase
class CommonViewsTestCase(PermaTestCase):
def test_public_views(self):
# test static template views
for urlpattern in urlpatterns:
if urlpattern.callback.func_name == 'DirectTemplateView':
resp = self.get(urlpattern.name)
def test_misformatted_nonexistent_links_404(self):
response = self.client.get(reverse('single_linky', kwargs={'guid': 'JJ99--JJJJ'}))
self.assertEqual(response.status_code, 404)
response = self.client.get(reverse('single_linky', kwargs={'guid': '988-JJJJ=JJJJ'}))
self.assertEqual(response.status_code, 404)
def test_properly_formatted_nonexistent_links_404(self):
response = self.client.get(reverse('single_linky', kwargs={'guid': 'JJ99-JJJJ'}))
self.assertEqual(response.status_code, 404)
# Test the original ID style. We shouldn't get a redirect.
response = self.client.get(reverse('single_linky', kwargs={'guid': '0J6pkzDeQwT'}))
self.assertEqual(response.status_code, 404)
def test_contact(self):
# Does our contact form behave reasonably?
# The form should be fine will all fields
message_body = 'Just some message here'
from_email = 'example@example.com'
self.submit_form('contact', data={
'email': from_email,
'message': message_body},
success_url=reverse('contact_thanks'))
# check contents of sent email
message = mail.outbox[0]
self.assertIn(message_body, message.body)
self.assertEqual(message.subject, 'New message from Perma contact form')
self.assertEqual(message.from_email, settings.DEFAULT_FROM_EMAIL)
self.assertEqual(message.recipients(), [settings.DEFAULT_FROM_EMAIL])
self.assertDictEqual(message.extra_headers, {'Reply-To': from_email})
# We should fail if we don't get a from email
response = self.client.post(reverse('contact'), data={
'email': '',
'message': message_body})
self.assertEqual(response.request['PATH_INFO'], reverse('contact'))
# We need at least a message. We should get the contact page back
# instead of the thanks page.
response = self.client.post(reverse('contact'), data={
'email': from_email,
'message': ''})
self.assertEqual(response.request['PATH_INFO'], reverse('contact'))
| 43.677419
| 93
| 0.642171
| 309
| 2,708
| 5.498382
| 0.368932
| 0.079459
| 0.063567
| 0.049441
| 0.422602
| 0.389641
| 0.361389
| 0.359035
| 0.194232
| 0.136551
| 0
| 0.013807
| 0.251108
| 2,708
| 62
| 94
| 43.677419
| 0.823965
| 0.121492
| 0
| 0.243902
| 0
| 0
| 0.13159
| 0
| 0
| 0
| 0
| 0
| 0.268293
| 1
| 0.097561
| false
| 0
| 0.121951
| 0
| 0.243902
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
efa68642041c99f789a40f12b356c9ba93e64adc
| 1,708
|
py
|
Python
|
GeneratorInterface/Pythia8Interface/python/Py8PtLxyGun_4tau_cfi.py
|
menglu21/cmssw
|
c3d6cb102c0aaddf652805743370c28044d53da6
|
[
"Apache-2.0"
] | null | null | null |
GeneratorInterface/Pythia8Interface/python/Py8PtLxyGun_4tau_cfi.py
|
menglu21/cmssw
|
c3d6cb102c0aaddf652805743370c28044d53da6
|
[
"Apache-2.0"
] | null | null | null |
GeneratorInterface/Pythia8Interface/python/Py8PtLxyGun_4tau_cfi.py
|
menglu21/cmssw
|
c3d6cb102c0aaddf652805743370c28044d53da6
|
[
"Apache-2.0"
] | null | null | null |
import FWCore.ParameterSet.Config as cms
#Note: distances in mm instead of in cm usually used in CMS
generator = cms.EDFilter("Pythia8PtAndLxyGun",
maxEventsToPrint = cms.untracked.int32(1),
pythiaPylistVerbosity = cms.untracked.int32(1),
pythiaHepMCVerbosity = cms.untracked.bool(True),
PGunParameters = cms.PSet(
ParticleID = cms.vint32(-15, -15),
AddAntiParticle = cms.bool(True), # antiparticle has opposite momentum and production point symmetric wrt (0,0,0) compared to corresponding particle
MinPt = cms.double(15.00),
MaxPt = cms.double(300.00),
MinEta = cms.double(-2.5),
MaxEta = cms.double(2.5),
MinPhi = cms.double(-3.14159265359),
MaxPhi = cms.double(3.14159265359),
LxyMin = cms.double(0.0),
LxyMax = cms.double(550.0), # most tau generated within TOB (55cm)
LzMax = cms.double(300.0),
dxyMax = cms.double(30.0),
dzMax = cms.double(120.0),
ConeRadius = cms.double(1000.0),
ConeH = cms.double(3000.0),
DistanceToAPEX = cms.double(850.0),
LxyBackFraction = cms.double(0.0), # fraction of particles going back towards to center at transverse plan; numbers outside the [0,1] range are set to 0 or 1
LzOppositeFraction = cms.double(0.0), # fraction of particles going in opposite direction wrt to center along beam-line than in transverse plane; numbers outside the [0,1] range are set to 0 or 1
),
Verbosity = cms.untracked.int32(0), ## set to 1 (or greater) for printouts
psethack = cms.string('displaced taus'),
firstRun = cms.untracked.uint32(1),
PythiaParameters = cms.PSet(parameterSets = cms.vstring())
)
| 47.444444
| 203
| 0.668618
| 231
| 1,708
| 4.943723
| 0.502165
| 0.126095
| 0.044658
| 0.028897
| 0.124343
| 0.124343
| 0.124343
| 0.124343
| 0.063047
| 0.063047
| 0
| 0.078534
| 0.217213
| 1,708
| 35
| 204
| 48.8
| 0.775617
| 0.305621
| 0
| 0
| 0
| 0
| 0.027142
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.033333
| 0
| 0.033333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
efabd851d1c220194dc0597eebe6be9a8b117165
| 5,492
|
py
|
Python
|
questions/models.py
|
stkrizh/otus-django-hasker
|
9692b8060a789b0b66b4cf3591a78e32c8a10380
|
[
"MIT"
] | null | null | null |
questions/models.py
|
stkrizh/otus-django-hasker
|
9692b8060a789b0b66b4cf3591a78e32c8a10380
|
[
"MIT"
] | 10
|
2020-06-05T22:56:30.000Z
|
2022-02-10T08:54:18.000Z
|
questions/models.py
|
stkrizh/otus-django-hasker
|
9692b8060a789b0b66b4cf3591a78e32c8a10380
|
[
"MIT"
] | null | null | null |
import logging
from typing import List, Optional
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
VOTE_UP = 1
VOTE_DOWN = -1
VOTE_CHOICES = ((VOTE_UP, "Vote Up"), (VOTE_DOWN, "Vote Down"))
User = get_user_model()
logger = logging.getLogger(__name__)
class AbstractPost(models.Model):
""" Abstract model that defines common fields and methods
for Question / Answer models.
"""
vote_class: Optional[models.Model] = None
author = models.ForeignKey(
User,
on_delete=models.CASCADE,
related_name="%(class)ss",
related_query_name="%(class)s",
)
content = models.TextField(blank=False)
posted = models.DateTimeField(auto_now_add=True)
rating = models.IntegerField(default=0)
number_of_votes = models.IntegerField(default=0)
class Meta:
abstract = True
ordering = ["-posted"]
def vote(self, user, value: int) -> int:
""" Add vote from `user` and return new rating.
"""
assert self.vote_class is not None
assert value in (VOTE_DOWN, VOTE_UP), value
try:
current = self.vote_class.objects.get(user=user, to=self)
except ObjectDoesNotExist:
self.vote_class.objects.create(user=user, to=self, value=value)
logger.debug(
f"New vote ({value}) by {user} hase been created for {self}"
)
return self.rating + value
if current.value == value:
return self.rating
self.vote_class.objects.filter(to=self, user=user).delete()
logger.debug(f"Vote by {user} has been deleted for {self}")
return self.rating + value
class AnswerVote(models.Model):
timestamp = models.DateTimeField(auto_now=True)
to = models.ForeignKey(
"Answer",
on_delete=models.CASCADE,
related_name="votes",
related_query_name="votes",
)
user = models.ForeignKey(
to=User,
on_delete=models.CASCADE,
related_name="%(class)ss",
related_query_name="%(class)s",
)
value = models.SmallIntegerField(choices=VOTE_CHOICES)
class Meta:
ordering = ["-timestamp"]
unique_together = ["to", "user"]
def __str__(self):
return f"{self.user.username} {self.value:+d}"
class Answer(AbstractPost):
vote_class = AnswerVote
is_accepted = models.BooleanField(default=False)
question = models.ForeignKey(
"Question",
on_delete=models.CASCADE,
related_name="answers",
related_query_name="answer",
)
def __str__(self):
return f"{self.question.title} - {self.content[:50]} ..."
def mark(self):
""" Mark the answer as accepted.
"""
self.question.answers.update(is_accepted=False)
self.is_accepted = True
self.save(update_fields=["is_accepted"])
logger.debug(
f"Answer ({self.pk}) by {self.author} has been marked "
f"for question ({self.question.pk})."
)
def unmark(self):
""" Unmark acceptance from the answer.
"""
self.is_accepted = False
self.save(update_fields=["is_accepted"])
logger.debug(
f"Answer ({self.pk}) by {self.author} has been unmarked "
f"for question ({self.question.pk})."
)
class QuestionVote(models.Model):
timestamp = models.DateTimeField(auto_now=True)
to = models.ForeignKey(
"Question",
on_delete=models.CASCADE,
related_name="votes",
related_query_name="votes",
)
user = models.ForeignKey(
to=User,
on_delete=models.CASCADE,
related_name="%(class)ss",
related_query_name="%(class)s",
)
value = models.SmallIntegerField(choices=VOTE_CHOICES)
class Meta:
ordering = ["-timestamp"]
unique_together = ["to", "user"]
def __str__(self):
return f"{self.user.username} {self.value:+d}"
class Question(AbstractPost):
vote_class = QuestionVote
number_of_answers = models.IntegerField(default=0)
tags = models.ManyToManyField("Tag")
title = models.CharField(
blank=False, max_length=settings.QUESTIONS_MAX_TITLE_LEN
)
def __str__(self):
return self.title
@classmethod
def trending(cls, count: int = 5) -> models.QuerySet:
""" Returns a query set of trending questions.
"""
return cls.objects.order_by("-number_of_votes")[:count]
def add_tags(self, tags: List[str], user) -> None:
if self.pk is None:
raise ValueError("Instance should be saved.")
for raw_tag in tags:
try:
tag = Tag.objects.get(name=raw_tag)
except ObjectDoesNotExist:
tag = Tag.objects.create(added=user, name=raw_tag)
self.tags.add(tag)
logger.debug(f"Tags ({tags}) have been added to question {self.pk}")
class Tag(models.Model):
added = models.DateTimeField(auto_now_add=True)
added_by = models.ForeignKey(
User,
blank=True,
null=True,
on_delete=models.SET_NULL,
related_name="added_tags",
related_query_name="added_tag",
)
name = models.CharField(
blank=False, max_length=settings.QUESTIONS_MAX_TAG_LEN
)
def __str__(self):
return self.name
| 27.878173
| 76
| 0.619993
| 652
| 5,492
| 5.052147
| 0.223926
| 0.019126
| 0.029751
| 0.038251
| 0.403764
| 0.403764
| 0.330601
| 0.330601
| 0.330601
| 0.278385
| 0
| 0.001982
| 0.264931
| 5,492
| 196
| 77
| 28.020408
| 0.813971
| 0.047524
| 0
| 0.393103
| 0
| 0
| 0.134118
| 0.012157
| 0
| 0
| 0
| 0
| 0.013793
| 1
| 0.068966
| false
| 0
| 0.041379
| 0.034483
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
efac998014549cc9e8410daf8e8486e66ec92ef3
| 1,430
|
py
|
Python
|
backend/routers/bookmarks.py
|
heshikirihasebe/fastapi-instagram-clone
|
7bc265a62160171c5c5c1b2f18b3c86833cb64e7
|
[
"MIT"
] | 1
|
2022-02-08T19:35:22.000Z
|
2022-02-08T19:35:22.000Z
|
backend/routers/bookmarks.py
|
heshikirihasebe/fastapi-instagram-clone
|
7bc265a62160171c5c5c1b2f18b3c86833cb64e7
|
[
"MIT"
] | null | null | null |
backend/routers/bookmarks.py
|
heshikirihasebe/fastapi-instagram-clone
|
7bc265a62160171c5c5c1b2f18b3c86833cb64e7
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from fastapi import APIRouter, Request
from ..classes.jwt_authenticator import JWTAuthenticator
from ..repositories import bookmark_repository
from ..schemas.bookmark_schema import RequestSchema, ResponseSchema
router = APIRouter(
prefix='/bookmarks',
tags=['bookmarks'],
)
# Index
@router.get('/')
async def index():
pass
# Store a new bookmark, or update if exists
@router.post('/', response_model=ResponseSchema, status_code=200)
async def store(request: Request, req: RequestSchema):
authenticator = JWTAuthenticator()
# get a user from http headers
auth = await authenticator.get_current_user(request)
# check the record
bookmark = await bookmark_repository.select_one(user_id=auth['id'], post_id=req.post_id)
if bookmark is None:
# insert a new record
await bookmark_repository.insert(user_id=auth['id'], post_id=req.post_id)
is_bookmarked = True
else:
if bookmark.deleted_at is not None:
# bookmark
await bookmark_repository.update(user_id=auth['id'], post_id=req.post_id, deleted_at=None)
is_bookmarked = True
else:
# unbookmark
await bookmark_repository.update(user_id=auth['id'], post_id=req.post_id, deleted_at=datetime.now())
is_bookmarked = False
response = ResponseSchema(is_bookmarked=is_bookmarked)
return response
| 31.086957
| 112
| 0.706294
| 178
| 1,430
| 5.494382
| 0.365169
| 0.04908
| 0.09407
| 0.04908
| 0.188139
| 0.188139
| 0.188139
| 0.188139
| 0.188139
| 0.132924
| 0
| 0.002632
| 0.202797
| 1,430
| 45
| 113
| 31.777778
| 0.855263
| 0.093007
| 0
| 0.137931
| 0
| 0
| 0.022533
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.034483
| 0.172414
| 0
| 0.206897
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
efaec7b2aeea24ccd064fbf8dcfa28faac52b446
| 1,635
|
py
|
Python
|
analysis/scripts/project_functions_Tom.py
|
data301-2020-winter2/course-project-group_1052
|
3733aacac0812811752d77e5f3d822ef5251c17b
|
[
"MIT"
] | null | null | null |
analysis/scripts/project_functions_Tom.py
|
data301-2020-winter2/course-project-group_1052
|
3733aacac0812811752d77e5f3d822ef5251c17b
|
[
"MIT"
] | 1
|
2021-03-24T17:16:52.000Z
|
2021-03-24T17:16:52.000Z
|
analysis/scripts/project_functions_Tom.py
|
data301-2020-winter2/course-project-group_1052
|
3733aacac0812811752d77e5f3d822ef5251c17b
|
[
"MIT"
] | null | null | null |
import pandas as pd
def load_and_process(path):
df1 = pd.read_csv(path)
df2 = (
df1.drop(columns=['songName', 'ogLyric', 'kbLyric'])
.rename(columns={'badword':'badWord'})
.sort_values("ogArtist", ascending = True)
.sort_values("year", ascending = True)
)
return df2
def badword_count(dataframe):
df1 = (pd.DataFrame(dataframe['badWord']
.value_counts())
.reset_index()
.rename(columns={'index':'badWord','badWord':'frequency'})
)
return df1
def unique_word_count(dataframe):
df1 = (dataframe.groupby('ogArtist')['badWord']
.nunique()
.sort_values(ascending = False))
return df1
def words_per_year(dataframe):
df1 = dataframe.drop(columns = ['ogArtist', 'songName', 'category', 'ogLyric', 'kbLyric', 'count'])
df2 = df1.loc[(df1['badWord'] == 'fuck') | (df1['badWord'] == 'shit') |(df1['badWord'] == 'damn') |(df1['badWord'] == 'man') |(df1['badWord'] == 'kiss')]
df3 = df2.value_counts()
df4 = df3.reset_index()
df5 = df4.rename(columns={0:'count'})
return df5
def words_per_year_T4(dataframe):
df1 = dataframe.drop(columns = ['ogArtist', 'category', 'isCensored', 'isPresent', 'count'])
df2 = df1.loc[(df1['badWord'] == 'fuck') | (df1['badWord'] == 'shit') |(df1['badWord'] == 'damn') |(df1['badWord'] == 'man') |(df1['badWord'] == 'kiss')]
df3 = df2.value_counts()
df4 = df3.reset_index()
df5 = df4.rename(columns={0:'count'})
return df5
| 30.277778
| 157
| 0.55107
| 173
| 1,635
| 5.092486
| 0.317919
| 0.113507
| 0.07151
| 0.034052
| 0.431328
| 0.431328
| 0.340522
| 0.340522
| 0.340522
| 0.340522
| 0
| 0.033856
| 0.259327
| 1,635
| 54
| 158
| 30.277778
| 0.693642
| 0
| 0
| 0.333333
| 0
| 0
| 0.182763
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.138889
| false
| 0
| 0.027778
| 0
| 0.305556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|