hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
95887e566eb9b0860bede603c8c4d3bf2e059af1
| 5,634
|
py
|
Python
|
main.py
|
TrueMLGPro/MultiDownloader
|
8ef6cdccbe253fe79cf3cec9ed83fd40c3f834bc
|
[
"Apache-2.0"
] | 3
|
2021-02-05T09:33:39.000Z
|
2021-07-25T18:39:43.000Z
|
main.py
|
TrueMLGPro/MultiDownloader
|
8ef6cdccbe253fe79cf3cec9ed83fd40c3f834bc
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
TrueMLGPro/MultiDownloader
|
8ef6cdccbe253fe79cf3cec9ed83fd40c3f834bc
|
[
"Apache-2.0"
] | 1
|
2022-02-28T21:41:12.000Z
|
2022-02-28T21:41:12.000Z
|
# Copyright 2020 TrueMLGPro
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
import pyfiglet
import subprocess
import sys
parser = argparse.ArgumentParser(add_help=False)
group_download = parser.add_argument_group('Download Tools')
group_download.add_argument('URL', metavar='url', help='a url to download', nargs='?')
group_download.add_argument('-c', '--curl', dest='curl', action='store_true', help='Uses curl for download')
group_download.add_argument('-w', '--wget', dest='wget', action='store_true', help='Uses wget for download')
group_download.add_argument('-H', '--httrack', dest='httrack', action='store_true', help='Uses httrack for mirroring')
group_download_args = parser.add_argument_group('Download Arguments')
group_download_args.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='Makes output more detailed')
group_download_args.add_argument('-d', '--depth', dest='depth', help='Defines depth of mirror (httrack only)')
group_download_args.add_argument('-eD', '--ext-depth', dest='ext_depth', help='Defines depth of mirror for external links (httrack only)')
group_download_args.add_argument('-cN', '--conn-num', dest='conn_num', help='Defines a number of active connections during mirroring (httrack only)')
group_files = parser.add_argument_group('Files')
group_files.add_argument('-f', '--filename', dest='filename', help='Sets filename (or path) for file which is being downloaded')
group_misc = parser.add_argument_group('Misc')
group_misc.add_argument('-u', '--update', dest='update', action='store_true', help='Updates MultiDownloader')
group_misc.add_argument('-h', '--help', action='help', help='Shows this help message and exits')
args = parser.parse_args()
def banner():
banner_figlet = pyfiglet.figlet_format("MultiDownloader", font="small")
print(banner_figlet + "Made by TrueMLGPro | v1.0")
def menu():
print("\n" + "1. Download using curl" + "\n"
+ "2. Download using wget" + "\n"
+ "3. Mirror website using httrack" + "\n"
+ "4. Update Multidownloader" + "\n"
+ "5. Exit" + "\n"
+ "6. Get args")
def main():
if (len(sys.argv) <= 1):
banner()
menu()
while True:
choice = input("[>>] ")
if (choice == "1"):
print("[i] Using curl to download...")
curl_download(input("[+] Enter URL: "),
input("[+] Enter filename: "),
input("[+] Verbose? (y/n): "))
menu()
elif (choice == "2"):
print("[i] Using wget to download...")
wget_download(input("[+] Enter URL: "),
input("[+] Enter filename: "),
input("[+] Verbose? (y/n): "))
menu()
elif (choice == "3"):
print("[i] Using httrack to mirror...")
httrack_download(input("[+] Enter URL: "),
input("[+] Enter project path for mirror: "),
input("[+] Enter depth level: "),
input("[+] Enter external links depth level: "),
input("[+] Enter number of connections: "),
input("[+] Verbose? (y/n): "))
elif (choice == "4"):
print("[i] Getting latest updates for MultiDownloader..." + "\n")
subprocess.call('sh scripts/update.sh', shell=True)
menu()
elif (choice == "5"):
print("[!] Exiting...")
sys.exit()
elif (choice == "6"):
print(args)
elif type(choice) != int:
print("[!!!] Invalid choice. Exiting...")
sys.exit()
def curl_download(url, filename, verbose=None):
print("[i] Downloading using curl - " + url + " with filename: " + filename)
if (verbose == "y"):
subprocess.call(f"curl -L -O {filename} -v {url}", shell=True)
elif (verbose == "n"):
subprocess.call(f"curl -L -O {filename} {url}", shell=True)
else:
subprocess.call(f"curl -L -O {filename} {url}", shell=True)
def wget_download(url, filename, verbose=None):
print("[i] Downloading using wget - " + url + " with filename: " + filename + "\n" + ("Verbose: ") + str(verbose))
if (verbose == "y"):
subprocess.call(f"wget -O {filename} -v {url}", shell=True)
elif (verbose == "n"):
subprocess.call(f"wget -O {filename} {url}", shell=True)
else:
subprocess.call(f"wget -O {filename} {url}", shell=True)
def httrack_download(url, path, mirror_depth, ext_links_depth, conn_num, verbose=None):
print("[i] Cloning using httrack - " + url + " on path: " + path)
subprocess.call(f"httrack {url} -O {path} -r{mirror_depth} -%e{ext_links_depth} -c{conn_num}", shell=True)
def launch_updater():
print("[i] Getting latest updates for MultiDownloader..." + "\n")
subprocess.call('sh scripts/update.sh', shell=True)
if (args.curl):
if (args.verbose):
curl_download(args.URL, args.filename, args.verbose)
else:
curl_download(args.URL, args.filename)
if (args.wget):
if (args.verbose):
wget_download(args.URL, args.filename, args.verbose)
else:
wget_download(args.URL, args.filename)
if (args.httrack):
if (args.verbose):
httrack_download(args.URL, args.filename, args.depth, args.ext_depth, args.conn_num, args.verbose)
else:
httrack_download(args.URL, args.filename, args.depth, args.ext_depth, args.conn_num)
if (args.update):
launch_updater()
try:
main()
except KeyboardInterrupt:
print("[!] Exiting...")
sys.exit()
| 39.398601
| 149
| 0.671814
| 773
| 5,634
| 4.796895
| 0.244502
| 0.044498
| 0.028317
| 0.030744
| 0.385922
| 0.33603
| 0.276699
| 0.23247
| 0.209817
| 0.162082
| 0
| 0.004824
| 0.15371
| 5,634
| 143
| 150
| 39.398601
| 0.772861
| 0.097089
| 0
| 0.298246
| 0
| 0.008772
| 0.350995
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061404
| false
| 0
| 0.04386
| 0
| 0.105263
| 0.122807
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
958a38d4edf87c352270fdf92a3b1727c3d068e0
| 1,129
|
py
|
Python
|
forge/kubernetes.py
|
Acidburn0zzz/forge
|
c53d99f49abe61a2657a1a41232211bb48ee182d
|
[
"Apache-2.0"
] | 1
|
2017-11-15T15:04:44.000Z
|
2017-11-15T15:04:44.000Z
|
forge/kubernetes.py
|
Acidburn0zzz/forge
|
c53d99f49abe61a2657a1a41232211bb48ee182d
|
[
"Apache-2.0"
] | 2
|
2021-03-20T05:32:38.000Z
|
2021-03-26T00:39:11.000Z
|
forge/kubernetes.py
|
Acidburn0zzz/forge
|
c53d99f49abe61a2657a1a41232211bb48ee182d
|
[
"Apache-2.0"
] | null | null | null |
import os, glob
from tasks import task, TaskError, get, sh, SHResult
def is_yaml_empty(dir):
for name in glob.glob("%s/*.yaml" % dir):
with open(name) as f:
if f.read().strip():
return False
return True
class Kubernetes(object):
def __init__(self, namespace=None, context=None, dry_run=False):
self.namespace = namespace or os.environ.get("K8S_NAMESPACE", None)
self.context = context
self.dry_run = dry_run
@task()
def resources(self, yaml_dir):
if is_yaml_empty(yaml_dir):
return []
cmd = "kubectl", "apply", "--dry-run", "-f", yaml_dir, "-o", "name"
if self.namespace:
cmd += "--namespace", self.namespace
return sh(*cmd).output.split()
@task()
def apply(self, yaml_dir):
if is_yaml_empty(yaml_dir):
return SHResult("", 0, "")
cmd = "kubectl", "apply", "-f", yaml_dir
if self.namespace:
cmd += "--namespace", self.namespace
if self.dry_run:
cmd += "--dry-run",
result = sh(*cmd)
return result
| 29.710526
| 75
| 0.558902
| 143
| 1,129
| 4.265734
| 0.356643
| 0.080328
| 0.054098
| 0.042623
| 0.252459
| 0.252459
| 0.252459
| 0.121311
| 0.121311
| 0.121311
| 0
| 0.002528
| 0.29938
| 1,129
| 37
| 76
| 30.513514
| 0.768647
| 0
| 0
| 0.25
| 0
| 0
| 0.085031
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.0625
| 0
| 0.40625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
958c59599470ad36c300e0c6dec5381bb27923b6
| 1,952
|
py
|
Python
|
demucs/ema.py
|
sparshpriyadarshi/demucs
|
7c7f65401db654d750df2b6f4d5b82a0101500b1
|
[
"MIT"
] | 1
|
2022-02-14T05:52:53.000Z
|
2022-02-14T05:52:53.000Z
|
demucs/ema.py
|
sparshpriyadarshi/demucs
|
7c7f65401db654d750df2b6f4d5b82a0101500b1
|
[
"MIT"
] | null | null | null |
demucs/ema.py
|
sparshpriyadarshi/demucs
|
7c7f65401db654d750df2b6f4d5b82a0101500b1
|
[
"MIT"
] | null | null | null |
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# Inspired from https://github.com/rwightman/pytorch-image-models
from contextlib import contextmanager
import torch
from .states import swap_state
class ModelEMA:
"""
Perform EMA on a model. You can switch to the EMA weights temporarily
with the `swap` method.
ema = ModelEMA(model)
with ema.swap():
# compute valid metrics with averaged model.
"""
def __init__(self, model, decay=0.9999, unbias=True, device='cpu'):
self.decay = decay
self.model = model
self.state = {}
self.count = 0
self.device = device
self.unbias = unbias
self._init()
def _init(self):
for key, val in self.model.state_dict().items():
if val.dtype != torch.float32:
continue
device = self.device or val.device
if key not in self.state:
self.state[key] = val.detach().to(device, copy=True)
def update(self):
if self.unbias:
self.count = self.count * self.decay + 1
w = 1 / self.count
else:
w = 1 - self.decay
for key, val in self.model.state_dict().items():
if val.dtype != torch.float32:
continue
device = self.device or val.device
self.state[key].mul_(1 - w)
self.state[key].add_(val.detach().to(device), alpha=w)
@contextmanager
def swap(self):
with swap_state(self.model, self.state):
yield
def state_dict(self):
return {'state': self.state, 'count': self.count}
def load_state_dict(self, state):
self.count = state['count']
for k, v in state['state'].items():
self.state[k].copy_(v)
| 29.134328
| 73
| 0.585553
| 255
| 1,952
| 4.419608
| 0.376471
| 0.071872
| 0.034605
| 0.031943
| 0.161491
| 0.161491
| 0.161491
| 0.161491
| 0.161491
| 0.161491
| 0
| 0.01034
| 0.306352
| 1,952
| 66
| 74
| 29.575758
| 0.822009
| 0.227971
| 0
| 0.195122
| 0
| 0
| 0.015646
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.146341
| false
| 0
| 0.073171
| 0.02439
| 0.268293
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
958e7f740b7a101b6adbafb3854a0ff8c7e6558c
| 12,328
|
py
|
Python
|
gws.py
|
intelligence-csd-auth-gr/greek-words-evolution
|
ab1ee717f7567ffa8171e64f835932af7502955d
|
[
"MIT"
] | 9
|
2020-07-12T13:45:24.000Z
|
2021-12-05T16:08:58.000Z
|
word_embeddings/we.py
|
emiltj/NLP_exam_2021
|
9342e8dc9ad684927bbfa5eb6c125dd53c14cccb
|
[
"MIT"
] | 2
|
2021-03-30T14:35:26.000Z
|
2022-03-12T00:40:17.000Z
|
word_embeddings/we.py
|
emiltj/NLP_exam_2021
|
9342e8dc9ad684927bbfa5eb6c125dd53c14cccb
|
[
"MIT"
] | 2
|
2021-04-23T13:07:55.000Z
|
2021-12-16T14:06:51.000Z
|
import warnings
import argparse
import os
import logging
import lib.metadata as metadata
import lib.model as model
import lib.text as text
import lib.website as website
warnings.filterwarnings('ignore')
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
########################################################################################################################
# ----------------------------------------------------------------------------------------------------------------------
########################################################################################################################
DATA_FOLDER = os.path.join(os.path.curdir, 'data')
MODELS_FOLDER = os.path.join(os.path.curdir, 'output', 'models')
SCRAPPED_PDF_FOLDER = os.path.join(os.path.curdir, 'data', 'scrap', 'pdf')
FASTTEXT_PATH = os.path.join(os.path.curdir, 'fastText', 'fasttext')
SCRAPPED_TEXT_FOLDER = os.path.join(os.path.curdir, 'data', 'scrap', 'text')
PRODUCED_TEXTS_FOLDER = os.path.join(os.path.curdir, 'output', 'texts')
LIB_FOLDER = os.path.join(os.path.curdir, 'lib')
MODEL_FILE_EXTENSION = '.model'
TEXT_FILE_EXTENSION = '.txt'
PDF_FILE_EXTENSION = '.pdf'
POST_URLS_FILENAME = 'post_urls.pickle'
METADATA_FILENAME = 'raw_metadata.csv'
CORPORA = [
{
'name': 'openbook',
'textFilesFolder': os.path.join(DATA_FOLDER, 'corpora', 'openbook', 'text', 'parsable'),
'metadataFilename': os.path.join(DATA_FOLDER, 'corpora', 'openbook', 'metadata.tsv')
},
{
'name': 'project_gutenberg',
'textFilesFolder': os.path.join(DATA_FOLDER, 'corpora', 'project_gutenberg', 'text', 'parsable'),
'metadataFilename': os.path.join(DATA_FOLDER, 'corpora', 'project_gutenberg', 'metadata.tsv')
},
]
COMBINED_TEXTS_FILENAME = 'corpus_combined.txt'
COMBINED_MODEL_FILENAME = os.path.join(MODELS_FOLDER, 'corpus_combined_model.bin')
NEIGHBORS_COUNT = 20
#####################################
# Set up required folders and perform any other preliminary tasks
#####################################
if not os.path.exists(SCRAPPED_PDF_FOLDER):
os.makedirs(SCRAPPED_PDF_FOLDER)
if not os.path.exists(SCRAPPED_TEXT_FOLDER):
os.makedirs(SCRAPPED_TEXT_FOLDER)
########################################################################################################################
# ----------------------------------------------------------------------------------------------------------------------
########################################################################################################################
def websiteParser(args):
if args.action == 'fetchLinks':
logger.info('Selected action: Fetch website links')
links = website.fetchLinks(args.target)
print(links)
elif args.action == 'fetchMetadata':
logger.info('Selected action: Fetch website metadata')
metadata = website.fetchMetadata(args.target, PDF_FILE_EXTENSION, METADATA_FILENAME)
print(metadata)
elif args.action == 'fetchFiles':
logger.info('Selected action: Fetch website files')
website.fetchFiles(args.target, PDF_FILE_EXTENSION, METADATA_FILENAME, SCRAPPED_PDF_FOLDER)
########################################################################################################################
# ----------------------------------------------------------------------------------------------------------------------
########################################################################################################################
def metadataParser(args):
if (args.action == 'printStandard'):
combinedMetadata = metadata.getCombined(CORPORA, args.corpus, False)
print(combinedMetadata)
elif (args.action == 'printEnhanced' or args.action == 'exportEnhanced'):
combinedMetadata = metadata.getCombined(CORPORA, args.corpus, True)
if args.action == 'printEnhanced':
print(combinedMetadata)
if args.action == 'exportEnhanced':
text.exportMetadata(combinedMetadata)
########################################################################################################################
# ----------------------------------------------------------------------------------------------------------------------
########################################################################################################################
def textParser(args):
combinedMetadata = metadata.getCombined(CORPORA, args.corpus, True)
if args.action == 'exportByPeriod':
logger.info('Selected action: Export combined text by period')
text.exportTextByPeriod(combinedMetadata, args.fromYear, args.toYear, args.splitYearsInterval)
elif args.action == 'extractFromPDF':
logger.info('Selected action: Extract text from PDF')
text.extractTextFromPdf(combinedMetadata, SCRAPPED_PDF_FOLDER, PDF_FILE_EXTENSION, SCRAPPED_TEXT_FOLDER,
TEXT_FILE_EXTENSION)
########################################################################################################################
# ----------------------------------------------------------------------------------------------------------------------
########################################################################################################################
def modelParser(args):
if args.action == 'create':
logger.info('Selected action: Create models')
model.createModelsFromTextFiles(args.textsFolder, TEXT_FILE_EXTENSION, MODELS_FOLDER, MODEL_FILE_EXTENSION)
elif args.action == 'getNN':
logger.info('Selected action: Retrieve Nearest Neighbours')
modelFilename = args.period + MODEL_FILE_EXTENSION
nearestNeighbours = model.getNeighboursForWord(text.preProcessText(args.word), modelFilename, MODELS_FOLDER,
FASTTEXT_PATH, NEIGHBORS_COUNT)
print(nearestNeighbours)
elif args.action == 'getCD':
logger.info('Selected action: Get cosine distance')
model.exportByDistance(args.action, MODEL_FILE_EXTENSION, MODELS_FOLDER, args.fromYear, args.toYear,
NEIGHBORS_COUNT, FASTTEXT_PATH)
elif args.action == 'getCS':
logger.info('Selected action: Get cosine similarity')
model.exportByDistance(args.action, MODEL_FILE_EXTENSION, MODELS_FOLDER, args.fromYear, args.toYear,
NEIGHBORS_COUNT, FASTTEXT_PATH)
########################################################################################################################
# ----------------------------------------------------------------------------------------------------------------------
########################################################################################################################
parser = argparse.ArgumentParser()
parser.add_argument('--version', action='version', version='1.0.0')
subparsers = parser.add_subparsers()
########################################################################################################################
# ----------------------------------------------------------------------------------------------------------------------
########################################################################################################################
parser_website = subparsers.add_parser('website')
parser_website.add_argument('--target', default='openbook', choices=['openbook'], help='Target website to '
'scrap data from')
parser_website.add_argument('--action', default='fetchFiles', choices=['fetchLinks', 'fetchMetadata', 'fetchFiles'],
help='The action to execute on the selected website')
parser_website.set_defaults(func=websiteParser)
########################################################################################################################
# ----------------------------------------------------------------------------------------------------------------------
########################################################################################################################
parser_metadata = subparsers.add_parser('metadata')
parser_metadata.add_argument('--corpus', default='all', choices=['all', 'openbook', 'project_gutenberg'],
help='The name of the target corpus to work with')
parser_metadata.add_argument('--action', default='printStandard', choices=['printStandard', 'printEnhanced',
'exportEnhanced'],
help='Action to perform against the metadata of the selected text corpus')
parser_metadata.add_argument('--fromYear', default=1800, type=int, help='The target starting year to extract data from')
parser_metadata.add_argument('--toYear', default=1900, type=int, help='The target ending year to extract data from')
parser_metadata.add_argument('--splitYearsInterval', default=10, type=int, help='The interval to split the years with '
'and export the extracted data')
parser_metadata.set_defaults(func=metadataParser)
########################################################################################################################
# ----------------------------------------------------------------------------------------------------------------------
########################################################################################################################
parser_text = subparsers.add_parser('text')
parser_text.add_argument('--corpus', default='all', choices=['all', 'openbook', 'project_gutenberg'],
help='The name of the target corpus to work with')
parser_text.add_argument('--action', default='exportByPeriod', choices=['exportByPeriod', 'extractFromPDF'],
help='Action to perform against the selected text corpus')
parser_text.add_argument('--fromYear', default=1800, type=int, help='The target starting year to extract data from')
parser_text.add_argument('--toYear', default=1900, type=int, help='The target ending year to extract data from')
parser_text.add_argument('--splitYearsInterval', default=10, type=int, help='The interval to split the years with '
'and export the extracted data')
parser_text.set_defaults(func=textParser)
########################################################################################################################
# ----------------------------------------------------------------------------------------------------------------------
########################################################################################################################
parser_model = subparsers.add_parser('model')
parser_model.add_argument('--action', default='getNN', choices=['create', 'getNN', 'getCS', 'getCD'],
help='Action to perform against the selected model')
parser_model.add_argument('--word', help='Target word to get nearest neighbours for')
parser_model.add_argument('--period', help='The target period to load the model from')
parser_model.add_argument('--textsFolder', default='./output/texts', help='The target folder that contains the '
'texts files')
parser_model.add_argument('--fromYear', default='1800', help='the target starting year to create the model for')
parser_model.add_argument('--toYear', default='1900', help='the target ending year to create the model for')
parser_model.set_defaults(func=modelParser)
########################################################################################################################
# ----------------------------------------------------------------------------------------------------------------------
########################################################################################################################
if __name__ == '__main__':
args = parser.parse_args()
args.func(args)
| 56.036364
| 120
| 0.455224
| 936
| 12,328
| 5.83547
| 0.17735
| 0.023068
| 0.02197
| 0.039546
| 0.431527
| 0.398572
| 0.33138
| 0.283962
| 0.242402
| 0.204321
| 0
| 0.003117
| 0.141304
| 12,328
| 219
| 121
| 56.292237
| 0.512847
| 0.111291
| 0
| 0.089552
| 0
| 0
| 0.270628
| 0.003038
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029851
| false
| 0
| 0.059701
| 0
| 0.089552
| 0.067164
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
958ef26cd63d83883ded41820724c2716c93e70b
| 2,716
|
py
|
Python
|
ssepaperless/Organizer/views.py
|
michaelkressaty/ssepaperless
|
d536f9106fd499e664d3c03fb6331b4feb1cc4ca
|
[
"BSD-3-Clause"
] | null | null | null |
ssepaperless/Organizer/views.py
|
michaelkressaty/ssepaperless
|
d536f9106fd499e664d3c03fb6331b4feb1cc4ca
|
[
"BSD-3-Clause"
] | null | null | null |
ssepaperless/Organizer/views.py
|
michaelkressaty/ssepaperless
|
d536f9106fd499e664d3c03fb6331b4feb1cc4ca
|
[
"BSD-3-Clause"
] | null | null | null |
from django.shortcuts import get_object_or_404, render
from django.http import HttpResponse
from django.template import RequestContext, loader
from Organizer.models import Department
from Organizer.models import Advisor
from Organizer.models import Student
from Organizer.models import Course
from Organizer.models import Degree
from Organizer.models import Certificate
from Organizer.models import Degree_Core_Course_Structure
from Organizer.models import Degree_Elective_Course_Structure
from Organizer.models import Certificate_Course_Structure
def index(request):
department_list = Department.objects.all()
template = loader.get_template('Organizer/index.html')
context = RequestContext(request, {
'department_list': department_list
})
return HttpResponse(template.render(context))
def index2(request, department_id):
department = get_object_or_404(Department, pk=department_id)
return render(request, 'Organizer/index2.html', {'department': department})
def advisorinfo(request, department_id, advisor_id):
department = get_object_or_404(Department, pk=department_id)
advisor = get_object_or_404(Advisor, pk = advisor_id)
return render(request, 'Organizer/advisorinfo.html', {'department': department, 'advisor': advisor})
def detail(request, department_id, advisor_id):
department = get_object_or_404(Department, pk=department_id)
advisor = get_object_or_404(Advisor, pk=advisor_id)
return render(request, 'Organizer/detail.html', {'department': department,'advisor': advisor})
def advisordegree(request, department_id, advisor_id):
department = get_object_or_404(Department, pk=department_id)
advisor = get_object_or_404(Advisor, pk=advisor_id)
return render(request, 'Organizer/advisordegree.html', {'department': department,'advisor': advisor})
def degree(request, department_id, degree_id):
department = get_object_or_404(Department, pk=department_id)
degree = get_object_or_404(Degree, pk=degree_id)
return render(request, 'Organizer/degree.html', {'department': department,'degree': degree})
def coursedegree(request, degree_id, degree_core_course_structure_id):
core_course_structure = get_object_or_404(Degree_Core_Course_Structure, pk=degree_core_course_structure_id)
return render(request, 'Organizer/coursedegree.html', {'core_course_structure': core_course_structure})
def certificate(request, department_id, certificate_id):
department = get_object_or_404(Department, pk=department_id)
certificate = get_object_or_404(Certificate, pk=certificate_id)
return render(request, 'Organizer/certificate.html', {'department': department,'certificate': certificate})
# Create your views here.
| 48.5
| 111
| 0.796024
| 340
| 2,716
| 6.091176
| 0.132353
| 0.056494
| 0.069049
| 0.08788
| 0.534524
| 0.377595
| 0.279575
| 0.279575
| 0.279575
| 0.279575
| 0
| 0.016991
| 0.111561
| 2,716
| 55
| 112
| 49.381818
| 0.841276
| 0.008468
| 0
| 0.2
| 0
| 0
| 0.120401
| 0.070977
| 0
| 0
| 0
| 0
| 0
| 1
| 0.177778
| false
| 0
| 0.266667
| 0
| 0.622222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95908c4c021ce144e1c7f298836a5c4a2cc424d8
| 462
|
py
|
Python
|
project/3/cal.py
|
Aries-Dawn/Cpp-Program-Design
|
9d4fc9a902fff2f76e41314f5d6c52871d30a511
|
[
"MIT"
] | null | null | null |
project/3/cal.py
|
Aries-Dawn/Cpp-Program-Design
|
9d4fc9a902fff2f76e41314f5d6c52871d30a511
|
[
"MIT"
] | null | null | null |
project/3/cal.py
|
Aries-Dawn/Cpp-Program-Design
|
9d4fc9a902fff2f76e41314f5d6c52871d30a511
|
[
"MIT"
] | null | null | null |
import numpy as np
matrixA = np.loadtxt('./mat-A-32.txt')
matrixB = np.loadtxt('./mat-B-32.txt')
checking = np.loadtxt('./out32.txt')
result = np.dot(matrixA, matrixB)
diff = result - checking
print(checking)
print(result)
print(diff)
np.absolute(diff)
print(np.max(diff))
[rows, cols] = diff.shape
with open ('./out2048-diff.txt','w') as f:
for i in range(rows):
for j in range(cols):
f.write("%.6f "%diff[i, j])
f.write('\n')
| 23.1
| 42
| 0.623377
| 75
| 462
| 3.84
| 0.48
| 0.09375
| 0.083333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028947
| 0.177489
| 462
| 19
| 43
| 24.315789
| 0.728947
| 0
| 0
| 0
| 0
| 0
| 0.140693
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.058824
| 0.235294
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9594993f4525fce4f5b648804a7994f70f4ed262
| 4,773
|
py
|
Python
|
ci/check-documentation.py
|
FredrikBlomgren/aff3ct
|
fa616bd923b2dcf03a4cf119cceca51cf810d483
|
[
"MIT"
] | 315
|
2016-06-21T13:32:14.000Z
|
2022-03-28T09:33:59.000Z
|
ci/check-documentation.py
|
a-panella/aff3ct
|
61509eb756ae3725b8a67c2d26a5af5ba95186fb
|
[
"MIT"
] | 153
|
2017-01-17T03:51:06.000Z
|
2022-03-24T15:39:26.000Z
|
ci/check-documentation.py
|
a-panella/aff3ct
|
61509eb756ae3725b8a67c2d26a5af5ba95186fb
|
[
"MIT"
] | 119
|
2017-01-04T14:31:58.000Z
|
2022-03-21T08:34:16.000Z
|
#!/usr/bin/env python3
import argparse
import sys
import re
import subprocess
import os
import glob
import copy
import aff3ct_help_parser as ahp
# read all the lines from the given file and set them in a list of string lines with striped \n \r
def readFileInTable(filename):
aFile = open(filename, "r")
lines = []
for line in aFile:
line = re.sub('\r','',line.rstrip('\n'))
if len(line) > 0:
lines.append(line)
aFile.close()
return lines;
def get_keys(filename):
lines = readFileInTable(filename)
list_keys = []
for l in lines:
if l.startswith(".. |"):
start_pos = 4
end_pos = l.find("|", start_pos)
list_keys.append(l[start_pos:end_pos])
return list_keys
def run_aff3ct(args_list):
try:
processAFFECT = subprocess.Popen(args_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdoutAFFECT, stderrAFFECT) = processAFFECT.communicate()
except KeyboardInterrupt:
os.kill(processAFFECT.pid, signal.SIGINT)
(stdoutAFFECT, stderrAFFECT) = processAFFECT.communicate()
err = stderrAFFECT.decode(encoding='UTF-8')
std = stdoutAFFECT.decode(encoding='UTF-8').split("\n")
return std, err
def aff3ct_helpmap_to_keys_list(help_map, aff3ct_keys): # fill aff3ct_keys from help_map
# ahp.print_help_map(help_map)
for m in help_map: # module
for a in help_map[m]: # argument
if type(help_map[m][a]) is dict:
key = help_map[m][a]["key"]
if key != "":
try:
aff3ct_keys.index(key)
except Exception as e:
aff3ct_keys.append(key)
else:
pass
def get_aff3ct_help_keys(aff3ct_path):
# get the available codes and simulation types
args_list = [aff3ct_path, "-h"]
std, err = run_aff3ct(args_list)
helpMap = ahp.help_to_map(std)
codesList = helpMap["Simulation"]["--sim-cde-type, -C"]["limits"] [1:-1].split("|")
simList = helpMap["Simulation"]["--sim-type" ]["limits"] [1:-1].split("|")
# try to run all codes ans simu to get their helps
aff3ct_keys = []
for c in codesList:
for s in simList:
args_list = [aff3ct_path, "-C", c, "-H", "-k", "--sim-type", s, "-p", "8"]
std, err = run_aff3ct(args_list)
helpMap = ahp.help_to_map(std)
aff3ct_helpmap_to_keys_list(helpMap, aff3ct_keys)
return aff3ct_keys
def get_doc_keys(doc_path):
doc_keys = []
for filename in glob.iglob(doc_path + '**/*.rst', recursive=True):
pattern = re.compile("\|(factory::[^ ]*)\|")
for i, line in enumerate(open(filename)):
for match in re.finditer(pattern, line):
doc_keys.append(match.group(1))
# remove duplicates
doc_keys = list(set(doc_keys))
return doc_keys
def display_keys(keys):
for e in keys:
print (" - [" + e + "]")
if len(keys) == 0:
print (" The keys list is empty.")
def check_keys(keys_file, aff3ct_path, doc_path):
list_keys = get_keys(keys_file)
aff3ct_keys = get_aff3ct_help_keys(aff3ct_path)
doc_keys = get_doc_keys(doc_path)
list_keys.sort()
aff3ct_keys.sort()
doc_keys.sort()
aff3ct_keys_save = copy.deepcopy(aff3ct_keys)
not_in_aff3ct_keys = []
for k in list_keys:
try:
idx = aff3ct_keys.index(k)
del aff3ct_keys[idx]
except Exception as e:
not_in_aff3ct_keys.append(k)
not_in_doc_keys = []
for k in aff3ct_keys_save:
try:
idx = doc_keys.index(k)
del doc_keys[idx]
except Exception as e:
not_in_doc_keys.append(k)
# manages special key exceptions
exceptions_not_in_doc_keys = ["factory::Frozenbits_generator::p+pb-path"]
exceptions_doc_keys = ["factory::BFER::p+mpi-comm-freq", "factory::Launcher::except-a2l"]
for e in exceptions_not_in_doc_keys:
if e in not_in_doc_keys: not_in_doc_keys.remove(e)
for e in exceptions_doc_keys:
if e in doc_keys: doc_keys.remove(e)
print("Keys used in the AFF3CT help but not defined in the strings database (undocumented keys):")
display_keys(aff3ct_keys)
print()
print("Keys used in the AFF3CT doc but not used in the AFF3CT help:")
display_keys(doc_keys)
print()
print("Keys used in the AFF3CT help but not used in the AFF3CT doc:")
display_keys(not_in_doc_keys)
print()
print("Keys defined in the strings database but not used in the AFF3CT help or in the AFF3CT doc:")
display_keys(not_in_aff3ct_keys)
print()
nDiff = len(aff3ct_keys) + len(doc_keys) + len(not_in_doc_keys)
return nDiff;
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--keys', action='store', dest='keys_file', type=str, default='doc/strings.rst')
parser.add_argument('--aff3ct', action='store', dest='aff3ct_path', type=str, default='build/bin/aff3ct')
parser.add_argument('--doc', action='store', dest='doc_path', type=str, default='doc/source/user/simulation/parameters/')
args = parser.parse_args()
nDiff = check_keys(args.keys_file, args.aff3ct_path, args.doc_path)
sys.exit(nDiff);
| 27.431034
| 128
| 0.707521
| 750
| 4,773
| 4.286667
| 0.24
| 0.054432
| 0.025194
| 0.02986
| 0.216174
| 0.133126
| 0.115086
| 0.099533
| 0.049145
| 0.027994
| 0
| 0.013951
| 0.159019
| 4,773
| 174
| 129
| 27.431034
| 0.786996
| 0.070605
| 0
| 0.134921
| 0
| 0
| 0.159024
| 0.030946
| 0
| 0
| 0
| 0
| 0
| 1
| 0.063492
| false
| 0.007937
| 0.063492
| 0
| 0.174603
| 0.079365
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9595a509a88acc24d2199e14d5a84b03b3fb5415
| 677
|
py
|
Python
|
todoster/list_projects.py
|
SophieAu/todoster
|
6f69f7b254683d63f60f934eafa8971e78df7eb2
|
[
"MIT"
] | 5
|
2020-08-05T21:02:35.000Z
|
2021-11-11T14:31:35.000Z
|
todoster/list_projects.py
|
SophieAu/todoster
|
6f69f7b254683d63f60f934eafa8971e78df7eb2
|
[
"MIT"
] | 1
|
2020-09-24T04:41:20.000Z
|
2020-09-28T04:37:50.000Z
|
todoster/list_projects.py
|
SophieAu/todoster
|
6f69f7b254683d63f60f934eafa8971e78df7eb2
|
[
"MIT"
] | 1
|
2021-08-09T19:23:24.000Z
|
2021-08-09T19:23:24.000Z
|
from todoster.file_operations import load_projects
from todoster.output_formatter import format_string
def list_projects(arguments):
projects = load_projects()
if not arguments.show_all_projects:
projects = list(filter(lambda x: x["active"], projects))
print()
project_counter = 1
for project in projects:
counter = format_string(str(project_counter).rjust(3), dim=True)
title = format_string(project["title"], dim=(not project["active"]))
shortcode = format_string("#" + project["shortcode"], color=project["color"])
print(counter + " " + title + " (" + shortcode + ")")
project_counter += 1
print()
| 33.85
| 85
| 0.669129
| 78
| 677
| 5.628205
| 0.461538
| 0.109339
| 0.068337
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005566
| 0.20384
| 677
| 19
| 86
| 35.631579
| 0.808905
| 0
| 0
| 0.133333
| 0
| 0
| 0.053176
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.133333
| 0
| 0.2
| 0.2
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95988a5a0c747ad5cc792f45a029f70fc328bc8e
| 621
|
py
|
Python
|
src/game_test.py
|
TomNo/tictactoe-mcts
|
5d5db97f54fe5a3bf7c9afaaa4d74984fdb30ec4
|
[
"MIT"
] | null | null | null |
src/game_test.py
|
TomNo/tictactoe-mcts
|
5d5db97f54fe5a3bf7c9afaaa4d74984fdb30ec4
|
[
"MIT"
] | null | null | null |
src/game_test.py
|
TomNo/tictactoe-mcts
|
5d5db97f54fe5a3bf7c9afaaa4d74984fdb30ec4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
__author__ = 'Tomas Novacik'
import unittest2
from game import Game
from board import Board, PlayerType, Move
class GameTest(unittest2.TestCase):
def test_winning_move(self):
game = Game()
game.start()
# set winning status to board
board = Board()
[board.place_move(Move(0, i, PlayerType.CIRCLE)) for i in range(4)]
winning_move = 0, 4
game._board = board
game.move(*winning_move)
self.assertTrue(game.is_finished)
def test_clone(self):
game = Game()
game.start()
game.clone()
# eof
| 18.264706
| 75
| 0.613527
| 79
| 621
| 4.670886
| 0.481013
| 0.086721
| 0.081301
| 0.086721
| 0.113821
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013483
| 0.283414
| 621
| 33
| 76
| 18.818182
| 0.81573
| 0.083736
| 0
| 0.222222
| 0
| 0
| 0.022968
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 1
| 0.111111
| false
| 0
| 0.166667
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95993548b5a77661a71dcd96b3ee1f6f35d686ce
| 1,911
|
py
|
Python
|
skills_taxonomy_v2/pipeline/skills_extraction/get_sentence_embeddings_utils.py
|
india-kerle/skills-taxonomy-v2
|
a71366dfea3c35580dbafddba9470f83795805ae
|
[
"MIT"
] | 3
|
2021-11-21T17:21:12.000Z
|
2021-12-10T21:19:57.000Z
|
skills_taxonomy_v2/pipeline/skills_extraction/get_sentence_embeddings_utils.py
|
india-kerle/skills-taxonomy-v2
|
a71366dfea3c35580dbafddba9470f83795805ae
|
[
"MIT"
] | 16
|
2021-10-06T11:20:35.000Z
|
2022-02-02T11:44:28.000Z
|
skills_taxonomy_v2/pipeline/skills_extraction/get_sentence_embeddings_utils.py
|
india-kerle/skills-taxonomy-v2
|
a71366dfea3c35580dbafddba9470f83795805ae
|
[
"MIT"
] | 1
|
2021-10-04T12:27:20.000Z
|
2021-10-04T12:27:20.000Z
|
"""
Functions to mask sentences of undesirable words (stopwords, punctuation etc).
Used in get_sentence_embeddings.py to process sentences before finding embeddings.
"""
import re
from skills_taxonomy_v2.pipeline.skills_extraction.cleaning_sentences import (
separate_camel_case,
)
def is_token_word(token, token_len_threshold, stopwords, custom_stopwords):
"""
Returns true if the token:
- Doesn't contain 'www'
- Isn't too long (if it is it is usually garbage)
- Isn't a proper noun/number/quite a few other word types
- Isn't a word with numbers in (these are always garbage)
"""
return (
("www" not in token.text)
and (len(token) < token_len_threshold)
and (
token.pos_
not in [
"PROPN",
"NUM",
"SPACE",
"X",
"PUNCT",
"ADP",
"AUX",
"CONJ",
"DET",
"PART",
"PRON",
"SCONJ",
]
)
and (not re.search("\d", token.text))
and (not token.text.lower() in stopwords + custom_stopwords)
and (not token.lemma_.lower() in stopwords + custom_stopwords)
)
def process_sentence_mask(
sentence, nlp, bert_vectorizer, token_len_threshold, stopwords, custom_stopwords
):
"""
Mask sentence of stopwords etc, then get sentence embedding
"""
sentence = separate_camel_case(sentence)
doc = nlp(sentence)
masked_sentence = ""
for i, token in enumerate(doc):
if is_token_word(token, token_len_threshold, stopwords, custom_stopwords):
masked_sentence += " " + token.text
else:
masked_sentence += " [MASK]"
return masked_sentence
| 29.4
| 85
| 0.553114
| 206
| 1,911
| 4.956311
| 0.470874
| 0.073457
| 0.117532
| 0.064643
| 0.212537
| 0.151812
| 0.111655
| 0.111655
| 0.111655
| 0.111655
| 0
| 0.000814
| 0.357405
| 1,911
| 64
| 86
| 29.859375
| 0.830619
| 0.26269
| 0
| 0
| 0
| 0
| 0.04384
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04878
| false
| 0
| 0.04878
| 0
| 0.146341
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
959a854d76fcee93383a4561465ab39d08da02e1
| 1,000
|
py
|
Python
|
migrations/versions/033809bcaf32_destinations.py
|
RagtagOpen/carpools
|
56b8f6491a2d347b637b345fbad7bc744130ec7f
|
[
"Apache-2.0"
] | 11
|
2017-08-23T17:41:43.000Z
|
2018-10-24T03:00:38.000Z
|
migrations/versions/033809bcaf32_destinations.py
|
RagtagOpen/carpools
|
56b8f6491a2d347b637b345fbad7bc744130ec7f
|
[
"Apache-2.0"
] | 480
|
2017-07-14T00:29:11.000Z
|
2020-01-06T19:04:51.000Z
|
migrations/versions/033809bcaf32_destinations.py
|
RagtagOpen/carpools
|
56b8f6491a2d347b637b345fbad7bc744130ec7f
|
[
"Apache-2.0"
] | 22
|
2017-07-07T00:07:32.000Z
|
2020-02-27T19:43:14.000Z
|
"""destinations
Revision ID: 033809bcaf32
Revises: 4a77b8fb792a
Create Date: 2017-08-24 05:56:45.166590
"""
from alembic import op
import sqlalchemy as sa
import geoalchemy2
# revision identifiers, used by Alembic.
revision = '033809bcaf32'
down_revision = '4a77b8fb792a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('destinations',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('point', geoalchemy2.types.Geometry(geometry_type='POINT'), nullable=True),
sa.Column('name', sa.String(length=80), nullable=True),
sa.Column('address', sa.String(length=300), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('destinations')
# ### end Alembic commands ###
| 27.027027
| 89
| 0.698
| 122
| 1,000
| 5.663934
| 0.516393
| 0.057887
| 0.081042
| 0.086831
| 0.127352
| 0.127352
| 0.127352
| 0.127352
| 0
| 0
| 0
| 0.067536
| 0.156
| 1,000
| 36
| 90
| 27.777778
| 0.751185
| 0.294
| 0
| 0
| 0
| 0
| 0.124066
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.166667
| 0
| 0.277778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
959ac1baff7cea9daabf593760b72f74cd08cb19
| 778
|
py
|
Python
|
porcupine/plugins/gotoline.py
|
rscales02/porcupine
|
91b3c90d19d2291c0a60ddb9dffac931147cde3c
|
[
"MIT"
] | null | null | null |
porcupine/plugins/gotoline.py
|
rscales02/porcupine
|
91b3c90d19d2291c0a60ddb9dffac931147cde3c
|
[
"MIT"
] | null | null | null |
porcupine/plugins/gotoline.py
|
rscales02/porcupine
|
91b3c90d19d2291c0a60ddb9dffac931147cde3c
|
[
"MIT"
] | null | null | null |
from tkinter import simpledialog
from porcupine import actions, get_tab_manager, tabs
def gotoline():
tab = get_tab_manager().select()
# simpledialog isn't ttk yet, but it's not a huge problem imo
lineno = simpledialog.askinteger(
"Go to Line", "Type a line number and press Enter:")
if lineno is not None: # not cancelled
# there's no need to do a bounds check because tk ignores out-of-bounds
# text indexes
column = tab.textwidget.index('insert').split('.')[1]
tab.textwidget.mark_set('insert', '%d.%s' % (lineno, column))
tab.textwidget.see('insert')
tab.on_focus()
def setup():
actions.add_command("Edit/Go to Line", gotoline, '<Control-l>',
tabtypes=[tabs.FileTab])
| 31.12
| 79
| 0.638817
| 107
| 778
| 4.579439
| 0.672897
| 0.079592
| 0.053061
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001695
| 0.241645
| 778
| 24
| 80
| 32.416667
| 0.828814
| 0.200514
| 0
| 0
| 0
| 0
| 0.153971
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.142857
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
959aea6673bc315fd2a49870629b49b87e1b393a
| 4,634
|
py
|
Python
|
preprocessing.py
|
JackAndCole/Detection-of-sleep-apnea-from-single-lead-ECG-signal-using-a-time-window-artificial-neural-network
|
692bb7d969b7eb4a0ad9b221660901a863bc76e2
|
[
"Apache-2.0"
] | 7
|
2020-01-22T03:23:39.000Z
|
2021-12-26T05:02:10.000Z
|
preprocessing.py
|
JackAndCole/Detection-of-sleep-apnea-from-single-lead-ECG-signal-using-a-time-window-artificial-neural-network
|
692bb7d969b7eb4a0ad9b221660901a863bc76e2
|
[
"Apache-2.0"
] | null | null | null |
preprocessing.py
|
JackAndCole/Detection-of-sleep-apnea-from-single-lead-ECG-signal-using-a-time-window-artificial-neural-network
|
692bb7d969b7eb4a0ad9b221660901a863bc76e2
|
[
"Apache-2.0"
] | 1
|
2020-05-29T06:32:24.000Z
|
2020-05-29T06:32:24.000Z
|
import os
import pickle
import sys
import warnings
from collections import OrderedDict
import biosppy.signals.tools as st
import numpy as np
import wfdb
from biosppy.signals.ecg import correct_rpeaks, hamilton_segmenter
from hrv.classical import frequency_domain, time_domain
from scipy.signal import medfilt
from tqdm import tqdm
warnings.filterwarnings(action="ignore")
base_dir = "dataset"
fs = 100 # ECG sample frequency
hr_min = 20
hr_max = 300
def feature_extraction(recording, signal, labels):
data = []
for i in tqdm(range(len(labels)), desc=recording, file=sys.stdout):
segment = signal[i * fs * 60:(i + 1) * fs * 60]
segment, _, _ = st.filter_signal(segment, ftype='FIR', band='bandpass', order=int(0.3 * fs), frequency=[3, 45],
sampling_rate=fs)
# Finding R peaks
rpeaks, = hamilton_segmenter(segment, sampling_rate=fs)
rpeaks, = correct_rpeaks(segment, rpeaks, sampling_rate=fs, tol=0.1)
# Extracting feature
label = 0 if labels[i] == "N" else 1
if 40 <= len(rpeaks) <= 200: # Remove abnormal R peaks
rri_tm, rri = rpeaks[1:] / float(fs), np.diff(rpeaks, axis=-1) / float(fs)
rri = medfilt(rri, kernel_size=3)
edr_tm, edr = rpeaks / float(fs), segment[rpeaks]
# Remove physiologically impossible HR signal
if np.all(np.logical_and(60 / rri >= hr_min, 60 / rri <= hr_max)):
rri_time_features, rri_frequency_features = time_domain(rri * 1000), frequency_domain(rri, rri_tm)
edr_frequency_features = frequency_domain(edr, edr_tm)
# 6 + 6 + 6 + 1 = 19
data.append([
rri_time_features["rmssd"], rri_time_features["sdnn"], rri_time_features["nn50"],
rri_time_features["pnn50"], rri_time_features["mrri"], rri_time_features["mhr"],
rri_frequency_features["vlf"] / rri_frequency_features["total_power"],
rri_frequency_features["lf"] / rri_frequency_features["total_power"],
rri_frequency_features["hf"] / rri_frequency_features["total_power"],
rri_frequency_features["lf_hf"], rri_frequency_features["lfnu"], rri_frequency_features["hfnu"],
edr_frequency_features["vlf"] / edr_frequency_features["total_power"],
edr_frequency_features["lf"] / edr_frequency_features["total_power"],
edr_frequency_features["hf"] / edr_frequency_features["total_power"],
edr_frequency_features["lf_hf"], edr_frequency_features["lfnu"], edr_frequency_features["hfnu"],
label
])
else:
data.append([np.nan] * 18 + [label])
else:
data.append([np.nan] * 18 + [label])
data = np.array(data, dtype="float")
return data
if __name__ == "__main__":
apnea_ecg = OrderedDict()
# train data
recordings = [
"a01", "a02", "a03", "a04", "a05", "a06", "a07", "a08", "a09", "a10",
"a11", "a12", "a13", "a14", "a15", "a16", "a17", "a18", "a19", "a20",
"b01", "b02", "b03", "b04", "b05",
"c01", "c02", "c03", "c04", "c05", "c06", "c07", "c08", "c09", "c10"
]
for recording in recordings:
signal = wfdb.rdrecord(os.path.join(base_dir, recording), channels=[0]).p_signal[:, 0]
labels = wfdb.rdann(os.path.join(base_dir, recording), extension="apn").symbol
apnea_ecg[recording] = feature_extraction(recording, signal, labels)
print()
# test data
recordings = [
"x01", "x02", "x03", "x04", "x05", "x06", "x07", "x08", "x09", "x10",
"x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
"x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x29", "x30",
"x31", "x32", "x33", "x34", "x35"
]
answers = {}
filename = os.path.join(base_dir, "event-2-answers")
with open(filename, "r") as f:
for answer in f.read().split("\n\n"):
answers[answer[:3]] = list("".join(answer.split()[2::2]))
for recording in recordings:
signal = wfdb.rdrecord(os.path.join(base_dir, recording), channels=[0]).p_signal[:, 0]
labels = answers[recording]
apnea_ecg[recording] = feature_extraction(recording, signal, labels)
with open(os.path.join(base_dir, "apnea-ecg.pkl"), "wb") as f:
pickle.dump(apnea_ecg, f, protocol=2)
print("ok")
| 44.990291
| 120
| 0.579197
| 565
| 4,634
| 4.555752
| 0.39646
| 0.13209
| 0.0777
| 0.062937
| 0.293318
| 0.265346
| 0.255245
| 0.255245
| 0.151515
| 0.070707
| 0
| 0.05938
| 0.26953
| 4,634
| 102
| 121
| 45.431373
| 0.701034
| 0.035175
| 0
| 0.144578
| 0
| 0
| 0.096101
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012048
| false
| 0.012048
| 0.144578
| 0
| 0.168675
| 0.024096
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
959b3935838082e9b39f90f0dbe7ce84722264d7
| 3,904
|
py
|
Python
|
tiddlywebplugins/tiddlyspace/openid.py
|
FND/tiddlyspace
|
7b26e5b4e0b0a817b3ea0a357613c59705d016d4
|
[
"BSD-3-Clause"
] | 2
|
2015-12-15T00:40:36.000Z
|
2019-04-22T16:54:41.000Z
|
tiddlywebplugins/tiddlyspace/openid.py
|
jdlrobson/tiddlyspace
|
70f500687fcd26e3fa4ef144297a05203ccf0f35
|
[
"BSD-3-Clause"
] | null | null | null |
tiddlywebplugins/tiddlyspace/openid.py
|
jdlrobson/tiddlyspace
|
70f500687fcd26e3fa4ef144297a05203ccf0f35
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Subclass of tiddlywebplugins.openid2 to support
tiddlyweb_secondary_user cookie.
"""
import urlparse
from tiddlyweb.web.util import server_host_url, make_cookie
from tiddlywebplugins.openid2 import Challenger as OpenID
FRAGMENT_PREFIX = 'auth:OpenID:'
class Challenger(OpenID):
def __init__(self):
self.name = __name__
def _domain_path(self, environ):
return "." + environ['tiddlyweb.config']['server_host']['host']
def _success(self, environ, start_response, info):
"""
After successful validation of an openid generate
and send a cookie with the value of that openid.
If this is a normal auth scenario make the name
of the cookie the normal 'tiddlyweb_user'. If this
is auth addition, where a fragment of 'auth:OpenID' is
set, then name the cookie 'tiddlyweb_secondary_user'.
"""
usersign = info.getDisplayIdentifier()
if info.endpoint.canonicalID:
usersign = info.endpoint.canonicalID
# canonicolize usersign to tiddlyweb form
if usersign.startswith('http'):
usersign = usersign.split('://', 1)[1]
usersign = usersign.rstrip('/')
redirect = environ['tiddlyweb.query'].get(
'tiddlyweb_redirect', ['/'])[0]
uri = urlparse.urljoin(server_host_url(environ), redirect)
cookie_name = 'tiddlyweb_user'
cookie_age = environ['tiddlyweb.config'].get('cookie_age', None)
try:
fragment = uri.rsplit('#', 1)[1]
except (ValueError, IndexError):
fragment = None
secondary_cookie_name = 'tiddlyweb_secondary_user'
secondary_cookie_age = None
secondary_cookie_only = False
if fragment:
openid = fragment[len(FRAGMENT_PREFIX):]
uri = uri.replace(FRAGMENT_PREFIX + openid,
FRAGMENT_PREFIX + usersign)
secondary_cookie_only = True
secret = environ['tiddlyweb.config']['secret']
cookie_header_string = make_cookie(cookie_name, usersign,
mac_key=secret, path=self._cookie_path(environ),
expires=cookie_age)
secondary_cookie_header_string = make_cookie(
secondary_cookie_name, usersign,
mac_key=secret, path=self._cookie_path(environ),
expires=cookie_age, domain=self._domain_path(environ))
headers = [('Location', uri.encode('utf-8')),
('Content-Type', 'text/plain'),
('Set-Cookie', secondary_cookie_header_string)]
if not secondary_cookie_only:
headers.append(('Set-Cookie', cookie_header_string))
start_response('303 See Other', headers)
return [uri]
def _render_form(self, environ, start_response, openid='',
message='', form=''):
redirect = environ['tiddlyweb.query'].get(
'tiddlyweb_redirect', ['/'])[0]
start_response('200 OK', [(
'Content-Type', 'text/html')])
environ['tiddlyweb.title'] = 'OpenID Login'
return ["""
<div id='content'>
<div class='message'>%s</div>
<pre>
<form action="" method="POST">
OpenID: <input name="openid" size="60" value="%s"/>
<input type="hidden" name="tiddlyweb_redirect" value="%s" />
<input type="hidden" id="csrf_token" name="csrf_token" />
<input type="submit" value="submit" />
</form>
<script type="text/javascript"
src="%s/bags/tiddlyspace/tiddlers/TiddlySpaceCSRF"></script>
<script type="text/javascript">
var csrfToken = window.getCSRFToken(),
el = null;
if (csrfToken) {
el = document.getElementById('csrf_token');
el.value = csrfToken;
}
</script>
</pre>
</div>""" % (message, openid, redirect,
environ['tiddlyweb.config']['server_prefix'])]
| 36.148148
| 72
| 0.615523
| 421
| 3,904
| 5.517815
| 0.344418
| 0.051657
| 0.037882
| 0.024107
| 0.146362
| 0.104176
| 0.104176
| 0.104176
| 0.061128
| 0.061128
| 0
| 0.005891
| 0.260758
| 3,904
| 107
| 73
| 36.485981
| 0.79903
| 0.109631
| 0
| 0.076923
| 0
| 0
| 0.313069
| 0.069897
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051282
| false
| 0
| 0.038462
| 0.012821
| 0.141026
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
959bcca51833c2423f463ff10fb943bd7f71b93f
| 9,047
|
py
|
Python
|
pyacoustics/morph/intensity_morph.py
|
UNIST-Interactions/pyAcoustics
|
f22d19d258b4e359fec365b30f11af261dee1b5c
|
[
"MIT"
] | 72
|
2015-12-10T20:00:04.000Z
|
2022-03-31T05:42:17.000Z
|
pyacoustics/morph/intensity_morph.py
|
alivalehi/pyAcoustics
|
ab446681d7a2267063afb6a386334dcaefd0d93b
|
[
"MIT"
] | 5
|
2017-08-08T05:13:15.000Z
|
2020-11-26T00:58:04.000Z
|
pyacoustics/morph/intensity_morph.py
|
alivalehi/pyAcoustics
|
ab446681d7a2267063afb6a386334dcaefd0d93b
|
[
"MIT"
] | 16
|
2016-05-09T07:36:15.000Z
|
2021-08-30T14:23:25.000Z
|
'''
Created on Apr 2, 2015
@author: tmahrt
'''
import os
from os.path import join
import math
import copy
from pyacoustics.morph.morph_utils import common
from pyacoustics.morph.morph_utils import plot_morphed_data
from pyacoustics.utilities import utils
from pyacoustics.utilities import sequences
from pyacoustics.signals import audio_scripts
from pyacoustics.utilities import my_math
def intensityMorph(fromWavFN, toWavFN, fromWavTGFN, toWavTGFN, tierName,
numSteps, coreChunkSize, plotFlag):
fromDataTupleList = common.getIntervals(fromWavTGFN, tierName)
toDataTupleList = common.getIntervals(toWavTGFN, tierName)
outputName = os.path.splitext(fromWavFN)[0] + "_int_" + tierName
_intensityMorph(fromWavFN, toWavFN,
fromDataTupleList, toDataTupleList,
numSteps, coreChunkSize, plotFlag, outputName)
def _intensityMorph(fromWavFN, toWavFN, fromDataTupleList,
toDataTupleList, numSteps, coreChunkSize, plotFlag,
outputName=None):
if outputName is None:
outputName = os.path.splitext(fromWavFN)[0] + "_int"
outputDir = join(os.path.split(fromWavFN)[0], "output")
utils.makeDir(outputDir)
# Determine the multiplication values to be used in normalization
# - this extracts one value per chunk
expectedLength = 0
normFactorList = []
truncatedToList = []
chunkSizeList = []
fromDataList = []
fromParams = audio_scripts.getParams(fromWavFN)
toParams = audio_scripts.getParams(toWavFN)
for fromTuple, toTuple in zip(fromDataTupleList, toDataTupleList):
fromStart, fromEnd = fromTuple[:2]
toStart, toEnd = toTuple[:2]
expectedLength += (fromEnd - fromStart) * fromParams[2]
fromDataList.extend(fromSubWav.rawDataList)
normFactorListTmp, a = getRelativeNormalizedFactors(fromSubWav,
toSubWav,
coreChunkSize)
tmpChunkList = [tmpChunkSize
for value, tmpChunkSize in normFactorListTmp]
chunkSizeList.append(sum(tmpChunkList))
normFactorList.extend(normFactorListTmp)
truncatedToList.extend(a)
interpolatedResults = []
normFactorGen = [sequences.interp(1.0, factor[0], numSteps)
for factor in normFactorList]
tmpChunkSizeList = [factor[1] for factor in normFactorList]
for i in xrange(numSteps):
outputFN = "%s_s%d_%d_%d.wav" % (outputName,
coreChunkSize,
numSteps - 1, i)
tmpNormFactorList = [next(normFactorGen[j])
for j in xrange(len(normFactorGen))]
# Skip the first value (same as the input value)
if i == 0:
continue
tmpInputList = zip(tmpNormFactorList, tmpChunkSizeList)
normalizationTuple = expandNormalizationFactors(tmpInputList)
expandedNormFactorList = normalizationTuple[0]
# It happened once that the expanded factor list was off by one value
# -- I could not determine why, so this is just a cheap hack
if len(expandedNormFactorList) == (expectedLength - 1):
expandedNormFactorList.append(expandedNormFactorList[-1])
# print("Diff: ", expectedLength, len(expandedNormFactorList))
assert(expectedLength == len(expandedNormFactorList))
newWavObj = copy.deepcopy(fromWavObj)
newRawDataList = []
# Apply the normalization and reinsert the data back
# into the original file
offset = 0
for fromTuple, chunkSize in zip(fromDataTupleList, chunkSizeList):
fromStart, fromEnd = fromTuple[:2]
fromSubWav = fromWavObj.extractSubsegment(fromStart, fromEnd)
assert(len(fromSubWav.rawDataList) ==
len(expandedNormFactorList[offset:offset + chunkSize]))
tmpList = [fromSubWav.rawDataList,
expandedNormFactorList[offset:offset + chunkSize]]
subRawDataList = [value * normFactor for value, normFactor in
utils.safeZip(tmpList, enforceLength=True)]
newRawDataList.extend(subRawDataList)
offset += chunkSize
newWavObj = audio.WavObj(newRawDataList, fromWavObj.samplingRate)
newWavObj.save(join(outputDir, outputFN))
interpolatedResults.append(newWavObj.rawDataList)
plotFN = "%s_s%d_%d.png" % (outputFN, coreChunkSize, numSteps)
if plotFlag:
plotMorphedData.plotIntensity(fromDataList,
truncatedToList,
interpolatedResults,
expandedNormFactorList,
os.path.join(outputDir, plotFN))
def getNormalizationFactor(lst, refLst=None):
'''
'''
# Get the source values that we will be normalizing
lst = list(set(lst))
if 0 in lst:
lst.pop(lst.index(0))
actMaxV = float(max(lst))
actMinV = float(min(lst))
# Get the reference values
if refLst is None:
refMaxV = 32767.0
refMinV = -32767.0
else:
refLst = list(set(refLst))
if 0 in refLst:
refLst.pop(refLst.index(0))
refMaxV = float(max(refLst))
refMinV = float(min(refLst))
actualFactor = min(refMaxV / actMaxV, abs(refMinV) / abs(actMinV))
# print("Normalization factor: ", actualFactor)
return actualFactor
def getRelativeNormalizedFactors(fromDataList, toDataList, chunkSize):
'''
Determines the factors to be used to normalize sourceWav from targetWav
This can be used to relatively normalize the source based on the target
on an iterative basis (small chunks are normalized rather than the entire
wav.
'''
# Sample proportionately from the targetWav
# - if the two lists are the same length, there is no change
# - if /target/ is shorter, it will be lengthened with some repeated values
# - if /target/ is longer, it will be shortened with some values dropped
tmpIndexList = sequences.interp(0, len(toDataList) - 1,
fromDataList)
newTargetRawDataList = [toDataList[int(round(i))]
for i in tmpIndexList]
assert(len(fromDataList) == len(newTargetRawDataList))
fromGen = sequences.subsequenceGenerator(fromDataList,
chunkSize,
sequences.sampleMiddle,
sequences.DO_SAMPLE_GATED)
toGen = sequences.subsequenceGenerator(newTargetRawDataList,
chunkSize,
sequences.sampleMiddle,
sequences.DO_SAMPLE_GATED)
normFactorList = []
i = 0
for fromTuple, toTuple in zip(fromGen, toGen):
fromDataChunk = fromTuple[0]
toDataChunk = toTuple[0]
distToNextControlPoint = fromTuple[2]
normFactor = getNormalizationFactor(fromDataChunk, toDataChunk)
normFactorList.append((normFactor, distToNextControlPoint))
# i += 1
# if i >= 38:
# print("hello")
# print(len(sourceWav.rawDataList), allChunks)
# assert(len(sourceWav.rawDataList) == allChunks)
return normFactorList, newTargetRawDataList
def expandNormalizationFactors(normFactorList):
'''
Expands the normFactorList from being chunk-based to sample-based
E.g. A wav with 1000 samples may be represented by a factorList of 5 chunks
(5 factor values). This function will expand that to 1000.
'''
i = 0
normFactorsFull = []
controlPoints = []
while i < len(normFactorList) - 1:
startVal, chunkSize = normFactorList[i]
endVal = normFactorList[i + 1][0]
normFactorsFull.extend(my_math.linspace(startVal, endVal, chunkSize))
controlPoints.append(startVal)
controlPoints.extend(my_math.linspace(startVal, startVal,
chunkSize - 1))
i += 1
# We have no more data, so just repeat the final norm factor at the tail
# of the file
value, finalChunkSize = normFactorList[i]
controlPoints.append(value)
controlPoints.extend(my_math.linspace(startVal, startVal,
finalChunkSize - 1))
normFactorsFull.extend(my_math.linspace(value, value, finalChunkSize))
print('Norm factors full: %d' % len(normFactorsFull))
return normFactorsFull, controlPoints
| 36.776423
| 79
| 0.606831
| 812
| 9,047
| 6.729064
| 0.330049
| 0.016471
| 0.008785
| 0.014641
| 0.123719
| 0.100659
| 0.087482
| 0.036969
| 0.036969
| 0
| 0
| 0.010543
| 0.318559
| 9,047
| 245
| 80
| 36.926531
| 0.87575
| 0.167127
| 0
| 0.09589
| 0
| 0
| 0.008734
| 0
| 0
| 0
| 0
| 0
| 0.020548
| 1
| 0.034247
| false
| 0
| 0.068493
| 0
| 0.123288
| 0.006849
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95a2f6f31ddcda8bf982507b3035c6d82bfe1d80
| 723
|
py
|
Python
|
selfdrive/visiond/tensorflow_autodetect.py
|
jeroenbbb/openpilot
|
4a2ff784f85ac87a4aa9ba8a345c2403102f960a
|
[
"MIT"
] | 4
|
2019-05-29T19:44:56.000Z
|
2021-09-10T18:36:57.000Z
|
selfdrive/visiond/tensorflow_autodetect.py
|
jeroenbbb/openpilot
|
4a2ff784f85ac87a4aa9ba8a345c2403102f960a
|
[
"MIT"
] | null | null | null |
selfdrive/visiond/tensorflow_autodetect.py
|
jeroenbbb/openpilot
|
4a2ff784f85ac87a4aa9ba8a345c2403102f960a
|
[
"MIT"
] | 5
|
2019-08-09T07:49:28.000Z
|
2020-10-11T03:19:04.000Z
|
import os
from setuptools import setup
version = os.getenv('VERSION', '1.10.1')
setup(
name='tensorflow-autodetect',
version=version,
url='https://github.com/commaai/tensorflow-autodetect',
author='comma.ai',
author_email='',
license='MIT',
long_description='Auto-detect tensorflow or tensorflow-gpu package based on nvidia driver being installed',
keywords='tensorflow tensorflow-gpu',
install_requires=[
('tensorflow-gpu' if os.path.exists('/proc/driver/nvidia/version') else 'tensorflow') + '==' + version,
],
classifiers=[
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
| 30.125
| 111
| 0.656985
| 79
| 723
| 5.974684
| 0.658228
| 0.082627
| 0.105932
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010327
| 0.196404
| 723
| 23
| 112
| 31.434783
| 0.802065
| 0
| 0
| 0.095238
| 0
| 0
| 0.49101
| 0.06639
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.095238
| 0
| 0.095238
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95a49255a761f17a3cc35cbf97bc73b1442eaf32
| 7,563
|
py
|
Python
|
plex_import_watched_history.py
|
chazlarson/plex-watched-tools
|
ef3e34e733ec9555353d695ced582395bdc73480
|
[
"MIT"
] | null | null | null |
plex_import_watched_history.py
|
chazlarson/plex-watched-tools
|
ef3e34e733ec9555353d695ced582395bdc73480
|
[
"MIT"
] | null | null | null |
plex_import_watched_history.py
|
chazlarson/plex-watched-tools
|
ef3e34e733ec9555353d695ced582395bdc73480
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# python3 -m pip install --force -U --user PlexAPI
import json
import time
import logging
import plexapi
import plexapi.video
import plexapi.myplex
import plexapi.server
import plexapi.library
import plexapi.exceptions
PLEX_URL = ""
PLEX_TOKEN = ""
WATCHED_HISTORY = ""
LOG_FILE = ""
BATCH_SIZE = 10000
PLEX_REQUESTS_SLEEP = 0
CHECK_USERS = [
]
LOG_FORMAT = \
"[%(name)s][%(process)05d][%(asctime)s][%(levelname)-8s][%(funcName)-15s]" \
" %(message)s"
LOG_DATE_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
LOG_LEVEL = logging.INFO
plexapi.server.TIMEOUT = 3600
plexapi.server.X_PLEX_CONTAINER_SIZE = 2500
_SHOW_GUID_RATING_KEY_MAPPING = {}
_MOVIE_GUID_RATING_KEY_MAPPING = {}
_EPISODE_GUID_RATING_KEY_MAPPING = {}
logger = logging.getLogger("PlexWatchedHistoryImporter")
def _get_config_str(key):
return plexapi.CONFIG.get(key, default="", cast=str).strip("'").strip('"').strip()
def _load_config():
global PLEX_URL, PLEX_TOKEN, WATCHED_HISTORY, CHECK_USERS, LOG_FILE, LOG_LEVEL
if PLEX_URL == "":
PLEX_URL = _get_config_str("sync.dst_url")
if PLEX_TOKEN == "":
PLEX_TOKEN = _get_config_str("sync.dst_token")
if WATCHED_HISTORY == "":
WATCHED_HISTORY = _get_config_str("sync.watched_history")
if len(CHECK_USERS) == 0:
config_check_users = _get_config_str("sync.check_users").split(",")
CHECK_USERS = [user.strip() for user in config_check_users if user]
if LOG_FILE == "":
LOG_FILE = _get_config_str("sync.import_log_file")
debug = plexapi.utils.cast(bool, _get_config_str("sync.debug").lower())
if debug:
LOG_LEVEL = logging.DEBUG
def _setup_logger():
logging.Formatter.converter = time.gmtime
logging.raiseExceptions = False
logger.setLevel(logging.DEBUG)
logger.handlers = []
logger.propagate = False
detailed_formatter = logging.Formatter(fmt=LOG_FORMAT,
datefmt=LOG_DATE_FORMAT)
file_handler = logging.FileHandler(filename=LOG_FILE, mode="a+")
file_handler.setFormatter(detailed_formatter)
file_handler.setLevel(LOG_LEVEL)
logger.addHandler(file_handler)
def _get_rating_keys(server, rating_key_guid_mapping, guid):
if guid not in rating_key_guid_mapping:
items = server.library.search(guid=guid)
rating_key_guid_mapping[guid] = [item.ratingKey for item in items]
return rating_key_guid_mapping[guid]
def _set_movie_section_watched_history(server, movie_history):
for movie_guid, movie_item_history in movie_history.items():
rating_keys = _get_rating_keys(server, _MOVIE_GUID_RATING_KEY_MAPPING, movie_guid)
for rating_key in rating_keys:
item = server.fetchItem(rating_key)
if movie_item_history['watched'] and not item.isWatched:
logger.debug(f"Watching Movie: {item.title}")
item.markWatched()
if movie_item_history['viewCount'] > item.viewCount:
for _ in range(movie_item_history['viewCount'] - item.viewCount):
logger.debug(f"Watching Movie: {item.title}")
item.markWatched()
if movie_item_history['viewOffset'] != 0:
logger.debug(f"Updating Movie Timeline: {item.title}: {movie_item_history['viewOffset']}")
item.updateTimeline(movie_item_history['viewOffset'])
if movie_item_history['userRating'] != "":
logger.debug(f"Rating Movie: {item.title}: {movie_item_history['userRating']}")
item.rate(movie_item_history['userRating'])
def _set_show_section_watched_history(server, show_history):
for show_guid, show_item_history in show_history.items():
rating_keys = _get_rating_keys(server, _SHOW_GUID_RATING_KEY_MAPPING, show_guid)
for rating_key in rating_keys:
item = server.fetchItem(rating_key)
if show_item_history['watched'] and not item.isWatched:
logger.debug(f"Watching Show: {item.title}")
item.markWatched()
if show_item_history['userRating'] != "":
logger.debug(f"Rating Show: {item.title}: {show_item_history['userRating']}")
item.rate(show_item_history['userRating'])
for episode_guid, episode_item_history in show_item_history['episodes'].items():
rating_keys = _get_rating_keys(server, _EPISODE_GUID_RATING_KEY_MAPPING, episode_guid)
for rating_key in rating_keys:
item = server.fetchItem(rating_key)
if episode_item_history['watched'] and not item.isWatched:
logger.debug(f"Watching Episode: {item.title}")
item.markWatched()
if episode_item_history['viewCount'] > item.viewCount:
for _ in range(episode_item_history['viewCount'] - item.viewCount):
logger.debug(f"Watching Episode: {item.title}")
item.markWatched()
if episode_item_history['viewOffset'] != 0:
logger.debug(f"Updating Episode Timeline: {item.title}: {episode_item_history['viewOffset']}")
item.updateTimeline(episode_item_history['viewOffset'])
if episode_item_history['userRating'] != "":
logger.debug(f"Rating Episode: {item.title}: {episode_item_history['userRating']}")
item.rate(episode_item_history['userRating'])
def _set_user_server_watched_history(server, watched_history):
_set_movie_section_watched_history(server, watched_history['movie'])
_set_show_section_watched_history(server, watched_history['show'])
def main():
_load_config()
_setup_logger()
plex_server = plexapi.server.PlexServer(PLEX_URL, PLEX_TOKEN, timeout=300)
plex_account = plex_server.myPlexAccount()
with open(WATCHED_HISTORY, "r") as watched_history_file:
watched_history = json.load(watched_history_file)
logger.info(f"Starting Import")
plex_users = plex_account.users()
# Owner will be processed separately
logger.info(f"Total Users: {len(plex_users) + 1}")
if not (len(CHECK_USERS) > 0 and plex_account.username not in CHECK_USERS and
plex_account.email not in CHECK_USERS):
logger.info(f"Processing Owner: {plex_account.username}")
user_history = watched_history[plex_account.username]
_set_user_server_watched_history(plex_server, user_history)
for user_index, user in enumerate(plex_users):
if (len(CHECK_USERS) > 0 and user.username not in CHECK_USERS and
user.email not in CHECK_USERS):
continue
if user.username not in watched_history:
logger.warning(f"Missing User from Watched History: {user.username}")
continue
logger.info(f"Processing User: {user.username}")
user_server_token = user.get_token(plex_server.machineIdentifier)
try:
user_server = plexapi.server.PlexServer(PLEX_URL, user_server_token, timeout=300)
except plexapi.exceptions.Unauthorized:
# This should only happen when no libraries are shared
logger.warning(f"Skipped User with No Libraries Shared: {user.username}")
continue
user_history = watched_history[user.username]
_set_user_server_watched_history(user_server, user_history)
logger.info(f"Completed Import")
if __name__ == "__main__":
main()
| 36.713592
| 114
| 0.672352
| 927
| 7,563
| 5.160734
| 0.18986
| 0.059783
| 0.033445
| 0.025084
| 0.449415
| 0.323161
| 0.210284
| 0.178721
| 0.135242
| 0.121446
| 0
| 0.005783
| 0.222663
| 7,563
| 205
| 115
| 36.892683
| 0.807961
| 0.0238
| 0
| 0.123288
| 0
| 0.006849
| 0.153294
| 0.039848
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054795
| false
| 0
| 0.089041
| 0.006849
| 0.157534
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95aa9b2ab7c302c981b157247e84659b7c3d8105
| 709
|
py
|
Python
|
test/test_integration.py
|
gaborfodor/wave-bird-recognition
|
6feafdbae82746e3e7b0f6588a9158aa8336309a
|
[
"MIT"
] | 17
|
2021-06-02T12:26:30.000Z
|
2022-03-27T18:35:02.000Z
|
test/test_integration.py
|
gaborfodor/wave-bird-recognition
|
6feafdbae82746e3e7b0f6588a9158aa8336309a
|
[
"MIT"
] | null | null | null |
test/test_integration.py
|
gaborfodor/wave-bird-recognition
|
6feafdbae82746e3e7b0f6588a9158aa8336309a
|
[
"MIT"
] | 3
|
2021-06-02T12:26:51.000Z
|
2021-06-06T05:56:45.000Z
|
from birds.display_utils import geo_plot
from birds.pann import load_pretrained_model, read_audio_fast, get_model_predictions_for_clip, BIRDS
def test_prediction_works():
test_bird = 'comrav'
model = load_pretrained_model()
y = read_audio_fast(f'./data/audio/{test_bird}.mp3')
predictions = get_model_predictions_for_clip(y, model)
class_probs = predictions[BIRDS].sum().reset_index()
class_probs.columns = ['ebird', 'p']
class_probs = class_probs.sort_values(by='p')
top_ebird = class_probs.ebird.values[-1]
assert top_ebird == test_bird
def test_map():
html = geo_plot('norcar', 10, 10)
with open('./temp/test_map.html', 'w') as f:
f.write(html)
| 27.269231
| 100
| 0.71086
| 105
| 709
| 4.466667
| 0.495238
| 0.10661
| 0.081023
| 0.093817
| 0.110874
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010135
| 0.165021
| 709
| 25
| 101
| 28.36
| 0.782095
| 0
| 0
| 0
| 0
| 0
| 0.09591
| 0.039492
| 0
| 0
| 0
| 0
| 0.0625
| 1
| 0.125
| false
| 0
| 0.125
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95ae2e3a04b5bb9553c2d275221aaaba3d17f40e
| 1,236
|
py
|
Python
|
0205.Isomorphic Strings/solution.py
|
zhlinh/leetcode
|
6dfa0a4df9ec07b2c746a13c8257780880ea04af
|
[
"Apache-2.0"
] | null | null | null |
0205.Isomorphic Strings/solution.py
|
zhlinh/leetcode
|
6dfa0a4df9ec07b2c746a13c8257780880ea04af
|
[
"Apache-2.0"
] | null | null | null |
0205.Isomorphic Strings/solution.py
|
zhlinh/leetcode
|
6dfa0a4df9ec07b2c746a13c8257780880ea04af
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
*****************************************
Author: zhlinh
Email: zhlinhng@gmail.com
Version: 0.0.1
Created Time: 2016-03-24
Last_modify: 2016-03-24
******************************************
'''
'''
Given two strings s and t, determine if they are isomorphic.
Two strings are isomorphic if the characters in s can be replaced to get t.
All occurrences of a character must be replaced with another character
while preserving the order of characters.
No two characters may map to the same character
but a character may map to itself.
For example,
Given "egg", "add", return true.
Given "foo", "bar", return false.
Given "paper", "title", return true.
Note:
You may assume both s and t have the same length.
'''
class Solution(object):
def isIsomorphic(self, s, t):
"""
:type s: str
:type t: str
:rtype: bool
"""
if len(s) != len(t):
return False
m1 = [0] * 256
m2 = [0] * 256
for i in range(len(s)):
if m1[ord(s[i])] != m2[ord(t[i])]:
return False
m1[ord(s[i])] = i + 1
m2[ord(t[i])] = i + 1
return True
| 24.235294
| 75
| 0.536408
| 175
| 1,236
| 3.782857
| 0.531429
| 0.045317
| 0.024169
| 0.021148
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.040678
| 0.283981
| 1,236
| 50
| 76
| 24.72
| 0.707345
| 0.242718
| 0
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0
| 0
| 0.416667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95b233e62bad224b765ef9f8b1c2e67cce2b24ad
| 1,659
|
py
|
Python
|
YOLOv2.py
|
scain40/OpenCVCVImageComparisson
|
368d901233111606fb2f0ecbce4447dd9c149fd0
|
[
"MIT"
] | null | null | null |
YOLOv2.py
|
scain40/OpenCVCVImageComparisson
|
368d901233111606fb2f0ecbce4447dd9c149fd0
|
[
"MIT"
] | null | null | null |
YOLOv2.py
|
scain40/OpenCVCVImageComparisson
|
368d901233111606fb2f0ecbce4447dd9c149fd0
|
[
"MIT"
] | null | null | null |
import numpy as np
import cv2 as cv
import os
import sys
class ObjectDetector:
"""
Object Detector is the class model for using YOLOv2 and gathering results
"""
def __init__(self):
self.network_loading()
def network_loading(self):
# Loading in the trained darknet models labels
self.LABELS = open(os.getcwd() + "\coco.names").read().strip().split("\n")
self.readingNetwork = cv.dnn.readNetFromDarknet(os.getcwd() + "\yolov3.cfg", os.getcwd() + "\yolov3.weights")
def read_image(self, image_name):
# Reading in a specific image from the files that exist in the input folder
working_image = cv.imread(image_name)
self.labelNames = self.readingNetwork.getLayerNames()
self.labelNames = [self.labelNames[i[0] - 1] for i in self.readingNetwork.getUnconnectedOutLayers()]
imageInputBlob = cv.dnn.blobFromImage(working_image, 1 / 255.0, (416, 416), swapRB=True, crop=False)
self.readingNetwork.setInput(imageInputBlob)
layerOutputs = self.readingNetwork.forward(self.labelNames)
return self.processReading(layerOutputs)
def processReading(self, processingResults):
# Takes in the results from a reading and proceses them to check for valid objects
classIDs = []
for objects in processingResults:
# loop over each of the detections
for detection in objects:
scores = detection[5:]
classID = np.argmax(scores)
confidence = scores[classID]
if confidence > 0.9:
# Appending the names of all the objects to be sorted later
classIDs.append(self.LABELS[classID])
# Just returning class names as it's only thing relevant to OSR later
return classIDs
| 36.866667
| 112
| 0.722122
| 219
| 1,659
| 5.420091
| 0.515982
| 0.075821
| 0.023589
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014771
| 0.183846
| 1,659
| 44
| 113
| 37.704545
| 0.861891
| 0.261001
| 0
| 0
| 0
| 0
| 0.033448
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.142857
| 0
| 0.392857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95b40e4094e935db9b4e39bc3de9c67b55114bbe
| 484
|
py
|
Python
|
app/run.py
|
dudikbender/geocoder
|
af8c0839d3d73c7825a0488763d053b5e6bc8257
|
[
"Unlicense"
] | null | null | null |
app/run.py
|
dudikbender/geocoder
|
af8c0839d3d73c7825a0488763d053b5e6bc8257
|
[
"Unlicense"
] | null | null | null |
app/run.py
|
dudikbender/geocoder
|
af8c0839d3d73c7825a0488763d053b5e6bc8257
|
[
"Unlicense"
] | null | null | null |
from utils.db import connection, print_version
import pandas as pd
def add_table(csv_file, table_name, engine):
df = pd.read_csv(csv_file)
df = df.drop('Unnamed: 0')
df.to_sql(name=table_name, con=engine, index=False, if_exists='replace')
table = 'data/tables/postcode_coordinates.csv'
add_table(table, 'Postcode_coordinates', connection)
cur = connection.cursor()
cur.execute('''SELECT *
FROM Postcode_coordinates''')
data = cur.fetchmany(5)
print(data)
| 25.473684
| 76
| 0.727273
| 70
| 484
| 4.842857
| 0.571429
| 0.168142
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004831
| 0.144628
| 484
| 19
| 77
| 25.473684
| 0.81401
| 0
| 0
| 0
| 0
| 0
| 0.245361
| 0.074227
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0.153846
| 0
| 0.230769
| 0.153846
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95b771302ac3436f68366f36390ccc4ddba021fd
| 2,206
|
py
|
Python
|
validator_rewards/validator_rewards.py
|
harmony-one/monitor-ops
|
0a379655ff26bff5821cd7cb6f619a15a308441b
|
[
"MIT"
] | 1
|
2020-04-11T16:46:56.000Z
|
2020-04-11T16:46:56.000Z
|
validator_rewards/validator_rewards.py
|
harmony-one/monitor-ops
|
0a379655ff26bff5821cd7cb6f619a15a308441b
|
[
"MIT"
] | 3
|
2020-04-13T10:42:59.000Z
|
2020-07-10T06:26:23.000Z
|
validator_rewards/validator_rewards.py
|
harmony-one/monitor-ops
|
0a379655ff26bff5821cd7cb6f619a15a308441b
|
[
"MIT"
] | 2
|
2020-04-22T10:36:25.000Z
|
2020-05-20T15:58:02.000Z
|
import argparse
import json
from pyhmy import (
get_all_validator_addresses,
get_validator_information
)
def get_block_by_num(block_num, endpoint):
params = [
str(hex(block_num)),
False,
]
payload = {
"id": "1",
"jsonrpc": "2.0",
"method": "hmy_getBlockByNumber",
"params": params
}
headers = {
'Content-Type': 'application/json'
}
timeout = 5
try:
resp = requests.request('POST', endpoint, headers=headers, data=json.dumps(payload),
timeout=timeout, allow_redirects=True)
return json.loads(resp.content)
except Exception as e:
v_print(f'{e.__class__}: {e}')
return None
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--start", required=True, type=int, help="First block")
parser.add_argument("--end", required=True, type=int, help="Last block")
parser.add_argument("--endpoint", default="http://localhost:9500", help="Endpoint to query")
parser.add_argument("--verbose", action='store_true', help="Verbose print for debug")
args = parser.parse_args()
if args.verbose:
def v_print(*args, **kwargs):
print(*args, **kwargs)
else:
def v_print(*args, **kwargs):
return
block_timestamps = []
block_tx = []
block_stx = []
for block_num in range(args.start, args.end):
v_print(f'Block {block_num}/{args.end}', end="\r")
reply = get_block_by_num(block_num, args.endpoint)
try:
block_timestamps.append(int(reply['result']['timestamp'], 0))
block_tx.append(len(reply['result']['transactions']))
block_stx.append(len(reply['result']['stakingTransactions']))
except Exception as e:
v_print(f'{e.__class__}: {e}')
pass
block_times = [y - x for x, y in zip(block_timestamps, block_timestamps[1:])]
avg = sum(block_times) / len(block_times)
print(f'Average Block Time: {avg}')
unique_times = Counter(block_times)
print(f'Unique block times: {unique_times.most_common()}')
# offset = [0].extend(block_times)
| 31.514286
| 96
| 0.609248
| 268
| 2,206
| 4.787313
| 0.41791
| 0.046765
| 0.053001
| 0.020265
| 0.14809
| 0.082619
| 0.049883
| 0.049883
| 0.049883
| 0.049883
| 0
| 0.006623
| 0.247053
| 2,206
| 69
| 97
| 31.971014
| 0.765804
| 0.014506
| 0
| 0.135593
| 0
| 0
| 0.186464
| 0.02302
| 0
| 0
| 0
| 0
| 0
| 1
| 0.050847
| false
| 0.016949
| 0.050847
| 0.016949
| 0.152542
| 0.152542
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95bbb3583a2750d5735e9244fe93a6a446fb803f
| 8,314
|
py
|
Python
|
dataset/data_load.py
|
clovaai/symmetrical-synthesis
|
207953b1ae3d2e0a96fb676db3669bdc88cc18e8
|
[
"MIT"
] | 76
|
2020-02-08T03:15:54.000Z
|
2022-03-04T16:14:52.000Z
|
dataset/data_load.py
|
clovaai/symmetrical-synthesis
|
207953b1ae3d2e0a96fb676db3669bdc88cc18e8
|
[
"MIT"
] | 5
|
2020-02-07T14:00:58.000Z
|
2021-05-31T01:37:55.000Z
|
dataset/data_load.py
|
clovaai/symmetrical-synthesis
|
207953b1ae3d2e0a96fb676db3669bdc88cc18e8
|
[
"MIT"
] | 13
|
2020-02-10T02:56:51.000Z
|
2021-05-28T06:56:30.000Z
|
'''
symmetrical-synthesis
Copyright (c) 2020-present NAVER Corp.
MIT license
'''
import os
import time
import glob
import cv2
import random
import numpy as np
import tensorflow as tf
import random
try:
import data_util
except ImportError:
from dataset import data_util
tf.app.flags.DEFINE_boolean('random_resize', False, 'True or False')
tf.app.flags.DEFINE_boolean('past_dataset', False, 'True or False')
tf.app.flags.DEFINE_string('google_path', None, '')
tf.app.flags.DEFINE_integer('min_train3', 2, '')
tf.app.flags.DEFINE_string('match_info', None, '')
tf.app.flags.DEFINE_float('match_prob', 0.0, '')
tf.app.flags.DEFINE_boolean('mnist_mode', False, '')
FLAGS = tf.app.flags.FLAGS
'''
image_path = '/where/your/images/*.jpg'
'''
def load_image(im_fn, input_size=224):
org_image = cv2.imread(im_fn, cv2.IMREAD_IGNORE_ORIENTATION | cv2.IMREAD_COLOR)[:,:,::-1] # rgb converted
'''
if FLAGS.random_resize:
resize_table = [0.5, 1.0, 1.5, 2.0]
selected_scale = np.random.choice(resize_table, 1)[0]
shrinked_hr_size = int(hr_size / selected_scale)
h, w, _ = high_image.shape
if h <= shrinked_hr_size or w <= shrinked_hr_size:
high_image = cv2.resize(high_image, (hr_size, hr_size))
else:
h_edge = h - shrinked_hr_size
w_edge = w - shrinked_hr_size
h_start = np.random.randint(low=0, high=h_edge, size=1)[0]
w_start = np.random.randint(low=0, high=w_edge, size=1)[0]
high_image_crop = high_image[h_start:h_start+hr_size, w_start:w_start+hr_size, :]
high_image = cv2.resize(high_image_crop, (hr_size, hr_size))
'''
h, w, _ = org_image.shape
min_len = np.min([h, w])
# center crop margin, we follow the method, which was introduced in DELF paper.
if FLAGS.mnist_mode:
crop_image = org_image.copy()
else:
try:
cc_margin = np.random.randint(low=1, high=int(min_len * 0.05), size=1)[0]
crop_image = org_image[cc_margin:-cc_margin, cc_margin:-cc_margin, :].copy()
except:
crop_image = org_image.copy()
new_input_size = int(input_size * 1.125)
crop_image = cv2.resize(crop_image, (new_input_size, new_input_size), interpolation=cv2.INTER_AREA)
# random crop range
h_edge = new_input_size - input_size#32#256 - input_size # input_size is 224
w_edge = new_input_size - input_size#256 - input_size
h_start = np.random.randint(low=0, high=h_edge, size=1)[0]
w_start = np.random.randint(low=0, high=w_edge, size=1)[0]
return_image = crop_image[h_start:h_start+input_size, w_start:w_start+input_size,:]
# flip lr
if random.randint(0, 1):
return_image = return_image[:,::-1,:]
#print('return', return_image.shape)
return return_image #high_image, low_image
def get_images_dict(image_folder):
'''
image_folder = '/data/IR/DB/sid_images'
folder structure
sid_images - sid0 - image00.png, image01.png, ...
- sid1 - ...
- sid2 - ...
'''
if FLAGS.match_info is not None:
match_dict = {}
f_match = open(FLAGS.match_info, 'r')
match_lines = f_match.readlines()
cnt = 0
for match_line in match_lines:
ver1_cls, ver2_cls, prob = match_line.split()
prob = float(prob)
if prob >= FLAGS.match_prob:
match_dict[ver2_cls] = 1
possible_image_type = ['jpg', 'JPG', 'png', 'JPEG', 'jpeg']
sid_list = glob.glob(os.path.join(image_folder, '*'))
images_dict = {}
images_list = []
images_cnt = 0
sid_idx = 0
for sid_folder in sid_list:
ext_folder = sid_folder
#ext_folder = os.path.join(sid_folder, 'exterior')
images_path = [image_path for image_paths in [glob.glob(os.path.join(ext_folder, '*.%s' % ext)) for ext in possible_image_type] for image_path in image_paths]
n_instance = 2
if len(images_path) < n_instance:
continue
for image_path in images_path:
images_list.append([image_path, sid_idx])
images_dict[sid_idx] = images_path
images_cnt += len(images_path)
sid_idx += 1
#print(images_dict)
stat_db = {}
stat_db['num_sid'] = len(images_dict)
stat_db['images_cnt'] = images_cnt
return images_dict, stat_db, images_list
def get_record(image_folder, input_size, batch_size):
images_dict, stat_db, images_list = get_images_dict(image_folder)
print('place total sids: %d, total images: %d' % (stat_db['num_sid'], stat_db['images_cnt']))
if FLAGS.google_path is not None:
images_dict_google, stat_db_google, images_list_google = get_images_dict(FLAGS.google_path)
print('google total sids: %d, total images: %d' % (stat_db_google['num_sid'], stat_db_google['images_cnt']))
#time.sleep(3)
n_instance = 2
b_replace = False
real_batch_size = batch_size // n_instance
while True:
try:
gt_labels = np.random.choice(len(images_dict), real_batch_size, replace=b_replace)
anchor_images = []
pos_images = []
for n in range(n_instance - 1):
pos_images.append([])
for label in gt_labels:
tmp_image_list = images_dict[label]
image_index = np.random.choice(len(tmp_image_list), n_instance, replace=False)
anchor_image = load_image(tmp_image_list[image_index[0]], input_size)
anchor_images.append(anchor_image)
for n, ind in enumerate(image_index[1:]):
pos_image = load_image(tmp_image_list[ind], input_size)
pos_images[n].append(pos_image)
#print(len(gt_labels))
if n_instance == 2:
pos_images = pos_images[0]
elif n_instance == 1:
pos_images = pos_images
else:
pos_images = np.concatenate(pos_images, axis=0)
yield anchor_images, pos_images, gt_labels #im_fn, gt_label
except Exception as e:
print(e)
continue
def generator(image_folder, input_size=224, batch_size=32):
for anchor_images, pos_images, gt_labels in get_record(image_folder, input_size, batch_size):
yield anchor_images, pos_images, gt_labels
def get_generator(image_folder, **kwargs):
return generator(image_folder, **kwargs)
## image_path = '/where/is/your/images/'
def get_batch(image_path, num_workers, **kwargs):
try:
generator = get_generator(image_path, **kwargs)
enqueuer = data_util.GeneratorEnqueuer(generator, use_multiprocessing=True)
enqueuer.start(max_queue_size=24, workers=num_workers)
generator_ouptut = None
while True:
while enqueuer.is_running():
if not enqueuer.queue.empty():
generator_output = enqueuer.queue.get()
break
else:
time.sleep(0.001)
yield generator_output
generator_output = None
finally:
if enqueuer is not None:
enqueuer.stop()
if __name__ == '__main__':
image_path = '/data/IR/DB/data_refinement/place_exterior'
num_workers = 4
batch_size = 128
input_size = 224
data_generator = get_batch(image_path=image_path,
num_workers=num_workers,
batch_size=batch_size,
input_size=224)
_ = 0
while True:
_ += 1
#break
start_time = time.time()
data = next(data_generator)
anchor_images = np.asarray(data[0])
pos_images = np.asarray(data[1])
gts = np.asarray(data[2])
print('%d done!!! %f' % (_, time.time() - start_time), anchor_images.shape, pos_images.shape, gts.shape)
#for sub_idx, (loaded_image, gt) in enumerate(zip(loaded_images, gts)):
# save_path = '/data/IR/DB/naver_place/test/%03d_%03d_gt_%d_image.jpg' % (_, sub_idx, gt)
# cv2.imwrite(save_path, loaded_image[:,:,::-1])
| 35.228814
| 170
| 0.615227
| 1,139
| 8,314
| 4.183494
| 0.195786
| 0.039664
| 0.016789
| 0.023505
| 0.227912
| 0.149633
| 0.104512
| 0.090241
| 0.035257
| 0.035257
| 0
| 0.020354
| 0.273154
| 8,314
| 235
| 171
| 35.378723
| 0.768162
| 0.100674
| 0
| 0.134228
| 0
| 0
| 0.047886
| 0.006365
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040268
| false
| 0
| 0.073826
| 0.006711
| 0.134228
| 0.026846
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95bc1cbdca2faf1169e04427ea20b03a36f4f201
| 1,678
|
py
|
Python
|
python_parikshith21/Day39.py
|
01coders/50-Days-Of-Code
|
98928cf0e186ee295bc90a4da0aa9554e2918659
|
[
"MIT"
] | null | null | null |
python_parikshith21/Day39.py
|
01coders/50-Days-Of-Code
|
98928cf0e186ee295bc90a4da0aa9554e2918659
|
[
"MIT"
] | null | null | null |
python_parikshith21/Day39.py
|
01coders/50-Days-Of-Code
|
98928cf0e186ee295bc90a4da0aa9554e2918659
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Sat Jun 17 20:55:53 2019
@author: Parikshith.H
"""
import sqlite3
conn=sqlite3.connect('music.sqlite')
cur=conn.cursor()
cur.execute('DROP TABLE IF EXISTS Tracks')
cur.execute('CREATE TABLE Tracks(title TEXT,plays INTEGER)')
cur.execute('''INSERT INTO Tracks(title,plays) VALUES ('Thunder2',100)''')
cur.execute('''INSERT INTO Tracks VALUES ('Thunder3',100)''')
cur.execute('INSERT INTO Tracks(title,plays) VALUES (?,?)',('Thunderstuck',200))
cur.execute('INSERT INTO Tracks(title,plays) VALUES (?,?)',('Dangerous',20))
cur.execute('INSERT INTO Tracks(title,plays) VALUES (?,?)',('Myway',150))
cur.execute('INSERT INTO Tracks(title,plays) VALUES (?,?)',('Newway',30))
cur.execute('SELECT * FROM Tracks')
for row in cur:
print(row)
print('****************************')
cur.execute('''UPDATE Tracks SET plays=50 WHERE title='Myway' ''')
cur.execute('SELECT * FROM Tracks')
for row in cur:
print(row)
print('****************************')
cur.execute('''DELETE FROM Tracks WHERE plays<100 ''')
cur.execute('SELECT * FROM Tracks')
for row in cur:
print(row)
cur.close()
conn.close()
# =============================================================================
# #output:
# ('Thunder2', 100)
# ('Thunder3', 100)
# ('Thunderstuck', 200)
# ('Dangerous', 20)
# ('Myway', 150)
# ('Newway', 30)
# ****************************
# ('Thunder2', 100)
# ('Thunder3', 100)
# ('Thunderstuck', 200)
# ('Dangerous', 20)
# ('Myway', 50)
# ('Newway', 30)
# ****************************
# ('Thunder2', 100)
# ('Thunder3', 100)
# ('Thunderstuck', 200)
# =============================================================================
| 28.440678
| 80
| 0.544696
| 187
| 1,678
| 4.887701
| 0.320856
| 0.142232
| 0.105033
| 0.131291
| 0.619256
| 0.619256
| 0.584245
| 0.584245
| 0.296499
| 0.180525
| 0
| 0.0563
| 0.110846
| 1,678
| 59
| 81
| 28.440678
| 0.5563
| 0.340882
| 0
| 0.423077
| 0
| 0
| 0.544527
| 0.051948
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.038462
| 0
| 0.038462
| 0.192308
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95c0ec3bbf5dfcbc14218087f1c41fdd10c1b36f
| 5,135
|
py
|
Python
|
spacy/tests/website/test_home.py
|
moyogo/spacy
|
ddf5c5bb61864320189ebc70dac3bc10e4ecde82
|
[
"MIT"
] | null | null | null |
spacy/tests/website/test_home.py
|
moyogo/spacy
|
ddf5c5bb61864320189ebc70dac3bc10e4ecde82
|
[
"MIT"
] | null | null | null |
spacy/tests/website/test_home.py
|
moyogo/spacy
|
ddf5c5bb61864320189ebc70dac3bc10e4ecde82
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals
import pytest
import spacy
import os
try:
xrange
except NameError:
xrange = range
@pytest.fixture()
def token(doc):
return doc[0]
@pytest.mark.models
def test_load_resources_and_process_text():
from spacy.en import English
nlp = English()
doc = nlp(u'Hello, world. Here are two sentences.')
@pytest.mark.models
def test_get_tokens_and_sentences(doc):
token = doc[0]
sentence = next(doc.sents)
assert token is sentence[0]
assert sentence.text == 'Hello, world.'
@pytest.mark.models
def test_use_integer_ids_for_any_strings(nlp, token):
hello_id = nlp.vocab.strings['Hello']
hello_str = nlp.vocab.strings[hello_id]
assert token.orth == hello_id == 3125
assert token.orth_ == hello_str == 'Hello'
def test_get_and_set_string_views_and_flags(nlp, token):
assert token.shape_ == 'Xxxxx'
for lexeme in nlp.vocab:
if lexeme.is_alpha:
lexeme.shape_ = 'W'
elif lexeme.is_digit:
lexeme.shape_ = 'D'
elif lexeme.is_punct:
lexeme.shape_ = 'P'
else:
lexeme.shape_ = 'M'
assert token.shape_ == 'W'
def test_export_to_numpy_arrays(nlp, doc):
from spacy.attrs import ORTH, LIKE_URL, IS_OOV
attr_ids = [ORTH, LIKE_URL, IS_OOV]
doc_array = doc.to_array(attr_ids)
assert doc_array.shape == (len(doc), len(attr_ids))
assert doc[0].orth == doc_array[0, 0]
assert doc[1].orth == doc_array[1, 0]
assert doc[0].like_url == doc_array[0, 1]
assert list(doc_array[:, 1]) == [t.like_url for t in doc]
@pytest.mark.models
def test_word_vectors(nlp):
doc = nlp("Apples and oranges are similar. Boots and hippos aren't.")
apples = doc[0]
oranges = doc[2]
boots = doc[6]
hippos = doc[8]
assert apples.similarity(oranges) > boots.similarity(hippos)
@pytest.mark.models
def test_part_of_speech_tags(nlp):
from spacy.parts_of_speech import ADV
def is_adverb(token):
return token.pos == spacy.parts_of_speech.ADV
# These are data-specific, so no constants are provided. You have to look
# up the IDs from the StringStore.
NNS = nlp.vocab.strings['NNS']
NNPS = nlp.vocab.strings['NNPS']
def is_plural_noun(token):
return token.tag == NNS or token.tag == NNPS
def print_coarse_pos(token):
print(token.pos_)
def print_fine_pos(token):
print(token.tag_)
@pytest.mark.models
def test_syntactic_dependencies():
def dependency_labels_to_root(token):
'''Walk up the syntactic tree, collecting the arc labels.'''
dep_labels = []
while token.head is not token:
dep_labels.append(token.dep)
token = token.head
return dep_labels
@pytest.mark.models
def test_named_entities():
def iter_products(docs):
for doc in docs:
for ent in doc.ents:
if ent.label_ == 'PRODUCT':
yield ent
def word_is_in_entity(word):
return word.ent_type != 0
def count_parent_verb_by_person(docs):
counts = defaultdict(defaultdict(int))
for doc in docs:
for ent in doc.ents:
if ent.label_ == 'PERSON' and ent.root.head.pos == VERB:
counts[ent.orth_][ent.root.head.lemma_] += 1
return counts
def test_calculate_inline_mark_up_on_original_string():
def put_spans_around_tokens(doc, get_classes):
'''Given some function to compute class names, put each token in a
span element, with the appropriate classes computed.
All whitespace is preserved, outside of the spans. (Yes, I know HTML
won't display it. But the point is no information is lost, so you can
calculate what you need, e.g. <br /> tags, <p> tags, etc.)
'''
output = []
template = '<span classes="{classes}">{word}</span>{space}'
for token in doc:
if token.is_space:
output.append(token.orth_)
else:
output.append(
template.format(
classes=' '.join(get_classes(token)),
word=token.orth_,
space=token.whitespace_))
string = ''.join(output)
string = string.replace('\n', '')
string = string.replace('\t', ' ')
return string
@pytest.mark.models
def test_efficient_binary_serialization(doc):
from spacy.tokens.doc import Doc
byte_string = doc.to_bytes()
open('moby_dick.bin', 'wb').write(byte_string)
nlp = spacy.en.English()
for byte_string in Doc.read_bytes(open('moby_dick.bin', 'rb')):
doc = Doc(nlp.vocab)
doc.from_bytes(byte_string)
@pytest.mark.models
def test_multithreading(nlp):
texts = [u'One document.', u'...', u'Lots of documents']
# .pipe streams input, and produces streaming output
iter_texts = (texts[i % 3] for i in xrange(100000000))
for i, doc in enumerate(nlp.pipe(iter_texts, batch_size=50, n_threads=4)):
assert doc.is_parsed
if i == 100:
break
| 28.370166
| 78
| 0.631353
| 718
| 5,135
| 4.32312
| 0.346797
| 0.027062
| 0.046392
| 0.05509
| 0.11759
| 0.042526
| 0.02384
| 0.02384
| 0.02384
| 0.02384
| 0
| 0.01026
| 0.259786
| 5,135
| 180
| 79
| 28.527778
| 0.806367
| 0.102629
| 0
| 0.117188
| 0
| 0
| 0.057984
| 0.008785
| 0
| 0
| 0
| 0
| 0.101563
| 1
| 0.171875
| false
| 0
| 0.0625
| 0.03125
| 0.289063
| 0.03125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95c1db49e8979342f440e2ee5e1a48186d51308c
| 936
|
py
|
Python
|
parsers/download_data.py
|
bioinf-mcb/polish-microbiome-project
|
0fc15b1a5afe4edf63b6be6b945ac4053e3a24f9
|
[
"BSD-3-Clause"
] | null | null | null |
parsers/download_data.py
|
bioinf-mcb/polish-microbiome-project
|
0fc15b1a5afe4edf63b6be6b945ac4053e3a24f9
|
[
"BSD-3-Clause"
] | null | null | null |
parsers/download_data.py
|
bioinf-mcb/polish-microbiome-project
|
0fc15b1a5afe4edf63b6be6b945ac4053e3a24f9
|
[
"BSD-3-Clause"
] | null | null | null |
#%%
import json
import requests
from io import StringIO
import pandas as pd
# %%
with open("../db_pass", "r") as f:
token = json.load(f)['token']
# %%
data = {
'token': token,
'content': 'record',
'format': 'csv',
'type': 'flat',
'csvDelimiter': '',
'rawOrLabel': 'raw',
'rawOrLabelHeaders': 'raw',
'exportCheckboxLabel': 'false',
'exportSurveyFields': 'false',
'exportDataAccessGroups': 'false',
'returnFormat': 'csv',
'fields': 'patient_id,age,bmi,covid_test_date,date_of_test,weight,height,admission_date,final_date,death,sex'
}
r = requests.post('http://192.168.45.244/api/',data=data)
print('HTTP Status: ' + str(r.status_code))
data = StringIO(r.text)
# %%
df = pd.read_csv(data)
df = df[df["height"].apply(lambda x: not pd.isna(x))]
df = df.dropna(axis=1, how='all')
df["bmi"] = df["bmi"].apply(lambda x: round(x, 1))
df.to_csv("metadata.csv", index=False)
print(df)
# %%
| 23.4
| 113
| 0.63141
| 129
| 936
| 4.496124
| 0.589147
| 0.02069
| 0.041379
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016477
| 0.157051
| 936
| 39
| 114
| 24
| 0.718631
| 0.014957
| 0
| 0
| 0
| 0.034483
| 0.386041
| 0.129771
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.034483
| 0.137931
| 0
| 0.137931
| 0.068966
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95c256321ed64a1e2f22ab370936dbb097ea26b8
| 2,622
|
py
|
Python
|
preprocess/sequence_stats.py
|
ashish-roopan/fsgan
|
1582e112d0f59cd32920ac5953baec783e088cad
|
[
"CC0-1.0"
] | 599
|
2020-04-14T19:28:58.000Z
|
2022-03-26T11:29:37.000Z
|
preprocess/sequence_stats.py
|
ashish-roopan/fsgan
|
1582e112d0f59cd32920ac5953baec783e088cad
|
[
"CC0-1.0"
] | 157
|
2020-04-14T21:13:43.000Z
|
2022-02-07T06:30:16.000Z
|
preprocess/sequence_stats.py
|
ashish-roopan/fsgan
|
1582e112d0f59cd32920ac5953baec783e088cad
|
[
"CC0-1.0"
] | 150
|
2020-04-14T20:40:41.000Z
|
2022-03-30T10:50:21.000Z
|
"""
Sequence statistics: Count, length, bounding boxes size.
"""
import os
from glob import glob
import pickle
from tqdm import tqdm
def extract_stats(cache_path):
# Load sequences from file
with open(cache_path, "rb") as fp: # Unpickling
seq_list = pickle.load(fp)
if len(seq_list) == 0:
return 0, 0., 0.
# For each sequence
len_sum, size_sum = 0., 0.
for seq in seq_list:
len_sum += len(seq)
size_sum += seq.size_avg
return len(seq_list), len_sum / len(seq_list), size_sum / len(seq_list)
def main(in_dir, out_path=None, postfix='_dsfd_seq.pkl'):
out_path = os.path.join(in_dir, 'sequence_stats.txt') if out_path is None else out_path
# Validation
if not os.path.isdir(in_dir):
raise RuntimeError('Input directory not exist: ' + in_dir)
# Parse file paths
input_query = os.path.join(in_dir, '*' + postfix)
file_paths = sorted(glob(input_query))
# For each file in the input directory with the specified postfix
pbar = tqdm(file_paths, unit='files')
count_sum, len_sum, size_sum = 0., 0., 0.
vid_count = 0
for i, file_path in enumerate(pbar):
curr_count, curr_mean_len, curr_mean_size = extract_stats(file_path)
if curr_count == 0:
continue
count_sum += curr_count
len_sum += curr_mean_len
size_sum += curr_mean_size
vid_count += 1
pbar.set_description('mean_count = %.1f, mean_len = %.1f, mean_size = %.1f, valid_vids = %d / %d' %
(count_sum / vid_count, len_sum / vid_count, size_sum / vid_count, vid_count, i + 1))
# Write result to file
if out_path is not None:
with open(out_path, "w") as f:
f.write('mean_count = %.1f\n' % (count_sum / vid_count))
f.write('mean_len = %.1f\n' % (len_sum / vid_count))
f.write('mean_size = %.1f\n' % (size_sum / vid_count))
f.write('valid videos = %d / %d\n' % (vid_count, len(file_paths)))
if __name__ == "__main__":
# Parse program arguments
import argparse
parser = argparse.ArgumentParser('detections2sequences')
parser.add_argument('input', metavar='DIR',
help='input directory')
parser.add_argument('-o', '--output', default=None, metavar='PATH',
help='output directory')
parser.add_argument('-p', '--postfix', metavar='POSTFIX', default='_dsfd_seq.pkl',
help='the files postfix to search the input directory for')
args = parser.parse_args()
main(args.input, args.output, args.postfix)
| 35.432432
| 114
| 0.622426
| 375
| 2,622
| 4.104
| 0.274667
| 0.051982
| 0.042885
| 0.023392
| 0.102014
| 0.071475
| 0
| 0
| 0
| 0
| 0
| 0.010256
| 0.256293
| 2,622
| 73
| 115
| 35.917808
| 0.778974
| 0.094584
| 0
| 0
| 0
| 0.02
| 0.161933
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0
| 0.1
| 0
| 0.18
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95c5e262b4da5f7adb2dec6d61c74e3194680b9a
| 7,735
|
py
|
Python
|
tests/test_dossier.py
|
openkamer/tk-api-python
|
907b98ccc7602ad7e3e74f1e06f9544fbe66aba3
|
[
"MIT"
] | 9
|
2017-11-16T12:39:11.000Z
|
2021-10-16T19:30:52.000Z
|
tests/test_dossier.py
|
openkamer/tk-api-python
|
907b98ccc7602ad7e3e74f1e06f9544fbe66aba3
|
[
"MIT"
] | 1
|
2017-11-16T14:20:20.000Z
|
2017-11-20T18:49:14.000Z
|
tests/test_dossier.py
|
openkamer/tk-api-python
|
907b98ccc7602ad7e3e74f1e06f9544fbe66aba3
|
[
"MIT"
] | 3
|
2018-09-10T18:57:39.000Z
|
2020-06-09T14:13:10.000Z
|
import datetime
from tkapi.util import queries
from tkapi.zaak import Zaak, ZaakSoort
from tkapi.dossier import Dossier, DossierWetsvoorstel
from tkapi.document import Document
from .core import TKApiTestCase
class TestDossier(TKApiTestCase):
def test_get_dossiers(self):
dossiers = self.api.get_dossiers(filter=None, max_items=10)
self.assertEqual(10, len(dossiers))
def test_get_dossier_by_nummer(self):
nummer = 34435
filter = Dossier.create_filter()
filter.filter_nummer(nummer)
dossiers = self.api.get_dossiers(filter=filter)
self.assertEqual(len(dossiers), 1)
dossiers[0].print_json()
def test_dossier_filter(self):
self.check_dossier_filter('2016Z16486', 34537)
self.check_dossier_filter('2016Z24906', 34640)
def check_dossier_filter(self, zaak_nr, expected_dossier_nummer):
dossier_filter = Dossier.create_filter()
dossier_filter.filter_zaak(zaak_nr)
dossiers = self.api.get_dossiers(filter=dossier_filter)
# for dossier in dossiers:
# dossier.print_json()
self.assertEqual(len(dossiers), 1)
# print(dossiers[0].nummer)
self.assertEqual(dossiers[0].nummer, expected_dossier_nummer)
class TestDossiersForZaken(TKApiTestCase):
start_datetime = datetime.datetime(year=2016, month=1, day=1)
end_datetime = datetime.datetime(year=2016, month=1, day=14)
def test_get_dossiers(self):
zaak_filter = Zaak.create_filter()
zaak_filter.filter_date_range(
TestDossiersForZaken.start_datetime,
TestDossiersForZaken.end_datetime
)
zaak_filter.filter_soort(ZaakSoort.WETGEVING)
zaken = self.api.get_zaken(zaak_filter)
print('Wetgeving zaken found: ' + str(len(zaken)))
dossier_filter = Dossier.create_filter()
zaak_nummers = [zaak.nummer for zaak in zaken]
print(zaak_nummers)
dossier_filter.filter_zaken(zaak_nummers)
dossiers = self.api.get_dossiers(filter=dossier_filter)
dossier_zaak_nummers = set()
for dossier in dossiers:
print('dossier.nummer: ', str(dossier.nummer))
for zaak in dossier.zaken:
dossier_zaak_nummers.add(zaak.nummer)
print('dossier_zaak_nummers', dossier_zaak_nummers)
for zaak in zaken:
if zaak.nummer not in dossier_zaak_nummers:
print(zaak.nummer)
# zaak.print_json()
# self.assertTrue(zaak_nr in dossier_zaak_nummers)
# print(zaken)
for zaak_nummer in zaak_nummers:
self.assertTrue(zaak_nummer in dossier_zaak_nummers)
class TestDossierAfgesloten(TKApiTestCase):
start_datetime = datetime.datetime(year=2015, month=1, day=1)
end_datetime = datetime.datetime.now()
def test_filter_afgesloten(self):
dossier_filter = Dossier.create_filter()
dossier_filter.filter_afgesloten(True)
dossiers = self.api.get_dossiers(filter=dossier_filter)
# There are currently no afgesloten dossiers, this will hopefully change in the future
self.assertEqual(len(dossiers), 0)
class TestDossierFilter(TKApiTestCase):
def test_filter_dossier_nummer(self):
nummer = 33885
dossier = queries.get_dossier(nummer)
self.assertEqual(nummer, dossier.nummer)
def test_filter_dossier_nummer_toevoeging(self):
nummer = 35300
toevoeging = 'XVI'
dossier = queries.get_dossier(nummer, toevoeging=toevoeging)
self.assertEqual(nummer, dossier.nummer)
self.assertEqual(toevoeging, dossier.toevoeging)
def test_get_document_actors(self):
# nummer = 35234
nummer = 33885
dossier = queries.get_dossier(nummer)
for zaak in dossier.zaken:
print('==========')
print(zaak.soort, zaak.onderwerp, zaak.volgnummer)
for actor in zaak.actors:
print(actor.naam, actor.persoon.achternaam if actor.persoon else None, actor.fractie, actor.commissie)
for doc in zaak.documenten:
print(doc.soort, doc.onderwerp, doc.titel, doc.volgnummer)
for actor in doc.actors:
print(actor.naam)
class TestWetsvoorstelDossier(TKApiTestCase):
def test_get_wetsvoorstellen_dossiers(self):
max_items = 200
wetsvoorstellen = self.api.get_items(DossierWetsvoorstel, max_items=max_items)
self.assertEqual(max_items, len(wetsvoorstellen))
def test_get_begroting_dossiers(self):
filter = Zaak.create_filter()
filter.filter_date_range(datetime.date(year=2019, month=6, day=1), datetime.date.today())
filter.filter_soort(ZaakSoort.BEGROTING, is_or=True)
zaken = self.api.get_zaken(filter=filter)
for zaak in zaken:
dossier_id = str(zaak.dossier.nummer)
print(dossier_id)
def test_get_dossiers_via_documenten(self):
pd_filter = Document.create_filter()
# NOTE: this date filter does not seem to work in combination with the soort filter.
# start_datetime = datetime.datetime(year=2016, month=1, day=1)
# end_datetime = datetime.datetime(year=2016, month=2, day=1)
# pd_filter.filter_date_range(start_datetime, end_datetime)
pd_filter.filter_soort('Voorstel van wet', is_or=True)
pd_filter.filter_soort('Voorstel van wet (initiatiefvoorstel)', is_or=True)
pds = self.api.get_documenten(pd_filter)
dossier_nrs = []
pds_no_dossier_nr = []
for pd in pds[:10]:
print(pd.dossier_nummers)
if pd.dossier_nummers:
dossier_nrs += pd.dossier_nummers
else:
pds_no_dossier_nr.append(pd)
for pd in pds_no_dossier_nr:
print(pd.dossier_nummers)
print(pd.onderwerp)
dossier_nrs = sorted(set(dossier_nrs))
print(dossier_nrs)
for dossier_nr in dossier_nrs:
print(dossier_nr)
print(len(dossier_nrs))
# def test_get_dossiers(self):
# zaak_filter = Zaak.create_filter()
# start_datetime = datetime.datetime(year=2005, month=1, day=1)
# end_datetime = datetime.datetime.now()
# zaak_filter.filter_date_range(start_datetime, end_datetime)
# zaak_filter.filter_soort('Wetgeving')
# zaken = self.api.get_zaken(zaak_filter)
# print('Wetgeving zaken found: ' + str(len(zaken)))
# zaak_nummers = [zaak.nummer for zaak in zaken]
# print(zaak_nummers)
# dossiers = []
# nrs_batch = set()
# for zaak_nr in zaak_nummers:
# nrs_batch.add(zaak_nr)
# if len(nrs_batch) < 10:
# continue
# dossier_filter = Dossier.create_filter()
# dossier_filter.filter_zaken(nrs_batch)
# nrs_batch = set()
# dossiers_for_zaak = self.api.get_dossiers(filter=dossier_filter)
# if dossiers_for_zaak:
# dossiers += dossiers_for_zaak
# print('Dossier found for zaak: ' + str(zaak_nr))
# else:
# print('WARNING: No dossier found for zaak: ' + str(zaak_nr))
# dossier_nummers = []
# for dossier in dossiers:
# print('\n=======')
# print(dossier.nummer)
# print(dossier.afgesloten)
# print(dossier.organisatie)
# print(dossier.titel)
# dossier_nummers.append(dossier.nummer)
# # dossier.print_json()
# dossier_nrs = sorted(set(dossier_nummers))
# print(dossier_nrs)
# print(len(dossier_nrs))
| 39.065657
| 118
| 0.648869
| 909
| 7,735
| 5.293729
| 0.147415
| 0.044888
| 0.02286
| 0.022444
| 0.405653
| 0.310266
| 0.265586
| 0.204073
| 0.124688
| 0.108063
| 0
| 0.019686
| 0.257919
| 7,735
| 197
| 119
| 39.263959
| 0.818641
| 0.249644
| 0
| 0.184874
| 0
| 0
| 0.025204
| 0
| 0
| 0
| 0
| 0
| 0.084034
| 1
| 0.10084
| false
| 0
| 0.05042
| 0
| 0.226891
| 0.151261
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95c7b536f4cc90da867d02e9f53e889cad554b21
| 27,649
|
py
|
Python
|
Manuscript files/modflow_reference/auxfile_hexaplot.py
|
MaxRamgraber/Simple-AEM-Toolbox
|
27751103f5e504dd675ba6225f2aee9f85d7c85d
|
[
"MIT"
] | 3
|
2021-06-16T12:27:22.000Z
|
2022-01-04T11:21:35.000Z
|
Manuscript files/modflow_reference/auxfile_hexaplot.py
|
MaxRamgraber/Simple-AEM-Toolbox
|
27751103f5e504dd675ba6225f2aee9f85d7c85d
|
[
"MIT"
] | null | null | null |
Manuscript files/modflow_reference/auxfile_hexaplot.py
|
MaxRamgraber/Simple-AEM-Toolbox
|
27751103f5e504dd675ba6225f2aee9f85d7c85d
|
[
"MIT"
] | 3
|
2021-06-17T11:20:20.000Z
|
2022-01-12T09:56:56.000Z
|
"""
This library contains several functions designed to help with the illustration of hexagonal grids
Functions:
plot_hexagaons : plots a specified data vector over a 2-D hexagon grid.
create_alpha_mask : creates an alpha shape (a concave hull), which is required for plotting contours; without it, the contour function extrapolates outside of the model area.
plot_scattered_contour : plots contour lines over an irregular grid, such as a hexagonal one.
plot_hexagons_3d : plots a 2-dimensional hexagon grid with specified z-dimensions
"""
def plot_hexagons (data, hexagon_grid_cores, hexagon_radius, hexagon_orientation = 0, colormap = 'steel', color = None, vmin = None, vmax = None, vincr = None, xlabel = None, ylabel = None, clabel = None, hide_colorbar = False, **kwargs):
"""
Call to plot a specified vector (positions relative to node IDs) in a hexagonal grid
@params:
data - Required : vector of values for hexagonal plot, positions corresponding to cell IDs (counting from zero)
hexagon_grid_cores - Required : tessellated polygons over area of interest
hexagon_radius - Required : radius of hexagons used for tessellation
hexagon_orientation - Optional : orientation of hexagon in clock-wise degrees [0 = flat top]
colormap - Optional : specify a colormap as string
vmin - Optional : externally specified min value for colorbar
vmax - Optional : externally specified max value for colorbar
vincr - Optional : specified value increment for colorbar
xlabel - Optional : string for xlabel
ylabel - Optional : string for ylabel
clabel - Optional : string for colorbar label
**kwargs - Optional : keyword arguments for matplotlib.patches.RegularPolygon
"""
import matplotlib
import numpy as np
import math
#--------------------------------------------------------------------------
# Prepare data for plotting
#--------------------------------------------------------------------------
# If not specified, define range of values
if vmin == None or vmax == None:
vmin = np.min(data)
vmax = np.max(data)
vrange = vmax-vmin
if vincr == None:
vincr = vrange/100
# Snap value range to integers
vmin = int(vmin/vincr)*vincr # minimum value for colorbar
vmax = (int(vmax/vincr)+1)*vincr # maximum value for colorbar
if color is None:
# Retrieve colormap
if colormap == 'steel':
# Create colormap 'steel'
from matplotlib.colors import LinearSegmentedColormap
cmap_steel = [(0.007843137,0.305882353,0.443137255), (0.301960784,0.592156863,0.784313725),(0.623529412,0.776470588,0.882352941)]
cm = LinearSegmentedColormap.from_list('steel', cmap_steel, N=100)
cmaps = cm
else:
cmaps = colormap
# Correct orientation
orientation = math.radians(-hexagon_orientation+30)
# Hexagon radius only goes to normal of sides
edgepoint_distance = hexagon_radius/np.cos(np.deg2rad(30))
# Retrieve colormap information
if color is None:
cmap = matplotlib.cm.get_cmap(cmaps)
#--------------------------------------------------------------------------
# Start plotting
#--------------------------------------------------------------------------
# Create empty figure
ax1 = matplotlib.pyplot.gca()
# Plot hexagons
for hex in range(len(hexagon_grid_cores[:,0])):
# Retrieve color value
if color is None:
rgba = cmap((data[hex]-vmin)/(vrange))
rgba = matplotlib.colors.rgb2hex(rgba)
else:
rgba = color
# Add the patch
ax1.add_patch(
matplotlib.patches.RegularPolygon(
(hexagon_grid_cores[hex,0], hexagon_grid_cores[hex,1]), # x and y
6, # edges
edgepoint_distance,
orientation=orientation,
facecolor = rgba,
**kwargs)
)
# Determine meaningful colorbar steps
if color is None:
colorbar_increment = vincr
colorbar_min = int(vmin/colorbar_increment)*colorbar_increment # minimum value for colorbar
colorbar_max = (int(vmax/colorbar_increment)+1)*colorbar_increment # maximum value for colorbar
colorbar_increment_numbers = int((colorbar_max-colorbar_min)/colorbar_increment+1)
colorbar_steps = []
for num in range(colorbar_increment_numbers):
colorbar_steps = colorbar_steps + [colorbar_min+num*colorbar_increment]
# Recompute the ax.dataLim
ax1.relim()
# Update ax.viewLim using the new dataLim
ax1.autoscale_view()
# Create colorbar
if hide_colorbar == False and color is None:
norm = matplotlib.colors.Normalize(vmin=vmin,vmax=vmax)
sm = matplotlib.pyplot.cm.ScalarMappable(cmap=cmap, norm=norm)
sm.set_array([])
cbar = matplotlib.pyplot.colorbar(sm)
# Label plot
if xlabel != None:
matplotlib.pyplot.xlabel(xlabel)
if ylabel != None:
matplotlib.pyplot.ylabel(ylabel)
if clabel != None and not hide_colorbar and color is None:
cbar.set_label(clabel, rotation=270, labelpad=20)
def create_alpha_mask(points, distance_limit, resolution_x = 1000, resolution_y = 1000, visualization = True):
"""
Creates interpolation grid, then masks over the alpha shape spanned up by points and defined by distance_limit.
@params:
points - Required : points spanning up alpha shape
distance_limit - Required : distance threshold for removing Delaunay simplices
resolution_x - Optional : resolution for grid in x, default is 1000
resolution_y - Optional : resolution for grid in y, default is 1000
visualization - Optional : boolean for visualizing result, default is False
Returns:
grid_mask : An array containing 1 for cells inside, and 0 for cells outside
"""
import numpy as np
from scipy.spatial import Delaunay
from matplotlib.collections import LineCollection
import matplotlib.path as mplPath
#----------------------------------------------------------------------
# Create Grid
#----------------------------------------------------------------------
# Create meshgrid
xi = np.transpose(np.linspace(min(points[:,0]), max(points[:,0]), resolution_x))
yi = np.transpose(np.linspace(min(points[:,1]), max(points[:,1]), resolution_y))
X, Y = np.meshgrid(xi, yi)
# Reshape into vector
gridpoints_x = np.reshape(X, resolution_x*resolution_y)
gridpoints_y = np.reshape(Y, resolution_x*resolution_y)
# Combine into gridpoints array
gridpoints = np.transpose(np.asarray((gridpoints_x, gridpoints_y)))
#----------------------------------------------------------------------
# Create Alpha Shape
#----------------------------------------------------------------------
# Start Delaunay triangulation
tri = Delaunay(points)
# Auxiliary function for plotting, if required
if visualization == True:
import matplotlib.pyplot as plt
edges = set()
edge_points = []
def add_edge(i, j):
"""Add a line between the i-th and j-th points, if not in the list already"""
if (i, j) in edges or (j, i) in edges:
# already added
return
edges.add( (i, j) )
edge_points.append(points[ [i, j] ])
# Remove simplices outside of distance_limit
simplex_flag = np.zeros(len(tri.simplices[:,0])) # Flags bad simplices
counter = 0
for ia, ib, ic in tri.vertices:
# ia, ib, ic = indices of corner points of the triangle
if np.sqrt((points[ia,0]-points[ib,0])**2+(points[ia,1]-points[ib,1])**2) < distance_limit and \
np.sqrt((points[ia,0]-points[ic,0])**2+(points[ia,1]-points[ic,1])**2) < distance_limit and \
np.sqrt((points[ib,0]-points[ic,0])**2+(points[ib,1]-points[ic,1])**2) < distance_limit:
# do nothing
simplex_flag[counter] = 0
else:
# simplex has at least one side larger than threshold, flag it
simplex_flag[counter] = 1
counter += 1
tri.simplices = tri.simplices[simplex_flag == 0,:] # Remove bad simplices
tri.vertices = tri.vertices[simplex_flag == 0,:] # Remove bad simplices
# Visualize, if requested
if visualization == True:
# Mark all remaining simplices
for ia, ib, ic in tri.vertices:
add_edge(ia, ib)
add_edge(ib, ic)
add_edge(ic, ia)
# Draw them
lines = LineCollection(edge_points)
plt.figure()
plt.gca().add_collection(lines)
plt.plot(points[:,0], points[:,1], 'o')
#----------------------------------------------------------------------
# Mask over Alpha Shape
#----------------------------------------------------------------------
# Prepare point flag
flag_gridpoints = np.zeros(len(gridpoints[:,0]), dtype = np.int)
# Evaluate gridpoints
for sim in range(len(tri.simplices[:,0])):
# Print progress bar
cv = sim
mv = len(tri.simplices[:,0])-1
print('\r%s |%s| %s%% %s' % ('Masking: ', '\033[33m'+'█' * int(50 * cv // mv) + '-' * (50 - int(50 * cv // mv))+'\033[0m', ("{0:." + str(1) + "f}").format(100 * (cv / float(mv))), ' Complete'), end = '\r')
# Create simplex path
bbPath = mplPath.Path(np.array([points[tri.simplices[sim,0],:],
points[tri.simplices[sim,1],:],
points[tri.simplices[sim,2],:],
points[tri.simplices[sim,0],:]]))
# Flag points that are inside this simplex
for gridpts in range(len(gridpoints[:,0])):
if flag_gridpoints[gridpts] == 0: # only process points not already allocated
if bbPath.contains_point((gridpoints[gridpts,0],gridpoints[gridpts,1])) == True:
flag_gridpoints[gridpts] = 1
# Plot, if required
if visualization == True:
plt.scatter(gridpoints[flag_gridpoints == 1,0], gridpoints[flag_gridpoints == 1,1],color = 'g')
plt.scatter(gridpoints[flag_gridpoints == 0,0], gridpoints[flag_gridpoints == 0,1],color = 'r')
# Reshape flag_gridpoints into a 2D array
global grid_mask
grid_mask = np.reshape(flag_gridpoints,(resolution_y,resolution_x))
# Return result
return grid_mask
def plot_scattered_contour(x, y, data, resolution_x=1000, resolution_y=1000,
grid_mask = None, vmin = None, vmax = None, vincr = None, suppress_clabel = False,
**kwargs):
"""
Call to plot contour of scattered data
@params:
x - Required : x-coordinate
y - Required : y-coordinate
data - Required : data for the contours
resolution_x - Optional : resolution of auxiliary grid in x
resolution_y - Optional : resolution of auxiliary grid in y
grid_mask - Optional : mask array of dimension [resolution_y,resolution_x]
vmin - Optional : min value for contour
vmax - Optional : max value for contour
vincr - Optional : increment for contour
suppress_clabel - Optional : Flag wether contours should be labeld, False by default
**kwargs - Optional : keyword arguments for matplotlib.patches.RegularPolygon
"""
import numpy as np
import matplotlib
import scipy
#--------------------------------------------------------------------------
# Integrity checks
#--------------------------------------------------------------------------
# Check if grid_mask matches meshgrid dimensions
if len(grid_mask) != 1:
if len(grid_mask[:,0]) != resolution_y or len(grid_mask[0,:]) != resolution_x:
raise Exception('Grid mask dimensions must match resolution in x and y!')
# Check if one of the cells has dried; this algorithm can't handle that yet
if vmin < -1000:
print('\033[31m'+'WARNING:'+'\033[0m'+' Dried cells detected. Contour not printed.')
return
# Extract vmin and vmax, if not specified
if vmin == None or vmax == None:
vmin = np.min(data)
vmax = np.max(data)
# Set vincr, if not specified
if vincr == None:
vincr = (vmax-vmin)/10
# Snap value range to integers
vmin = int(vmin/vincr)*vincr # minimum value for colorbar
vmax = (int(vmax/vincr)+1)*vincr # maximum value for colorbar
#--------------------------------------------------------------------------
# Prepare data for plotting
#--------------------------------------------------------------------------
# Convert source material into required format
source = np.transpose(np.asarray([x,y]))
# Create and convert target material
xi = np.transpose(np.linspace(min(x), max(x), resolution_x))
yi = np.transpose(np.linspace(min(y), max(y), resolution_y))
X, Y = np.meshgrid(xi, yi)
target = np.transpose(np.asarray([X,Y]))
# Interpolate and transpose
Z = scipy.interpolate.griddata(source, data, target)
Z = np.transpose(Z)
# Mask values, if grid_mask was specified
if len(grid_mask) != 1:
Z[grid_mask == 0] = float('NaN')
# Define function for masking
levels = np.arange(vmin,vmax,vincr)
#--------------------------------------------------------------------------
# Plot that shit
#--------------------------------------------------------------------------
CS = matplotlib.pyplot.contour(xi,yi,Z,levels=levels,**kwargs)
if not suppress_clabel:
matplotlib.pyplot.clabel(CS, inline=1, inline_spacing = 0)
return
def plot_hexagons_3d(grid, zdim, hexagon_radius, hexagon_orientation = 0, xlabel = 'x', ylabel = 'y', zlabel = 'z', clabel = 'depth', depth_colormap = 'steel', alpha = 1, **kwargs):
"""
Call to tessellate a given polygon with hexagons
@params:
grid - Required : x-y-coordinates of center of hexagons, array of form [nx2]
zdim - Required : bottom and top elevation of hexagon cells, array of form [nx2]
hexagon_radius - Required : radius of hexagons used for tessellation
hexagon_orientation - Required : orientation of hexagon in clock-wise degrees [0 = flat top]
xlabel - Optional : label for x-axis
ylabel - Optional : label for y-axis
zlabel - Optional : label for z-axis
clabel - Optional : label for colorbar
depth_colormap - Optional : string of colormap, if requested
alpha - Optional : alpha value for transparency of polygons, default is 1
**kwargs - Optional : keyword arguments for Poly3DCollection
"""
# PLOT 3D
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from mpl_toolkits.mplot3d.art3d import Poly3DCollection, Line3DCollection
import math
if depth_colormap == 'steel':
# Create colormap 'steel'
from matplotlib.colors import LinearSegmentedColormap
cmap_steel = [(0.007843137,0.305882353,0.443137255), (0.301960784,0.592156863,0.784313725),(0.623529412,0.776470588,0.882352941)]
cm = LinearSegmentedColormap.from_list('steel', cmap_steel, N=100)
cmaps = cm
else:
cmaps = depth_colormap
# Initialize figure
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
# Hexagon radius only goes to normal of sides
edgepoint_distance = hexagon_radius/np.cos(np.deg2rad(30))
# Determine depth range, if colorbar is requested
vmin = np.min(zdim[:,1]-zdim[:,0])
vmax = np.max(zdim[:,1]-zdim[:,0])
c_range = vmax-vmin
# Plot hexagons
for hex in range(len(grid[:,0])):
# Reset coordinate variables
x = []
y = []
# Read top and bottom elevation
zbot = zdim[hex,0]
ztop = zdim[hex,1]
# Pre-allocate memory for coordinate matrix
Z = np.zeros((12,3))
# Determine cell color, if requested
if depth_colormap != 'None':
import matplotlib
# Retrieve colormap information
cmap = matplotlib.cm.get_cmap(cmaps)
rgba = cmap((ztop-zbot-vmin)/c_range) #cmap((zbot-vmin)/(vmax-vmin))
rgba = list(rgba)
rgba[3] = alpha
# rgba = matplotlib.colors.rgb2hex(rgba)
# Plot grid
counter = 0
for angle in range(0-hexagon_orientation, 420-hexagon_orientation, 60):
# Coordinates of edge point
x = np.append(x,grid[hex,0]+math.cos(math.radians(angle)) * edgepoint_distance)
y = np.append(y,grid[hex,1]+math.sin(math.radians(angle)) * edgepoint_distance)
# Write into coordinate matrix
if counter < 6:
Z[counter,0] = grid[hex,0]+math.cos(math.radians(angle)) * edgepoint_distance
Z[counter,1] = grid[hex,1]+math.sin(math.radians(angle)) * edgepoint_distance
Z[counter,2] = ztop
Z[6+counter,0] = grid[hex,0]+math.cos(math.radians(angle)) * edgepoint_distance
Z[6+counter,1] = grid[hex,1]+math.sin(math.radians(angle)) * edgepoint_distance
Z[6+counter,2] = zbot
counter += 1
# Vertices of hexagon sides
verts = [[Z[0],Z[1],Z[7],Z[6]],
[Z[1],Z[2],Z[8],Z[7]],
[Z[2],Z[3],Z[9],Z[8]],
[Z[3],Z[4],Z[10],Z[9]],
[Z[4],Z[5],Z[11],Z[10]],
[Z[5],Z[0],Z[6],Z[11]]]
if depth_colormap != 'None':
# Plot hexagon side
face = Poly3DCollection(verts,
**kwargs)
face.set_facecolor(rgba)
ax.add_collection3d(face)
else:
face = Poly3DCollection(verts,
**kwargs)
face.set_facecolor(rgba)
ax.add_collection3d(face)
# Vertices of hexagon top
verts = [[Z[0],Z[1],Z[2],Z[3],Z[4],Z[5]]]
# Plot hexagon top
if depth_colormap != 'None':
# Plot hexagon side
face = Poly3DCollection(verts,
**kwargs)
face.set_facecolor(rgba)
ax.add_collection3d(face)
else:
face = Poly3DCollection(verts,
**kwargs)
face.set_facecolor(rgba)
ax.add_collection3d(face)
# Vertices of hexagon bot
verts = [[Z[6],Z[7],Z[8],Z[9],Z[10],Z[11]]]
# Plot hexagon bot
if depth_colormap != 'None':
# Plot hexagon side
face = Poly3DCollection(verts,
**kwargs)
face.set_facecolor(rgba)
ax.add_collection3d(face)
else:
face = Poly3DCollection(verts,
**kwargs)
face.set_facecolor(rgba)
ax.add_collection3d(face)
# Determine meaningful colorbar steps, if colorbar was requested
if depth_colormap != 'None':
colorbar_increment = 0.1
colorbar_min = int(vmin/colorbar_increment)*colorbar_increment # minimum value for colorbar
colorbar_max = (int(vmax/colorbar_increment)+1)*colorbar_increment # maximum value for colorbar
colorbar_increment_numbers = int((colorbar_max-colorbar_min)/colorbar_increment+1)
colorbar_steps = []
for num in range(colorbar_increment_numbers):
colorbar_steps = colorbar_steps + [colorbar_min+num*colorbar_increment]
# Create colorbar
norm = matplotlib.colors.Normalize(vmin=vmin,vmax=vmax)
sm = matplotlib.pyplot.cm.ScalarMappable(cmap=cmap, norm=norm)
sm.set_array([])
cbar = matplotlib.pyplot.colorbar(sm)
cbar.set_label(clabel, rotation=270, labelpad=20)
# Label axes
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
ax.set_zlabel(zlabel)
# Equal aspect scaling doesn't work yet, manual workaround
# Designate array of edges
xyzlims = np.zeros((3,2))
xyzlims[0,0] = np.min(grid[:,0])
xyzlims[0,1] = np.max(grid[:,0])
xyzlims[1,0] = np.min(grid[:,1])
xyzlims[1,1] = np.max(grid[:,1])
xyzlims[2,0] = np.min(zdim)
xyzlims[2,1] = np.max(zdim)
# Determine maximal range
maxrange = np.max([xyzlims[0,1]-xyzlims[0,0],xyzlims[1,1]-xyzlims[1,0],xyzlims[2,1]-xyzlims[2,0]])
# Determine difference to maximal range
xdif = maxrange - (xyzlims[0,1]-xyzlims[0,0])
ydif = maxrange - (xyzlims[1,1]-xyzlims[1,0])
zdif = maxrange - (xyzlims[2,1]-xyzlims[2,0])
# Set axis limits -> equal aspect
ax.set_xlim3d(xyzlims[0,0]-xdif/2,xyzlims[0,1]+xdif/2)
ax.set_ylim3d(xyzlims[1,0]-ydif/2,xyzlims[1,1]+ydif/2)
ax.set_zlim3d(xyzlims[2,0]-zdif/2,xyzlims[2,1]+zdif/2)
# Show result
plt.show()
def vulture_plot(incr = 1, elev = 40., fps = 50):
"""
Creates a short animated .gif providing a flight around the 3-D model, requiring an open, compatible 3D figure
@params:
incr - Optional : degree increment for rotation frames; defines temporal resolution of .gif (default = 1)
elev - Optional : elevation angle for camera (default = 40)
fps - Optional : frames per second for resulting .gif; defines speed of .gif display (default 50)
"""
# Import libraries
import imageio
import os
import matplotlib.pyplot as plt
# Retrieve axis
ax = plt.gca()
# Rotate, save and compile vulture plot
images = []
for cv in range(0,360,incr):
# Rotate image
ax.view_init(elev=40., azim=cv)
plt.show()
# Save it as temporary file
plt.savefig("dummy.png")
# Append it to saved movie
images.append(imageio.imread("dummy.png"))
# Remove temporary file
os.remove("dummy.png")
# Print progress bar
mv = 359 # max value
print('\r%s |%s| %s%% %s' % ('Printing: ', '\033[33m'+'█' * int(50 * cv // mv) + '-' * (50 - int(50 * cv // mv))+'\033[0m', ("{0:." + str(1) + "f}").format(100 * (cv / float(mv))), ' Complete'), end = '\r')
# Compile .gif
imageio.mimsave('output_quick.gif', images,fps=fps)
def visualize_genealogy(genealogy,weights = None, rejuvenation = None,colormap = 'jet'):
"""
Creates an inline figure visualizing the particle genealogy over one resampling step.
@params:
genealogy - Required : vector describing genealogy of resampled particles, referring to indices
weights - Optional : weight of particles prior to resampling
rejuvenation - Optional : vector of booleans describing whether particles were rejuvenated
colormap - Optional : colormap string for visualization
"""
import numpy as np
from IPython import get_ipython
import matplotlib
import matplotlib.pyplot as plt
# Determine number of particles
n_particles = len(genealogy)
# Assign optional variables, if not provided
if weights is None == True:
weights = np.ones(n_particles)
# if rejuvenation is None == True:
# rejuvenation = np.ones((n_particles),dtype = np.bool)
# Switch to inline printing
get_ipython().run_line_magic('matplotlib', 'inline')
# Create dummy features for the legend
full_line = plt.Line2D([], [], color='black',label='inherited')
dashed_line = plt.Line2D([], [], linestyle = '--', color='black',label='rejuvenated')
particle = plt.Line2D([], [], linestyle = 'None', marker ='.', color='black',label='particle')
# Plot legend
plt.legend(handles=[dashed_line,full_line,particle],bbox_to_anchor=(0., -0.05, 1., .102), loc=3,
ncol=3, mode="expand", borderaxespad=0.)
# Determine colormap for particles
cmap = matplotlib.cm.get_cmap(colormap)
# Extract particle colors
rgba = [None] * n_particles
for n in range(n_particles):
rgba[n] = matplotlib.colors.rgb2hex(cmap(n/(n_particles-1)))
# Create plot
for n in range(n_particles):
plt.plot([genealogy[n],n],[1,2],'--',c=rgba[genealogy[n]])
# Draw genealogy of current particle
# if rejuvenation[n] == False:
# plt.plot([genealogy[n],n],[1,2],c=rgba[genealogy[n]])
# else:
# plt.plot([genealogy[n],n],[1,2],c='w')
# plt.plot([genealogy[n],n],[1,2],'--',c=rgba[genealogy[n]])
# Scatter previous and current particle index
if weights[n] == 0: # Particle weight is zero - print as greyscale
plt.scatter(n,1,s = weights[n]/np.max(weights)*55+5,c='xkcd:medium grey')
else:
plt.scatter(n,1,s = weights[n]/np.max(weights)*55+5,c=rgba[n])
plt.scatter(n,2,s=20,c=rgba[n])
# Deactivate axes
plt.axis('off')
# Show, and revert to automatic printing
plt.show()
get_ipython().run_line_magic('matplotlib', 'qt5')
| 42.66821
| 240
| 0.537054
| 3,089
| 27,649
| 4.732923
| 0.178375
| 0.02093
| 0.010944
| 0.01026
| 0.379275
| 0.323461
| 0.272845
| 0.259302
| 0.239056
| 0.22948
| 0
| 0.032065
| 0.323231
| 27,649
| 648
| 241
| 42.66821
| 0.749145
| 0.355492
| 0
| 0.4
| 0
| 0
| 0.027599
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022581
| false
| 0
| 0.083871
| 0
| 0.119355
| 0.009677
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95c8f1ad4e81caf4b83710c865b7efb620f7466e
| 58,889
|
py
|
Python
|
tests/python/self_concepts_test.py
|
JulianAL-01/self-concepts
|
d4a5ebfdadc472535777349602c775a67aaa3823
|
[
"MIT"
] | 14
|
2020-07-21T21:09:25.000Z
|
2022-01-30T11:00:35.000Z
|
tests/python/self_concepts_test.py
|
JulianAL-01/self-concepts
|
d4a5ebfdadc472535777349602c775a67aaa3823
|
[
"MIT"
] | 2
|
2020-07-28T14:46:11.000Z
|
2020-07-28T14:52:23.000Z
|
tests/python/self_concepts_test.py
|
JulianAL-01/self-concepts
|
d4a5ebfdadc472535777349602c775a67aaa3823
|
[
"MIT"
] | 5
|
2020-07-28T13:50:20.000Z
|
2021-07-12T22:56:11.000Z
|
'''
self_concepts_test
This module serves as the unit test for self_concepts
'''
import argparse, sys
sys.path.append('../../source/python')
from self_concepts import Concept
from self_concepts import Property
from self_concepts import Relationship
from self_concepts import Ontology
from self_concepts import Blackboard
from self_concepts import Agent
from self_concepts import SelfException
# Helper functions in support of concise and verbose reporting
def parseArguments():
'''Collect and return the test's arguments.'''
parser = argparse.ArgumentParser(description='Test ')
parser.add_argument('-c',
'--concise',
action='store_true',
help='test self_concept with concise results')
return parser.parse_args()
def reportHeader(message):
'''Print a report header.'''
if arguments.concise != True:
print(message)
else:
print('#', end='')
def reportSection(message):
'''Print a section header.'''
if arguments.concise != True:
print(' ' + message)
else:
print('*', end='')
def reportDetail(message):
'''Print a report detail.'''
if arguments.concise != True:
print(' ' + message)
else:
print('.', end='')
def reportDetailFailure(message):
'''Print a report failure.'''
if arguments.concise != True:
print('!!!!!!! ' + message)
else:
print('!')
exit()
def reportConceptName(concept: 'Concept'):
'''Print the name of the concept.'''
reportDetail(' Function applied to ' + concept.__class__.__name__ + ' (' + concept.name + ')')
# Various functions, classes, and instances used for testing
class AnotherConcept(Concept): pass
CONCEPT_NAME_1 = 'A well-formed concept'
CONCEPT_NAME_2 = 'A well-formed concept'
CONCEPT_NAME_3 = 'Another well-formed concept'
CONCEPT_NAME_4 = 'A well-formed concept'
c1 = Concept(CONCEPT_NAME_1)
c2 = Concept(CONCEPT_NAME_2)
c3 = AnotherConcept(CONCEPT_NAME_3)
c4 = Concept(CONCEPT_NAME_4)
class AnotherProperty(Property): pass
class YetAnotherProperty(AnotherProperty): pass
PROPERTY_NAME_1 = 'A well-formed property'
PROPERTY_NAME_2 = 'A well-formed property'
PROPERTY_NAME_3 = 'Another well-formed property'
PROPERTY_NAME_4 = 'A well-formed property'
PROPERTY_VALUE_1 = 42
PROPERTY_VALUE_2 = 'A value'
PROPERTY_VALUE_3 = c1
PROPERTY_VALUE_4 = 'A value'
p1 = Property(PROPERTY_NAME_1, PROPERTY_VALUE_1)
p2 = Property(PROPERTY_NAME_2, PROPERTY_VALUE_2)
p3 = AnotherProperty(PROPERTY_NAME_3, PROPERTY_VALUE_3)
p4 = Property(PROPERTY_NAME_4, PROPERTY_VALUE_4)
class AnotherRelationship(Relationship): pass
RELATIONSHIP_NAME_1 = 'A well-formed relationship'
RELATIONSHIP_NAME_2 = 'A well-formed relationship'
RELATIONSHIP_NAME_3 = 'Another well-formed relationship'
RELATIONSHIP_NAME_4 = 'A well-formed relationship'
r1 = Relationship(RELATIONSHIP_NAME_1, c1, c2)
r2 = Relationship(RELATIONSHIP_NAME_2, c2, c3)
r3 = AnotherRelationship(RELATIONSHIP_NAME_3, c3, c1)
r4 = Relationship(RELATIONSHIP_NAME_4, c1, c4)
ONTOLOGY_NAME_1 = 'A well-formed ontology'
o1 = Ontology(ONTOLOGY_NAME_1)
BLACKBOARD_NAME_1 = 'A well-formed blackboard'
b1 = Blackboard(BLACKBOARD_NAME_1)
class AnotherAgent(Agent):
def activity(self,
parameters: 'Concept' = None):
super().activity(parameters)
if parameters == None:
reportDetail(' Activity ('
+ self.name
+ ')')
else:
reportDetail(' Activity ('
+ self.name
+ ') with parameters ('
+ parameters.name
+ ')')
def start(self,
parameters: 'Concept' = None):
super().start(parameters)
if parameters == None:
reportDetail(' Start ('
+ self.name
+ ')')
else:
reportDetail(' Start ('
+ self.name
+ ') with parameters ('
+ parameters.name
+ ')')
def stop(self,
parameters: 'Concept' = None):
super().stop(parameters)
if parameters == None:
reportDetail(' Stop ('
+ self.name
+ ')')
else:
reportDetail(' Stop ('
+ self.name
+ ') with parameters ('
+ parameters.name
+ ')')
def pause(self,
parameters: 'Concept' = None):
super().pause(parameters)
if parameters == None:
reportDetail(' Pause ('
+ self.name
+ ')')
else:
reportDetail(' Pause ('
+ self.name
+ ') with parameters ('
+ parameters.name
+ ')')
def isAlive(self) -> bool:
state = super().isAlive()
reportDetail(' isAlive ('
+ self.name
+ ')')
return True
def status(self) -> Concept:
state = super().status()
reportDetail(' Status ('
+ self.name
+ ')')
return Concept('Status')
def signal(self,
source: 'Concept',
message: 'Concept',
parameters: 'Concept' = None):
super().signal(source, message, parameters)
reportDetail(' Signal to '
+ self.__class__.__name__
+ ' ('
+ self.name
+ ') by '
+ source.__class__.__name__
+ ' ('
+ source.name
+ ') regarding '
+ message.__class__.__name__
+ ' ('
+ message.name
+ ')')
def connect(self,
channel: 'Relationship',
parameters: 'Concept' = None):
super().connect(channel, parameters)
if parameters == None:
reportDetail(' Connect ('
+ self.name
+ ') to a channel ('
+ channel.name
+ ')')
else:
reportDetail(' Connect ('
+ self.name
+ ') with parameters ('
+ parameters.name
+ ') to a channel ('
+ channel.name
+ ')')
AGENT_NAME_1 = 'A well-formed agent'
AGENT_NAME_2 = 'Another well-formed agent'
AGENT_NAME_3 = 'Yet another well-formed agent'
a1 = AnotherAgent(AGENT_NAME_1)
a2 = AnotherAgent(AGENT_NAME_2)
a3 = AnotherAgent(AGENT_NAME_3)
# Concept unit test
def testConcept():
reportHeader('Concept')
reportSection('attributes')
if c1.name == CONCEPT_NAME_1:
reportDetail('Correctly set and retrived name')
else:
reportDetailFailure('Name was not set or retrived')
try:
s = c1.properties
reportDetailFailure('Properties were directly accessed')
except SelfException:
reportDetail('Correctly denied direct access to properties')
try:
c1.properties = set()
reportDetailFailure('Properties were directly assigned')
except SelfException:
reportDetail('Correctly denied direct assignment to properties')
reportSection('addProperty')
c1.addProperty(p1)
if c1.propertyExists(p1):
reportDetail('Correctly added property')
else:
reportFailure('Property was not added')
try:
c1.addProperty(p1)
reportDetailFailure('Property already exists')
except SelfException:
reportDetail('Correctly denied adding property that already exists')
try:
c1.addProperty('An ill-formed property')
reportDetailFailure('Property is ill-formed')
except SelfException:
reportDetail('Correctly denied adding ill-formed property')
reportSection('removeProperty')
c1.removeProperty(p1)
if not c1.propertyExists(p1):
reportDetail('Correctly removed property')
else:
reportFailure('Property was not removed')
try:
c1.removeProperty(p2)
reportDetailFailure('Property exists')
except SelfException:
reportDetail('Correctly denied removing property that does not exist')
try:
c1.removeProperty('An ill-formed property')
reportDetailFailure('Property is ill-formed')
except SelfException:
reportDetail('Correctly denied removing ill-formed property')
reportSection('removeAllProperties')
c1.addProperty(p1)
c1.addProperty(p2)
c1.removeAllProperties()
if c1.numberOfProperties() == 0:
reportDetail('Correctly removed all properties')
else:
reportDetailFailure('Properties were not removed')
reportSection('propertyExists')
c1.addProperty(p1)
if c1.propertyExists(p1):
reportDetail('Correctly checked that property exists')
else:
reportDetailFailure('Property does not exist')
if not c1.propertyExists(p2):
reportDetail('Correctly checked that property does not exist')
else:
reportDetailFailure('Property exists')
try:
c1.propertyExists('An ill-formed property')
reportDetailFailure('Property is ill-formed')
except SelfException:
reportDetail('Correctly denied checking existence of ill-formed property')
reportSection('numberOfProperties')
c1.addProperty(p2)
if c1.numberOfProperties() == 2:
reportDetail('Correctly reported number of properties')
else:
reportDetailFailure('Number of properties is wrong')
reportSection('iterateOverProperties')
c1.iterateOverProperties(reportConceptName)
reportDetail('Correctly iterated over properties')
c1.iterateOverProperties(reportConceptName, PROPERTY_NAME_1)
reportDetail('Correctly iterated over properties with given name')
c1.iterateOverProperties(reportConceptName, None, AnotherProperty)
reportDetail('Correctly iterated over properties with given property class')
c1.iterateOverProperties(reportConceptName, PROPERTY_NAME_2, Property)
reportDetail('Correctly iterated over properties with given name and property class')
try:
c1.iterateOverProperties(reportConceptName, None, SelfException)
reportDetailFailure('Property class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed property class')
try:
c1.iterateOverProperties(reportConceptName, None, 'An ill-formed property class')
reportDetailFailure('Property class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed property class')
# Property unit test
def testProperty():
reportHeader('Property')
reportSection('attributes')
if p3.name == PROPERTY_NAME_3:
reportDetail('Correctly set and retrived name')
else:
reportDetailFailure('Name was not set or retrived')
if p3.value == c1:
reportDetail('Correctly set and retrieved value')
else:
reportDetailFailure('Value was not set or retrieved')
# Relationship unit test
def testRelationship():
reportHeader('Relationship')
reportSection('constructor')
try:
r0 = Relationship('A well-formed relationship', c1, c2)
reportDetail('Correctly constructed relationship')
except SelfException:
reportDetailFailure('Relationship was not constructed')
try:
r0 = Relationship('A well-formed relationship', Concept, Concept)
reportDetail('Correctly constructed relationship')
except SelfException:
reportDetailFailure('Relationship was not constructed')
try:
r0 = Relationship('An ill-formed relationship', 'An ill-formed edge', c2)
reportDetailFailure('Edge is ill-formed')
except SelfException:
reportDetail('Correctly denied constructing relationship with ill-formed edge')
try:
r0 = Relationship('An ill-formed relationship', c1, 'An ill-formed edge')
reportDetailFailure('Edge is ill-formed')
except SelfException:
reportDetail('Correctly denied constructing relationship with ill-formed edge')
reportSection('attributes')
r1.name = RELATIONSHIP_NAME_1;
if r1.name == RELATIONSHIP_NAME_1:
reportDetail('Correctly set and retrived name')
else:
reportDetailFailure('Name was not set or retrieved')
r1.edge1 = c1
if r1.edge1 == c1:
reportDetail('Correctly set and retrieved edge')
else:
reportDetailFailure('Edge was not set or retrieved')
try:
r1.edge1 = 'An ill-formed edge'
reportDetailFailure('Edge is ill-formed')
except SelfException:
reportDetail('Correctly denied assigning ill-formed edge')
try:
r1.edge2 = 'An ill-formed edge'
reportDetailFailure('Edge is ill-formed')
except SelfException:
reportDetail('Correctly denied assigning ill-formed edge')
try:
s = r1.edge1Properties
reportDetailFailure('Edge properties were directly accessed')
except SelfException:
reportDetail('Correctly denied direct access to edge properties')
try:
r1.edge1Properties = set()
reportDetailFailure('Edge properties were directly assigned')
except SelfException:
reportDetail('Correctly denied direct assignment to edge properties')
try:
s = r1.edge2Properties
reportDetailFailure('Edge properties were directly accessed')
except SelfException:
reportDetail('Correctly denied direct access to edge properties')
try:
r1.edge2Properties = set()
reportDetailFailure('Edge properties were directly assigned')
except SelfException:
reportDetail('Correctly denied direct assignment to edge properties')
reportSection('addEdgeProperty')
r1.addEdgeProperty(Relationship.EDGE1, p1)
if r1.edgePropertyExists(Relationship.EDGE1, p1):
reportDetail('Correctly added edge property')
else:
reportFailure('Edge property was not added')
try:
r1.addEdgeProperty(Relationship.EDGE1, p1)
reportDetailFailure('Edge property already exists')
except SelfException:
reportDetail('Correctly denied adding edge property that already exists')
try:
r1.addEdgeProperty(Relationship.EDGE1, 'An ill-formed property')
reportDetailFailure('Edge property is ill-formed')
except SelfException:
reportDetail('Correctly denied adding ill-formed edge property')
r1.addEdgeProperty(Relationship.EDGE2, p1)
if r1.edgePropertyExists(Relationship.EDGE2, p1):
reportDetail('Correctly added edge property')
else:
reportFailure('Edge property was not added')
try:
r1.addEdgeProperty(Relationship.EDGE2, p1)
reportDetailFailure('Edge property already exists')
except SelfException:
reportDetail('Correctly denied adding edge property that already exists')
try:
r1.addEdgeProperty(Relationship.EDGE2, 'An ill-formed property')
reportDetailFailure('Edge property is ill-formed')
except SelfException:
reportDetail('Correctly denied adding ill-formed edge property')
reportSection('removeEdgeProperty')
r1.removeEdgeProperty(Relationship.EDGE1, p1)
if not r1.edgePropertyExists(Relationship.EDGE1, p1):
reportDetail('Correctly removed edge property')
else:
reportFailure('Edge property was not removed')
try:
r1.removeEdgeProperty(Relationship.EDGE1, p2)
reportDetailProperty('Edge property exists')
except SelfException:
reportDetail('Correctly denied removing edge property that does not exist')
try:
r1.removeEdgeProperty(Relationship.EDGE1, 'An ill-formed property')
reportDetailFailure('Edge property is ill-formed')
except SelfException:
reportDetail('Correctly denied removing ill-formed edge property')
r1.removeEdgeProperty(Relationship.EDGE2, p1)
if not r1.edgePropertyExists(Relationship.EDGE2, p1):
reportDetail('Correctly removed edge property')
else:
reportFailure('Edge property was not removed')
try:
r1.removeEdgeProperty(Relationship.EDGE2, p2)
reportDetailFailure('Edge property exists')
except SelfException:
reportDetail('Correctly denied removing edge property that does not exist')
try:
r1.removeEdgeProperty(Relationship.EDGE2, 'An ill-formed property')
reportDetailFailure('Edge property is ill-formed')
except SelfException:
reportDetail('Correctly denied removing ill-formed edge property')
reportSection('removeAllEdgeProperties')
r1.addEdgeProperty(Relationship.EDGE1, p1)
r1.addEdgeProperty(Relationship.EDGE1, p2)
r1.removeAllEdgeProperties(Relationship.EDGE1)
if r1.numberOfEdgeProperties(Relationship.EDGE1) == 0:
reportDetail('Correctly removed all edge properties')
else:
reportDetailFailure('Edge properties were not removed')
r1.addEdgeProperty(Relationship.EDGE2, p1)
r1.addEdgeProperty(Relationship.EDGE2, p2)
r1.removeAllEdgeProperties(Relationship.EDGE2)
if r1.numberOfEdgeProperties(Relationship.EDGE2) == 0:
reportDetail('Correctly removed all edge properties')
else:
reportDetailFailure('Edge properties were not removed')
reportSection('edgePropertyExists')
r1.addEdgeProperty(Relationship.EDGE1, p1)
r1.addEdgeProperty(Relationship.EDGE2, p1)
if r1.edgePropertyExists(Relationship.EDGE1, p1):
reportDetail('Correctly checked that edge property exists')
else:
reportDetailFailure('Edge property does not exist')
if not r1.edgePropertyExists(Relationship.EDGE1, p2):
reportDetail('Correctly checked that edge property does not exist')
else:
reportDetailFailure('Edge property exists')
try:
r1.edgePropertyExists(Relationship.EDGE1, 'An ill-formed property')
reportDetailFailure('Edge property is ill-formed')
except SelfException:
reportDetail('Correctly denied checking existence of ill-formed edge property')
if r1.edgePropertyExists(Relationship.EDGE2, p1):
reportDetail('Correctly checked that edge property exists')
else:
reportDetailFailure('Edge property does not exist')
if not r1.edgePropertyExists(Relationship.EDGE2, p2):
reportDetail('Correctly checked that edge property does not exist')
else:
reportDetailFailure('Edge property exists')
try:
r1.edgePropertyExists(Relationship.EDGE2, 'An ill-formed property')
reportDetailFailure('Edge property is ill-formed')
except SelfException:
reportDetail('Correctly denied checking existence of ill-formed edge property')
reportSection('numberOfEdgeProperties')
r1.addEdgeProperty(Relationship.EDGE1, p2)
r1.addEdgeProperty(Relationship.EDGE2, p2)
if r1.numberOfEdgeProperties(Relationship.EDGE1) == 2:
reportDetail('Correctly reported number of edge properties')
else:
reportDetailFailure('Number of edge properties is wrong')
if r1.numberOfEdgeProperties(Relationship.EDGE2) == 2:
reportDetail('Correctly reported number of edge properties')
else:
reportDetailFailure('Number of edge properties is wrong')
reportSection('iterateOverEdgeProperties')
r1.iterateOverEdgeProperties(Relationship.EDGE1, reportConceptName)
reportDetail('Correctly iterated over edge properties')
r1.iterateOverEdgeProperties(Relationship.EDGE1, reportConceptName, PROPERTY_NAME_1)
reportDetail('Correctly iterated over edge properties with given name')
r1.iterateOverEdgeProperties(Relationship.EDGE1, reportConceptName, None, AnotherProperty)
reportDetail('Correctly iterated over edge properties with given property class')
r1.iterateOverEdgeProperties(Relationship.EDGE1, reportConceptName, PROPERTY_NAME_2, Property)
reportDetail('Correctly iterated over edge properties with given name and property class')
try:
r1.iterateOverEdgeProperties(Relationship.EDGE1, reportConceptName, None, SelfException)
reportDetailFailure('Property class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed property class')
try:
r1.iterateOverEdgeProperties(Relationship.EDGE1, reportConceptName, None, 'An ill-formed property class')
reportDetailFailure('Edge property class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed edge property class')
r1.iterateOverEdgeProperties(Relationship.EDGE2, reportConceptName)
reportDetail('Correctly iterated over edge properties')
r1.iterateOverEdgeProperties(Relationship.EDGE2, reportConceptName, PROPERTY_NAME_1)
reportDetail('Correctly iterated over edge properties with given name')
r1.iterateOverEdgeProperties(Relationship.EDGE2, reportConceptName, None, AnotherProperty)
reportDetail('Correctly iterated over edge properties with given property class')
r1.iterateOverEdgeProperties(Relationship.EDGE2, reportConceptName, PROPERTY_NAME_2, Property)
reportDetail('Correctly iterated over edge properties with given name and property class')
try:
r1.iterateOverEdgeProperties(Relationship.EDGE2, reportConceptName, None, SelfException)
reportDetailFailure('Property class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed property class')
try:
r1.iterateOverEdgeProperties(Relationship.EDGE2, reportConceptName, None, 'An ill-formed property class')
reportDetailFailure('Edge property class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed edge property class')
# Ontology unit test
def testOntology():
reportHeader('Ontology')
reportSection('attributes')
if o1.name == ONTOLOGY_NAME_1:
reportDetail('Correctly set and retrived name')
else:
reportDetailFailure('Name was not set or retrived')
try:
s = o1.concepts
reportDetailFailure('Concepts were directly accessed')
except SelfException:
reportDetail('Correctly denied direct access to concepts')
try:
o1.concepts = set()
reportDetailFailure('Concepts were directly assigned')
except SelfException:
reportDetail('Correctly denied direct assignment to concepts')
try:
s = o1.relationships
reportDetailFailure('Relationships were directly accessed')
except SelfException:
reportDetail('Correctly denied direct access to relationships')
try:
o1.relationships = set()
reportDetailFailure('Relationships were directly assigned')
except SelfException:
reportDetail('Correctly denied direct assignment to relationships')
reportSection('addConcept')
o1.addConcept(c1)
if o1.conceptExists(c1):
reportDetail('Correctly added concept')
else:
reportFailure('Concept was not added')
try:
o1.addConcept(c1)
reportDetailFailure('Concept already exists')
except SelfException:
reportDetail('Correctly denied adding concept that already exists')
try:
o1.addConcept('An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied adding ill-formed concept')
reportSection('removeConcept')
o1.removeConcept(c1)
if not o1.conceptExists(c1):
reportDetail('Correctly removed concept')
else:
reportFailure('Concept was not removed')
try:
o1.removeConcept(c2)
reportDetailFailure('Concept exists')
except SelfException:
reportDetail('Correctly denied removing concept that does not exist')
try:
o1.removeConcept('An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied removing an ill-formed concept')
o1.addConcept(c1)
o1.addConcept(c2)
o1.addRelationship(r1)
try:
o1.removeConcept(c1)
reportDetailFailure('Concept is bound')
except SelfException:
reportDetail('Correctly denied removing concept that is bound')
reportSection('removeAllConcepts')
o1.removeRelationship(r1)
o1.removeAllConcepts()
if o1.numberOfConcepts() == 0:
reportDetail('Correctly removed all concepts')
else:
reportDetailFailure('Concepts were not removed')
o1.addConcept(c1)
o1.addConcept(c2)
o1.addRelationship(r1)
try:
o1.removeAllConcepts()
reportDetailFailure('Concepts are bound')
except SelfException:
reportDetail('Correctly denied removing concepts that are bound')
o1.removeRelationship(r1)
o1.removeConcept(c2)
o1.removeConcept(c1)
reportSection('conceptExists')
o1.addConcept(c1)
if o1.conceptExists(c1):
reportDetail('Correctly checked that concept exists')
else:
reportDetailFailure('Concept does not exist')
if not o1.conceptExists(c2):
reportDetail('Correctly checked that concept does not exist')
else:
reportDetailFailure('Concept exists')
try:
o1.conceptExists('An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied checking existence of ill-formed concept')
reportSection('numberOfConcepts')
o1.addConcept(c2)
if o1.numberOfConcepts() == 2:
reportDetail('Correctly reported number of concepts')
else:
reportDetailFailure('Number of concepts is wrong')
reportSection('iterateOverConcepts')
o1.addConcept(c3)
o1.iterateOverConcepts(reportConceptName)
reportDetail('Correctly iterated over concepts')
o1.iterateOverConcepts(reportConceptName, CONCEPT_NAME_1)
reportDetail('Correctly iterated over concepts with given name')
o1.iterateOverConcepts(reportConceptName, None, AnotherConcept)
reportDetail('Correctly iterated over concepts with given concept class')
o1.iterateOverConcepts(reportConceptName, CONCEPT_NAME_2, Concept)
reportDetail('Correctly iterated over concepts with given name and concept class')
try:
o1.iterateOverConcepts(reportConceptName, None, SelfException)
reportDetailFailure('Concept class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed concept class')
try:
o1.iterateOverConcepts(reportConceptName, None, 'An ill-formed concept class')
reportDetailFailure('Concept class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed concept class')
reportSection('addRelationship')
o1.addRelationship(r1)
o1.addRelationship(r2)
o1.addRelationship(r3)
if o1.numberOfRelationships() == 3:
reportDetail('Correctly added relationship')
else:
reportDetailFailure('Relationship was not added')
try:
o1.addRelationship(r1)
reportDetailFailure('Relationship already exists')
except SelfException:
reportDetail('Correctly denied addding relationship that already exists')
try:
o1.addRelationship('An ill-formed relationship')
reportDetailFailure('Relationship is ill-formed')
except SelfException:
reportDetail('Correctly denied adding ill-formed relationship')
try:
o1.addRelationship(r4)
reportDetailFalure('Relationship is not closed')
except SelfException:
reportDetail('Correctly denied adding relationship that is not closed')
reportSection('removeRelationship')
o1.removeRelationship(r3)
if not o1.relationshipExists(r3):
reportDetail('Correctly remove relationship')
else:
reportDetailFailure('Relationship was not removed')
try:
o1.removeRelationship(r3)
reportDetailFailure('Relationship exists')
except SelfException:
reportDetail('Corectly denied removing relationship that does not exist')
try:
o1.removeRelationship('An ill-formed relationship')
reportDetailFailure('Relationship is ill-formed')
except SelfException:
reportDetail('Correctly denied removing ill-formed relationship')
reportSection('removeAllRelationships')
o1.removeAllRelationships()
if o1.numberOfRelationships() == 0:
reportDetail('Correctly removed all relationships')
else:
reportDetailFailure('Relationships were not removed')
reportSection('relationshipExists')
o1.addRelationship(r1)
if o1.relationshipExists(r1):
reportDetail('Correctly checked that relationship exists')
else:
reportDetailFailure('Relationship does not exist')
if not o1.relationshipExists(r3):
reportDetail('Correctly checked that relationship does not exist')
else:
reportDetailFailure('Relationship exists')
try:
o1.relationshipExists('An ill-formed relationship')
reportDetailFailure('Relationship is ill-formed')
except SelfException:
reportDetail('Correctly denied checking existance of ill-formed relationship')
reportSection('numberOfRelationship')
o1.addRelationship(r2)
if o1.numberOfRelationships() == 2:
reportDetail('Correctly reported number of relationships')
else:
reportDetailFailure('Number of relationships is wrong')
reportSection('iterateOverRelationships')
o1.addRelationship(r3)
o1.iterateOverRelationships(reportConceptName)
reportDetail('Correctly iterated over relationships')
o1.iterateOverRelationships(reportConceptName, RELATIONSHIP_NAME_1)
reportDetail('Correctly iterated over relationships with given name')
o1.iterateOverRelationships(reportConceptName, None, AnotherRelationship)
reportDetail('Correctly iterated over relationships with given relationship class')
o1.iterateOverRelationships(reportConceptName, RELATIONSHIP_NAME_2, Relationship)
reportDetail('Correctly iterated over relationshps with given name and concept class')
try:
o1.iterateOverRelationships(reportConceptName, None, SelfException)
reportDetailFailure('Relationship class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed relationship class')
try:
o1.iterateOverRelationships(reportConceptName, None, 'An ill-formed relationship class')
reportDetailFailure('Relationship class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed relationship class')
reportSection('conceptIsBound')
if o1.conceptIsBound(c1):
reportDetail('Correctly checked that concept is bound')
else:
reportDetailFailure('Concept is not bound')
if not o1.conceptIsBound(c4):
reportDetail('Correctly checked that concept is not bound')
else:
reportDetailFailure('Concept is bound')
try:
o1.conceptIsBound('An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied checking if an ill-formed concept is bound')
reportSection('numberOfUnboundConcepts')
o1.addConcept(c4)
if o1.numberOfUnboundConcepts() == 1:
reportDetail('Correctly reported number of unbound concepts')
else:
reportDetailFailure('Number of unbound concepts is wrong')
reportSection('numberOfBoundConcepts')
if o1.numberOfBoundConcepts() == 3:
reportDetail('Correctly reported number of bound concepts')
else:
reportDetailFailure('Number of bound concepts is wrong')
reportSection('iterateOverUnboundConcepts')
o1.iterateOverUnboundConcepts(reportConceptName)
reportDetail('Correctly iterated over unbound concepts')
o1.iterateOverUnboundConcepts(reportConceptName, CONCEPT_NAME_1)
reportDetail('Correctly iterated over unbound concepts with given name')
o1.iterateOverUnboundConcepts(reportConceptName, None, AnotherConcept)
reportDetail('Correctly iterated over unbound concepts with given concept class')
o1.iterateOverUnboundConcepts(reportConceptName, CONCEPT_NAME_2, Concept)
reportDetail('Correctly iterated over unbound concepts with given name and concept class')
try:
o1.iterateOverUnboundConcepts(reportConceptName, None, SelfException)
reportDetailFailure('Concept class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed concept class')
try:
o1.iterateOverUnboundConcepts(reportConceptName, None, 'An ill-formed concept class')
reportDetailFailure('Concept class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed concept class')
reportSection('iterateOverBoundConcepts')
o1.iterateOverBoundConcepts(reportConceptName)
reportDetail('Correctly iterated over bound concepts')
o1.iterateOverBoundConcepts(reportConceptName, CONCEPT_NAME_1)
reportDetail('Correctly iterated over bound concepts with given name')
o1.iterateOverBoundConcepts(reportConceptName, None, AnotherConcept)
reportDetail('Correctly iterated over bound concepts with given concept class')
o1.iterateOverBoundConcepts(reportConceptName, CONCEPT_NAME_2, Concept)
reportDetail('Correctly iterated over bound concepts with given name and concept class')
try:
o1.iterateOverBoundConcepts(reportConceptName, None, SelfException)
reportDetailFailure('Concept class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed concept class')
try:
o1.iterateOverBoundConcepts(reportConceptName, None, 'An ill-formed concept class')
reportDetailFailure('Concept class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed concept class')
# Blackboard unit test
def testBlackboard():
reportHeader('Blackboard')
reportSection('attributes')
if b1.name == BLACKBOARD_NAME_1:
reportDetail('Correctly set and retrieved name')
else:
reportDetailFailure('Name was not set or retrieved')
try:
s = b1.concepts
reportDetailFailure('Concepts were directly accessed')
except SelfException:
reportDetail('Correctly denied direct access to concepts')
try:
b1.concepts = set()
reportDetailFailure('Concepts were directly assigned')
except SelfException:
reportDetail('Correctly denied direct assignment to concepts')
try:
s = b1.conceptClasses
reportDetailFailure('Concepts classes were directly accessed')
except SelfException:
reportDetail('Correctly denied direct access to concept classes')
try:
b1.conceptClasses = set()
reportDetailFailure('Concept classes were directly assigned')
except SelfException:
reportDetail('Correctly denied direct assignment to concept classes')
try:
s = b1.publications
reportDetailFailure('Publications were directly accessed')
except SelfException:
reportDetail('Correctly denied direct access to publications')
try:
b1.publications = set()
reportDetailFailure('Publications were directly assigned')
except SelfException:
reportDetail('Correctly denied direct assignment to publications')
try:
s = b1.conceptSubscriptions
reportDetailFailure('Subscriptions were directly accessed')
except SelfException:
reportDetail('Correctly denied direct access to subsubscriptions')
try:
b1.conceptSubscriptions = set()
reportDetailFailure('Subscriptions were directly assigned')
except SelfException:
reportDetail('Correctly denied direct assignment to subscriptions')
try:
s = b1.classSubscriptions
reportDetailFailure('Class subscriptions were directly accessed')
except SelfException:
reportDetail('Correctly denied direct access to class subscriptions')
try:
b1.classSubscriptions = set()
reportDetailFailure('Class subscriptions were directly assigned')
except SelfException:
reportDetail('Correctly denied direct assignment to class subscriptions')
reportSection('publishConcept')
b1.publishConcept(a1, c1)
if b1.conceptExists(c1):
reportDetail('Correctly published concept')
else:
reportDetailFailure('Concept was not published')
b1.subscribeToConceptClass(a2, AnotherConcept)
b1.publishConcept(a1, c3)
if len(b1.subscribers(c3)) == 1:
reportDetail('Correctly subscribed to concept class instance')
else:
reportDetailFailure('Subscription failed')
try:
b1.publishConcept(a1, c1)
reportDetailFailure('Concept already exists')
except SelfException:
reportDetail('Correctly denied adding concept that already exists')
try:
b1.publishConcept('An ill-formed agent', c1)
reportDetailFailure('Agent is ill-formed')
except SelfException:
reportDetail('Correctly denied publishing ill-formed agent')
try:
b1.publishConcept(a1, 'An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied publishing ill-formed concept')
reportSection('unpublishConcept')
b1.unpublishConcept(c1)
b1.unpublishConcept(c3)
if not b1.conceptExists(c3):
reportDetail('Correctly unpublished concept')
else:
reportDetailFailure('Concept was not unpublished')
b1.publishConcept(a1, c1)
b1.publishConcept(a2, c2)
b1.publishConcept(a1, c3)
b1.unpublishConcept()
if b1.numberOfConcepts() == 0:
reportDetail('Correctly unpublished all concepts')
else:
reportDetailFailure('Concepts were not unpublished')
try:
b1.unpublishConcept(c3)
reportDetailFailure('Concept exists')
except SelfException:
reportDetail('Correctly denied unpublishing concept that does not exist')
try:
b1.unpublishConcept('An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied unpublishing ill-formed concept')
reportSection('publisher')
b1.publishConcept(a1, c1)
if b1.publisher(c1) == a1:
reportDetail('Correctly returned publisher')
else:
reportDetailFailure('Publisher was not returned')
try:
b1.publisher(c2)
reportDetailFailure('Concept does not exist')
except SelfException:
reportDetail('Correctly denied returning publisher of concept that does not exist')
try:
b1.publisher('An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied returning publisher of ill-formed concept')
reportSection('signalPublisher')
b1.signalPublisher(Concept('A well-formed source'), Concept('A well-formed message'), c1)
reportDetail('Correctly signaled publisher')
b1.signalPublisher(Concept('A well-formed source'), Concept('A well-formed message'))
reportDetail('Correctly signaled publishers')
try:
b1.signalPublisher(Concept('A well-formed source'), Concept('A well-formed message'), c2)
reportDetailFailure('Concept does not exist')
except SelfException:
reportDetail('Correctly denied signaling a publisher of concept that does not exist')
try:
b1.signalPublisher(Concept('A well-formed source'), Concept('A well-formed message'), 'An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied signaling publisher of ill-formed concept')
try:
b1.signalPublisher('An ill-formed source', Concept('A well-formed message'), c1)
reportDetail('Source is ill-formed')
except SelfException:
reportDetail('Correctly denied signaling publisher of ill-formed source')
try:
b1.signalPublisher(Concept('A well-formed source'), 'An ill-formed message', c1)
reportDetailFailure('Message is ill-formed')
except SelfException:
reportDetail('Correctly denied signaling publisher of ill-formed message')
reportSection('conceptExists')
if b1.conceptExists(c1):
reportDetail('Correctly checked that concept exists')
else:
reportDetailFailure('Concept does not exist')
if not b1.conceptExists(c2):
reportDetail('Correctly checked that concept does not exist')
else:
reportDetailFailure('Concept exists')
try:
b1.conceptExists('An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied checking of ill-formed concept')
reportSection('numberOfConcepts')
b1.publishConcept(a2, c3)
if b1.numberOfConcepts() == 2:
reportDetail('Correctly reported number of concepts')
else:
reportDetailFailure('Number of concepts is wrong')
reportSection('iterateOverConcepts')
b1.iterateOverConcepts(reportConceptName)
reportDetail('Correctly iterated over concepts')
b1.iterateOverConcepts(reportConceptName, CONCEPT_NAME_1)
reportDetail('Correctly iterated over concepts with given name')
b1.iterateOverConcepts(reportConceptName, None, AnotherConcept)
reportDetail('Correctly iterated over concepts with given concept class')
b1.iterateOverConcepts(reportConceptName, CONCEPT_NAME_2, Concept)
reportDetail('Correctly iterated over concepts with given name and concept class')
try:
b1.iterateOverConcepts(reportConceptName, None, SelfException)
reportDetailFailure('Concept class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed concept class')
try:
b1.iterateOverConcepts(reportConceptName, None, 'An ill-formed concept class')
reportDetailFailure('Concept class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed concept class')
reportSection('subscribeToConcept')
b1.subscribeToConcept(a3, c3)
if len(b1.subscribers(c3)) == 2:
reportDetail('Correctly subscribed to concept')
else:
reportDetailFailure('Concept was not subscribed')
try:
b1.subscribeToConcept(a3, c3)
reportDetailFailure('Concept is already subscribed')
except SelfException:
reportDetail('Correctly denied subscribing to concept more than once')
try:
b1.subscribeToConcept(a3, c4)
reportDetailFailure('Concept exists')
except SelfException:
reportDetail('Correctly denied subscribing to concept that does not exist')
try:
b1.subscribeToConcept('An ill-formed agent', c3)
reportDetailFailure('Agent is ill-formed')
except SelfException:
reportDetail('Correctly denied subscribing by ill-formed agent')
try:
b1.subscribeToConcept(a2, 'An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied subscribing to ill-formed concept')
reportSection('unsubscribeFromConcept')
b1.unsubscribeFromConcept()
if len(b1.subscribers()) == 0:
reportDetail('Correctly unsubscribed by from all concepts by all agents')
else:
reportDetailFailure('Concepts were not unsubscribed')
b1.subscribeToConcept(a1, c1)
b1.subscribeToConcept(a1, c3)
b1.subscribeToConcept(a2, c1)
b1.subscribeToConcept(a2, c3)
b1.unsubscribeFromConcept(a1)
if (len(b1.subscribers(c1)) == 1 and len(b1.subscribers(c3)) == 1):
reportDetail('Correctly unsubscribed from all concepts by agent')
else:
reportDetailFailure('Concepts were not unsubscribed')
b1.subscribeToConcept(a1, c1)
b1.unsubscribeFromConcept(None, c1)
if (len(b1.subscribers(c1)) == 0 and len(b1.subscribers(c3)) == 1):
reportDetail('Correctly unsubscribied from concept by all agents')
else:
reportDetailFailure('Concepts were not unsubscribed')
b1.unsubscribeFromConcept(a2, c3)
if len(b1.subscribers(c3)) == 0:
reportDetail('Correctly unsubscribed from concept by agent')
else:
reportDetailFailure('Concept was not unsubscribed')
try:
b1.unsubscribeFromConcept(None, c2)
reportDetailFailure('Concept does not exist')
except SelfException:
reportDetail('Correctly denied unsubscribing from concept that does not exist')
try:
b1.unsubscribeFromConcept('An ill-formed agent', c1)
reportDetailFailure('Agent is ill-formed')
except SelfException:
reportDetail('Correctly denied unsubscibing from ill-formed agent')
try:
b1.unsubscribeFromConcept(a1, 'An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied unsubscrbing from ill-formed concept')
reportDetail('subscribers')
b1.subscribeToConcept(a1, c1)
b1.subscribeToConcept(a2, c1)
if len(b1.subscribers(c1)) == 2:
reportDetail('Correctly return subscribers')
else:
reportDetailFailure('Subscribers were not returned')
if len(b1.subscribers(c3)) == 0:
reportDetail('Correctly returned subscribers')
else:
reportDetailFailure('Subscribers were not returned')
if len(b1.subscribers()) == 2:
reportDetail('Correctly returned subscribers')
else:
reportDetailFailure('Subscribers were not returned')
try:
b1.subscribers(c2)
reportDetailFailure('Concept exists')
except SelfException:
reportDetail('Correctly denied returning subscribers from concept that does not exist')
try:
b1.subscribers('An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except:
reportDetail('Correctly denied returning subscribers from ill-formed concept')
reportDetail('signalSubscribers')
b1.signalSubscribers(Concept('A well-formed source'), Concept('A well-formed message'), c1)
reportDetail('Correctly signaled subscribers')
b1.signalSubscribers(Concept('A well-formed source'), Concept('A well-formed message'))
reportDetail('Correctly signaled subscribers')
try:
b1.signalSubscribers(Concept('A well-formed source'), Concept('A well-formed message'), c2)
reportDetailFailure('Concept does not exist')
except SelfException:
reportDetail('Correctly denied signaling subscribers of concept that does not exist')
try:
b1.signalSubscribers(Concept('A well-formed source'), Concept('A well-formed message'), 'An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied signaling subscribers of ill-formed concept')
try:
b1.signalSubscribers('An ill-formed source', Concept('A well-formed message'), c1)
reportDetail('Source is ill-formed')
except SelfException:
reportDetail('Correctly denied signaling subscribers of ill-formed source')
try:
b1.signalSubscribers(Concept('A well-formed source'), 'An ill-formed message', c1)
reportDetailFailure('Message is ill-formed')
except SelfException:
reportDetail('Correctly denied signaling subscribers of ill-formed message')
reportSection('subscribeToConceptClass')
b1.unsubscribeFromConceptClass()
b1.subscribeToConceptClass(a1, Concept)
b1.subscribeToConceptClass(a2, Concept)
b1.subscribeToConceptClass(a3, AnotherConcept)
if len(b1.classSubscribers()) == 3:
reportDetail('Correctly subscribed to concept class')
else:
reportDetailFailure('Concept class was not subscribed')
if len(b1.classSubscribers(Concept)) == 2:
reportDetail('Correctly subscribed to concept class')
else:
reportDetail('Concept class was not subscribed')
if len(b1.classSubscribers(AnotherConcept)) == 1:
reportDetail('Correctly subscribed to concept class')
else:
reportDetailFailure('CConcept class was not subscribed')
try:
b1.subscribeToConceptClass(a1, Concept)
reportDetailFailure('Concept class is already subscribed')
except SelfException:
reportDetail('Correctly denied subscribing to concept class more than once')
try:
b1.subscribeToConceptClass('An ill-formed agent', c3)
reportDetailFailure('Agent is ill-formed')
except SelfException:
reportDetail('Correctly denied subscribing by ill-formed agent')
try:
b1.subscribeToConceptClass(a2, 'An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied subscribing to ill-formed concept')
reportSection('unsubscribeFromConceptClass')
b1.unsubscribeFromConceptClass()
if len(b1.classSubscribers()) == 0:
reportDetail('Correctly unsubscribed by from all concept classes by all agents')
else:
reportDetailFailure('Concept classes were not unsubscribed')
b1.subscribeToConceptClass(a1, Concept)
b1.subscribeToConceptClass(a1, AnotherConcept)
b1.subscribeToConceptClass(a2, Concept)
b1.subscribeToConceptClass(a3, AnotherConcept)
b1.unsubscribeFromConceptClass(a1)
if (len(b1.classSubscribers(Concept)) == 1 and len(b1.classSubscribers(AnotherConcept)) == 1):
reportDetail('Correctly unsubcribed from all concept classes by agent')
else:
reportDetailFailure('Concept classes were not unsubscribed')
b1.subscribeToConceptClass(a1, Concept)
b1.unsubscribeFromConceptClass(None, Concept)
if len(b1.classSubscribers(AnotherConcept)) == 1:
reportDetail('Correctly unsubscribied from concept class by all agents')
else:
reportDetailFailure('Concept class was not unsubscribed')
b1.unsubscribeFromConceptClass(a3, AnotherConcept)
if len(b1.classSubscribers()) == 0:
reportDetail('Correctly unsubscribed from concept class by agent')
else:
reportDetailFailure('Concept class was not unsubscribed')
try:
b1.unsubscribeFromConceptClass(None, c2)
reportDetailFailure('Concept class does not exist')
except SelfException:
reportDetail('Correctly denied unsubscribing from concept class that does not exist')
try:
b1.unsubscribeFromConceptClass('An ill-formed agent', c1)
reportDetailFailure('Agent is ill-formed')
except SelfException:
reportDetail('Correctly denied unsubscibing from ill-formed agent')
try:
b1.unsubscribeFromConceptClass(a1, 'An ill-formed concept class')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied unsubscrbing from ill-formed concept class')
reportSection('classSubscribers')
b1.subscribeToConceptClass(a1, Concept)
b1.subscribeToConceptClass(a2, Concept)
if len(b1.classSubscribers(Concept)) == 2:
reportDetail('Correctly return subscribers')
else:
reportDetailFailure('Subscribers were not returned')
if len(b1.classSubscribers()) == 2:
reportDetail('Correctly returned subscribers')
else:
reportDetailFailure('Subscribers were not returned')
try:
b1.classSubscribers(AnotherConcept)
reportDetailFailure('Concept class exists')
except SelfException:
reportDetail('Correctly denied returning subscribers from concept class that does not exist')
try:
b1.classSubscribers('An ill-formed concept class')
reportDetailFailure('Concept class is ill-formed')
except:
reportDetail('Correctly denied returning subscribers from ill-formed concept class')
reportSection('signalConceptClassSubscribers')
b1.subscribeToConceptClass(a3, AnotherConcept)
b1.signalClassSubscribers(Concept('A well-formed source'), Concept('A well-formed message'), Concept)
reportDetail('Correctly signaled subscribers')
b1.signalClassSubscribers(Concept('A well-formed source'), Concept('A well-formed message'))
reportDetail('Correctly signaled subscribers')
try:
b1.signalClassSubscribers(Concept('A well-formed source'), Concept('A well-formed message'), SelfException)
reportDetailFailure('Concept class does not exist')
except SelfException:
reportDetail('Correctly denied signaling subscribers of concept class that does not exist')
try:
b1.signalClassSubscribers(Concept('A well-formed source'),
Concept('A well-formed message'),
'An ill-formed concept class')
reportDetailFailure('Concept class is ill-formed')
except SelfException:
reportDetail('Correctly denied signaling subscribers of ill-formed concept class')
try:
b1.signalClassSubscribers('An ill-formed source', Concept('A well-formed message'), Concept)
reportDetail('Source is ill-formed')
except SelfException:
reportDetail('Correctly denied signaling subscribers of ill-formed source')
try:
b1.signalClassSubscribers(Concept('A well-formed source'), 'An ill-formed message', Concept)
reportDetailFailure('Message is ill-formed')
except SelfException:
reportDetail('Correctly denied signaling subscribers of ill-formed message')
# Agent unit test
def testAgent():
reportHeader('Agent')
reportSection('activity')
a1.activity()
reportDetail('Correctly carried out the activity')
a1.activity(Concept('A well-formed parameter'))
reportDetail('Correctly carried out the activity')
try:
a1.activity('An ill-formed parameter')
reportDetailFailure('Parameters are ill-formed')
except SelfException:
reportDetail('Correctly denied carrying out activity with ill-formed parameters')
reportSection('start')
a1.start()
reportDetail('Correctly started the agent activity')
a1.start(Concept('A well-formed parameter'))
reportDetail('Correctly started the agent activity')
try:
a1.start('An ill-formed parameter')
reportDetailFailure('Parameters are ill-formed')
except SelfException:
reportDetail('Correctly denied starting activity with ill-formed parameters')
reportSection('stop')
a1.stop()
reportDetail('Correctly stopped the agent activity')
a1.stop(Concept('A well-formed parameter'))
reportDetail('Correctly stopped the agent activity')
try:
a1.start('An ill-formed parameter')
reportDetailFailure('Parameters are ill-formed')
except SelfException:
reportDetail('Correctly denied starting activity with ill-formed parameters')
reportSection('pause')
a1.pause()
reportDetail('Correctly paused the agent activity')
a1.pause(Concept('A well-formed parameter'))
reportDetail('Correctly paused the agent activity')
try:
a1.start('An ill-formed parameter')
reportDetailFailure('Parameters are ill-formed')
except SelfException:
reportDetail('Correctly denied starting activity with ill-formed parameters')
reportSection('isAlive')
if a1.isAlive():
reportDetail('Correctly checked that agent is alive')
else:
reportDetailFailure('Agent is not alive')
reportSection('status')
if a1.status().name == 'Status':
reportDetail('Correctly checked agent status')
else:
reportDetailFailure('Agent status is wrong')
reportSection('signal')
a1.signal(Concept('A well-defined source'), Concept('A well-defined message'))
reportDetail('Correctly signaled the agent')
a1.signal(Concept('A well-defined source'), Concept('A well-defined message'), Concept('A well-defined parameter'))
reportDetail('Correctly signaled the agent')
try:
a1.signal('An ill-defined source', Concept('A well-defined message'), Concept('A well-defined parameter'))
reportDetailFailure('Source is ill-defined')
except SelfException:
reportDetail('Correctly denied connecting with ill-defined source')
try:
a1.signal(Concept('A well-defined source'), 'An ill-defined message', Concept('A well-defined parameter'))
reportDetailFailure('Message is ill-defined')
except SelfException:
reportDetail('Correctly denied connecting with ill-defined message')
try:
a1.signal(Concept('A well-defined source'), Concept('A well-defined message'), 'An ill-defined parameter')
reportDetailFailure('Parameters are ill-defined')
except SelfException:
reportDetail('Correctly denied connecting with ill-defined parameters')
reportSection('connect')
a1.connect(Relationship('A well-defined relationship', a1, a2))
reportDetail('Correctly connected the agent')
a1.connect(Relationship('A well-defined relationship', a1, a2), Concept('A well-formed parameter'))
reportDetail('Correctly connected the agent')
try:
a1.connect('An ill-formed relationship', Concept('A well-formed parameter'))
reportDetailFailure('Channel is ill-formed')
except SelfException:
reportDetail('Correctly denied connecting with ill-formed channel')
try:
a1.connect(Relationship('A well-formed relationship', a1, a2), 'An ill-formed parameter')
reportDetailFailure('Parameters are ill-defined')
except SelfException:
reportDetail('Correctly denied connecting wiht ill-formed parameters')
# Test all of Self's foundational classes
arguments = parseArguments()
testConcept()
testProperty()
testRelationship()
testOntology()
testBlackboard()
testAgent()
# Clean up the output stream if reporting concisely
if arguments.concise == True:
print()
| 40.252221
| 120
| 0.69558
| 5,816
| 58,889
| 7.013067
| 0.050894
| 0.120477
| 0.075463
| 0.109836
| 0.763533
| 0.690252
| 0.633348
| 0.586079
| 0.536334
| 0.499387
| 0
| 0.015023
| 0.218954
| 58,889
| 1,462
| 121
| 40.279754
| 0.871769
| 0.009611
| 0
| 0.564652
| 0
| 0
| 0.354245
| 0.005697
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015302
| false
| 0.00306
| 0.006121
| 0
| 0.027544
| 0.006886
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95c9bf8a576fcba5f592caf1b205652fbf6c6df7
| 1,042
|
py
|
Python
|
100-200q/123.py
|
rampup01/Leetcode
|
8450a95a966ef83b24ffe6450f06ce8de92b3efb
|
[
"MIT"
] | 990
|
2018-06-05T11:49:22.000Z
|
2022-03-31T08:59:17.000Z
|
100-200q/123.py
|
rampup01/Leetcode
|
8450a95a966ef83b24ffe6450f06ce8de92b3efb
|
[
"MIT"
] | 1
|
2021-11-01T01:29:38.000Z
|
2021-11-01T01:29:38.000Z
|
100-200q/123.py
|
rampup01/Leetcode
|
8450a95a966ef83b24ffe6450f06ce8de92b3efb
|
[
"MIT"
] | 482
|
2018-06-12T22:16:53.000Z
|
2022-03-29T00:23:29.000Z
|
'''
Say you have an array for which the ith element is the price of a given stock on day i.
Design an algorithm to find the maximum profit. You may complete at most two transactions.
Note: You may not engage in multiple transactions at the same time (i.e., you must sell the stock before you buy again).
Example 1:
Input: [3,3,5,0,0,3,1,4]
Output: 6
Explanation: Buy on day 4 (price = 0) and sell on day 6 (price = 3), profit = 3-0 = 3.
Then buy on day 7 (price = 1) and sell on day 8 (price = 4), profit = 4-1 = 3.
'''
class Solution(object):
def maxProfit(self, prices):
"""
:type prices: List[int]
:rtype: int
"""
if len(prices) < 2:
return 0
dp = [[0 for _ in range(len(prices))] for _ in range(3)]
for i in range(1,3):
maxDiff = -prices[0]
for j in range(1,len(prices)):
dp[i][j] = max(dp[i][j-1], prices[j] + maxDiff)
maxDiff = max(maxDiff, dp[i-1][j] -prices[j])
return dp[2][len(prices)-1]
| 30.647059
| 121
| 0.579655
| 179
| 1,042
| 3.363128
| 0.435754
| 0.041528
| 0.026578
| 0.039867
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04878
| 0.291747
| 1,042
| 33
| 122
| 31.575758
| 0.766938
| 0.541267
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0
| 0
| 0.363636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95ca4ff47bbf69d356929cfddbfe83070e5ea793
| 2,077
|
py
|
Python
|
lambdas/verify_admin.py
|
charvi-a/320-S20-Track1
|
ac97504fc1fdedb1c311773b015570eeea8a8663
|
[
"BSD-3-Clause"
] | 9
|
2019-12-30T16:32:22.000Z
|
2020-03-03T20:14:47.000Z
|
lambdas/verify_admin.py
|
charvi-a/320-S20-Track1
|
ac97504fc1fdedb1c311773b015570eeea8a8663
|
[
"BSD-3-Clause"
] | 283
|
2020-02-03T15:16:03.000Z
|
2020-05-05T03:18:59.000Z
|
lambdas/verify_admin.py
|
charvi-a/320-S20-Track1
|
ac97504fc1fdedb1c311773b015570eeea8a8663
|
[
"BSD-3-Clause"
] | 3
|
2020-04-16T15:23:29.000Z
|
2020-05-12T00:38:41.000Z
|
import json
from package.query_db import query
from package.dictionary_to_list import dictionary_to_list
from package.lambda_exception import LambdaException
from boto3 import client as boto3_client
def verify_admin(event, context):
user_id = int(event['user_id'])
user_id_dic = {}
if user_id == None: #Making sure user_id was passed
raise LambdaException("400: user_id was not given")
user_id_dic['user_id'] = user_id
sql_parameters = dictionary_to_list(user_id_dic)
sql_select = """SELECT users.id FROM users WHERE users.id = :user_id""" #This query is ensuring that the user exists
response = query(sql_select, sql_parameters)
if response['records'] == []: #Returning error if user does not exist
raise LambdaException("404: user does not exist")
sql_select = """SELECT users.id FROM users WHERE users.id = :user_id and is_admin = true""" #This query is ensuring user is not already an admin
response = query(sql_select, sql_parameters)
if response['records'] != []: #Returning error if user is already an admin
raise LambdaException("405: user is already an admin")
else:
sql_update = """UPDATE users SET is_admin = true WHERE users.id = :user_id"""
response = query(sql_update, sql_parameters)
sql_insert = """INSERT INTO admins(admin_id, user_id, is_pending) VALUES(:user_id, :user_id, false)
"""
response = query(sql_insert, sql_parameters)
# send approval email
lambda_client = boto3_client('lambda')
email_event = {
"user_id": user_id,
"approved_role": "admin"
}
try:
response = lambda_client.invoke(FunctionName="approval_email",
InvocationType='Event',
Payload=json.dumps(email_event))
except Exception as e:
raise LambdaException("404: Unable to send approval email " + str(e))
return{
"statusCode": 200
}
| 42.387755
| 148
| 0.639384
| 263
| 2,077
| 4.840304
| 0.319392
| 0.084839
| 0.050275
| 0.037706
| 0.25923
| 0.190102
| 0.190102
| 0.190102
| 0.190102
| 0.190102
| 0
| 0.011913
| 0.272508
| 2,077
| 48
| 149
| 43.270833
| 0.830576
| 0.108329
| 0
| 0.05
| 0
| 0
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025
| false
| 0
| 0.125
| 0
| 0.15
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95cadfb3b8d6c3a18abd5334655fd77acc7c9759
| 4,821
|
py
|
Python
|
run.py
|
Galaxy-SynBioCAD/rp2paths
|
f87ea0f64556be44af1ae717cd4246159253d029
|
[
"MIT"
] | null | null | null |
run.py
|
Galaxy-SynBioCAD/rp2paths
|
f87ea0f64556be44af1ae717cd4246159253d029
|
[
"MIT"
] | null | null | null |
run.py
|
Galaxy-SynBioCAD/rp2paths
|
f87ea0f64556be44af1ae717cd4246159253d029
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
"""
Created on September 21 2019
@author: Melchior du Lac
@description: Wrap rp2paths into a docker
"""
import argparse
import tempfile
import os
import logging
import shutil
import docker
import glob
def main(rp_pathways, rp2paths_pathways, rp2paths_compounds, timeout=30, max_steps=0, max_paths=150, unfold_compounds=False):
"""Call the docker to run rp2paths
:param rp_pathways: The path to the results RetroPath2.0 scope file
:param rp2paths_pathways: The path to the results rp2paths out_paths file
:param rp2paths_compounds: The path to the results rp2paths compounds file
:param timeout: The timeout of the function in minutes (Default: 90)
:param max_steps: The maximal number of steps WARNING: not used (Default: 0, ie. infinite)
:param max_paths: The maximal number of pathways to return WARNING: not used (Default: 150)
:param unfold_compounds: not sure WARNING: not used (Default: False)
:param rp_pathways: str
:param rp2paths_pathways: str
:param rp2paths_compounds: str
:param timeout: int
:param max_steps: int
:param max_paths: int
:param unfold_compounds: bool
:rtype: None
:return: None
"""
docker_client = docker.from_env()
image_str = 'brsynth/rp2paths-standalone'
try:
image = docker_client.images.get(image_str)
except docker.errors.ImageNotFound:
logging.warning('Could not find the image, trying to pull it')
try:
docker_client.images.pull(image_str)
image = docker_client.images.get(image_str)
except docker.errors.ImageNotFound:
logging.error('Cannot pull image: '+str(image_str))
exit(1)
with tempfile.TemporaryDirectory() as tmpOutputFolder:
if os.path.exists(rp_pathways):
shutil.copy(rp_pathways, tmpOutputFolder+'/rp_pathways.csv')
command = ['python',
'/home/tool_rp2paths.py',
'-rp_pathways',
'/home/tmp_output/rp_pathways.csv',
'-rp2paths_compounds',
'/home/tmp_output/rp2paths_compounds.csv',
'-rp2paths_pathways',
'/home/tmp_output/rp2paths_pathways.csv',
'-timeout',
str(timeout),
'-max_steps',
str(max_steps),
'-max_paths',
str(max_paths),
'-unfold_compounds',
str(unfold_compounds)]
container = docker_client.containers.run(image_str,
command,
detach=True,
stderr=True,
volumes={tmpOutputFolder+'/': {'bind': '/home/tmp_output', 'mode': 'rw'}})
container.wait()
err = container.logs(stdout=False, stderr=True)
err_str = err.decode('utf-8')
if 'ERROR' in err_str:
print(err_str)
elif 'WARNING' in err_str:
print(err_str)
if not os.path.exists(tmpOutputFolder+'/rp2paths_compounds.csv') or not os.path.exists(tmpOutputFolder+'/rp2paths_pathways.csv'):
print('ERROR: Cannot find the output file: '+str(tmpOutputFolder+'/rp2paths_compounds.csv'))
print('ERROR: Cannot find the output file: '+str(tmpOutputFolder+'/rp2paths_pathways.csv'))
else:
shutil.copy(tmpOutputFolder+'/rp2paths_pathways.csv', rp2paths_pathways)
shutil.copy(tmpOutputFolder+'/rp2paths_compounds.csv', rp2paths_compounds)
container.remove()
else:
logging.error('Cannot find one or more of the input files: '+str(rp_pathways))
exit(1)
if __name__ == "__main__":
parser = argparse.ArgumentParser('Enumerate the individual pathways from the results of Retropath2')
parser.add_argument('-rp_pathways', type=str)
parser.add_argument('-rp2paths_pathways', type=str)
parser.add_argument('-rp2paths_compounds', type=str)
parser.add_argument('-max_steps', type=int, default=0)
parser.add_argument('-timeout', type=int, default=30)
parser.add_argument('-max_paths', type=int, default=150)
parser.add_argument('-unfold_compounds', type=str, default='False')
params = parser.parse_args()
if params.timeout<0:
logging.error('Timeout cannot be <0 :'+str(params.timeout))
exit(1)
main(params.rp_pathways, params.rp2paths_pathways, params.rp2paths_compounds, params.timeout, params.max_steps, params.max_paths, params.unfold_compounds)
| 43.827273
| 158
| 0.611077
| 543
| 4,821
| 5.254144
| 0.263352
| 0.071504
| 0.04171
| 0.012618
| 0.201542
| 0.19313
| 0.121977
| 0.093936
| 0.093936
| 0.093936
| 0
| 0.018118
| 0.290189
| 4,821
| 109
| 159
| 44.229358
| 0.815605
| 0.186476
| 0
| 0.173333
| 0
| 0
| 0.214639
| 0.076322
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013333
| false
| 0
| 0.093333
| 0
| 0.106667
| 0.053333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95cb8a34cde724ada03c12bdaeb21669317ed997
| 402
|
py
|
Python
|
verilator/scripts/concat_up5k.py
|
micro-FPGA/engine-V
|
00a8f924e10fc69874d9c179f788bf037fe9c407
|
[
"Apache-2.0"
] | 44
|
2018-11-19T16:49:10.000Z
|
2021-12-05T10:16:24.000Z
|
verilator/scripts/concat_up5k.py
|
micro-FPGA/engine-V
|
00a8f924e10fc69874d9c179f788bf037fe9c407
|
[
"Apache-2.0"
] | null | null | null |
verilator/scripts/concat_up5k.py
|
micro-FPGA/engine-V
|
00a8f924e10fc69874d9c179f788bf037fe9c407
|
[
"Apache-2.0"
] | 5
|
2018-12-05T23:43:21.000Z
|
2020-09-03T04:36:34.000Z
|
spiFile = open('spiflash.bin','wb')
# 128KB is reserved for bitstream
bitFile = open('../bitstream/mf8a18_rv32i.bin','rb')
bitData = bitFile.read(0x20000)
riscvFile = open('riscv.bin','rb')
riscvData = riscvFile.read(32768)
spiFile.write(bitData)
spiFile.seek(0x20000)
spiFile.write(riscvData)
nullData = bytearray([0])
spiFile.seek(0x27fff)
spiFile.write(nullData)
spiFile.close
bitFile.close
| 17.478261
| 52
| 0.748756
| 52
| 402
| 5.769231
| 0.538462
| 0.12
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07989
| 0.097015
| 402
| 22
| 53
| 18.272727
| 0.746556
| 0.077114
| 0
| 0
| 0
| 0
| 0.152589
| 0.079019
| 0
| 0
| 0.057221
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95cda288d497faae566e114db4bdc1e1b83b2b52
| 753
|
py
|
Python
|
pyvista_gui/options.py
|
akaszynski/pyvista-gui
|
4ed7e3a52026dfeab4e82a300b92a92f43060dda
|
[
"MIT"
] | 6
|
2019-11-20T20:08:42.000Z
|
2022-02-24T12:24:20.000Z
|
pyvista_gui/options.py
|
akaszynski/pyvista-gui
|
4ed7e3a52026dfeab4e82a300b92a92f43060dda
|
[
"MIT"
] | 6
|
2020-01-27T16:15:11.000Z
|
2021-04-12T11:42:11.000Z
|
pyvista_gui/options.py
|
akaszynski/pyvista-gui
|
4ed7e3a52026dfeab4e82a300b92a92f43060dda
|
[
"MIT"
] | null | null | null |
"""Options for saving user prefences, etc.
"""
import json
import os
import pyvista
class RcParams(dict):
"""Internally used class to manage the rcParams"""
filename = os.path.join(pyvista.USER_DATA_PATH, 'rcParams.json')
def save(self):
with open(self.filename, 'w') as f:
json.dump(self, f)
return
def load(self):
with open(self.filename, 'r') as f:
data = json.load(f)
self.update(data)
def __setitem__(self, key, value):
dict.__setitem__(self, key, value)
self.save()
# The options
rcParams = RcParams(
dark_mode=False,
)
# Load user prefences from last session if none exist, save defaults
try:
rcParams.load()
except:
rcParams.save()
| 19.815789
| 68
| 0.629482
| 100
| 753
| 4.63
| 0.5
| 0.056156
| 0.051836
| 0.069114
| 0.103672
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25498
| 753
| 37
| 69
| 20.351351
| 0.825312
| 0.217795
| 0
| 0
| 0
| 0
| 0.026042
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.130435
| false
| 0
| 0.130435
| 0
| 0.391304
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95ce4cab43e2034234aed87a60cc3f00447f9524
| 4,445
|
py
|
Python
|
2020/aoc/__init__.py
|
amochtar/adventofcode
|
292e7f00a1e19d2149d00246b0a77fedfcd3bd08
|
[
"MIT"
] | 1
|
2019-12-27T22:36:30.000Z
|
2019-12-27T22:36:30.000Z
|
2020/aoc/__init__.py
|
amochtar/adventofcode
|
292e7f00a1e19d2149d00246b0a77fedfcd3bd08
|
[
"MIT"
] | null | null | null |
2020/aoc/__init__.py
|
amochtar/adventofcode
|
292e7f00a1e19d2149d00246b0a77fedfcd3bd08
|
[
"MIT"
] | null | null | null |
import itertools
import re
import math
from typing import List, Tuple
def ints(text: str) -> Tuple[int, ...]:
"Return a tuple of all ints in a string"
return tuple(map(int, re.findall(r'-?\b\d+\b', text)))
def powerset(iterable):
"powerset([1,2,3]) --> () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)"
s = list(iterable)
return itertools.chain.from_iterable(itertools.combinations(s, r) for r in range(len(s)+1))
def manhattan(p: Tuple[int, ...], q=itertools.repeat(0)) -> Tuple[int, ...]:
"Return the manhattan distance between 2 (multi-dimensional) points"
return sum([abs(a-b) for a, b in zip(p, q)])
def king_distance(p: Tuple[int, ...], q=itertools.repeat(0)) -> Tuple[int, ...]:
"Return thenNumber of chess King moves between two points"
return max(abs(a - b) for a, b in zip(p, q))
def neighbors4(p: Tuple[int, int]) -> List[Tuple[int, int]]:
"Return the 4 neighboring cells for a given position"
x, y = p
return [
(x, y-1),
(x, y+1),
(x-1, y),
(x+1, y)
]
def neighbors8(p: Tuple[int, int]) -> List[Tuple[int, int]]:
"Return the 8 neighboring cells for a given position"
x, y = p
return [
(x-1, y-1),
(x, y-1),
(x+1, y-1),
(x-1, y),
(x+1, y),
(x-1, y+1),
(x, y+1),
(x+1, y+1)
]
def neighbors_cube(p: Tuple[int, int, int]) -> List[Tuple[int, int, int]]:
"Return the 26 neighboring cells for a given position in a 3d cube"
x, y, z = p
n = []
for i in range(-1, 2):
for j in range(-1, 2):
for k in range(-1, 2):
if (i, j, k) != (0, 0, 0):
n.append((x+i, y+j, z+k))
return n
def neighbors_cube4(p: Tuple[int, int, int, int]) -> List[Tuple[int, int, int, int]]:
"Return the 80 neighboring cells for a given position in a 4-d cube"
x, y, z, w = p
n = []
for i in range(-1, 2):
for j in range(-1, 2):
for k in range(-1, 2):
for l in range(-1, 2):
if (i, j, k, l) != (0, 0, 0, 0):
n.append((x+i, y+j, z+k, w+l))
return n
moves = {
'n': lambda p: (p[0], p[1]-1),
's': lambda p: (p[0], p[1]+1),
'e': lambda p: (p[0]+1, p[1]),
'w': lambda p: (p[0]-1, p[1]),
}
left_turn = {
'n': 'w',
's': 'e',
'e': 'n',
'w': 's',
}
right_turn = {
'n': 'e',
's': 'w',
'e': 's',
'w': 'n',
}
opposite = {
'n': 's',
's': 'n',
'e': 'w',
'w': 'e',
}
facing_dir = {
'n': (0, -1),
's': (0, 1),
'e': (1, 0),
'w': (-1, 0),
}
origin = (0, 0)
hex_origin = (0, 0, 0)
hex_moves = {
'ne': lambda p: (p[0]+1, p[1], p[2]-1),
'nw': lambda p: (p[0], p[1]+1, p[2]-1),
'se': lambda p: (p[0], p[1]-1, p[2]+1),
'sw': lambda p: (p[0]-1, p[1], p[2]+1),
'w': lambda p: (p[0]-1, p[1]+1, p[2]),
'e': lambda p: (p[0]+1, p[1]-1, p[2]),
}
def hex_neighbors(p: Tuple[int, int, int]) -> List[Tuple[int, int, int]]:
return [move(p) for move in hex_moves.values()]
def add_pos(a: Tuple[int, int], b: Tuple[int, int], factor: int = 1) -> Tuple[int, int]:
"Adds two position tuples"
return (a[0]+b[0]*factor, a[1]+b[1]*factor)
def sub_pos(a: Tuple[int, int], b: Tuple[int, int]) -> Tuple[int, int]:
"Subtracts the position tuple b from a"
return (a[0]-b[0], a[1]-b[1])
def mult_pos(a: Tuple[int, int], factor: int) -> Tuple[int, int]:
"Multiplies a position tuple with a given factor"
return (a[0]*factor, a[1]*factor)
def rot_left(pos: Tuple[int, int], rel: Tuple[int, int] = origin) -> Tuple[int, int]:
"Rotates a position 90 degrees left (counter clock-wise) relative to the given location (default origin)"
rel_pos = sub_pos(pos, rel)
new_pos = (rel_pos[1], -rel_pos[0])
return add_pos(new_pos, rel)
def rot_right(pos: Tuple[int, int], rel: Tuple[int, int] = origin) -> Tuple[int, int]:
"Rotates a position 90 degrees right (clock-wise) relative to the given location (default origin)"
rel_pos = sub_pos(pos, rel)
new_pos = (-rel_pos[1], rel_pos[0])
return add_pos(new_pos, rel)
def min_max(lst: List[Tuple[int, ...]]) -> Tuple[int, ...]:
"Returns the min and max values for every index in the given list of tuples"
return tuple((min(e), max(e)) for e in zip(*lst))
def mod1(a: int, b: int) -> int:
"Returns 1-based modulo"
return 1 + (a-1) % b
| 26.939394
| 109
| 0.526659
| 787
| 4,445
| 2.935197
| 0.162643
| 0.085714
| 0.114286
| 0.038961
| 0.551948
| 0.511688
| 0.511688
| 0.501299
| 0.437229
| 0.407359
| 0
| 0.044383
| 0.265017
| 4,445
| 164
| 110
| 27.103659
| 0.662687
| 0.19685
| 0
| 0.188976
| 0
| 0.015748
| 0.205174
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125984
| false
| 0
| 0.031496
| 0.007874
| 0.283465
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95ce971f5a305cd3a19578c204fef92020757f3c
| 4,431
|
py
|
Python
|
pi_source_code.py
|
cjkuhlmann/CCHack2019
|
fb6eb505ac350c2dda0c36e1f33254fbeef049bf
|
[
"MIT"
] | null | null | null |
pi_source_code.py
|
cjkuhlmann/CCHack2019
|
fb6eb505ac350c2dda0c36e1f33254fbeef049bf
|
[
"MIT"
] | null | null | null |
pi_source_code.py
|
cjkuhlmann/CCHack2019
|
fb6eb505ac350c2dda0c36e1f33254fbeef049bf
|
[
"MIT"
] | null | null | null |
import math
import time
from max30105 import MAX30105, HeartRate
import smbus
from bme280 import BME280
import socket
#from matplotlib import pyplot as plt
class DataPoint():
def __init__(self,value,time):
self.time_stamp = time
self.value = value
class Device():
def __init__(self):
self.humidity = []
self.temperature = []
self.smoke_level = []
self.mean_size = 100
self.identifier = "0,0"
def setup_network(self):
self.network = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
connected = False
while not connected:
try:
self.network.connect(("192.168.88.167",25565))
connected = True
except:
a = 1
def upload_data(self):
network_string = (#str(round(self.calculate_humidity_trend(),5)) + "," +
str(round(self.humidity[-1].value,5)) + "," +
#str(round(self.calculate_temperature_trend(),5)) + "," +
str(round(self.temperature[-1].value,5)) + "," +
#str(round(self.calculate_smoke_level_trend(),5)) + "," +
str(round(self.smoke_level[-1].value,5)) + "," +
str(round(self.pressure.value,5)) + "," +
str(self.identifier))
network_string = network_string.encode()
self.network.sendall(network_string)
def update(self):
dev.get_smoke_data()
dev.get_humi_temp_data()
def setup_particle_sensor(self):
self.MAX30105 = MAX30105()
self.MAX30105.setup(leds_enable=3)
self.MAX30105.set_led_pulse_amplitude(1,0.0)
self.MAX30105.set_led_pulse_amplitude(2,0.0)
self.MAX30105.set_led_pulse_amplitude(3,12.5)
self.MAX30105.set_slot_mode(1,"red")
self.MAX30105.set_slot_mode(2,"ir")
self.MAX30105.set_slot_mode(3,"green")
self.MAX30105.set_slot_mode(4,"off")
self.hr = HeartRate(self.MAX30105)
def setup_temp_humi_sensor(self):
bus = smbus.SMBus(1)
self.bme280 = BME280(i2c_dev=bus)
def setup_sensors(self):
self.setup_particle_sensor()
self.setup_temp_humi_sensor()
def get_smoke_data(self):
data = []
for i in range(self.mean_size*3+1):
samples = self.MAX30105.get_samples()
if samples is not None:
for sample in samples:
r = samples[2] & 0xff
d = self.hr.low_pass_fir(r)
data.append(d)
mean = sum(data)/(self.mean_size*3)
self.smoke_level.append(DataPoint(mean,time.time))
def get_humi_temp_data(self):
temp_data = []
humi_data = []
pres_data = []
for i in range(self.mean_size):
temp_data.append(self.bme280.get_temperature())
humi_data.append(self.bme280.get_humidity())
pres_data.append(self.bme280.get_pressure())
mean_temp = sum(temp_data)/self.mean_size
mean_humi = sum(humi_data)/self.mean_size
mean_pres = sum(pres_data)/self.mean_size
self.humidity.append(DataPoint(mean_humi,time.time()))
self.temperature.append(DataPoint(mean_temp,time.time()))
self.pressure = DataPoint(mean_pres,time.time())
"""def calculate_humidity_trend(self):
return self.lin_reg(self.humidity)
def calculate_temperature_trend(self):
return self.lin_reg(self.temperature)
def calculate_smoke_level_trend(self):
return self.lin_reg(self.smoke_level)
def lin_reg(self,data_set):
x = 0
Sxy = 0
Sx = 0
Sx2 = 0
Sy = 0
Sy2 = 0
sample_size = len(data_set)
for y in data_set:
y=y.value
x += 1
Sxy += x * y
Sx += x
Sx2 += x**2
Sy += y
Sy2 += y**2
lin_reg = ((sample_size*Sxy)-(Sx*Sy))/((sample_size*Sx2)-(Sx)**2)
return lin_reg"""
dev = Device()
dev.setup_sensors()
dev.setup_network()
for i in range(2):
dev.update()
while True:
try:
dev.update()
dev.upload_data()
print("sending_data")
except:
dev.setup_network()
| 28.403846
| 83
| 0.558565
| 545
| 4,431
| 4.321101
| 0.220183
| 0.056051
| 0.035669
| 0.033121
| 0.242463
| 0.134183
| 0.112527
| 0.051805
| 0
| 0
| 0
| 0.053828
| 0.324983
| 4,431
| 155
| 84
| 28.587097
| 0.733534
| 0.045362
| 0
| 0.086957
| 0
| 0
| 0.01315
| 0
| 0
| 0
| 0.001144
| 0
| 0
| 1
| 0.108696
| false
| 0.01087
| 0.065217
| 0
| 0.195652
| 0.01087
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95cead6bce011703374b48a18d5379f241d0c282
| 1,417
|
py
|
Python
|
butter/mas/clients/client_factory.py
|
bennymeg/Butter.MAS.PythonAPI
|
9641293436d989ae9c5324c2b8129f232822b248
|
[
"Apache-2.0"
] | 2
|
2019-08-22T08:57:42.000Z
|
2019-11-28T14:01:49.000Z
|
butter/mas/clients/client_factory.py
|
bennymeg/Butter.MAS.PythonAPI
|
9641293436d989ae9c5324c2b8129f232822b248
|
[
"Apache-2.0"
] | null | null | null |
butter/mas/clients/client_factory.py
|
bennymeg/Butter.MAS.PythonAPI
|
9641293436d989ae9c5324c2b8129f232822b248
|
[
"Apache-2.0"
] | null | null | null |
from .client_http import HttpClient
from .client_tcp import TcpClient
from .client_udp import UdpClient
from .client import Client
class ClientFactory:
""" Client factory for different types of protocols """
def getClient(self, ip, port=None, protocol="http") -> Client:
"""Creates new client
Args:
ip (str): robot IP
port (int, optional): robot port. Defaults to None.
protocol (str, optional): communication protocol. Defaults to "http".
Returns:
Client: requested client
"""
if protocol == "http":
return HttpClient(ip) if port is None else HttpClient(ip, port)
elif protocol == "tcp":
return TcpClient(ip) if port is None else TcpClient(ip, port)
elif protocol == "udp":
return UdpClient(ip) if port is None else UdpClient(ip, port)
else:
return None
def getClientClass(self, protocol="http"):
"""Get client class
Args:
protocol (str, optional): communication protocol. Defaults to "http".
Returns:
Client: client class
"""
if protocol == "http":
return HttpClient
elif protocol == "tcp":
return TcpClient
elif protocol == "udp":
return UdpClient
else:
return None
| 30.148936
| 81
| 0.56669
| 149
| 1,417
| 5.369128
| 0.288591
| 0.0375
| 0.03
| 0.0375
| 0.46
| 0.235
| 0.1675
| 0.1675
| 0.1675
| 0.1675
| 0
| 0
| 0.348624
| 1,417
| 46
| 82
| 30.804348
| 0.866739
| 0.281581
| 0
| 0.434783
| 0
| 0
| 0.032147
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086957
| false
| 0
| 0.173913
| 0
| 0.652174
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95ceaebae16674be2fef2960c47326152d1eb461
| 1,569
|
py
|
Python
|
scrapytest/spiders/ScrapyDemo5.py
|
liang1024/Scrapy
|
bfa7ea5b2174bf91c49f4da9dadc5471acc43092
|
[
"Apache-2.0"
] | null | null | null |
scrapytest/spiders/ScrapyDemo5.py
|
liang1024/Scrapy
|
bfa7ea5b2174bf91c49f4da9dadc5471acc43092
|
[
"Apache-2.0"
] | null | null | null |
scrapytest/spiders/ScrapyDemo5.py
|
liang1024/Scrapy
|
bfa7ea5b2174bf91c49f4da9dadc5471acc43092
|
[
"Apache-2.0"
] | null | null | null |
import scrapy
'''
现在您已经知道如何从页面中提取数据,我们来看看如何跟踪它们的链接。
首先是提取我们想要跟踪的页面的链接。检查我们的页面,我们可以看到有一个链接到下一个页面与以下标记:
<ul class="pager">
<li class="next">
<a href="/page/2/">Next <span aria-hidden="true">→</span></a>
</li>
</ul>
我们可以尝试在shell中提取它:
>>> response.css('li.next a').extract_first()
'<a href="/page/2/">Next <span aria-hidden="true">→</span></a>'
这得到了锚点元素,但是我们需要该属性href。为此,Scrapy支持CSS扩展,您可以选择属性内容,如下所示:
>>> response.css('li.next a::attr(href)').extract_first()
'/page/2/'
让我们看看现在我们的蜘蛛修改为递归地跟随链接到下一页,从中提取数据:
'''
import scrapy
class QuotesSpider(scrapy.Spider):
name = "demo5"
start_urls = [
'http://quotes.toscrape.com/page/1/',
]
def parse(self, response):
for quote in response.css('div.quote'):
yield {
'text': quote.css('span.text::text').extract_first(),
'author': quote.css('small.author::text').extract_first(),
'tags': quote.css('div.tags a.tag::text').extract(),
}
next_page = response.css('li.next a::attr(href)').extract_first()
if next_page is not None:
next_page = response.urljoin(next_page)
yield scrapy.Request(next_page, callback=self.parse)
'''
现在,在提取数据之后,该parse()方法会查找到下一页的链接,使用该urljoin()方法构建完整的绝对URL (由于链接可以是相对的),并且向下一页产生一个新的请求,将其注册为回调以处理下一页的数据提取,并保持爬行遍历所有页面。
您在这里看到的是Scrapy的以下链接机制:当您以回调方式生成请求时,Scrapy将安排该请求发送,并注册一个回调方法,以在该请求完成时执行。
使用它,您可以根据您定义的规则构建复杂的跟踪链接,并根据访问页面提取不同类型的数据。
在我们的示例中,它创建一个循环,跟随到所有到下一页的链接,直到它找不到一个方便的抓取博客,论坛和其他站点分页。
'''
'''
启动项目
scrapy crawl demo5
'''
| 24.138462
| 116
| 0.66348
| 177
| 1,569
| 5.824859
| 0.519774
| 0.058196
| 0.037827
| 0.049467
| 0.153249
| 0.13579
| 0.13579
| 0.13579
| 0.13579
| 0
| 0
| 0.004623
| 0.172721
| 1,569
| 65
| 117
| 24.138462
| 0.788906
| 0
| 0
| 0.111111
| 0
| 0
| 0.189679
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95cf9c3a1a9e3db6fb75803b4f3891c4c503d528
| 15,563
|
py
|
Python
|
digits/model/forms.py
|
Linda-liugongzi/DIGITS-digits-py3
|
6df5eb6972574a628b9544934518ec8dfa9c7439
|
[
"BSD-3-Clause"
] | null | null | null |
digits/model/forms.py
|
Linda-liugongzi/DIGITS-digits-py3
|
6df5eb6972574a628b9544934518ec8dfa9c7439
|
[
"BSD-3-Clause"
] | null | null | null |
digits/model/forms.py
|
Linda-liugongzi/DIGITS-digits-py3
|
6df5eb6972574a628b9544934518ec8dfa9c7439
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2014-2017, NVIDIA CORPORATION. All rights reserved.
import os
import flask
from flask_wtf import FlaskForm
import wtforms
from wtforms import validators
from digits.config import config_value
from digits.device_query import get_device, get_nvml_info
from digits import utils
from digits.utils import sizeof_fmt
from digits.utils.forms import validate_required_iff
from digits import frameworks
from flask_babel import lazy_gettext as _
class ModelForm(FlaskForm):
# Methods
def selection_exists_in_choices(form, field):
found = False
for choice in field.choices:
if choice[0] == field.data:
found = True
if not found:
raise validators.ValidationError(_("Selected job doesn't exist. Maybe it was deleted by another user."))
def validate_NetParameter(form, field):
fw = frameworks.get_framework_by_id(form['framework'].data)
try:
# below function raises a BadNetworkException in case of validation error
fw.validate_network(field.data)
except frameworks.errors.BadNetworkError as e:
raise validators.ValidationError(_('Bad network: %(message)s', message=e.message))
def validate_file_exists(form, field):
from_client = bool(form.python_layer_from_client.data)
filename = ''
if not from_client and field.type == 'StringField':
filename = field.data
if filename == '':
return
if not os.path.isfile(filename):
raise validators.ValidationError(_('Server side file, %(filename)s, does not exist.', filename=filename))
def validate_py_ext(form, field):
from_client = bool(form.python_layer_from_client.data)
filename = ''
if from_client and field.type == 'FileField':
filename = flask.request.files[field.name].filename
elif not from_client and field.type == 'StringField':
filename = field.data
if filename == '':
return
(root, ext) = os.path.splitext(filename)
if ext != '.py' and ext != '.pyc':
raise validators.ValidationError(_('Python file, %(filename)s, needs .py or .pyc extension.',
filename=filename))
# Fields
# The options for this get set in the view (since they are dynamic)
dataset = utils.forms.SelectField(
_('Select Dataset'),
choices=[],
tooltip=_("Choose the dataset to use for this model.")
)
python_layer_from_client = utils.forms.BooleanField(
_('Use client-side file'),
default=False,
)
python_layer_client_file = utils.forms.FileField(
_('Client-side file'),
validators=[
validate_py_ext
],
tooltip=_("Choose a Python file on the client containing layer definitions.")
)
python_layer_server_file = utils.forms.StringField(
_('Server-side file'),
validators=[
validate_file_exists,
validate_py_ext
],
tooltip=_("Choose a Python file on the server containing layer definitions.")
)
train_epochs = utils.forms.IntegerField(
_('Training epochs'),
validators=[
validators.NumberRange(min=1)
],
default=30,
tooltip=_("How many passes through the training data?")
)
snapshot_interval = utils.forms.FloatField(
_('Snapshot interval (in epochs)'),
default=1,
validators=[
validators.NumberRange(min=0),
],
tooltip=_("How many epochs of training between taking a snapshot?")
)
val_interval = utils.forms.FloatField(
_('Validation interval (in epochs)'),
default=1,
validators=[
validators.NumberRange(min=0)
],
tooltip=_("How many epochs of training between running through one pass of the validation data?")
)
traces_interval = utils.forms.IntegerField(
_('Tracing Interval (in steps)'),
validators=[
validators.NumberRange(min=0)
],
default=0,
tooltip=_("Generation of a timeline trace every few steps")
)
random_seed = utils.forms.IntegerField(
_('Random seed'),
validators=[
validators.NumberRange(min=0),
validators.Optional(),
],
tooltip=_('If you provide a random seed, then back-to-back runs with '
'the same model and dataset should give identical results.')
)
batch_size = utils.forms.MultiIntegerField(
_('Batch size'),
default=100,
validators=[
utils.forms.MultiNumberRange(min=1),
utils.forms.MultiOptional(),
],
tooltip=_("How many images to process at once. If blank, values are used from the network definition.")
)
batch_accumulation = utils.forms.IntegerField(
_('Batch Accumulation'),
validators=[
validators.NumberRange(min=1),
validators.Optional(),
],
tooltip=_("Accumulate gradients over multiple batches (useful when you "
"need a bigger batch size for training but it doesn't fit in memory).")
)
# Solver types
solver_type = utils.forms.SelectField(
_('Solver type'),
choices=[
('SGD', _('SGD (Stochastic Gradient Descent)')),
('MOMENTUM', _('Momentum')),
('NESTEROV', _("NAG (Nesterov's accelerated gradient)")),
('ADAGRAD', _('AdaGrad (Adaptive Gradient)')),
('ADAGRADDA', _('AdaGradDA (AdaGrad Dual Averaging)')),
('ADADELTA', _('AdaDelta')),
('ADAM', _('Adam (Adaptive Moment Estimation)')),
('RMSPROP', _('RMSprop')),
('FTRL', _('FTRL (Follow-The-Regularized-Leader)')),
],
default='SGD',
tooltip=_("What type of solver will be used?"),
)
def validate_solver_type(form, field):
fw = frameworks.get_framework_by_id(form.framework)
if fw is not None:
if not fw.supports_solver_type(field.data):
raise validators.ValidationError(
_('Solver type not supported by this framework'))
# Additional settings specific to selected solver
rms_decay = utils.forms.FloatField(
_('RMS decay value'),
default=0.99,
validators=[
validators.NumberRange(min=0),
],
tooltip=_("If the gradient updates results in oscillations the gradient is reduced "
"by times 1-rms_decay. Otherwise it will be increased by rms_decay.")
)
# Learning rate
learning_rate = utils.forms.MultiFloatField(
_('Base Learning Rate'),
default=0.01,
validators=[
utils.forms.MultiNumberRange(min=0),
],
tooltip=_("Affects how quickly the network learns. If you are getting "
"NaN for your loss, you probably need to lower this value.")
)
lr_policy = wtforms.SelectField(
_('Policy'),
choices=[
('fixed', _('Fixed')),
('step', _('Step Down')),
('multistep', _('Step Down (arbitrary steps)')),
('exp', _('Exponential Decay')),
('inv', _('Inverse Decay')),
('poly', _('Polynomial Decay')),
('sigmoid', _('Sigmoid Decay')),
],
default='step'
)
lr_step_size = wtforms.FloatField(_('Step Size'), default=33)
lr_step_gamma = wtforms.FloatField(_('Gamma'), default=0.1)
lr_multistep_values = wtforms.StringField(_('Step Values'), default="50,85")
def validate_lr_multistep_values(form, field):
if form.lr_policy.data == 'multistep':
for value in field.data.split(','):
try:
float(value)
except ValueError:
raise validators.ValidationError(_('invalid value'))
lr_multistep_gamma = wtforms.FloatField(_('Gamma'), default=0.5)
lr_exp_gamma = wtforms.FloatField(_('Gamma'), default=0.95)
lr_inv_gamma = wtforms.FloatField(_('Gamma'), default=0.1)
lr_inv_power = wtforms.FloatField(_('Power'), default=0.5)
lr_poly_power = wtforms.FloatField(_('Power'), default=3)
lr_sigmoid_step = wtforms.FloatField(_('Step'), default=50)
lr_sigmoid_gamma = wtforms.FloatField(_('Gamma'), default=0.1)
# Network
# Use a SelectField instead of a HiddenField so that the default value
# is used when nothing is provided (through the REST API)
method = wtforms.SelectField(
_('Network type'),
choices=[
('standard', _('Standard network')),
('previous', _('Previous network')),
('pretrained', _('Pretrained network')),
('custom', _('Custom network')),
],
default='standard',
)
# framework - hidden field, set by Javascript to the selected framework ID
framework = wtforms.HiddenField(
_('framework'),
validators=[
validators.AnyOf(
[fw.get_id() for fw in frameworks.get_frameworks()],
message=_('The framework you choose is not currently supported.')
)
],
default=frameworks.get_frameworks()[0].get_id()
)
# The options for this get set in the view (since they are dependent on the data type)
standard_networks = wtforms.RadioField(
_('Standard Networks'),
validators=[
validate_required_iff(method='standard'),
],
)
previous_networks = wtforms.RadioField(
_('Previous Networks'),
choices=[],
validators=[
validate_required_iff(method='previous'),
selection_exists_in_choices,
],
)
pretrained_networks = wtforms.RadioField(
_('Pretrained Networks'),
choices=[],
validators=[
validate_required_iff(method='pretrained'),
selection_exists_in_choices,
],
)
custom_network = utils.forms.TextAreaField(
_('Custom Network'),
validators=[
validate_required_iff(method='custom'),
validate_NetParameter,
],
)
custom_network_snapshot = utils.forms.TextField(
_('Pretrained model(s)'),
tooltip=_("Paths to pretrained model files, separated by '%(pathsep)s'. "
"Only edit this field if you understand how fine-tuning "
"works in caffe or torch.", pathsep=os.path.pathsep)
)
def validate_custom_network_snapshot(form, field):
pass
# if form.method.data == 'custom':
# for filename in field.data.strip().split(os.path.pathsep):
# if filename and not os.path.lexists(filename):
# raise validators.ValidationError('File "%s" does not exist' % filename)
# Select one of several GPUs
select_gpu = wtforms.RadioField(
_('Select which GPU you would like to use'),
choices=[('next', 'Next available')] + [(
index,
'#%s - %s (%s memory)' % (
index,
get_device(index).name,
sizeof_fmt(
get_nvml_info(index)['memory']['total']
if get_nvml_info(index) and 'memory' in get_nvml_info(index)
else get_device(index).totalGlobalMem)
),
) for index in config_value('gpu_list').split(',') if index],
default='next',
)
# Select N of several GPUs
select_gpus = utils.forms.SelectMultipleField(
_('Select which GPU[s] you would like to use'),
choices=[(
index,
'#%s - %s (%s memory)' % (
index,
get_device(index).name,
sizeof_fmt(
get_nvml_info(index)['memory']['total']
if get_nvml_info(index) and 'memory' in get_nvml_info(index)
else get_device(index).totalGlobalMem)
),
) for index in config_value('gpu_list').split(',') if index],
tooltip=_("The job won't start until all of the chosen GPUs are available.")
)
# XXX For testing
# The Flask test framework can't handle SelectMultipleFields correctly
select_gpus_list = wtforms.StringField(_('Select which GPU[s] you would like to use (comma separated)'))
def validate_select_gpus(form, field):
if form.select_gpus_list.data:
field.data = form.select_gpus_list.data.split(',')
# Use next available N GPUs
select_gpu_count = wtforms.IntegerField(_('Use this many GPUs (next available)'),
validators=[
validators.NumberRange(min=1, max=len(
config_value('gpu_list').split(',')))
],
default=1,
)
def validate_select_gpu_count(form, field):
if field.data is None:
if form.select_gpus.data:
# Make this field optional
field.errors[:] = []
raise validators.StopValidation()
model_name = utils.forms.StringField(_('Model Name'),
validators=[
validators.DataRequired()
],
tooltip=_("An identifier, later used to refer to this model in the Application.")
)
group_name = utils.forms.StringField(_('Group Name'),
tooltip=_("An optional group name for organization on the main page.")
)
# allows shuffling data during training (for frameworks that support this, as indicated by
# their Framework.can_shuffle_data() method)
shuffle = utils.forms.BooleanField(_('Shuffle Train Data'),
default=True,
tooltip=_('For every epoch, shuffle the data before training.')
)
steps = utils.forms.IntegerField("训练总步长",
default=4000,
validators=[
validators.NumberRange(min=1)
],
tooltip="本次训练总步长数(迭代次数)")
iter_store_step = utils.forms.IntegerField("步长间隔",
default=1000,
validators=[
validators.NumberRange(min=1)
],
tooltip="要间隔多少个步长来进行快照保存")
train_batch_size = utils.forms.IntegerField("批处理大小",
default=100,
validators=[
validators.NumberRange(min=1)
],
tooltip="一次处理多少图片,默认为100")
# bottleneck_dir = utils.forms.StringField("瓶颈值目录",
# tooltip="计算出每个图片的瓶颈值并存储于此目录下")
| 36.791962
| 122
| 0.556512
| 1,545
| 15,563
| 5.440777
| 0.259547
| 0.03331
| 0.040566
| 0.044492
| 0.258506
| 0.196407
| 0.164406
| 0.148227
| 0.139186
| 0.131573
| 0
| 0.007402
| 0.340294
| 15,563
| 422
| 123
| 36.879147
| 0.811337
| 0.083531
| 0
| 0.307463
| 0
| 0
| 0.219826
| 0.002178
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026866
| false
| 0.008955
| 0.035821
| 0
| 0.197015
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95d02019dda244ece2c09a15f8673c55536ad4de
| 1,155
|
py
|
Python
|
004 Sons/afinacao.py
|
yamadathamine/300ideiasparaprogramarPython
|
331a063bbf8bcd117ae5a34324b8176a6014fc98
|
[
"MIT"
] | null | null | null |
004 Sons/afinacao.py
|
yamadathamine/300ideiasparaprogramarPython
|
331a063bbf8bcd117ae5a34324b8176a6014fc98
|
[
"MIT"
] | 4
|
2020-06-09T19:10:04.000Z
|
2020-06-17T18:23:47.000Z
|
004 Sons/afinacao.py
|
yamadathamine/300ideiasparaprogramarPython
|
331a063bbf8bcd117ae5a34324b8176a6014fc98
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
# usando python 3
# Afinação - Alberto toca violão e é programador.
# Precisando afinar o violão e sem diapasão por perto,
# resolveu fazer um programa para ajudá-lo.
# O que ele queria era a nota Lá soando sem parar até que ele conseguisse afinar a
# respectiva corda do violão; as demais cordas ele poderia afinar com base na primeira.
# Escreva um programa que faz soar no alto-falante do computador a nota Lá (440 Hz)
# e só para quando for pressionada alguma tecla.
import numpy as np
import simpleaudio as sa
frequency = 440 # Our played note will be 440 Hz
fs = 44100 # 44100 samples per second
seconds = 3 # Note duration of 3 seconds
# Generate array with seconds*sample_rate steps, ranging between 0 and seconds
t = np.linspace(0, seconds, seconds * fs, False)
# Generate a 440 Hz sine wave
note = np.sin(frequency * t * 2 * np.pi)
# Ensure that highest value is in 16-bit range
audio = note * (2**15 - 1) / np.max(np.abs(note))
# Convert to 16-bit data
audio = audio.astype(np.int16)
# Start playback
play_obj = sa.play_buffer(audio, 1, 2, fs)
# Wait for playback to finish before exiting
play_obj.wait_done()
| 35
| 88
| 0.735931
| 199
| 1,155
| 4.246231
| 0.653266
| 0.017751
| 0.016568
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043944
| 0.192208
| 1,155
| 33
| 89
| 35
| 0.861736
| 0.684848
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.181818
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95d7f54672f221417081565b033268249f18412b
| 835
|
py
|
Python
|
tests/test_modules/test_builtin/test_grouppart.py
|
MattTaylorDLS/pymalcolm
|
995a8e4729bd745f8f617969111cc5a34ce1ac14
|
[
"Apache-2.0"
] | null | null | null |
tests/test_modules/test_builtin/test_grouppart.py
|
MattTaylorDLS/pymalcolm
|
995a8e4729bd745f8f617969111cc5a34ce1ac14
|
[
"Apache-2.0"
] | null | null | null |
tests/test_modules/test_builtin/test_grouppart.py
|
MattTaylorDLS/pymalcolm
|
995a8e4729bd745f8f617969111cc5a34ce1ac14
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from malcolm.core import call_with_params
from malcolm.modules.builtin.parts import GroupPart
class TestGroupPart(unittest.TestCase):
def setUp(self):
self.o = call_with_params(
GroupPart, name="things", description="A group of things")
self.setter = list(self.o.create_attribute_models())[0][2]
def test_init(self):
assert self.o.name == "things"
assert self.o.attr.value == "expanded"
assert self.o.attr.meta.description == "A group of things"
assert self.o.attr.meta.tags == ("widget:group", "config")
def test_setter(self):
assert self.o.attr.value == "expanded"
self.setter("collapsed")
assert self.o.attr.value == "collapsed"
with self.assertRaises(ValueError):
self.setter("anything else")
| 32.115385
| 70
| 0.653892
| 107
| 835
| 5.028037
| 0.439252
| 0.074349
| 0.122677
| 0.139405
| 0.315985
| 0.104089
| 0
| 0
| 0
| 0
| 0
| 0.003077
| 0.221557
| 835
| 25
| 71
| 33.4
| 0.824615
| 0
| 0
| 0.105263
| 0
| 0
| 0.132934
| 0
| 0
| 0
| 0
| 0
| 0.368421
| 1
| 0.157895
| false
| 0
| 0.157895
| 0
| 0.368421
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95d8eae1e421c5a5d85e31ca5953813a5295d371
| 512
|
py
|
Python
|
ok2_backend/common/utils.py
|
Mipsters/ok2-backend
|
50ddbb44262749d731f4e923add205541254223d
|
[
"MIT"
] | 1
|
2020-02-10T17:53:58.000Z
|
2020-02-10T17:53:58.000Z
|
ok2_backend/common/utils.py
|
Mipsters/ok2-backend
|
50ddbb44262749d731f4e923add205541254223d
|
[
"MIT"
] | 6
|
2020-01-06T19:37:12.000Z
|
2021-09-22T18:03:31.000Z
|
ok2_backend/common/utils.py
|
Mipsters/ok2-backend
|
50ddbb44262749d731f4e923add205541254223d
|
[
"MIT"
] | 5
|
2019-11-18T17:39:29.000Z
|
2020-07-31T16:00:21.000Z
|
import os
from jose import jwt
from datetime import datetime, timedelta
JWT_SECRET = 'secret'
JWT_ALGORITHM = 'HS256'
JWT_EXP_DELTA_SECONDS = 31556952 # year
def get_token(request):
return jwt.decode(request.headers.get('Authorization'), os.environ['JWT_SECRET'])
def create_token(user_id):
payload = {
'user_id': user_id,
'exp': datetime.utcnow() + timedelta(seconds=JWT_EXP_DELTA_SECONDS)
}
jwt_token = jwt.encode(payload, JWT_SECRET, JWT_ALGORITHM)
return jwt_token
| 23.272727
| 85
| 0.722656
| 69
| 512
| 5.101449
| 0.434783
| 0.076705
| 0.102273
| 0.102273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026005
| 0.173828
| 512
| 21
| 86
| 24.380952
| 0.806147
| 0.007813
| 0
| 0
| 0
| 0
| 0.086957
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.133333
| false
| 0
| 0.2
| 0.066667
| 0.466667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95e0d6973a04cf649a738acb651bea0fa6b7dfcd
| 996
|
py
|
Python
|
Inflearn_SungKim/3.MultiVariableLinearRegression/multi-variableLinearregression.py
|
shinhaha/tensorflow
|
4647017a727985d64c5b0addee92f0ec516952c1
|
[
"MIT"
] | null | null | null |
Inflearn_SungKim/3.MultiVariableLinearRegression/multi-variableLinearregression.py
|
shinhaha/tensorflow
|
4647017a727985d64c5b0addee92f0ec516952c1
|
[
"MIT"
] | null | null | null |
Inflearn_SungKim/3.MultiVariableLinearRegression/multi-variableLinearregression.py
|
shinhaha/tensorflow
|
4647017a727985d64c5b0addee92f0ec516952c1
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
x1_data=[73.,93.,89.,96.,73.]
x2_data=[80.,88.,91.,98.,66.]
x3_data=[75.,93.,90.,100.,70.]
y_data=[152.,185.,180.,196.,142.]
x1=tf.placeholder(tf.float32)
x2=tf.placeholder(tf.float32)
x3=tf.placeholder(tf.float32)
Y=tf.placeholder(tf.float32)
w1=tf.Variable(tf.random_normal([1]),name='weight1')
w2=tf.Variable(tf.random_normal([1]),name='weight2')
w3=tf.Variable(tf.random_normal([1]),name='weight1')
b=tf.Variable(tf.random_normal([1]),name='bias')
hypothesis=x1*w1+x2*w2+x3*w3+b
cost=tf.reduce_mean(tf.square(hypothesis-Y))
#minimize
optimizer=tf.train.GradientDescentOptimizer(learning_rate=1e-5)
train=optimizer.minimize(cost)
#launch graph
sess=tf.Session()
#initialize
sess.run(tf.global_variables_initializer())
for step in range(2001):
cost_val,hy_val,_=sess.run([cost,hypothesis,train],
feed_dict={x1:x1_data,x2:x2_data,x3:x3_data,Y:y_data})
if step%10==0:
print(step,"Cost:",cost_val,"\nPrediction:\n",hy_val)
| 32.129032
| 85
| 0.712851
| 170
| 996
| 4.047059
| 0.458824
| 0.075581
| 0.087209
| 0.127907
| 0.188953
| 0.188953
| 0.188953
| 0.104651
| 0
| 0
| 0
| 0.09989
| 0.085341
| 996
| 31
| 86
| 32.129032
| 0.655324
| 0.03012
| 0
| 0
| 0
| 0
| 0.046681
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.041667
| 0
| 0.041667
| 0.041667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95e18e6281085104769aa15c1a8ef9828b449526
| 1,759
|
py
|
Python
|
train_model.py
|
sanjjayrj/Chatbot-NLTK
|
2000a3c640d6624984ca4ad2457557e937d4ae05
|
[
"MIT"
] | 3
|
2020-11-17T12:14:37.000Z
|
2021-08-14T05:46:38.000Z
|
train_model.py
|
sanjjayrj/Chatbot-NLTK
|
2000a3c640d6624984ca4ad2457557e937d4ae05
|
[
"MIT"
] | null | null | null |
train_model.py
|
sanjjayrj/Chatbot-NLTK
|
2000a3c640d6624984ca4ad2457557e937d4ae05
|
[
"MIT"
] | null | null | null |
import pandas as pd
import nltk
import re
from nltk.stem import wordnet
from nltk import pos_tag
from nltk import word_tokenize
from datetime import datetime
data = pd.read_csv('traindata.csv', encoding='utf-8')
train_counter = 0
def text_normalize(text):
global train_counter
if train_counter % 10000 == 0:
print(str(train_counter) + " sets lemmatized..., "+"Time now: " + str(datetime.now()))
train_counter += 1
text = str(text).lower()
spl_char_text = re.sub(r'[^ a-z]', '', text)
tokens = nltk.word_tokenize(spl_char_text)
lema = wordnet.WordNetLemmatizer()
tags_list = pos_tag(tokens, tagset = None)
lema_words = []
for token, pos_token in tags_list:
if pos_token.startswith('V'):
pos_value = 'v'
elif pos_token.startswith('J'):
pos_value = 'a'
elif pos_token.startswith('R'):
pos_value = 'r'
else:
pos_value = 'n'
lema_token = lema.lemmatize(token, pos_value)
lema_words.append(lema_token)
return " ".join(lema_words)
if __name__ == '__main__':
print("Time now: " + str(datetime.now()))
print(data.info())
print("\nData Imported...")
print("----------------------------------------------------------------------------------------------------------")
data['lemmatized text'] = data['Content'].apply(text_normalize)
print("Training Data Lemmatized..., Time now: " + str(datetime.now()))
data.to_csv('traindata.csv', encoding='utf-8', index = False)
print(data['lemmatized text'])
print(type(data['lemmatized text']))
print("\nTraining data...")
print("----------------------------------------------------------------------------------------------------------")
| 37.425532
| 119
| 0.557703
| 204
| 1,759
| 4.607843
| 0.387255
| 0.06383
| 0.031915
| 0.057447
| 0.145745
| 0.123404
| 0
| 0
| 0
| 0
| 0
| 0.007052
| 0.19386
| 1,759
| 47
| 120
| 37.425532
| 0.655853
| 0
| 0
| 0.045455
| 0
| 0
| 0.249432
| 0.120455
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022727
| false
| 0
| 0.181818
| 0
| 0.227273
| 0.227273
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95e2b38a9c011b08bb379e05752137d534a0a8a9
| 1,848
|
py
|
Python
|
tensor_twister/server.py
|
iamorphen/tensor_twister
|
d7936efa50cf0f7f3950ff4cbb0dd3fbac310ca9
|
[
"MIT"
] | null | null | null |
tensor_twister/server.py
|
iamorphen/tensor_twister
|
d7936efa50cf0f7f3950ff4cbb0dd3fbac310ca9
|
[
"MIT"
] | null | null | null |
tensor_twister/server.py
|
iamorphen/tensor_twister
|
d7936efa50cf0f7f3950ff4cbb0dd3fbac310ca9
|
[
"MIT"
] | null | null | null |
import io
import logging
import queue
from collections import namedtuple
import torch
import zmq
from tensor_twister.status_codes import StatusCode
UnpackedMessage = namedtuple("UnpackedMessage", ["tensor", "name", "ip"])
def serve(host: str, port: int):
"""
Listen for incoming tensor data from clients. Print comparisons between
pairs of tensor data.
Args:
host (str): The hostname to listen on; for example "localhost"
port (int): The port to listen on; for example 5555
"""
logger = logging.getLogger(__name__)
logger.debug("libzmq version: %s", zmq.zmq_version())
logger.debug(" pyzmq version: %s", zmq.__version__)
tensor_queue = queue.Queue()
context = zmq.Context()
socket = context.socket(zmq.REP)
server_uri = f"tcp://{host}:{port}"
logger.info("Attempting to listen on %s.", server_uri)
socket.bind(server_uri)
logger.info("Listening on %s.", server_uri)
while True:
# Get the next message, blocking.
message = socket.recv_pyobj()
try:
tensor = torch.load(message.tensor)
except Exception:
socket.send_pyobj(StatusCode.TensorLoadFailure)
continue
tensor_queue.put(UnpackedMessage(tensor, message.name, message.ip))
socket.send_pyobj(StatusCode.OK)
# If the queue has at least 2 messages, compare the first 2.
if tensor_queue.qsize() >= 2:
m1 = tensor_queue.get()
m2 = tensor_queue.get()
print(f"{m1.name}@{m1.ip}: tensor min: {m1.tensor.min()}; max: {m1.tensor.max()}; mean: {m1.tensor.mean()}")
print(f"{m2.name}@{m2.ip}: tensor min: {m2.tensor.min()}; max: {m2.tensor.max()}; mean: {m2.tensor.mean()}")
print(f"t1 and t2 are {'' if (m1.tensor == m2.tensor).all() else 'not'} equal")
| 33
| 120
| 0.635281
| 242
| 1,848
| 4.756198
| 0.404959
| 0.047785
| 0.026064
| 0.022589
| 0.034752
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016186
| 0.231061
| 1,848
| 55
| 121
| 33.6
| 0.793807
| 0.170455
| 0
| 0
| 0
| 0.088235
| 0.26
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029412
| false
| 0
| 0.205882
| 0
| 0.235294
| 0.088235
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95e39518b618f5551cfe1c882c8f307a7a86e276
| 6,744
|
py
|
Python
|
optunity/solvers/CMAES.py
|
xrounder/optunity
|
019182ca83fe2002083cc1ac938510cb967fd2c9
|
[
"BSD-3-Clause"
] | 401
|
2015-01-08T00:56:20.000Z
|
2022-03-19T09:07:12.000Z
|
optunity/solvers/CMAES.py
|
xrounder/optunity
|
019182ca83fe2002083cc1ac938510cb967fd2c9
|
[
"BSD-3-Clause"
] | 67
|
2015-01-08T09:13:20.000Z
|
2022-01-05T23:26:36.000Z
|
optunity/solvers/CMAES.py
|
xrounder/optunity
|
019182ca83fe2002083cc1ac938510cb967fd2c9
|
[
"BSD-3-Clause"
] | 94
|
2015-02-04T08:35:56.000Z
|
2021-10-03T12:40:35.000Z
|
#! /usr/bin/env python
# Copyright (c) 2014 KU Leuven, ESAT-STADIUS
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither name of copyright holders nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import math
import functools
from .solver_registry import register_solver
from .util import Solver, _copydoc
from . import util
_numpy_available = True
try:
import numpy as np
except ImportError:
_numpy_available = False
_deap_available = True
try:
import deap
import deap.creator
import deap.base
import deap.tools
import deap.cma
import deap.algorithms
except ImportError:
_deap_available = False
except TypeError:
# this can happen because DEAP is in Python 2
# install needs to take proper care of converting
# 2 to 3 when necessary
_deap_available = False
class CMA_ES(Solver):
"""
.. include:: /global.rst
Please refer to |cmaes| for details about this algorithm.
This solver uses an implementation available in the DEAP library [DEAP2012]_.
.. warning:: This solver has dependencies on DEAP_ and NumPy_
and will be unavailable if these are not met.
.. _DEAP: https://code.google.com/p/deap/
.. _NumPy: http://www.numpy.org
"""
def __init__(self, num_generations, sigma=1.0, Lambda=None, **kwargs):
"""blah
.. warning:: |warning-unconstrained|
"""
if not _deap_available:
raise ImportError('This solver requires DEAP but it is missing.')
if not _numpy_available:
raise ImportError('This solver requires NumPy but it is missing.')
self._num_generations = num_generations
self._start = kwargs
self._sigma = sigma
self._lambda = Lambda
@staticmethod
def suggest_from_seed(num_evals, **kwargs):
"""Verify that we can effectively make a solver.
The doctest has to be skipped from automated builds, because DEAP may not be available
and yet we want documentation to be generated.
>>> s = CMA_ES.suggest_from_seed(30, x=1.0, y=-1.0, z=2.0)
>>> solver = CMA_ES(**s) #doctest:+SKIP
"""
fertility = 4 + 3 * math.log(len(kwargs))
d = dict(kwargs)
d['num_generations'] = int(math.ceil(float(num_evals) / fertility))
# num_gen is overestimated
# this will require slightly more function evaluations than permitted by num_evals
return d
@property
def num_generations(self):
return self._num_generations
@property
def start(self):
"""Returns the starting point for CMA-ES."""
return self._start
@property
def lambda_(self):
return self._lambda
@property
def sigma(self):
return self._sigma
@_copydoc(Solver.optimize)
def optimize(self, f, maximize=True, pmap=map):
toolbox = deap.base.Toolbox()
if maximize:
fit = 1.0
else:
fit = -1.0
deap.creator.create("FitnessMax", deap.base.Fitness,
weights=(fit,))
Fit = deap.creator.FitnessMax
deap.creator.create("Individual", list,
fitness=Fit)
Individual = deap.creator.Individual
if self.lambda_:
strategy = deap.cma.Strategy(centroid=list(self.start.values()),
sigma=self.sigma, lambda_=self.lambda_)
else:
strategy = deap.cma.Strategy(centroid=list(self.start.values()),
sigma=self.sigma)
toolbox.register("generate", strategy.generate, Individual)
toolbox.register("update", strategy.update)
@functools.wraps(f)
def evaluate(individual):
return (util.score(f(**dict([(k, v)
for k, v in zip(self.start.keys(),
individual)]))),)
toolbox.register("evaluate", evaluate)
toolbox.register("map", pmap)
hof = deap.tools.HallOfFame(1)
deap.algorithms.eaGenerateUpdate(toolbox=toolbox,
ngen=self._num_generations,
halloffame=hof, verbose=False)
return dict([(k, v)
for k, v in zip(self.start.keys(), hof[0])]), None
# CMA_ES solver requires deap > 1.0.1
# http://deap.readthedocs.org/en/latest/examples/cmaes.html
if _deap_available and _numpy_available:
CMA_ES = register_solver('cma-es', 'covariance matrix adaptation evolutionary strategy',
['CMA-ES: covariance matrix adaptation evolutionary strategy',
' ',
'This method requires the following parameters:',
'- num_generations :: number of generations to use',
'- sigma :: (optional) initial covariance, default 1',
'- Lambda :: (optional) measure of reproducibility',
'- starting point: through kwargs'
' ',
'This method is described in detail in:',
'Hansen and Ostermeier, 2001. Completely Derandomized Self-Adaptation in Evolution Strategies. Evolutionary Computation'
])(CMA_ES)
| 36.852459
| 144
| 0.631821
| 806
| 6,744
| 5.203474
| 0.387097
| 0.01073
| 0.017167
| 0.010968
| 0.133047
| 0.121602
| 0.101097
| 0.076776
| 0.076776
| 0.076776
| 0
| 0.008307
| 0.286032
| 6,744
| 182
| 145
| 37.054945
| 0.862721
| 0.375297
| 0
| 0.164948
| 0
| 0
| 0.159567
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.082474
| false
| 0
| 0.164948
| 0.041237
| 0.329897
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95e555ee7266bd7c5e0f103c5c42eba12b36c67d
| 622
|
py
|
Python
|
DailyCoding/11.py
|
jason71319jason/Interview-solved
|
42ca93a68475952753d185c325cb55c79e2e55e1
|
[
"MIT"
] | 46
|
2019-10-14T01:21:35.000Z
|
2022-01-08T23:55:15.000Z
|
DailyCoding/11.py
|
jason71319jason/Interview-solved
|
42ca93a68475952753d185c325cb55c79e2e55e1
|
[
"MIT"
] | 53
|
2019-10-03T17:16:43.000Z
|
2020-12-08T12:48:19.000Z
|
DailyCoding/11.py
|
jason71319jason/Interview-solved
|
42ca93a68475952753d185c325cb55c79e2e55e1
|
[
"MIT"
] | 96
|
2019-10-03T18:12:10.000Z
|
2021-03-14T19:41:06.000Z
|
"""
This problem was asked by Twitter.
Implement an autocomplete system. That is, given a query string s and a set of all possible query strings, return all strings in the set that have s as a prefix.
For example, given the query string de and the set of strings [dog, deer, deal], return [deer, deal].
Hint: Try preprocessing the dictionary into a more efficient data structure to speed up queries.
"""
def autocomplete_bruteforce(words, s):
result = []
for word in words:
if s in word:
result.append(word)
return result
print(autocomplete_bruteforce(['dog','deer','deal'], 'de'))
| 28.272727
| 161
| 0.705788
| 96
| 622
| 4.552083
| 0.583333
| 0.05492
| 0.050343
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210611
| 622
| 22
| 162
| 28.272727
| 0.89002
| 0.639871
| 0
| 0
| 0
| 0
| 0.060465
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0
| 0
| 0.285714
| 0.142857
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95e79ef92334e9854cdc295c02dc16e232f812ed
| 4,974
|
py
|
Python
|
pyblnet/blnet_parser.py
|
henfri/pyblnet
|
0a3a59ea39ab569d4b59be5a918736dc238bcf13
|
[
"MIT"
] | 3
|
2019-03-11T12:38:43.000Z
|
2022-02-18T21:40:54.000Z
|
pyblnet/blnet_parser.py
|
henfri/pyblnet
|
0a3a59ea39ab569d4b59be5a918736dc238bcf13
|
[
"MIT"
] | 26
|
2018-10-15T10:57:21.000Z
|
2021-03-23T18:35:06.000Z
|
pyblnet/blnet_parser.py
|
henfri/pyblnet
|
0a3a59ea39ab569d4b59be5a918736dc238bcf13
|
[
"MIT"
] | 7
|
2018-10-03T09:39:30.000Z
|
2020-03-12T19:44:44.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on 09.08.2018
This is basically a python port of of a script by berwinter
https://github.com/berwinter/uvr1611/blob/master/lib/backend/blnet-connection.inc.php
author: Niels
"""
import struct
from datetime import datetime
# Parser constant
# 1 bit
DIGITAL_ON = 1
DIGITAL_OFF = 0
# 8 bit
SPEED_ACTIVE = 0x80
SPEED_MASK = 0x1F
# 16 bit
INT16_POSITIVE_MASK = 0xFFFF
SIGN_BIT = 0x8000
POSITIVE_VALUE_MASK = 0x0FFF
TYPE_MASK = 0x7000
TYPE_NONE = 0x0000
TYPE_DIGITAL = 0x1000
TYPE_TEMP = 0x2000
TYPE_VOLUME = 0x3000
TYPE_RADIATION = 0x4000
TYPE_RAS = 0x7000
RAS_POSITIVE_MASK = 0x01FF
# 32 bit
INT32_MASK = 0xFFFFFFFF
INT32_SIGN = 0x80000000
class BLNETParser:
def __init__(self, data):
"""
parse a binary string containing a dataset
Provides access to the values of a dataset as object properties
@param data: byte string
"""
# check if dataset contains time information
# (fetched from bootloader storage)
if len(data) == 61:
(_, seconds, minutes, hours, days, months, years) = struct.unpack(
'<55sBBBBBB', data)
self.date = datetime(2000 + years, months, days, hours, minutes,
seconds)
# Only parse preceding data
data = data[:55]
power = [0, 0]
kWh = [0, 0]
MWh = [0, 0]
(_, digital, speed, active, power[0], kWh[0], MWh[0], power[1], kWh[1],
MWh[1]) = struct.unpack('<32sH4sBLHHLHH', data)
analog = struct.unpack(
'<{}{}'.format('H' * 16, 'x' * (len(data) - 32)), data)
self.analog = {}
for channel in range(0, 16):
self.analog[channel + 1] = round(
self._convert_analog(analog[channel]), 3)
self.digital = {}
for channel in range(0, 16):
self.digital[channel + 1] = self._convert_digital(digital, channel)
self.speed = {}
for channel in range(0, 4):
self.speed[channel + 1] = round(
self._convert_speed(speed[channel]), 3)
self.energy = {}
for channel in range(0, 2):
self.energy[channel + 1] = round(
self._convert_energy(MWh[channel], kWh[channel], active,
channel), 3)
self.power = {}
for channel in range(0, 2):
self.power[channel + 1] = round(
self._convert_power(power[channel], active, channel), 3)
def to_dict(self):
"""
Turn parsed data into parser object
@return dict
"""
return self.__dict__
def _convert_analog(self, value):
"""
Convert int to correct float
@param value: short unsigned int that was returned by blnet
@return float with correct sensor value
"""
mask = value & TYPE_MASK
if mask == TYPE_TEMP:
return self._calculate_value(value, 0.1)
elif mask == TYPE_VOLUME:
return self._calculate_value(value, 4)
elif mask == TYPE_DIGITAL:
if value & SIGN_BIT:
return 1
else:
return 0
elif mask == TYPE_RAS:
return self._calculate_value(value, 0.1, RAS_POSITIVE_MASK)
elif mask in [TYPE_RADIATION, TYPE_NONE] or True:
return self._calculate_value(value)
def _convert_digital(self, value, position):
"""
Check if bit at given position is set (=1)
"""
if value & (0x1 << (position)):
return DIGITAL_ON
else:
return DIGITAL_OFF
def _convert_speed(self, value):
"""
Check if speed is activated and return its value
"""
if value & SPEED_ACTIVE:
return None
else:
return value & SPEED_MASK
def _convert_energy(self, mwh, kwh, active, position):
"""
Check if heat meter is activated on a given position
@return its energy
"""
if active & position:
kwh = self._calculate_value(kwh, 0.1, INT16_POSITIVE_MASK)
return mwh * 1000 + kwh
else:
return None
def _convert_power(self, value, active, position):
"""
checks if heat meter is activated at given position
@return its power
"""
if active & position:
return self._calculate_value(value, 1 / 2560, INT32_MASK,
INT32_SIGN)
else:
return None
def _calculate_value(self,
value,
multiplier=1,
positive_mask=POSITIVE_VALUE_MASK,
signbit=SIGN_BIT):
result = value & positive_mask
if value & signbit:
result = -((result ^ positive_mask) + 1)
return result * multiplier
| 29.784431
| 85
| 0.559912
| 581
| 4,974
| 4.640275
| 0.299484
| 0.031157
| 0.040059
| 0.031528
| 0.148739
| 0.057864
| 0.057864
| 0
| 0
| 0
| 0
| 0.051432
| 0.347205
| 4,974
| 166
| 86
| 29.963855
| 0.778873
| 0.183957
| 0
| 0.137255
| 0
| 0
| 0.008105
| 0
| 0
| 0
| 0.025359
| 0
| 0
| 1
| 0.078431
| false
| 0
| 0.019608
| 0
| 0.264706
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95e8a73a4c141ad9d18c2ea514ffb13b8b700b03
| 3,568
|
py
|
Python
|
app/routers/nodes.py
|
yamatteo/vue-fastapi-boilerplate
|
5fa3de29a6e7ec4a8df9b3a4073f462307f62cb6
|
[
"MIT"
] | 2
|
2020-03-11T02:58:44.000Z
|
2020-03-27T16:00:25.000Z
|
app/routers/nodes.py
|
yamatteo/vue-fastapi-boilerplate
|
5fa3de29a6e7ec4a8df9b3a4073f462307f62cb6
|
[
"MIT"
] | 7
|
2021-03-10T07:59:29.000Z
|
2022-02-26T23:46:17.000Z
|
app/routers/nodes.py
|
yamatteo/vue-fastapi-boilerplate
|
5fa3de29a6e7ec4a8df9b3a4073f462307f62cb6
|
[
"MIT"
] | 1
|
2020-03-11T02:58:48.000Z
|
2020-03-11T02:58:48.000Z
|
from typing import Optional
from typing import List
from fastapi import APIRouter, Depends, Body
from models import User, Content, Node, Group, ExternalContent
from routers import get_current_user, admin_only
from schemas import NodeAdd, NodeEdit, NodeFind
#
router = APIRouter()
@router.post("/push_content")
async def push_content(node_id: str = Body(..., embed=True), content_id: str = Body(..., embed=True),
admin: User = Depends(admin_only)):
assert admin is not None
node = await Node.find_one_and_add_to_set(
find={"id": node_id},
data={"contents": Content.ref(content_id)}
)
return node.export()
@router.post("/pull_content")
async def pull_content(node_id: str = Body(..., embed=True), content_id: str = Body(..., embed=True),
admin: User = Depends(admin_only)):
assert admin is not None
node = await Node.find_one_and_pull(
find={"id": node_id},
data={"contents": Content.ref(content_id)}
)
return node.export()
@router.post("/push_external_content")
async def push_external_content(node_id: str = Body(..., embed=True), external_content_id: str = Body(..., embed=True),
admin: User = Depends(admin_only)):
assert admin is not None
node = await Node.find_one_and_add_to_set(
find={"id": node_id},
data={"external_contents": ExternalContent.ref(external_content_id)}
)
return node.export()
@router.post("/pull_external_content")
async def pull_external_content(node_id: str = Body(..., embed=True), external_content_id: str = Body(..., embed=True),
admin: User = Depends(admin_only)):
assert admin is not None
node = await Node.find_one_and_pull(
find={"id": node_id},
data={"external_contents": ExternalContent.ref(external_content_id)}
)
return node.export()
@router.get("/current")
async def current_nodes(current_user: User = Depends(get_current_user)):
groups = await Group.find({"members": current_user})
nodes_ids = [node.id for group in groups for node in group.nodes]
return [node.export() for node in await Node.find({"id": {"$in": nodes_ids}})]
@router.post("/browse", dependencies=[Depends(admin_only)])
async def browse_nodes(find: NodeFind) -> List[Node]:
return await Node.find(find=find.dict(exclude_unset=True))
@router.post("/read", dependencies=[Depends(admin_only)])
async def read_node(find: NodeFind, with_contents: bool = Body(False), with_other_contents: bool = Body(False)):
node = await Node.find_one(find=find.dict(exclude_unset=True))
node_export = node.dict()
if with_contents:
node_export["contents"] = await Content.find({"id": {"$in": [ content.id for content in node.contents ]}})
if with_other_contents:
node_export["other_contents"] = await Content.find({"id": {"$nin": [ content.id for content in node.contents ]}})
return node_export
@router.post("/edit", dependencies=[Depends(admin_only)])
async def edit_node(find: NodeFind, data: NodeEdit):
print("find", find)
print("data", data)
return await Node.find_one_and_set(find=find.dict(exclude_unset=True), data=data.dict(exclude_unset=True))
@router.post("/add", dependencies=[Depends(admin_only)])
async def add_node(data: NodeAdd):
return await Node.insert_one(data=data.dict(exclude_unset=True))
@router.post("/delete", dependencies=[Depends(admin_only)])
async def delete_node(find: NodeFind):
return await Node.delete_one(find=find.dict(exclude_unset=True))
| 37.166667
| 121
| 0.690583
| 492
| 3,568
| 4.810976
| 0.148374
| 0.037178
| 0.060837
| 0.047317
| 0.606675
| 0.558513
| 0.454584
| 0.400507
| 0.363329
| 0.363329
| 0
| 0
| 0.169843
| 3,568
| 95
| 122
| 37.557895
| 0.799122
| 0
| 0
| 0.338028
| 0
| 0
| 0.060835
| 0.012335
| 0
| 0
| 0
| 0
| 0.056338
| 1
| 0
| false
| 0
| 0.084507
| 0
| 0.225352
| 0.028169
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95ea2d544465e77e80dcc38902724b81ddc4c5b9
| 2,427
|
py
|
Python
|
Algebra/vector.py
|
jonasjungaker/VectorsAlgebra
|
1b064b4328b7eb6a3c7a1c50b29e6df042309ca5
|
[
"MIT"
] | null | null | null |
Algebra/vector.py
|
jonasjungaker/VectorsAlgebra
|
1b064b4328b7eb6a3c7a1c50b29e6df042309ca5
|
[
"MIT"
] | null | null | null |
Algebra/vector.py
|
jonasjungaker/VectorsAlgebra
|
1b064b4328b7eb6a3c7a1c50b29e6df042309ca5
|
[
"MIT"
] | null | null | null |
class vector:
def __init__(self, *vals):
self.x = list(vals)
for val in vals:
float(val)
self.dimension = len(self.x)
def __getitem__(self, key):
return self.x[key]
def __setitem__(self, key, value):
self.x[key] = value
return self
def __add__(self, other):
if type(other) == type(int): # This also needs to support floating point types
for i in range(self.dimension):
self[i] += other
return self
self._checkDimension(other)
newx = []
for i in range(self.dimension):
newx.append(self[i] + other[i])
return vector(*newx)
def __eq__(self, other):
if self.dimension != other.dimension:
return False
for i in range(self.dimension):
if self[i] != other[i]:
return False
return True
def __mul__(self, other):
if type(other) == type(int):
x = []
for i in range(self.dimension):
x.append(self[i] * other)
return vector(*x)
self._checkDimension(other)
value = 0
for i in range(self.dimension):
value += self[i] * other[i]
return value
def __rmul__(self, other):
return self * other
def __matmul__(self, other):
if self.dimension != other.dimension != 3:
raise TypeError("Vector dimensions must be 3")
v = vector(0, 0, 0)
v[0] = (self[1] * other[2]) - (self[2] * other[1])
v[1] = (self[2] * other[0]) - (self[0] * other[2])
v[2] = (self[0] * other[1]) - (self[1] * other[0])
return v
def __sub__(self, other):
return self + ( - other)
def __neg__(self):
v = []
for i in range(self):
v.append( - self[i])
return vector(*v)
def __abs__(self):
value = self.magnitude()
return value**0.5
def _checkDimension(self, other):
if self.dimension != other.dimension:
raise TypeError("Vector dimensions must agree")
def magnitude(self):
# Returns the value of the sum of all values of the vector squared
powerMagnitude = 0
for a in self.x:
powerMagnitude += a*a
return powerMagnitude
| 29.240964
| 87
| 0.510507
| 292
| 2,427
| 4.082192
| 0.232877
| 0.098154
| 0.030201
| 0.055369
| 0.399329
| 0.286913
| 0.14094
| 0
| 0
| 0
| 0
| 0.015852
| 0.376185
| 2,427
| 82
| 88
| 29.597561
| 0.771466
| 0.046148
| 0
| 0.223881
| 0
| 0
| 0.024664
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.19403
| false
| 0
| 0
| 0.044776
| 0.432836
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95ed4a727fcf9707dcfd7fa3fc1e4e7848fbb44c
| 992
|
py
|
Python
|
neodroidagent/common/session_factory/vertical/procedures/training/sampling/rollout.py
|
gitter-badger/agent
|
3f53eaa7ebdee3ab423c7b58785d584fe1a6ae11
|
[
"Apache-2.0"
] | 8
|
2017-09-13T08:28:44.000Z
|
2022-01-21T15:59:19.000Z
|
neodroidagent/common/session_factory/vertical/procedures/training/sampling/rollout.py
|
gitter-badger/agent
|
3f53eaa7ebdee3ab423c7b58785d584fe1a6ae11
|
[
"Apache-2.0"
] | 4
|
2019-03-22T13:49:16.000Z
|
2019-03-25T13:49:39.000Z
|
neodroidagent/common/session_factory/vertical/procedures/training/sampling/rollout.py
|
gitter-badger/agent
|
3f53eaa7ebdee3ab423c7b58785d584fe1a6ae11
|
[
"Apache-2.0"
] | 3
|
2017-09-13T08:31:38.000Z
|
2021-11-09T11:22:27.000Z
|
from itertools import count
from tqdm import tqdm
from neodroid.environments.droid_environment import VectorUnityEnvironment
def run(self, environment: VectorUnityEnvironment, render: bool = True) -> None:
state = environment.reset().observables
F = count(1)
F = tqdm(F, leave=False, disable=not render)
for frame_i in F:
F.set_description(f"Frame {frame_i}")
action, *_ = self.sample(state, deterministic=True)
state, signal, terminated, info = environment.react(action, render=render)
if terminated.all():
state = environment.reset().observables
def infer(self, env, render=True):
for episode_i in count(1):
print(f"Episode {episode_i}")
state = env.reset()
for frame_i in count(1):
action, *_ = self.sample(state)
state, signal, terminated, info = env.act(action)
if render:
env.render()
if terminated:
break
| 26.810811
| 82
| 0.626008
| 117
| 992
| 5.230769
| 0.401709
| 0.029412
| 0.068627
| 0.104575
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004155
| 0.272177
| 992
| 36
| 83
| 27.555556
| 0.84349
| 0
| 0
| 0.083333
| 0
| 0
| 0.034274
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0.125
| 0
| 0.208333
| 0.041667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95eef20a68a045c35b991c4b9eef565e70a03766
| 17,995
|
py
|
Python
|
sysevr/slicer/mapping.py
|
Saleh-Ibtasham/VulScrape
|
738d17e9dd7e5edc2341d106361651fd28f99c61
|
[
"PostgreSQL",
"Unlicense",
"MIT"
] | 1
|
2021-04-12T12:59:33.000Z
|
2021-04-12T12:59:33.000Z
|
sysevr/slicer/mapping.py
|
Jokers-grin/VulScrape
|
738d17e9dd7e5edc2341d106361651fd28f99c61
|
[
"PostgreSQL",
"Unlicense",
"MIT"
] | null | null | null |
sysevr/slicer/mapping.py
|
Jokers-grin/VulScrape
|
738d17e9dd7e5edc2341d106361651fd28f99c61
|
[
"PostgreSQL",
"Unlicense",
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import re
import copy
import os
import string
import xlrd
import pickle
from .get_tokens import *
keywords_0 = ('auto', 'typedf', 'const', 'extern', 'register', 'static', 'volatile', 'continue', 'break',
'default', 'return', 'goto', 'else', 'case')
keywords_1 = ('catch', 'sizeof', 'if', 'switch', 'while', 'for')
keywords_2 = ('memcpy', 'wmemcpy', '_memccpy', 'memmove', 'wmemmove', 'memset', 'wmemset', 'memcmp', 'wmemcmp', 'memchr',
'wmemchr', 'strncpy', 'lstrcpyn', 'wcsncpy', 'strncat', 'bcopy', 'cin', 'strcpy', 'lstrcpy', 'wcscpy', '_tcscpy',
'_mbscpy', 'CopyMemory', 'strcat', 'lstrcat', 'fgets', 'main', '_main', '_tmain', 'Winmain', 'AfxWinMain', 'getchar',
'getc', 'getch', 'getche', 'kbhit', 'stdin', 'm_lpCmdLine', 'getdlgtext', 'getpass', 'istream.get', 'istream.getline',
'istream.peek', 'istream.putback', 'streambuf.sbumpc', 'streambuf.sgetc', 'streambuf.sgetn', 'streambuf.snextc', 'streambuf.sputbackc',
'SendMessage', 'SendMessageCallback', 'SendNotifyMessage', 'PostMessage', 'PostThreadMessage', 'recv', 'recvfrom', 'Receive',
'ReceiveFrom', 'ReceiveFromEx', 'CEdit.GetLine', 'CHtmlEditCtrl.GetDHtmlDocument', 'CListBox.GetText', 'CListCtrl.GetItemText',
'CRichEditCtrl.GetLine', 'GetDlgItemText', 'CCheckListBox.GetCheck', 'DISP_FUNCTION', 'DISP_PROPERTY_EX', 'getenv', 'getenv_s', '_wgetenv',
'_wgetenv_s', 'snprintf', 'vsnprintf', 'scanf', 'sscanf', 'catgets', 'gets', 'fscanf', 'vscanf', 'vfscanf', 'printf', 'vprintf', 'CString.Format',
'CString.FormatV', 'CString.FormatMessage', 'CStringT.Format', 'CStringT.FormatV', 'CStringT.FormatMessage', 'CStringT.FormatMessageV',
'vsprintf', 'asprintf', 'vasprintf', 'fprintf', 'sprintf', 'syslog', 'swscanf', 'sscanf_s', 'swscanf_s', 'swprintf', 'malloc',
'readlink', 'lstrlen', 'strchr', 'strcmp', 'strcoll', 'strcspn', 'strerror', 'strlen', 'strpbrk', 'strrchr', 'strspn', 'strstr',
'strtok', 'strxfrm', 'kfree', '_alloca')
keywords_3 = ('_strncpy*', '_tcsncpy*', '_mbsnbcpy*', '_wcsncpy*', '_strncat*', '_mbsncat*', 'wcsncat*', 'CEdit.Get*', 'CRichEditCtrl.Get*',
'CComboBox.Get*', 'GetWindowText*', 'istream.read*', 'Socket.Receive*', 'DDX_*', '_snprintf*', '_snwprintf*')
keywords_5 = ('*malloc',)
xread = xlrd.open_workbook('./sysevr/ml_models/function.xls')
keywords_4 = []
for sheet in xread.sheets():
col = sheet.col_values(0)[1:]
keywords_4 += col
#print keywords_4
typewords_0 = ('short', 'int', 'long', 'float', 'doubule', 'char', 'unsigned', 'signed', 'void' ,'wchar_t', 'size_t', 'bool')
typewords_1 = ('struct', 'union', 'enum')
typewords_2 = ('new', 'delete')
operators = ('+', '-', '*', '/', '=', '%', '?', ':', '!=', '==', '<<', '&&', '||', '+=', '-=', '++', '--', '>>', '|=')
function = '^[_a-zA-Z][_a-zA-Z0-9]*$'
variable = '^[_a-zA-Z][_a-zA-Z0-9(->)?(\.)?]*$'
number = '[0-9]+'
stringConst = '(^\'[\s|\S]*\'$)|(^"[\s|\S]*"$)'
constValue = ['NULL', 'false', 'true']
phla = '[^a-zA-Z0-9_]'
space = '\s'
spa = ''
def isinKeyword_3(token):
for key in keywords_3:
if len(token) < len(key)-1:
return False
if key[:-1] == token[:len(key)-1]:
return True
else:
return False
def isinKeyword_5(token):
for key in keywords_5:
if len(token) < len(key)-1:
return False
if token.find(key[1:]) != -1:
if "_" in token:
return False
else:
return True
else:
return False
def isphor(s, liter):
m = re.search(liter, s)
if m is not None:
return True
else:
return False
def var(s):
m = re.match(function, s)
if m is not None:
return True
else:
return False
def CreateVariable(string, token):
length = len(string)
stack1 = []
s = ''
i = 0
while (i < length):
if var(string[i]):
#if i + 1 < length and (string[i + 1] == '->' or string[i + 1] == '.'):
# stack1.append(string[i])
# stack1.append(string[i + 1])
# i = i + 2
#else:
while stack1 != []:
s = stack1.pop() + s
s = s + string[i]
token.append(s)
s = ''
i = i + 1
else:
token.append(string[i])
i = i + 1
def mapping(list_sentence):
list_code = []
list_func = []
for code in list_sentence:
#print code
_string = ''
for c in code:
_string = _string + ' ' + c
_string = _string[1:]
list_code.append(_string)
#print list_code
_func_dict = {}
_variable_dict = {}
index = 0
while index < len(list_code):
string = []
token = []
j = 0
str1 = copy.copy(list_code[index])
i = 0
tag = 0
strtemp = ''
while i < len(str1):
if tag == 0:
if isphor(str1[i], space):
if i > 0:
string.append(str1[j:i])
j = i + 1
else:
j = i + 1
i = i + 1
elif i + 1 == len(str1):
string.append(str1[j:i + 1])
break
elif isphor(str1[i], phla):
if i + 1 < len(str1) and str1[i] == '-' and str1[i + 1] == '>':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '<' and str1[i + 1] == '<':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '>' and str1[i + 1] == '>':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '&' and str1[i + 1] == '&':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '|' and str1[i + 1] == '|':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '|' and str1[i + 1] == '=':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '=' and str1[i + 1] == '=':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '!' and str1[i + 1] == '=':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '+' and str1[i + 1] == '+':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '-' and str1[i + 1] == '-':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '+' and str1[i + 1] == '=':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '-' and str1[i + 1] == '=':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif str1[i] == '"':
strtemp = strtemp + str1[i]
i = i + 1
tag = 1
elif str1[i] == '\'':
strtemp = strtemp + str1[i]
i = i + 1
tag = 2
else:
string.append(str1[i])
j = i + 1
i += 1
else:
i += 1
elif tag == 1:
if str1[i] != '"':
strtemp = strtemp + str1[i]
i = i + 1
else:
strtemp = strtemp + str1[i]
string.append(strtemp)
strtemp = ''
tag = 0
j = i + 1
i += 1
elif tag == 2:
if str1[i] != '\'':
strtemp = strtemp + str1[i]
i = i + 1
else:
strtemp = strtemp + str1[i]
string.append(strtemp)
strtemp = ''
tag = 0
j = i + 1
i += 1
count = 0
for sub in string:
if sub == spa:
count += 1
for i in range(count):
string.remove('')
CreateVariable(string, token)
j = 0
while j < len(token):
if token[j] in constValue:
token[j] = token[j]
j += 1
elif j < len(token) and isphor(token[j], variable):
if (token[j] in keywords_0) or (token[j] in typewords_0) or (token[j] in typewords_1 or token[j] in typewords_2):
j += 1
elif j - 1 >= 0 and j + 1 < len(token) and token[j-1] == 'new' and token[j + 1] == '[':
j = j + 2
elif j + 1 < len(token) and token[j + 1] == '(':
#print(token[j])
if token[j] in keywords_1:
j = j + 2
elif token[j] in keywords_2:
#print('3', token[j])
j = j + 2
elif isinKeyword_3(token[j]):
#print('4', token[j])
j = j + 2
elif token[j] in keywords_4:
#print('5', token[j])
j = j + 2
elif isinKeyword_5(token[j]):
#print('6', token[j])
j = j + 2
else:
#print('7',token[j])
if "good" in token[j] or "bad" in token[j]:
list_func.append(str(token[j]))
if token[j] in _func_dict.keys():
token[j] = _func_dict[token[j]]
else:
list_values = _func_dict.values()
if len(list_values) == 0:
_func_dict[token[j]] = 'func_0'
token[j] = _func_dict[token[j]]
else:
if token[j] in _func_dict.keys():
token[j] = _func_dict[token[j]]
else:
list_num = []
for value in list_values:
list_num.append(int(value.split('_')[-1]))
_max = max(list_num)
_func_dict[token[j]] = 'func_' + str(_max+1)
token[j] = _func_dict[token[j]]
j = j + 2
elif j + 1 < len(token) and (not isphor(token[j + 1], variable)):
if token[j + 1] == '*':
if j + 2 < len(token) and token[j + 2] == 'const':
j = j + 3
elif j - 1 >= 0 and token[j - 1] == 'const':
j = j + 2
elif j - 1 > 0 and (token[j - 1] in operators):
list_values = _variable_dict.values()
if len(list_values) == 0:
_variable_dict[token[j]] = 'variable_0'
token[j] = _variable_dict[token[j]]
else:
if token[j] in _variable_dict.keys():
token[j] = _variable_dict[token[j]]
else:
list_num = []
for value in list_values:
list_num.append(int(value.split('_')[-1]))
_max = max(list_num)
_variable_dict[token[j]] = 'variable_' + str(_max+1)
token[j] = _variable_dict[token[j]]
j = j + 2
elif j + 2 < len(token) and token[j + 2] == ')':
j = j + 2
elif j - 2 > 0 and (token[j - 1] == '(' and token[j - 2] in operators):
list_values = _variable_dict.values()
if len(list_values) == 0:
_variable_dict[token[j]] = 'variable_0'
token[j] = _variable_dict[token[j]]
else:
if token[j] in _variable_dict.keys():
token[j] = _variable_dict[token[j]]
else:
list_num = []
for value in list_values:
list_num.append(int(value.split('_')[-1]))
_max = max(list_num)
_variable_dict[token[j]] = 'variable_' + str(_max+1)
token[j] = _variable_dict[token[j]]
j = j + 2
else:
list_values = _variable_dict.values()
if len(list_values) == 0:
_variable_dict[token[j]] = 'variable_0'
token[j] = _variable_dict[token[j]]
else:
if token[j] in _variable_dict.keys():
token[j] = _variable_dict[token[j]]
else:
list_num = []
for value in list_values:
list_num.append(int(value.split('_')[-1]))
_max = max(list_num)
_variable_dict[token[j]] = 'variable_' + str(_max+1)
token[j] = _variable_dict[token[j]]
j = j + 2
else:
list_values = _variable_dict.values()
if len(list_values) == 0:
_variable_dict[token[j]] = 'variable_0'
token[j] = _variable_dict[token[j]]
else:
if token[j] in _variable_dict.keys():
token[j] = _variable_dict[token[j]]
else:
list_num = []
for value in list_values:
list_num.append(int(value.split('_')[-1]))
_max = max(list_num)
_variable_dict[token[j]] = 'variable_' + str(_max+1)
token[j] = _variable_dict[token[j]]
j = j + 2
elif j + 1 == len(token):
list_values = _variable_dict.values()
if len(list_values) == 0:
_variable_dict[token[j]] = 'variable_0'
token[j] = _variable_dict[token[j]]
else:
if token[j] in _variable_dict.keys():
token[j] = _variable_dict[token[j]]
else:
list_num = []
for value in list_values:
list_num.append(int(value.split('_')[-1]))
_max = max(list_num)
_variable_dict[token[j]] = 'variable_' + str(_max+1)
token[j] = _variable_dict[token[j]]
break
else:
j += 1
elif j < len(token) and isphor(token[j], number):
j += 1
elif j < len(token) and isphor(token[j], stringConst):
j += 1
else:
j += 1
stemp = ''
i = 0
while i < len(token):
if i == len(token) - 1:
stemp = stemp + token[i]
else:
stemp = stemp + token[i] + ' '
i += 1
list_code[index] = stemp
index += 1
#print list_code
#print _variable_dict
return list_code, list_func
| 38.866091
| 160
| 0.373159
| 1,759
| 17,995
| 3.681069
| 0.152359
| 0.087104
| 0.047876
| 0.069498
| 0.537297
| 0.503475
| 0.478919
| 0.462857
| 0.438147
| 0.425328
| 0
| 0.033092
| 0.492859
| 17,995
| 462
| 161
| 38.950216
| 0.676419
| 0.020228
| 0
| 0.581267
| 0
| 0.00551
| 0.103162
| 0.014137
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016529
| false
| 0.002755
| 0.019284
| 0
| 0.068871
| 0.008264
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95ef71f8c3f9102a164ab9d3fc0c343aa7cbaaa5
| 7,035
|
py
|
Python
|
lib/datasets/vrd/to_pascal_format.py
|
sx14/open-relation.pytorch
|
3fe52a0c6129a80abbc84df53903d13b7dea05d6
|
[
"MIT"
] | 2
|
2019-04-21T01:45:01.000Z
|
2020-03-11T07:09:18.000Z
|
lib/datasets/vrd/to_pascal_format.py
|
sx14/open-relation.pytorch
|
3fe52a0c6129a80abbc84df53903d13b7dea05d6
|
[
"MIT"
] | null | null | null |
lib/datasets/vrd/to_pascal_format.py
|
sx14/open-relation.pytorch
|
3fe52a0c6129a80abbc84df53903d13b7dea05d6
|
[
"MIT"
] | null | null | null |
import os
import shutil
import xml.dom.minidom
def output_pascal_format(mid_data, output_path):
# mid_data:
# filename
# width
# height
# depth
# objects
# -- xmin
# -- ymin
# -- xmax
# -- ymax
# -- name
# -- pose
# -- truncated
# -- difficult
additional_data = dict()
additional_data['folder'] = 'VOC2007'
additional_data['s_database'] = 'The VOC2007 Database'
additional_data['s_annotation'] = 'PASCAL VOC2007'
additional_data['s_image'] = 'flickr'
additional_data['s_flickrid'] = '123456789'
additional_data['o_flickrid'] = 'Tom'
additional_data['o_name'] = 'Tom'
additional_data['segmented'] = '0'
des_xml_dom = xml.dom.minidom.Document()
# annotation
des_root_node = des_xml_dom.createElement('annotation')
# folder
des_folder_node = des_xml_dom.createElement('folder')
des_folder = des_xml_dom.createTextNode(additional_data['folder'])
des_folder_node.appendChild(des_folder)
des_root_node.appendChild(des_folder_node)
# filename
des_filename_node = des_xml_dom.createElement('filename')
des_filename = des_xml_dom.createTextNode(mid_data['filename'])
des_filename_node.appendChild(des_filename)
des_root_node.appendChild(des_filename_node)
# source
des_dataset_name = des_xml_dom.createTextNode(additional_data['s_database'])
des_dataset_node = des_xml_dom.createElement('database')
des_dataset_node.appendChild(des_dataset_name)
des_annotation = des_xml_dom.createTextNode(additional_data['s_annotation'])
des_annotation_node = des_xml_dom.createElement('annotation')
des_annotation_node.appendChild(des_annotation)
des_image = des_xml_dom.createTextNode(additional_data['s_image'])
des_image_node = des_xml_dom.createElement('image')
des_image_node.appendChild(des_image)
des_flickrid = des_xml_dom.createTextNode(additional_data['s_flickrid'])
des_flickrid_node = des_xml_dom.createElement('flickrid')
des_flickrid_node.appendChild(des_flickrid)
des_source_node = des_xml_dom.createElement('source')
des_source_node.appendChild(des_dataset_node)
des_source_node.appendChild(des_annotation_node)
des_source_node.appendChild(des_image_node)
des_source_node.appendChild(des_flickrid_node)
des_root_node.appendChild(des_source_node)
# owner
des_owner_flickrid = des_xml_dom.createTextNode(additional_data['o_flickrid'])
des_owner_flickrid_node = des_xml_dom.createElement('flickrid')
des_owner_flickrid_node.appendChild(des_owner_flickrid)
des_owner_name = des_xml_dom.createTextNode(additional_data['o_name'])
des_owner_name_node = des_xml_dom.createElement('name')
des_owner_name_node.appendChild(des_owner_name)
des_owner_node = des_xml_dom.createElement('owner')
des_owner_node.appendChild(des_owner_flickrid_node)
des_owner_node.appendChild(des_owner_name_node)
des_root_node.appendChild(des_owner_node)
# size
des_size_node = des_xml_dom.createElement('size')
des_width_node = des_xml_dom.createElement('width')
des_height_node = des_xml_dom.createElement('height')
des_depth_node = des_xml_dom.createElement('depth')
des_width = des_xml_dom.createTextNode(str(mid_data['width']))
des_height = des_xml_dom.createTextNode(str(mid_data['height']))
des_depth = des_xml_dom.createTextNode(str(mid_data['depth']))
des_width_node.appendChild(des_width)
des_height_node.appendChild(des_height)
des_depth_node.appendChild(des_depth)
des_size_node.appendChild(des_width_node)
des_size_node.appendChild(des_height_node)
des_size_node.appendChild(des_depth_node)
des_root_node.appendChild(des_size_node)
# segmented
des_segmented = des_xml_dom.createTextNode(additional_data['segmented'])
des_segmented_node = des_xml_dom.createElement('segmented')
des_segmented_node.appendChild(des_segmented)
des_root_node.appendChild(des_segmented_node)
# object
org_objects = mid_data['objects']
for j in range(0, len(org_objects)):
org_object = org_objects[j]
des_object_node = des_xml_dom.createElement('object')
x_min = int(org_object['xmin'])
y_min = int(org_object['ymin'])
x_max = int(org_object['xmax'])
y_max = int(org_object['ymax'])
# prevent box scale out
# pixel coordinate is 1 based
if x_min <= 0:
org_object['xmin'] = '1'
if y_min <= 0:
org_object['ymin'] = '1'
if y_max > mid_data['height']:
org_object['ymax'] = mid_data['height']
if x_max > mid_data['width']:
org_object['xmax'] = mid_data['width']
# name
des_object_name = des_xml_dom.createTextNode(org_object['name'])
des_object_name_node = des_xml_dom.createElement('name')
des_object_name_node.appendChild(des_object_name)
des_object_node.appendChild(des_object_name_node)
# pose
des_pose = des_xml_dom.createTextNode(org_object['pose'])
des_pose_node = des_xml_dom.createElement('pose')
des_pose_node.appendChild(des_pose)
des_object_node.appendChild(des_pose_node)
# truncated
des_truncated = des_xml_dom.createTextNode(str(org_object['truncated']))
des_truncated_node = des_xml_dom.createElement('truncated')
des_truncated_node.appendChild(des_truncated)
des_object_node.appendChild(des_truncated_node)
# difficult
des_object_difficult = des_xml_dom.createTextNode(str(org_object['difficult']))
des_object_difficult_node = des_xml_dom.createElement('difficult')
des_object_difficult_node.appendChild(des_object_difficult)
des_object_node.appendChild(des_object_difficult_node)
# bndbox
des_xmin_node = des_xml_dom.createElement('xmin')
des_xmin = des_xml_dom.createTextNode(str(org_object['xmin']))
des_xmin_node.appendChild(des_xmin)
des_ymin_node = des_xml_dom.createElement('ymin')
des_ymin = des_xml_dom.createTextNode(str(org_object['ymin']))
des_ymin_node.appendChild(des_ymin)
des_xmax_node = des_xml_dom.createElement('xmax')
des_xmax = des_xml_dom.createTextNode(str(org_object['xmax']))
des_xmax_node.appendChild(des_xmax)
des_ymax_node = des_xml_dom.createElement('ymax')
des_ymax = des_xml_dom.createTextNode(str(org_object['ymax']))
des_ymax_node.appendChild(des_ymax)
des_object_box_node = des_xml_dom.createElement('bndbox')
des_object_box_node.appendChild(des_xmin_node)
des_object_box_node.appendChild(des_ymin_node)
des_object_box_node.appendChild(des_xmax_node)
des_object_box_node.appendChild(des_ymax_node)
des_object_node.appendChild(des_object_box_node)
des_root_node.appendChild(des_object_node)
with open(output_path, 'w') as des_file:
des_root_node.writexml(des_file, addindent='\t', newl='\n')
| 45.980392
| 87
| 0.729495
| 930
| 7,035
| 5.055914
| 0.093548
| 0.062527
| 0.089962
| 0.071884
| 0.529562
| 0.31795
| 0.175883
| 0.034879
| 0
| 0
| 0
| 0.004804
| 0.171429
| 7,035
| 153
| 88
| 45.980392
| 0.801853
| 0.04037
| 0
| 0
| 0
| 0
| 0.074085
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008197
| false
| 0
| 0.02459
| 0
| 0.032787
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95f03d2ec095743360ac14d2a11b057617f86d87
| 4,880
|
py
|
Python
|
stellar/cognition/planning.py
|
strfx/stellar
|
41b190eed016d2d6ad8548490a0c9620a02d711e
|
[
"MIT"
] | null | null | null |
stellar/cognition/planning.py
|
strfx/stellar
|
41b190eed016d2d6ad8548490a0c9620a02d711e
|
[
"MIT"
] | null | null | null |
stellar/cognition/planning.py
|
strfx/stellar
|
41b190eed016d2d6ad8548490a0c9620a02d711e
|
[
"MIT"
] | null | null | null |
"""
Contains path planning logic.
"""
import math
import numpy as np
from heapq import heappush, heappop
def heuristics(a, b):
"""Heuristics function using the Euclidian Distance."""
weight = 1.0
x1, y1 = a
x2, y2 = b
distance = np.sqrt(np.square(x2-x1) + np.square(y2-y1))
# distance = math.hypot(x1 - x2, y1 - y2)
return distance
def motion_model_4():
return [
[1, 0, 1],
[0, 1, 1],
[-1, 0, 1],
[0, -1, 1],
[-1, -1, 1],
[-1, 1, 1],
[1, -1, 1],
[1, 1, 1]
]
class AStarPlanner:
def __init__(self):
pass
def plan(self, occupancy_grid_map, start_node, goal_node):
"""Plans a path through the occupancy grid map.
Args:
occupancy_grid_map: The occupancy grid map.
start_node: Coordinates of the start node.
goal_ndoe: Coordinates of the goal node.
Returns:
A list of coordinates of the planned path or None, if no path
could be constructed.
"""
# Node; Cost to Goal; Node cost, previous node
start_node_costs = 0
node_to_goal = heuristics(start_node, goal_node) + start_node_costs
frontier = [(node_to_goal, start_node_costs, start_node, None)]
visited = []
history = {}
possible_movements = motion_model_4()
# Safety guard (TODO: Remove after DEV)
i = 0
break_if_count_reached = 10000
while frontier or i >= break_if_count_reached:
i += 1
element = heappop(frontier)
total_cost, cost, position, previous = element
# If we have already traversed this node (x,y), then skip it
if position in visited:
continue
# Mark this position as visited
visited.append(position)
history[position] = previous
# Have already reached our goal, we can abort.
if position == goal_node:
break
for dx, dy, dcost in possible_movements:
xn = position[0] + dx
yn = position[1] + dy
if xn < 0 or yn < 0:
continue
if (xn, yn) in visited:
continue
if yn >= occupancy_grid_map.shape[0] or xn >= occupancy_grid_map.shape[1]:
continue
# Check if that cell is free!
cell = occupancy_grid_map[yn][xn]
if cell <= 0:
potential_cost = 0 # abs(cell) # * 3
new_cost = cost + dcost + potential_cost
new_total_cost_to_goal = new_cost + \
heuristics((xn, yn), goal_node) + potential_cost
heappush(
frontier, (new_total_cost_to_goal, new_cost, (xn, yn), position))
path = []
while position:
path.append(position)
position = history[position]
return list(reversed(path))
def smoothen(self, occupancy_grid_map, path):
"""Smoothens the planned path.
Utilizes gradient descent to smoothen the path.
"""
from copy import deepcopy
# Create a deep copy of the path
smoothed_path = deepcopy(path)
weight_data = 0.01
weight_smooth = 0.8
tolerance = 0.0000001
smoothed_path = [list(elem) for elem in smoothed_path]
while True:
# Keep track of the total of changes made to check if we
# reached convergence
total_of_changes = 0
for i in range(len(path)):
# Do not smoothen start and endpoint
if i == 0 or i == (len(path) - 1):
continue
for dimension in range(len(path[i])):
previous = smoothed_path[i][dimension]
smoothed_path[i][dimension] = smoothed_path[i][dimension] + \
weight_data * (path[i][dimension] - smoothed_path[i][dimension]) + \
weight_smooth * \
(smoothed_path[i+1][dimension] + smoothed_path[i-1]
[dimension] - 2 * smoothed_path[i][dimension])
total_of_changes += abs(previous -
smoothed_path[i][dimension])
if total_of_changes < tolerance:
break
return smoothed_path
def get_nearest_point(robot, aa):
r = (robot.x, robot.y)
a = [edist(k, r) for k in list(reference_trajectory)]
i = np.argmin(a)
p1 = reference_trajectory[i]
p2 = reference_trajectory[i+5]
aaa = np.arctan2(p2[1] - p1[1], p2[0] - p1[0])
print(f"l => {aaa:.4f}, {p1}, {p2}")
return reference_trajectory[i]
| 28.87574
| 92
| 0.527664
| 582
| 4,880
| 4.269759
| 0.297251
| 0.012072
| 0.015694
| 0.017706
| 0.129577
| 0.075654
| 0.075654
| 0.051509
| 0.005634
| 0.005634
| 0
| 0.030093
| 0.380328
| 4,880
| 168
| 93
| 29.047619
| 0.791667
| 0.182992
| 0
| 0.072165
| 0
| 0
| 0.006724
| 0
| 0
| 0
| 0
| 0.005952
| 0
| 1
| 0.061856
| false
| 0.010309
| 0.041237
| 0.010309
| 0.164948
| 0.010309
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95f29beeb0a5add129f6eb5d02625efa724d1d4e
| 699
|
py
|
Python
|
core/migrations/0008_auto_20151203_1519.py
|
rafaelbantu/timtec
|
86c51b7440a044704ed33c3e752a6cf6b15ceae3
|
[
"BSD-3-Clause"
] | 21
|
2015-09-23T14:07:16.000Z
|
2022-02-18T01:35:18.000Z
|
core/migrations/0008_auto_20151203_1519.py
|
rafaelbantu/timtec
|
86c51b7440a044704ed33c3e752a6cf6b15ceae3
|
[
"BSD-3-Clause"
] | 178
|
2016-05-10T16:16:19.000Z
|
2021-12-15T20:21:21.000Z
|
core/migrations/0008_auto_20151203_1519.py
|
rafaelbantu/timtec
|
86c51b7440a044704ed33c3e752a6cf6b15ceae3
|
[
"BSD-3-Clause"
] | 18
|
2015-10-23T13:28:17.000Z
|
2021-09-22T13:08:28.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0007_auto_20151202_1434'),
]
operations = [
migrations.AddField(
model_name='certificationprocess',
name='active',
field=models.BooleanField(default=True, verbose_name='Active'),
),
migrations.AlterField(
model_name='certificationprocess',
name='course_certification',
field=models.ForeignKey(related_name='processes', verbose_name='Certificate', to='core.CourseCertification', null=True),
),
]
| 27.96
| 132
| 0.635193
| 62
| 699
| 6.935484
| 0.66129
| 0.04186
| 0.134884
| 0.153488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032319
| 0.247496
| 699
| 24
| 133
| 29.125
| 0.785171
| 0.030043
| 0
| 0.222222
| 0
| 0
| 0.211538
| 0.069527
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.277778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95f38c9fc1b89ab08b48f547bb8603c9adde90bb
| 628
|
py
|
Python
|
hipshare/lib/util.py
|
erg0dic/hipshare
|
993f0edee7e9156b7154d578ef6a4e50cfcdd632
|
[
"BSD-2-Clause"
] | 1
|
2015-11-03T19:33:44.000Z
|
2015-11-03T19:33:44.000Z
|
hipshare/lib/util.py
|
erg0dic/hipshare
|
993f0edee7e9156b7154d578ef6a4e50cfcdd632
|
[
"BSD-2-Clause"
] | 1
|
2015-11-03T19:35:19.000Z
|
2015-11-03T19:35:19.000Z
|
hipshare/lib/util.py
|
erg0dic/hipshare
|
993f0edee7e9156b7154d578ef6a4e50cfcdd632
|
[
"BSD-2-Clause"
] | null | null | null |
import json
import logging
import sys
log = logging.getLogger(__name__)
def die(s):
log.error(s)
sys.exit(-1)
def load_json(path):
try:
fp = open(path)
except OSError as err:
die("Could not open {}: {}".format(path, str(err)))
try:
value = json.load(fp)
except ValueError as err:
die("Invalid JSON in {}: {}".format(path, str(err)))
return value
def load_jsons(*paths):
return [load_json(path) for path in paths]
def merge_dicts(a, b):
c = a.copy()
c.update(b)
return c
def usage():
log.error("usage: hipshare <strategy>")
sys.exit(-1)
| 17.942857
| 60
| 0.598726
| 93
| 628
| 3.956989
| 0.473118
| 0.043478
| 0.043478
| 0.086957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004274
| 0.254777
| 628
| 34
| 61
| 18.470588
| 0.782051
| 0
| 0
| 0.153846
| 0
| 0
| 0.109873
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.192308
| false
| 0
| 0.115385
| 0.038462
| 0.423077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95f8d504586e0cc5968ca2a0c621d00c07ae2c40
| 2,078
|
py
|
Python
|
p2_mahjong/utils.py
|
yata0/Mahjong
|
764cd607df715b879f3f8a54b6def55e0b7d4706
|
[
"MIT"
] | null | null | null |
p2_mahjong/utils.py
|
yata0/Mahjong
|
764cd607df715b879f3f8a54b6def55e0b7d4706
|
[
"MIT"
] | null | null | null |
p2_mahjong/utils.py
|
yata0/Mahjong
|
764cd607df715b879f3f8a54b6def55e0b7d4706
|
[
"MIT"
] | null | null | null |
# coding=utf-8
import sys
import numpy as np
from p2_mahjong.card import MahjongCard as Card
log_head = "utils.py"
CARD_USED_TYPE = ['characters', 'green', 'red', 'white', 'east', 'west', 'north', 'south',
'spring', 'summer', 'autumn', 'winter', 'mei', 'lan', 'zhu', 'ju']
card_encoding_dict = {}
card_id = 0
DIC_CHOW = {}
character_list = []
wind_list = []
dragon_list = []
card_used = {}
for _type in ['bamboo', 'characters', 'dots']:
for _trait in ['1', '2', '3', '4', '5', '6', '7', '8', '9']:
card = _type+"-"+_trait
card_encoding_dict[card] = card_id
DIC_CHOW[card_id] = 1
if _type in ['characters']:
card_used[card_id] = 1
character_list.append(card_id)
card_id += 1
for _trait in ['green', 'red', 'white']:
card = 'dragons-'+_trait
card_encoding_dict[card] = card_id
if _trait in CARD_USED_TYPE:
card_used[card_id] = 1
dragon_list.append(card_id)
card_id += 1
for _trait in ['east', 'west', 'north', 'south']:
card = 'winds-'+_trait
card_encoding_dict[card] = card_id
if _trait in CARD_USED_TYPE:
card_used[card_id] = 1
wind_list.append(card_id)
card_id += 1
for _trait in ['spring', 'summer', 'autumn', 'winter', 'mei', 'lan', 'zhu', 'ju']:
card = 'flowers-'+_trait
card_encoding_dict[card] = card_id
if _trait in CARD_USED_TYPE:
card_used[card_id] = 1
card_id += 1
card_decoding_dict = {card_encoding_dict[key]: key for key in card_encoding_dict.keys()}
def init_deck(game_id=""):
func_head = "init_deck()" + game_id
deck = []
idx = 0
for card_id in card_decoding_dict:
for _ in range(4):
if card_id not in card_used:
continue
card = Card(runtime_id=idx, card_id=card_id)
card.type = card_decoding_dict[card_id].split("-")[0]
card.trait = card_decoding_dict[card_id].split("-")[1]
deck.append(card)
idx += 1
if card.type == "flowers":
break
return deck
| 29.685714
| 90
| 0.590472
| 295
| 2,078
| 3.854237
| 0.267797
| 0.121372
| 0.055409
| 0.087951
| 0.420405
| 0.407212
| 0.359719
| 0.332454
| 0.332454
| 0.263852
| 0
| 0.016894
| 0.259384
| 2,078
| 69
| 91
| 30.115942
| 0.721897
| 0.005775
| 0
| 0.254237
| 0
| 0
| 0.112512
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016949
| false
| 0
| 0.050847
| 0
| 0.084746
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95fa5390eed432169e5e44214698604b6c85fcde
| 1,062
|
py
|
Python
|
Chapter 01/int_sqrt.py
|
bpbpublications/Python-Quick-Interview-Guide
|
ab4ff3e670b116a4db6b9e1f0ccba8424640704d
|
[
"MIT"
] | 1
|
2021-05-14T19:53:41.000Z
|
2021-05-14T19:53:41.000Z
|
Chapter 01/int_sqrt.py
|
bpbpublications/Python-Quick-Interview-Guide
|
ab4ff3e670b116a4db6b9e1f0ccba8424640704d
|
[
"MIT"
] | null | null | null |
Chapter 01/int_sqrt.py
|
bpbpublications/Python-Quick-Interview-Guide
|
ab4ff3e670b116a4db6b9e1f0ccba8424640704d
|
[
"MIT"
] | null | null | null |
class Solution:
def mySqrt(self, x: int) -> int:
# Base cases
if (x == 0 or x == 1):
return x
# Staring from 1, try all numbers until
# i*i remains less than to x.
i = 1
while (i*i < x):i += 1
return i if i*i == x else i-1
'''
class Solution:
def mySqrt(self,x) :
# Base cases
if (x == 0 or x == 1) :
return x
# Do Binary Search for integer square root
start = 1
end = x
while (start <= end) :
mid = (start + end) // 2
# If x is a perfect square
if (mid*mid == x) :
return mid
# when mid^2 is smaller than x, check if (mid+1)^2 >x
if (mid * mid < x) :
if (mid+1)*(mid+1) > x:return mid
start = mid + 1
else :
# If mid*mid is greater than x
end = mid-1
'''
sol=Solution()
for i in range(1,10):
print(i,sol.mySqrt(i))
| 24.136364
| 62
| 0.415254
| 147
| 1,062
| 3
| 0.340136
| 0.056689
| 0.054422
| 0.099773
| 0.231293
| 0.231293
| 0.108844
| 0.108844
| 0.108844
| 0.108844
| 0
| 0.035971
| 0.47646
| 1,062
| 43
| 63
| 24.697674
| 0.757194
| 0.073446
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0
| 0
| 0.4
| 0.1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
95ff75475d347ef322808cfa526e253df07b5f81
| 13,517
|
py
|
Python
|
meg_runtime/ui/manager.py
|
MultimediaExtensibleGit/Runtime
|
ba2e469666163177034e44077b02378dfc6649c9
|
[
"MIT"
] | null | null | null |
meg_runtime/ui/manager.py
|
MultimediaExtensibleGit/Runtime
|
ba2e469666163177034e44077b02378dfc6649c9
|
[
"MIT"
] | 5
|
2020-03-24T19:59:38.000Z
|
2020-04-22T03:44:43.000Z
|
meg_runtime/ui/manager.py
|
MultimediaExtensibleGit/Runtime
|
ba2e469666163177034e44077b02378dfc6649c9
|
[
"MIT"
] | 2
|
2020-03-13T18:35:46.000Z
|
2020-04-11T20:19:20.000Z
|
"""MEG UI Manager
"""
import pkg_resources
from PyQt5 import QtCore, QtWidgets, QtGui, uic
from meg_runtime.config import Config
from meg_runtime.logger import Logger
from meg_runtime.app import App
class UIManager(QtWidgets.QMainWindow):
"""Main UI manager for the MEG system."""
UI_FILE = 'mainwindow.ui'
# The window class widgets
__widgets = None
def __init__(self, **kwargs):
"""UI manager constructor."""
# Load window resource if needed
if UIManager.__widgets is None:
# Load the resource setup from the package
UIManager.__widgets = uic.loadUiType(pkg_resources.resource_filename(__name__, UIManager.UI_FILE))
# Initialize the super class
super().__init__(**kwargs)
# Setup window resource
UIManager.__widgets[0]().setupUi(self)
# Set the window panel stack
self._panels = []
self._current_panel = None
self._current_popup = None
# Set handler for closing a panel
self._panel = self.findChild(QtWidgets.QTabWidget, 'panelwidget')
self._panel.tabCloseRequested.connect(self.remove_view_by_index)
self._panel.currentChanged.connect(self._show_view_by_index)
# Get status widget
self._statusbar = self.findChild(QtWidgets.QStatusBar, 'statusbar')
# Set handlers for main buttons
# TODO: Add more handlers for these
self._action_clone = self.findChild(QtWidgets.QAction, 'action_Clone')
self._action_clone.triggered.connect(App.open_clone_panel)
self._action_open = self.findChild(QtWidgets.QAction, 'action_Open')
self._action_open.triggered.connect(App.open_repo_panel)
self._action_quit = self.findChild(QtWidgets.QAction, 'action_Quit')
self._action_quit.triggered.connect(App.quit)
self._action_about = self.findChild(QtWidgets.QAction, 'action_About')
self._action_about.triggered.connect(App.open_about)
self._action_preferences = self.findChild(QtWidgets.QAction, 'action_Preferences')
self._action_preferences.triggered.connect(App.open_prefs_panel)
self._action_manage_plugins = self.findChild(QtWidgets.QAction, 'action_Manage_Plugins')
self._action_manage_plugins.triggered.connect(App.open_plugins_panel)
# Set the default title
self.set_title()
# Set the icon
icon_path = App.get_icon()
if icon_path is not None:
self.setWindowIcon(QtGui.QIcon(icon_path))
# Restore the state from the configuration if needed
window_state = Config.get('window/state', 'none')
state = self.windowState()
if window_state == 'maximized':
state &= ~(QtCore.Qt.WindowMinimized | QtCore.Qt.WindowFullScreen)
state |= QtCore.Qt.WindowMaximized
elif window_state == 'minimized':
state &= ~(QtCore.Qt.WindowMaximized | QtCore.Qt.WindowFullScreen)
state |= QtCore.Qt.WindowMinimized
elif window_state == 'fullscreen':
state &= ~(QtCore.Qt.WindowMinimized | QtCore.Qt.WindowMaximized)
state |= QtCore.Qt.WindowFullScreen
self.setWindowState(state)
# Restore the window geometry from the configuration if needed
geometry = Config.get('window/geometry', None)
if isinstance(geometry, list) and len(geometry) == 4:
self.setGeometry(geometry[0], geometry[1], geometry[2], geometry[3])
def closeEvent(self, event):
"""The window was closed"""
# Determine the window state
state = self.windowState()
window_state = 'none'
if state & QtCore.Qt.WindowFullScreen:
window_state = 'fullscreen'
elif state & QtCore.Qt.WindowMaximized:
window_state = 'maximized'
elif state & QtCore.Qt.WindowMinimized:
window_state = 'minimized'
else:
# Save the window geometry for normal state
geometry = self.geometry()
Config.set('window/geometry', [
geometry.x(),
geometry.y(),
geometry.width(),
geometry.height()
])
# Save the window state
Config.set('window/state', window_state)
# Save the configuration
Config.save()
# Continue to close the window
QtWidgets.QMainWindow.closeEvent(self, event)
def set_title(self, panel=None):
"""Update the window title from the current panel"""
# Set the new window title, if provided by the panel
if panel is not None and panel.get_title():
title = panel.get_title()
self.setWindowTitle(f'{App.get_name()} - {title}')
container = self.get_panel_container()
if container is not None:
index = container.indexOf(panel.get_widgets())
if index >= 0:
container.setTabText(index, title)
container.setTabIcon(index, panel.get_icon())
else:
self.setWindowTitle(f'{App.get_name()}')
def set_status(self, panel=None, timeout=0):
"""Update the window status from the current panel"""
self.set_status_text('' if panel is None else panel.get_status(), timeout)
def set_status_text(self, message, timeout=0):
"""Update the window status from the current panel"""
if self._statusbar is not None:
self._statusbar.showMessage('' if message is None else message, timeout)
def get_panel_container(self):
"""Get the panel container widget"""
return self._panel
def get_panels(self):
"""Get all the panels in the window panel stack"""
if not isinstance(self._panels, list):
self._panels = []
return self._panels
def get_panel(self, name):
"""Get a panel in the window panel stack by name"""
# Check panels by name
for panel in self.get_panels():
if panel.get_name() == name:
# Return the panel
return panel
# Panel not found
return None
def get_panel_by_index(self, index):
"""Get a panel in the window panel stack by index"""
# Get panel container
container = self.get_panel_container()
if container is not None:
# Get the widgets of the panel
widgets = container.widget(index)
if widgets is not None:
# Check the panels for matching widgets
for panel in self.get_panels():
if panel.get_widgets() == widgets:
# Found the panel
return panel
# Panel not found
return None
def get_current_panel(self):
"""Get the current panel in the window stack"""
return self._current_panel
def get_current_popup(self):
"""Get the current popup dialog"""
return self._current_popup
def push_view(self, panel):
"""Push a panel onto the stack being viewed."""
if panel is not None:
Logger.debug(f'MEG UI: Adding panel "{panel.get_name()}"')
# Hide the current panel
current_panel = self.get_current_panel()
if current_panel is not None:
current_panel.on_hide()
# Show the current panel
panel.on_show()
# Update the title for the panel
self.set_title(panel)
# Update the status for the panel
self.set_status(panel)
# Get the window central widget
container = self.get_panel_container()
if container is not None:
# Add the panel to the view stack
widgets = panel.get_widgets()
widgets.setParent(container)
title = panel.get_title()
index = container.addTab(widgets, 'Home' if not title else title)
# Remove the close button if not closable
tabbar = container.tabBar()
if not panel.get_is_closable():
tabbar.tabButton(index, QtWidgets.QTabBar.RightSide).deleteLater()
tabbar.setTabButton(index, QtWidgets.QTabBar.RightSide, None)
# Add the panel icon
tabbar.setTabIcon(index, panel.get_icon())
# Add the panel to the panel stack
self.get_panels().append(panel)
# Set the panel to the view
container.setCurrentIndex(index)
def set_view(self, panel):
"""Set the panel to be viewed in the stack or push the panel onto the stack being viewed."""
if panel is not None:
# Get the window central widget
container = self.get_panel_container()
if container is not None:
# Get the index of the panel
index = container.indexOf(panel.get_widgets())
if index >= 0:
# Set the new panel
container.setCurrentIndex(index)
# Do not continue since the panel was found do not push
Logger.debug(f'MEG UI: Setting panel "{panel.get_name()}"')
return
# Push the panel instead because it was not found
self.push_view(panel)
def popup_view(self, panel, resizable=False):
"""Popup a dialog containing a panel."""
if panel is None or self._current_popup is not None:
return QtWidgets.QDialog.Rejected
# Create a dialog window to popup
dialog = QtWidgets.QDialog(None, QtCore.Qt.WindowSystemMenuHint | QtCore.Qt.WindowTitleHint | QtCore.Qt.WindowCloseButtonHint)
dialog.setModal(True)
dialog.setSizeGripEnabled(resizable)
# Set the current popup
self._current_popup = dialog
# Set dialog layout
layout = QtWidgets.QGridLayout()
layout.setContentsMargins(0, 0, 0, 0)
dialog.setLayout(layout)
# Add the panel widgets to the popup
widgets = panel.get_widgets()
layout.addWidget(widgets)
widgets.setParent(dialog)
# Set the dialog icon
icon = panel.get_icon()
dialog.setWindowIcon(icon if icon else QtWidgets.QIcon(App.get_icon()))
title = panel.get_title()
# Set the dialog title
dialog.setWindowTitle(title if title else App.get_name())
previous_panel = self._current_panel
# Hide the current panel
if previous_panel is not None:
previous_panel.on_hide()
# Make the panel the current
self._current_panel = panel
# Show the panel
panel.on_show()
# Show the dialog
if not resizable:
dialog.setFixedSize(dialog.size())
result = dialog.exec_()
# Hide the panel
panel.on_hide()
# Remove the popup
self._current_popup = None
# Restore the previous panel to current
self._current_panel = previous_panel
# Show the previous panel
if previous_panel is not None:
previous_panel.on_show()
return result
def remove_view(self, panel):
"""Remove a panel from the stack being viewed."""
# Check if the panel is closable
if panel is not None and panel.get_is_closable():
Logger.debug(f'MEG UI: Removing panel "{panel.get_name()}"')
# Close the panel
panel.on_hide()
panel.on_close()
# Remove the panel from the list
panels = self.get_panels()
if panel in panels:
panels.remove(panel)
if self._current_panel == panel:
self._current_panel = None
# Get the window central widget
container = self.get_panel_container()
if container:
# Get the index of this panel
index = container.indexOf(panel.get_widgets())
if index >= 0:
# Remove the panel from the view stack
container.removeTab(index)
panel.get_widgets().setParent(None)
def remove_view_by_index(self, index):
"""Remove a panel from the stack being viewed."""
# Get the panel by index
Logger.debug(f'MEG UI: Removing panel by index ({index})')
panel = self.get_panel_by_index(index)
if panel is not None and panel.get_is_closable():
# Remove the panel
self.remove_view(panel)
def _show_view_by_index(self, index):
"""Show the panel on click"""
# Get the panel by index
panel = self.get_panel_by_index(index)
if panel is not None:
# Get the current panel
current_panel = self.get_current_panel()
# Check if the panel is not the current panel
if current_panel != panel:
# Hide the current panel
if current_panel is not None:
current_panel.on_hide()
# Set the current panel
self._current_panel = panel
# Update the title
self.set_title(panel)
# Update the status
self.set_status(panel)
# Show the new panel
if panel is not None:
panel.on_show()
| 41.719136
| 134
| 0.600059
| 1,580
| 13,517
| 4.982278
| 0.13481
| 0.030488
| 0.021723
| 0.019563
| 0.322282
| 0.231453
| 0.19563
| 0.179497
| 0.167175
| 0.134909
| 0
| 0.001739
| 0.319154
| 13,517
| 323
| 135
| 41.848297
| 0.853635
| 0.203226
| 0
| 0.3125
| 0
| 0
| 0.042354
| 0.001981
| 0
| 0
| 0
| 0.003096
| 0
| 1
| 0.081731
| false
| 0
| 0.024038
| 0
| 0.173077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
2501aa9e0452052b19ad9fe91a29c5a969b9d03e
| 1,935
|
py
|
Python
|
release/davis16/evaluate.py
|
MSiam/segment-any-moving
|
82cb782867d866d2f4eb68230edb75f613e15a02
|
[
"Apache-2.0"
] | 70
|
2019-09-16T17:55:55.000Z
|
2022-03-07T00:26:53.000Z
|
release/davis16/evaluate.py
|
MSiam/segment-any-moving
|
82cb782867d866d2f4eb68230edb75f613e15a02
|
[
"Apache-2.0"
] | 9
|
2019-09-30T09:15:11.000Z
|
2021-07-21T11:33:13.000Z
|
release/davis16/evaluate.py
|
MSiam/segment-any-moving
|
82cb782867d866d2f4eb68230edb75f613e15a02
|
[
"Apache-2.0"
] | 5
|
2019-09-25T05:14:37.000Z
|
2021-07-08T20:13:47.000Z
|
import argparse
import logging
import yaml
from pathlib import Path
from script_utils.common import common_setup
from release.davis16.compute_flow import link_splits
from release.helpers.misc import msg, subprocess_call
def check_tracks(track_output, splits):
for split in splits:
np_dir = track_output / split
if not np_dir.exists():
raise ValueError(f'Did not find tracks in {np_dir}; '
f'did you run release/davis17/track.py?')
def evaluate_proposed(config, output_stage):
if output_stage == 'detection':
input_dir = (Path(config['davis16']['output_dir']) / 'detections')
elif output_stage == 'tracking':
input_dir = (Path(config['davis16']['output_dir']) / 'tracks')
else:
raise ValueError(f'Unknown output stage: {output_stage}')
for split in config['davis16']['splits']:
masks_dir = input_dir / split / 'masks' / 'masks'
cmd = [
'python', 'davis/eval_fgbg.py',
'--masks-dir', masks_dir
]
msg(f'Evaluating {split}')
subprocess_call(cmd)
def main():
# Use first line of file docstring as description if it exists.
parser = argparse.ArgumentParser(
description=__doc__.split('\n')[0] if __doc__ else '',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('output_stage',
choices=['detection', 'tracking'],
default='detection')
parser.add_argument('--config', default=Path('./release/config.yaml'))
args = parser.parse_args()
logging.getLogger().setLevel(logging.INFO)
logging.basicConfig(format='%(asctime)s.%(msecs).03d: %(message)s',
datefmt='%H:%M:%S')
with open(args.config, 'r') as f:
config = yaml.load(f)
evaluate_proposed(config, args.output_stage)
if __name__ == "__main__":
main()
| 31.209677
| 74
| 0.632041
| 230
| 1,935
| 5.108696
| 0.452174
| 0.065532
| 0.017021
| 0.030638
| 0.057872
| 0.057872
| 0.057872
| 0
| 0
| 0
| 0
| 0.008862
| 0.24186
| 1,935
| 61
| 75
| 31.721311
| 0.792093
| 0.031525
| 0
| 0
| 0
| 0
| 0.198718
| 0.037927
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.155556
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
2503cb791f9ad674e778396da993788db1fa44bb
| 4,712
|
py
|
Python
|
qq/mention.py
|
foxwhite25/qq.py
|
92e744205e57b4c8922aa5843095ae900b3c1d84
|
[
"MIT"
] | 40
|
2021-12-07T02:18:14.000Z
|
2022-03-28T13:14:16.000Z
|
qq/mention.py
|
foxwhite25/qq.py
|
92e744205e57b4c8922aa5843095ae900b3c1d84
|
[
"MIT"
] | 2
|
2021-12-12T17:34:29.000Z
|
2021-12-17T04:43:03.000Z
|
qq/mention.py
|
foxwhite25/qq.py
|
92e744205e57b4c8922aa5843095ae900b3c1d84
|
[
"MIT"
] | 5
|
2021-12-10T11:17:41.000Z
|
2022-03-05T13:53:50.000Z
|
# The MIT License (MIT)
# Copyright (c) 2021-present foxwhite25
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from __future__ import annotations
from typing import Type, TypeVar, List, TYPE_CHECKING, Any, Union
__all__ = (
'AllowedMentions',
)
if TYPE_CHECKING:
from .types.message import AllowedMentions as AllowedMentionsPayload
from .member import Member
from .role import Role
class _FakeBool:
def __repr__(self):
return 'True'
def __eq__(self, other):
return other is True
def __bool__(self):
return True
default: Any = _FakeBool()
A = TypeVar('A', bound='AllowedMentions')
class AllowedMentions:
"""一个类,表示消息中允许提及的内容。
这个类可以在 :class:`Client` 初始化期间设置,以应用于每条发送的消息。
它也可以通过 :meth:`abc.Messageable.send` 在每条消息的基础上应用,以获得更细粒度的控制。
Attributes
------------
everyone: :class:`bool`
是否允许所有人和这里提到。 默认为 ``True``。
users: Union[:class:`bool`, List[:class:`Member`]]
控制被提及的用户。 如果为 ``True`` (默认值),则根据消息内容提及用户。
如果 ``False`` 则根本不会提及用户。 如果给出了 :class:`Member` 的列表,则只提及所提供的用户,前提是这些用户在消息内容中。
roles: Union[:class:`bool`, List[:class:`Role`]]
控制提到的用户组。 如果为 ``True`` (默认值),则根据消息内容提及用户组。 如果 ``False`` 则根本不提及用户组。
如果给出了 :class:`Role` 的列表,则只提及所提供的用户组,前提是这些用户组在消息内容中。
replied_user: :class:`bool`
是否提及正在回复的消息的作者。 默认为 ``True`` 。
"""
__slots__ = ('everyone', 'users', 'roles', 'replied_user')
def __init__(
self,
*,
everyone: bool = default,
users: Union[bool, List[Member]] = default,
roles: Union[bool, List[Role]] = default,
replied_user: bool = default,
):
self.everyone = everyone
self.users = users
self.roles = roles
self.replied_user = replied_user
@classmethod
def all(cls: Type[A]) -> A:
"""返回一个 :class:`AllowedMentions` 的工厂方法,其中所有字段都显式设置为 ``True``"""
return cls(everyone=True, users=True, roles=True, replied_user=True)
@classmethod
def none(cls: Type[A]) -> A:
"""一个工厂方法,返回一个 :class:`AllowedMentions`,所有字段都设置为 ``False``"""
return cls(everyone=False, users=False, roles=False, replied_user=False)
def to_dict(self) -> AllowedMentionsPayload:
parse = []
data = {}
if self.everyone:
parse.append('everyone')
if self.users == True:
parse.append('users')
elif self.users != False:
data['users'] = [x.id for x in self.users]
if self.roles == True:
parse.append('roles')
elif self.roles != False:
data['roles'] = [x.id for x in self.roles]
if self.replied_user:
data['replied_user'] = True
data['parse'] = parse
return data # type: ignore
def merge(self, other: AllowedMentions) -> AllowedMentions:
# Creates a new AllowedMentions by merging from another one.
# Merge is done by using the 'self' values unless explicitly
# overridden by the 'other' values.
everyone = self.everyone if other.everyone is default else other.everyone
users = self.users if other.users is default else other.users
roles = self.roles if other.roles is default else other.roles
replied_user = self.replied_user if other.replied_user is default else other.replied_user
return AllowedMentions(everyone=everyone, roles=roles, users=users, replied_user=replied_user)
def __repr__(self) -> str:
return (
f'{self.__class__.__name__}(everyone={self.everyone}, '
f'users={self.users}, roles={self.roles}, replied_user={self.replied_user})'
)
| 35.164179
| 102
| 0.655136
| 578
| 4,712
| 5.233564
| 0.3391
| 0.061818
| 0.019835
| 0.023802
| 0.044298
| 0.029091
| 0
| 0
| 0
| 0
| 0
| 0.001674
| 0.239389
| 4,712
| 133
| 103
| 35.428571
| 0.842355
| 0.423599
| 0
| 0.030303
| 0
| 0
| 0.089832
| 0.03211
| 0
| 0
| 0
| 0
| 0
| 1
| 0.136364
| false
| 0
| 0.075758
| 0.060606
| 0.378788
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
250a44eb50bdd484b59b76e217165e7deeb8a326
| 9,686
|
py
|
Python
|
utils/iwr6843_utils/parse_tlv.py
|
ApocalyVec/mGesf
|
21e0bf37a9d11a3cdde86a8d54e2f6c6a2211ab5
|
[
"MIT"
] | 18
|
2020-06-02T11:21:47.000Z
|
2022-03-25T08:16:57.000Z
|
utils/iwr6843_utils/parse_tlv.py
|
ApocalyVec/mGesf
|
21e0bf37a9d11a3cdde86a8d54e2f6c6a2211ab5
|
[
"MIT"
] | 4
|
2020-06-20T13:53:44.000Z
|
2021-09-11T22:58:21.000Z
|
utils/iwr6843_utils/parse_tlv.py
|
ApocalyVec/mGesf
|
21e0bf37a9d11a3cdde86a8d54e2f6c6a2211ab5
|
[
"MIT"
] | 6
|
2020-04-23T21:30:17.000Z
|
2021-08-03T19:59:12.000Z
|
import struct
import sys
import math
import numpy as np
#
# TODO 1: (NOW FIXED) Find the first occurrence of magic and start from there
# TODO 2: Warn if we cannot parse a specific section and try to recover
# TODO 3: Remove error at end of file if we have only fragment of TLV
#
def tlvHeaderDecode(data):
tlvType, tlvLength = struct.unpack('2I', data)
return tlvType, tlvLength
def parseDetectedObjects(data, numObj, tlvLength):
detected_points = struct.unpack(str(numObj * 4) + 'f', data[:tlvLength])
detected_points = np.asarray(detected_points).reshape(numObj, 4)
return detected_points
def parseRangeProfile(data, tlvLength):
# an integer is 2 byte long
range_bins = tlvLength / 2
range_profile = struct.unpack(str(int(range_bins)) + 'H', data[:tlvLength])
return range_profile
def parseRDheatmap(data, tlvLength, range_bins, rm_clutter=True):
"""
range bins times doppler bins times 2, doppler bins = chirps/ frame divided by num of antennas TX (3)
#default chirps per frame is (128/3) = 42 * 2 * 256
the call to replace_left_right mirror-flips left and right after reshaping.
replace_left_right is equivalent to this line from mmWave.js in the visualizer code
# rangeDoppler = rangeDoppler.slice((rangeDoppler.length + 1) / 2).concat(
# rangeDoppler.slice(0, (rangeDoppler.length + 1) / 2));
:param range_bins:
:param data: the incoming byte stream to be interpreted as range-doppler heatmap/profile
:param tlvLength:
:return:
"""
doppler_bins = (tlvLength / 2) / range_bins
rd_heatmap = struct.unpack(str(int(range_bins * doppler_bins)) + 'H', data[:tlvLength])
rd_heatmap = np.reshape(rd_heatmap, (int(range_bins), int(doppler_bins)))
overall_mean = np.mean(rd_heatmap)
if rm_clutter:
rd_heatmap = np.array([row - np.mean(row) for row in rd_heatmap])
return replace_left_right(rd_heatmap)
def chg_val(val):
return val - 65536 if val > 32767 else val
def parseAziheatmap(data, tlvLength, range_bins):
"""
:param range_bins:
:param data: the incoming byte stream to be interpreted as range-doppler heatmap/profile
:param tlvLength:
:return:
"""
# range_bins = 256
azi_bins = (tlvLength / 2) / range_bins
azi_heatmap = struct.unpack(str(int(range_bins * azi_bins)) + 'H', data[:tlvLength])
# azi_heatmap = [chg_val(x) for x in azi_heatmap]
azi_heatmap = np.reshape(azi_heatmap, (int(range_bins), int(azi_bins)))
# use the default order of 3 Tx's and ordering is TX0, TX1, TX2
row_indices = [7, 5, 11, 9]
qrows = 4
qcols = range_bins
rowSizeBytes = 48
q = data[:tlvLength]
qq = []
for col in range(qcols):
real = []
img = []
for row in range(qrows):
index = col * rowSizeBytes + 4 * row_indices[row]
real.append(q[index + 1] * 256 + q[index])
img.append(q[index + 3] * 256 + q[index + 2])
real = [chg_val(x) for x in real]
img = [chg_val(x) for x in img]
# convert to complex numbers
data = np.array([real, img]).transpose()
data = np.pad(data, ((0, 60), (0, 0)), 'constant', constant_values=0)
data = data[..., 0] + 1j * data[..., 1]
transformed = np.fft.fft(data)
# take the magnitude
transformed = np.absolute(transformed)
qq.append(np.concatenate((transformed[int(len(transformed) / 2):], transformed[:int(len(transformed) / 2)])))
qq = np.array(qq)
return qq
def replace_left_right(a):
rtn = np.empty(shape=a.shape)
rtn[:, :int(rtn.shape[1] / 2)] = a[:, int(rtn.shape[1] / 2):]
rtn[:, int(rtn.shape[1] / 2):] = a[:, :int(rtn.shape[1] / 2)]
return rtn
def parseStats(data):
interProcess, transmitOut, frameMargin, chirpMargin, activeCPULoad, interCPULoad = struct.unpack('6I', data[:24])
return interProcess, transmitOut, frameMargin, chirpMargin, activeCPULoad, interCPULoad
# print("\tOutputMsgStats:\t%d " % (6))
# print("\t\tChirpMargin:\t%d " % (chirpMargin))
# print("\t\tFrameMargin:\t%d " % (frameMargin))
# print("\t\tInterCPULoad:\t%d " % (interCPULoad))
# print("\t\tActiveCPULoad:\t%d " % (activeCPULoad))
# print("\t\tTransmitOut:\t%d " % (transmitOut))
# print("\t\tInterprocess:\t%d " % (interProcess))
negative_rtn = False, None, None, None, None, None
class tlv_header_decoder():
def __init__(self):
pass
def decode_iwr_tlv(in_data):
"""
Must disable range profile for the quick RD heatmap to work, this way the number of range bins will be be calculated
from the absent range profile. You can still get the range profile by inferring it from the RD heatmap
:param in_data:
:return: if no detected point at this frame, the detected point will be an empty a
"""
magic = b'\x02\x01\x04\x03\x06\x05\x08\x07'
header_length = 36
offset = in_data.find(magic)
data = in_data[offset:]
if len(data) < header_length:
return negative_rtn
try:
data_magic, version, length, platform, frameNum, cpuCycles, numObj, numTLVs = struct.unpack('Q7I',
data[
:header_length])
except struct.error:
print("Improper TLV structure found: ", (data,))
return negative_rtn
# print("Packet ID:\t%d "%(frameNum))
# print("Version:\t%x "%(version))
# print("Data Len:\t\t%d", length)
# print("TLV:\t\t%d "%(numTLVs))
# print("Detect Obj:\t%d "%(numObj))
# print("Platform:\t%X "%(platform))
if version >= 50462726 and len(data) >= length:
# if version > 0x01000005 and len(data) >= length:
try:
sub_frame_num = struct.unpack('I', data[36:40])[0]
header_length = 40
# print("Subframe:\t%d "%(subFrameNum))
pending_bytes = length - header_length
data = data[header_length:]
detected_points = None
range_profile = None
rd_heatmap = None
azi_heatmap = None
range_bins = 8
statistics = None
for i in range(numTLVs):
tlvType, tlvLength = tlvHeaderDecode(data[:8])
data = data[8:]
if tlvType == 1:
# print('Outputting Points')
detected_points = parseDetectedObjects(data, numObj,
tlvLength) # if no detected points, tlvType won't have 1
elif tlvType == 2:
# the range bins is modified in the range profile is enabled
range_profile = parseRangeProfile(data, tlvLength)
elif tlvType == 4:
# resolving static azimuth heatmap
pass
elif tlvType == 5:
# try:
# assert range_bins
# except AssertionError:
# raise Exception('Must enable range-profile while enabling range-doppler-profile, in order to'
# 'interpret the number of range bins')
rd_heatmap = parseRDheatmap(data, tlvLength, range_bins)
elif tlvType == 6:
# TODO why is the states' TLV not present?
interProcess, transmitOut, frameMargin, chirpMargin, activeCPULoad, interCPULoad = parseStats(data)
pass
elif tlvType == 7:
pass
elif tlvType == 8:
# resolving static azimuth-elevation heatmap
try:
azi_heatmap = parseAziheatmap(data, tlvLength, range_bins)
except:
print('bad azimuth')
azi_heatmap = None
pass
elif tlvType == 9: # only for AoP EV2
pass
else:
# print("Unidentified tlv type %d" % tlvType, '. Its len is ' + str(tlvLength))
n_offset = data.find(magic)
if n_offset != offset and n_offset != -1:
print('New magic found, discarding previous frame with unknown tlv')
data = data[n_offset:]
return True, data, detected_points, range_profile, rd_heatmap, azi_heatmap
data = data[tlvLength:]
pending_bytes -= (8 + tlvLength)
data = data[pending_bytes:] # data that are left
# infer range profile from heatmap is the former is not enabled
if range_profile is None and rd_heatmap is not None and len(rd_heatmap) > 0:
range_profile = rd_heatmap[:, 0]
return True, data, detected_points, range_profile, rd_heatmap, azi_heatmap
except struct.error as se:
print('Failed to parse tlv message, type = ' + str(tlvType) + ', error: ')
print(se)
pass
return negative_rtn
if __name__ == "__main__":
magic = b'\x02\x01\x04\x03\x06\x05\x08\x07'
fileName = 'D:/PycharmProjects/mmWave_gesture_iwr6843/test_data2.dat'
rawDataFile = open(fileName, "rb")
rawData = rawDataFile.read()
rawDataFile.close()
offset = rawData.find(magic)
rawData = rawData[offset:]
# for i in range(len(rawData/36))
#
# for length, frameNum in tlvHeader(rawData):
# print
| 39.056452
| 120
| 0.58218
| 1,171
| 9,686
| 4.708796
| 0.266439
| 0.035909
| 0.010881
| 0.015959
| 0.204933
| 0.14436
| 0.094305
| 0.081973
| 0.081973
| 0.081973
| 0
| 0.025237
| 0.312719
| 9,686
| 247
| 121
| 39.214575
| 0.803064
| 0.287735
| 0
| 0.134752
| 0
| 0
| 0.04371
| 0.01778
| 0
| 0
| 0
| 0.008097
| 0
| 1
| 0.070922
| false
| 0.049645
| 0.028369
| 0.007092
| 0.198582
| 0.035461
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
250eb809dd09ad7a9b6aa51c271e231f078546da
| 1,772
|
py
|
Python
|
bunq/sdk/util/util.py
|
mwiekens/sdk_python
|
9333636083bc63dca4353e8f497588f57617efec
|
[
"MIT"
] | null | null | null |
bunq/sdk/util/util.py
|
mwiekens/sdk_python
|
9333636083bc63dca4353e8f497588f57617efec
|
[
"MIT"
] | null | null | null |
bunq/sdk/util/util.py
|
mwiekens/sdk_python
|
9333636083bc63dca4353e8f497588f57617efec
|
[
"MIT"
] | null | null | null |
from __future__ import annotations
import json
import socket
import requests
from bunq.sdk.context.api_context import ApiContext, ApiEnvironmentType
from bunq.sdk.exception.bunq_exception import BunqException
from bunq.sdk.http.api_client import ApiClient
from bunq.sdk.model.generated import endpoint
from bunq.sdk.model.generated.endpoint import SandboxUser
__UNIQUE_REQUEST_ID = "uniqueness-is-required"
__FIELD_API_KEY = "ApiKey"
__INDEX_FIRST = 0
__FIELD_RESPONSE = "Response"
__ENDPOINT_SANDBOX_USER = "sandbox-user"
_ERROR_COULD_NOT_CREATE_NEW_SANDBOX_USER = "Could not create new sandbox user."
def automatic_sandbox_install() -> ApiContext:
sandbox_user = __generate_new_sandbox_user()
return ApiContext.create(ApiEnvironmentType.SANDBOX,
sandbox_user.api_key,
socket.gethostname()
)
def __generate_new_sandbox_user() -> SandboxUser:
url = ApiEnvironmentType.SANDBOX.uri_base + __ENDPOINT_SANDBOX_USER
headers = {
ApiClient.HEADER_REQUEST_ID: __UNIQUE_REQUEST_ID,
ApiClient.HEADER_CACHE_CONTROL: ApiClient.CACHE_CONTROL_NONE,
ApiClient.HEADER_GEOLOCATION: ApiClient.GEOLOCATION_ZERO,
ApiClient.HEADER_LANGUAGE: ApiClient.LANGUAGE_EN_US,
ApiClient.HEADER_REGION: ApiClient.REGION_NL_NL,
}
response = requests.request(ApiClient.METHOD_POST, url, headers=headers)
if response.status_code is ApiClient.STATUS_CODE_OK:
response_json = json.loads(response.text)
return endpoint.SandboxUser.from_json(
json.dumps(response_json[__FIELD_RESPONSE][__INDEX_FIRST][
__FIELD_API_KEY]))
raise BunqException(_ERROR_COULD_NOT_CREATE_NEW_SANDBOX_USER)
| 34.076923
| 79
| 0.744921
| 205
| 1,772
| 5.980488
| 0.356098
| 0.089723
| 0.044861
| 0.041599
| 0.117455
| 0.076672
| 0.053834
| 0
| 0
| 0
| 0
| 0.000696
| 0.189052
| 1,772
| 51
| 80
| 34.745098
| 0.85247
| 0
| 0
| 0
| 0
| 0
| 0.046275
| 0.012415
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054054
| false
| 0
| 0.243243
| 0
| 0.351351
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
2511753f88ea48953fbf7d9fff0197ffc5356c2e
| 752
|
py
|
Python
|
students/models/exams.py
|
samitnuk/studentsdb
|
659c82f7bdc0d6a14074da14252384b9443e286c
|
[
"MIT"
] | null | null | null |
students/models/exams.py
|
samitnuk/studentsdb
|
659c82f7bdc0d6a14074da14252384b9443e286c
|
[
"MIT"
] | null | null | null |
students/models/exams.py
|
samitnuk/studentsdb
|
659c82f7bdc0d6a14074da14252384b9443e286c
|
[
"MIT"
] | null | null | null |
from django.db import models
class Exam(models.Model):
"""Exam Model"""
class Meta(object):
verbose_name = 'Іспит'
verbose_name_plural = 'Іспити'
title = models.CharField(
max_length=256,
blank=False,
verbose_name='Назва предмету')
datetime = models.DateTimeField(
blank=False,
verbose_name='Дата і час проведення')
teacher = models.CharField(
max_length=256,
blank=False,
verbose_name='ПІБ викладача')
exam_group = models.ForeignKey(
'Group',
verbose_name='Група',
blank=False,
null=True,
on_delete=models.PROTECT)
def __str__(self):
return '%s (приймає %s' % (self.title, self.teacher)
| 22.117647
| 60
| 0.599734
| 83
| 752
| 5.253012
| 0.566265
| 0.151376
| 0.116972
| 0.144495
| 0.220183
| 0.220183
| 0.220183
| 0.220183
| 0.220183
| 0
| 0
| 0.011257
| 0.291223
| 752
| 33
| 61
| 22.787879
| 0.806754
| 0.013298
| 0
| 0.25
| 0
| 0
| 0.112772
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0
| 0.041667
| 0.041667
| 0.375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
25132e1264d30cca913fe293f3805c8d79177d9b
| 2,201
|
py
|
Python
|
club_crm/api/clubtour.py
|
VivekChamp/clubcrm
|
82036360d867d3dc5406bc71445a98841b5bffbf
|
[
"MIT"
] | null | null | null |
club_crm/api/clubtour.py
|
VivekChamp/clubcrm
|
82036360d867d3dc5406bc71445a98841b5bffbf
|
[
"MIT"
] | null | null | null |
club_crm/api/clubtour.py
|
VivekChamp/clubcrm
|
82036360d867d3dc5406bc71445a98841b5bffbf
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals
import frappe
from frappe import _
from datetime import datetime, timedelta, date, time
from frappe.utils import getdate, get_time, flt, now_datetime
from frappe.utils import escape_html
from frappe import throw, msgprint, _
@frappe.whitelist()
def get_schedule():
time_schedule = frappe.get_doc('Club Settings')
schedule = []
for time in time_schedule.club_tour_schedule:
from_time_string = str(time.from_time)
from_time_datetime = datetime.strptime(from_time_string, "%H:%M:%S")
from_time = datetime.strftime(from_time_datetime, "%I:%M %p")
to_time_string = str(time.to_time)
to_time_datetime = datetime.strptime(to_time_string, "%H:%M:%S")
to_time = datetime.strftime(to_time_datetime, "%I:%M %p")
name = _('{0} - {1}').format(from_time, to_time)
schedule.append({
"name" : name
})
frappe.response["message"] = {
"Preferred Time": schedule
}
@frappe.whitelist()
def get_status(client_id):
client = frappe.db.get("Client", {"email": frappe.session.user})
doc= frappe.get_all('Club Tour', filters={'client_id':client.name,'tour_status': "Pending"}, fields=["*"])
if doc:
frappe.response["message"] = {
"Status": 0,
"Status Message": "Pending"
}
else:
doc= frappe.get_all('Club Tour', filters={'client_id':client.name,'tour_status': "Scheduled"}, fields=["*"])
if doc:
doc_1= doc[0]
frappe.response["message"] = {
"Status":1,
"Status Message": "Scheduled",
"From Time": doc_1.start_time,
"To Time": doc_1.end_time
}
@frappe.whitelist()
def create_clubtour(client_id,date,time):
client = frappe.db.get("Client", {"email": frappe.session.user})
doc = frappe.get_doc({
'doctype': 'Club Tour',
'client_id': client.name,
'preferred_date': date,
'preferred_time_between': time
})
doc.save()
frappe.response["message"] = {
"Status":1,
"Status Message": "Club Tour booking submitted"
}
| 33.348485
| 116
| 0.606997
| 265
| 2,201
| 4.818868
| 0.260377
| 0.050117
| 0.031323
| 0.042287
| 0.278778
| 0.234926
| 0.234926
| 0.170713
| 0.170713
| 0.170713
| 0
| 0.005471
| 0.252612
| 2,201
| 65
| 117
| 33.861538
| 0.770821
| 0
| 0
| 0.258621
| 0
| 0
| 0.17174
| 0.009995
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051724
| false
| 0
| 0.12069
| 0
| 0.172414
| 0.017241
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
2513a6b22c946cb8b820c0695cdd317c638f6bf0
| 647
|
py
|
Python
|
goalboost/model/__init__.py
|
JohnLockwood/Goalboost
|
1556a15f766ab762243e5d198b00ee7239b20411
|
[
"RSA-MD"
] | null | null | null |
goalboost/model/__init__.py
|
JohnLockwood/Goalboost
|
1556a15f766ab762243e5d198b00ee7239b20411
|
[
"RSA-MD"
] | 10
|
2021-07-30T14:39:05.000Z
|
2021-07-30T14:39:07.000Z
|
goalboost/model/__init__.py
|
JohnLockwood/Goalboost
|
1556a15f766ab762243e5d198b00ee7239b20411
|
[
"RSA-MD"
] | null | null | null |
'''
goalboost.model package
The goalboost model package consists of MongoEngine models along with
Marshmallow schemas. MongoEngine is our database ORM to MongoDB,
and Marshmallow is a serialization library that helps us validate, consume,
and expose these Orm objects for clients that need it at the API layer.
For MongoEngine, see http://mongoengine.org/
For Marshmallow and the MongoEngine integration piece, see:
https://marshmallow.readthedocs.org/en/latest/
https://github.com/touilleMan/marshmallow-mongoengine
'''
from flask.ext.mongoengine import MongoEngine
db = MongoEngine()
def init_db(app):
global db
db.init_app(app)
| 26.958333
| 75
| 0.789799
| 91
| 647
| 5.593407
| 0.637363
| 0.05501
| 0.082515
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139104
| 647
| 23
| 76
| 28.130435
| 0.913824
| 0.795981
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
2513a8a764760e74306e494219df1291ea86952f
| 3,290
|
py
|
Python
|
examples/block_store/snapshots.py
|
IamFive/sdk-python
|
223b04f90477f7de0f00b3e652d8672ba73271c8
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
examples/block_store/snapshots.py
|
IamFive/sdk-python
|
223b04f90477f7de0f00b3e652d8672ba73271c8
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
examples/block_store/snapshots.py
|
IamFive/sdk-python
|
223b04f90477f7de0f00b3e652d8672ba73271c8
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Huawei Technologies Co.,Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy of the
# License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
import logging
def snapshots_detail(conn):
query = {
'limit': 10
}
details = list(conn.block_store.snapshots(**query))
logging.info(details)
def create_snapshot(conn):
attr = {
'name': 'snap-001',
'description': 'Daily backup',
'volume_id': '5aa119a8-d25b-45a7-8d1b-88e127885635',
'force': False,
'metadata': {}
}
snapshot = conn.block_store.create_snapshot(**attr)
logging.info(snapshot)
def rollback_snapshot(conn):
snapshot_id = 'snapshot-id'
volume_id = 'volume-id'
volume_name = 'volume-name'
snapshot_rollback = conn.block_store.rollback_snapshot(volume_id,
volume_name,
snapshot_id)
logging.info(snapshot_rollback)
def update_snapshot(conn):
snapshot_id = 'snapshot-id'
attrs = {
'name': 'name_xx3',
'description': 'hello'
}
snapshot = conn.block_store.update_snapshot(snapshot_id, **attrs)
logging.info(snapshot)
def create_snapshot_metadata(conn):
snapshot_id = 'snapshot-id'
metadata = {
'metadata': {
'key1': 'value1',
'key2': 'value2'
}
}
new_metadata = conn.block_store.create_snapshot_metadata(snapshot_id,
**metadata)
logging.info(new_metadata)
def update_snapshot_metadata(conn):
snapshot_id = 'snapshot-id'
metadata = {
'metadata': {
'key1': 'value1',
'key2': 'value2'
}
}
updated_metadata = conn.block_store.update_snapshot_metadata(snapshot_id, **metadata)
logging.info(updated_metadata)
def update_snapshot_metadata_with_key(conn):
snapshot_id = 'snapshot-id'
key = 'key1'
metadata = {
'meta': {
'key1': 'value1',
}
}
updated_metadata = conn.block_store.update_snapshot_metadata(snapshot_id,
key=key,
**metadata)
logging.info(updated_metadata)
def delete_snapshot_metadata(conn):
snapshot_id = 'snapshot-id'
key = 'key1'
conn.block_store.delete_snapshot_metadata(snapshot_id, key)
def get_snapshot_metadata(conn):
snapshot_id = 'snapshot-id'
metadata = conn.block_store.get_snapshot_metadata(snapshot_id)
logging.info(metadata)
def get_snapshot_metadata_with_key(conn):
key = 'key1'
snapshot_id = 'snapshot-id'
metadata = conn.block_store.get_snapshot_metadata(snapshot_id, key)
logging.info(metadata)
| 29.115044
| 89
| 0.614286
| 362
| 3,290
| 5.375691
| 0.312155
| 0.12333
| 0.071942
| 0.08222
| 0.452724
| 0.358171
| 0.29702
| 0.224049
| 0.213772
| 0.213772
| 0
| 0.021313
| 0.28693
| 3,290
| 112
| 90
| 29.375
| 0.808184
| 0.171733
| 0
| 0.367089
| 0
| 0
| 0.116519
| 0.013274
| 0
| 0
| 0
| 0
| 0
| 1
| 0.126582
| false
| 0
| 0.012658
| 0
| 0.139241
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
2515ebed6d44cdb6e775f2b149da71a36b8ce3fa
| 6,270
|
py
|
Python
|
lambda_upload.py
|
elbursto/aws_lambda_upload
|
62215a1efd7037cad2d099489c16fab905ccf2d3
|
[
"Apache-2.0"
] | null | null | null |
lambda_upload.py
|
elbursto/aws_lambda_upload
|
62215a1efd7037cad2d099489c16fab905ccf2d3
|
[
"Apache-2.0"
] | null | null | null |
lambda_upload.py
|
elbursto/aws_lambda_upload
|
62215a1efd7037cad2d099489c16fab905ccf2d3
|
[
"Apache-2.0"
] | null | null | null |
import boto3
from zipfile import ZipFile
import argparse
import json
import os
import shutil
class LambdaMaker(object):
def __init__(self, config_file, working_dir):
# const vars
self.creator='TomLambdaCreator_v1.0.0'
os.chdir(working_dir)
self.process_config_file(config_file)
def process_config_file(self, fname):
# read config file
with open(fname, 'r') as f:
self.contents = json.load(f)
f.close()
self.lambda_bucket = self.contents['S3Bucket']
self.key = self.contents['S3Key']
self.fname = self.contents['ZipLocalFname']
self.basename = self.contents['ZipBaseName']
self.buildDir = self.contents['BuildDir']
self.functionName=self.contents['FunctionName']
self.runTime=self.contents['Runtime']
self.iamRole=self.contents['Role']
self.handler=self.contents['Handler']
self.desc=self.contents['Description']
self.timeout=self.contents['Timeout']
self.memory=self.contents['MemorySize']
self.publish=self.contents['Publish']
self.vpnconfig = {}
self.vpnconfig['SubnetIds'] = self.contents['SubnetIds']
self.vpnconfig['SecurityGroupIds'] = self.contents['SecurityGroupIds']
self.targetArn = self.contents['DeadLetterTargetArn']
self.env = self.contents['EnvironmentVariables']
self.tracingConfig = self.contents['TracingConfigMode']
self.keyarn = self.contents['KeyArn']
def install_python_dependancies(self):
deps = self.contents['dependancies']
for dep in deps:
cmd = (("pip install {0} -t .").format(dep))
os.system(cmd)
def install_node_dependancies(self):
deps = self.contents['dependancies']
deplen = len(deps)
if deplen > 0:
os.mkdir("node_modules")
for dep in deps:
cmd = (("npm install -s {0}").format(dep))
print(cmd)
os.system(cmd)
def make_zip_file(self):
if (os.path.exists(self.buildDir)):
# remove old build director
shutil.rmtree(self.buildDir)
# make the build dir
os.mkdir(self.buildDir)
#copy the source file
source = self.contents['sourceFile']
shutil.copy(source, self.buildDir)
source_files = []
source_files.append(source)
os.chdir(self.buildDir)
if 'node' in self.runTime:
self.install_node_dependancies()
else:
self.install_python_dependancies()
shutil.make_archive(self.basename, "zip")
#with ZipFile(self.fname, 'w') as myzip:
# for zipit in source_files:
# print(("adding {0} to {1}").format(zipit, self.fname))
# myzip.write(zipit)
def push_function_code_to_s3(self):
self.make_zip_file()
client = boto3.client('s3')
response = client.put_object(
Bucket=self.lambda_bucket,
Body=open(self.fname, 'rb'),
Key=self.key)
metadata=response['ResponseMetadata']
print(("s3 code metadata = {0}").format(metadata))
self.s3version = response['VersionId']
print(('version = {0}').format(self.s3version))
# now that we pushed the code we can setup the S3
# info.
self.setup_function_vars()
print("pushed code to s3")
def setup_function_vars(self):
self.code = {}
self.code['S3Bucket'] = self.lambda_bucket
self.code['S3Key'] = self.key
self.code['S3ObjectVersion'] = self.s3version
self.desc="Get Location"
self.deadcfg = {}
self.deadcfg['TargetArn'] = self.targetArn
self.variables = {}
self.variables['Variables'] = self.env
self.tracingMode = {}
self.tracingMode['Mode'] = self.tracingConfig
# Active needs special permissions
#self.tracingConfig['Mode'] = 'Active'
self.tags = {}
self.tags['FunctionName'] = self.functionName
self.tags['RunTime'] = self.runTime
self.tags['Creator'] = self.creator
def make_new_function(self):
response = self.lambda_client.create_function(
FunctionName=self.functionName,
Runtime=self.runTime,
Role=self.iamRole,
Handler=self.handler,
Code=self.code,
Description=self.desc,
Timeout=self.timeout,
MemorySize=self.memory,
Publish=self.publish,
VpcConfig=self.vpnconfig,
DeadLetterConfig=self.deadcfg,
Environment=self.variables,
#KMSKeyArn=self.keyarn,
TracingConfig=self.tracingMode,
Tags=self.tags
)
print(("lambda create response = {0}").format(response))
def update_function_code(self):
response = self.lambda_client.update_function_code(
FunctionName=self.functionName,
S3Bucket=self.lambda_bucket,
S3Key=self.key,
S3ObjectVersion=self.s3version,
Publish=True,
DryRun=False)
print(("update_function_code response: {0}").format(response))
def push_code(self):
self.lambda_client = boto3.client('lambda')
newFunction = False
try:
response = self.lambda_client.get_function(
FunctionName=self.functionName)
#print(response['ResponseMetadata'])
except Exception:
newFunction = True
# push the new code to S3
self.push_function_code_to_s3()
if newFunction:
# new function so make it
self.make_new_function();
else:
# function exists so just update code
self.update_function_code()
def main():
parser = argparse.ArgumentParser(description='aws lambda function creator')
parser.add_argument('-f', required=True, help='json file')
parser.add_argument('-w', required=True, help='working directory')
args = parser.parse_args()
config_file = args.f
wdir = args.w
LambdaMaker(config_file).push_code()
if __name__ == "__main__":
main()
| 33
| 79
| 0.601435
| 672
| 6,270
| 5.49256
| 0.258929
| 0.074776
| 0.017339
| 0.019507
| 0.072067
| 0.023842
| 0
| 0
| 0
| 0
| 0
| 0.007583
| 0.284848
| 6,270
| 189
| 80
| 33.174603
| 0.815566
| 0.081978
| 0
| 0.070423
| 0
| 0
| 0.111402
| 0.00401
| 0
| 0
| 0
| 0
| 0
| 1
| 0.077465
| false
| 0
| 0.042254
| 0
| 0.126761
| 0.042254
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
25166ab3132cfb837c187df9b62bcf91450b7109
| 6,260
|
py
|
Python
|
official/vision/image_classification/callbacks.py
|
arayabrain/models
|
ceaa23c0ebecdb445d14f002cc66a39c50ac92e3
|
[
"Apache-2.0"
] | null | null | null |
official/vision/image_classification/callbacks.py
|
arayabrain/models
|
ceaa23c0ebecdb445d14f002cc66a39c50ac92e3
|
[
"Apache-2.0"
] | 3
|
2020-08-12T06:16:40.000Z
|
2020-08-17T05:44:26.000Z
|
official/vision/image_classification/callbacks.py
|
arayabrain/models
|
ceaa23c0ebecdb445d14f002cc66a39c50ac92e3
|
[
"Apache-2.0"
] | 1
|
2020-08-04T01:56:03.000Z
|
2020-08-04T01:56:03.000Z
|
# Lint as: python3
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Common modules for callbacks."""
from __future__ import absolute_import
from __future__ import division
# from __future__ import google_type_annotations
from __future__ import print_function
import functools
import os
from absl import logging
import numpy as np
import tensorflow as tf
from typing import Any, List, Optional, MutableMapping
from official.utils.misc import keras_utils
from official.vision.image_classification.pruning.pruning_base_configs import ModelPruningConfig
from tensorflow_model_optimization.python.core.keras import compat
from tensorflow_model_optimization.python.core.sparsity.keras.cprune_registry import ConstraintRegistry
def get_callbacks(model_checkpoint: bool = True,
include_tensorboard: bool = True,
time_history: bool = True,
track_lr: bool = True,
model_pruning_config: Optional[ModelPruningConfig] = None,
write_model_weights: bool = True,
batch_size: int = 0,
log_steps: int = 0,
model_dir: str = None) -> List[tf.keras.callbacks.Callback]:
"""Get all callbacks."""
model_dir = model_dir or ''
callbacks = []
if model_checkpoint:
ckpt_full_path = os.path.join(model_dir, 'model.ckpt-{epoch:04d}')
callbacks.append(
tf.keras.callbacks.ModelCheckpoint(
ckpt_full_path, save_weights_only=True, verbose=1))
if include_tensorboard:
callbacks.append(
CustomTensorBoard(
log_dir=model_dir,
track_lr=track_lr,
model_pruning_config=model_pruning_config,
write_images=write_model_weights))
if time_history:
callbacks.append(
keras_utils.TimeHistory(
batch_size,
log_steps,
logdir=model_dir if include_tensorboard else None))
return callbacks
def get_scalar_from_tensor(t: tf.Tensor) -> int:
"""Utility function to convert a Tensor to a scalar."""
t = tf.keras.backend.get_value(t)
if callable(t):
return t()
else:
return t
class CustomTensorBoard(tf.keras.callbacks.TensorBoard):
"""A customized TensorBoard callback that tracks additional datapoints.
Metrics tracked:
- Global learning rate
Attributes:
log_dir: the path of the directory where to save the log files to be parsed
by TensorBoard.
track_lr: `bool`, whether or not to track the global learning rate.
**kwargs: Additional arguments for backwards compatibility. Possible key is
`period`.
"""
# TODO(b/146499062): track params, flops, log lr, l2 loss,
# classification loss
def __init__(self,
log_dir: str,
track_lr: bool = False,
model_pruning_config: Optional[ModelPruningConfig] = None,
**kwargs):
super(CustomTensorBoard, self).__init__(log_dir=log_dir, **kwargs)
self._track_lr = track_lr
self._model_pruning_config = model_pruning_config
def _collect_learning_rate(self, logs):
logs = logs or {}
lr_schedule = getattr(self.model.optimizer, "lr", None)
if isinstance(lr_schedule, tf.keras.optimizers.schedules.LearningRateSchedule):
logs["learning_rate"] = tf.keras.backend.get_value(
lr_schedule(self.model.optimizer.iterations)
)
if isinstance(logs["learning_rate"], functools.partial):
logs["learning_rate"] = logs["learning_rate"]()
return logs
def _log_metrics(self, logs, prefix, step):
if self._track_lr:
super()._log_metrics(self._collect_learning_rate(logs), prefix, step)
def _log_pruning_metrics(self, logs, prefix, step):
if compat.is_v1_apis():
# Safely depend on TF 1.X private API given
# no more 1.X releases.
self._write_custom_summaries(step, logs)
else: # TF 2.X
log_dir = self.log_dir + '/metrics'
file_writer = tf.summary.create_file_writer(log_dir)
file_writer.set_as_default()
for name, value in logs.items():
tf.summary.scalar(name, value, step=step)
file_writer.flush()
def on_epoch_begin(self, epoch, logs=None):
if logs is not None:
super(CustomTensorBoard, self).on_epoch_begin(epoch, logs)
if self._model_pruning_config:
pruning_logs = {}
params = []
postfixes = []
for layer_pruning_config in self._model_pruning_config.pruning:
layer_name = layer_pruning_config.layer_name
layer = self.model.get_layer(layer_name)
for weight_pruning_config in layer_pruning_config.pruning:
weight_name = weight_pruning_config.weight_name
constraint_name = ConstraintRegistry.get_constraint_from_weight(weight_name)
constraint = getattr(layer, constraint_name)
params.append(constraint.mask)
params.append(constraint.threshold)
postfixes.append('/' + layer_name + '/' + weight_name)
params.append(self.model.optimizer.iterations)
values = tf.keras.backend.batch_get_value(params)
iteration = values[-1]
del values[-1]
del params[-1]
param_value_pairs = list(zip(params, values))
for (mask, mask_value), postfix in zip(param_value_pairs[::2], postfixes):
pruning_logs.update({
'mask_sparsity' + postfix: 1 - np.mean(mask_value)
})
for (threshold, threshold_value), postfix in zip(param_value_pairs[1::2], postfixes):
pruning_logs.update({'threshold' + postfix: threshold_value})
self._log_pruning_metrics(pruning_logs, '', iteration)
| 36.184971
| 103
| 0.684824
| 786
| 6,260
| 5.21374
| 0.315522
| 0.04124
| 0.035139
| 0.016105
| 0.127867
| 0.0898
| 0.015617
| 0
| 0
| 0
| 0
| 0.007137
| 0.216613
| 6,260
| 172
| 104
| 36.395349
| 0.828507
| 0.216454
| 0
| 0.063063
| 0
| 0
| 0.022291
| 0.004541
| 0
| 0
| 0
| 0.005814
| 0
| 1
| 0.063063
| false
| 0
| 0.117117
| 0
| 0.225225
| 0.009009
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
2519a94caf6b2f931b487b3397703da9ddf2b842
| 885
|
py
|
Python
|
EDyA_II/4_tree/python/4_default_parameter.py
|
jrg-sln/academy
|
498c11dcfeab78dbbbb77045a13d7d6675c0d150
|
[
"MIT"
] | null | null | null |
EDyA_II/4_tree/python/4_default_parameter.py
|
jrg-sln/academy
|
498c11dcfeab78dbbbb77045a13d7d6675c0d150
|
[
"MIT"
] | null | null | null |
EDyA_II/4_tree/python/4_default_parameter.py
|
jrg-sln/academy
|
498c11dcfeab78dbbbb77045a13d7d6675c0d150
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
class Saucer(object):
"""
Representa un plato de comida.
"""
def __init__(self, cadNombre, realPrecio, cadDescription=None,
cadImagen=None, boolVegetariano=False, entCoccion=1):
self.nombre = cadNombre
self.precio = realPrecio
self.descripcion = cadDescription
self.imagen = cadImagen
self.esVegetariano = boolVegetariano
self.coccion = entCoccion
def __str__(self):
return "{nombre}{esVeg}: {precio:.2f}{desc}".format(
nombre=self.nombre,
desc=' (' + self.descripcion + ')' if self.descripcion else '',
precio=self.precio,
esVeg='*' if self.esVegetariano else '')
burgerPython = Saucer("Hamburguesa de Python", 0.13,
cadDescription="Barely an eigth of a byte")
print(burgerPython)
| 34.038462
| 75
| 0.59661
| 86
| 885
| 6.046512
| 0.569767
| 0.086538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009509
| 0.287006
| 885
| 26
| 76
| 34.038462
| 0.81458
| 0.059887
| 0
| 0
| 0
| 0
| 0.104039
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0
| 0.055556
| 0.222222
| 0.055556
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
2519e01a81d1d3e2c4f4e4fede4c19c82e764391
| 9,768
|
py
|
Python
|
model/bdrar.py
|
Mhaiyang/iccv
|
04a8ee52c2323d7ff5cdf03c0be1466e8180d2eb
|
[
"MIT"
] | 2
|
2019-01-10T03:44:03.000Z
|
2019-05-24T08:50:14.000Z
|
model/bdrar.py
|
Mhaiyang/iccv
|
04a8ee52c2323d7ff5cdf03c0be1466e8180d2eb
|
[
"MIT"
] | null | null | null |
model/bdrar.py
|
Mhaiyang/iccv
|
04a8ee52c2323d7ff5cdf03c0be1466e8180d2eb
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn.functional as F
from torch import nn
from resnext.resnext101_regular import ResNeXt101
class _AttentionModule(nn.Module):
def __init__(self):
super(_AttentionModule, self).__init__()
self.block1 = nn.Sequential(
nn.Conv2d(64, 64, 1, bias=False), nn.BatchNorm2d(64), nn.ReLU(),
nn.Conv2d(64, 64, 3, dilation=2, padding=2, groups=32, bias=False), nn.BatchNorm2d(64), nn.ReLU(),
nn.Conv2d(64, 64, 1, bias=False), nn.BatchNorm2d(64)
)
self.block2 = nn.Sequential(
nn.Conv2d(64, 64, 1, bias=False), nn.BatchNorm2d(64), nn.ReLU(),
nn.Conv2d(64, 64, 3, dilation=3, padding=3, groups=32, bias=False), nn.BatchNorm2d(64), nn.ReLU(),
nn.Conv2d(64, 64, 1, bias=False), nn.BatchNorm2d(64)
)
self.block3 = nn.Sequential(
nn.Conv2d(64, 64, 1, bias=False), nn.BatchNorm2d(64), nn.ReLU(),
nn.Conv2d(64, 64, 3, dilation=4, padding=4, groups=32, bias=False), nn.BatchNorm2d(64), nn.ReLU(),
nn.Conv2d(64, 32, 1, bias=False), nn.BatchNorm2d(32)
)
self.down = nn.Sequential(
nn.Conv2d(64, 32, 1, bias=False), nn.BatchNorm2d(32)
)
def forward(self, x):
block1 = F.relu(self.block1(x) + x, True)
block2 = F.relu(self.block2(block1) + block1, True)
block3 = F.sigmoid(self.block3(block2) + self.down(block2))
return block3
class BDRAR(nn.Module):
def __init__(self):
super(BDRAR, self).__init__()
resnext = ResNeXt101()
self.layer0 = resnext.layer0
self.layer1 = resnext.layer1
self.layer2 = resnext.layer2
self.layer3 = resnext.layer3
self.layer4 = resnext.layer4
self.down4 = nn.Sequential(
nn.Conv2d(2048, 32, 1, bias=False), nn.BatchNorm2d(32), nn.ReLU()
)
self.down3 = nn.Sequential(
nn.Conv2d(1024, 32, 1, bias=False), nn.BatchNorm2d(32), nn.ReLU()
)
self.down2 = nn.Sequential(
nn.Conv2d(512, 32, 1, bias=False), nn.BatchNorm2d(32), nn.ReLU()
)
self.down1 = nn.Sequential(
nn.Conv2d(256, 32, 1, bias=False), nn.BatchNorm2d(32), nn.ReLU()
)
self.refine3_hl = nn.Sequential(
nn.Conv2d(64, 32, 1, bias=False), nn.BatchNorm2d(32), nn.ReLU(),
nn.Conv2d(32, 32, 3, padding=1, groups=32, bias=False), nn.BatchNorm2d(32), nn.ReLU(),
nn.Conv2d(32, 32, 1, bias=False), nn.BatchNorm2d(32)
)
self.refine2_hl = nn.Sequential(
nn.Conv2d(64, 32, 1, bias=False), nn.BatchNorm2d(32), nn.ReLU(),
nn.Conv2d(32, 32, 3, padding=1, groups=32, bias=False), nn.BatchNorm2d(32), nn.ReLU(),
nn.Conv2d(32, 32, 1, bias=False), nn.BatchNorm2d(32)
)
self.refine1_hl = nn.Sequential(
nn.Conv2d(64, 32, 1, bias=False), nn.BatchNorm2d(32), nn.ReLU(),
nn.Conv2d(32, 32, 3, padding=1, groups=32, bias=False), nn.BatchNorm2d(32), nn.ReLU(),
nn.Conv2d(32, 32, 1, bias=False), nn.BatchNorm2d(32)
)
self.attention3_hl = _AttentionModule()
self.attention2_hl = _AttentionModule()
self.attention1_hl = _AttentionModule()
self.refine2_lh = nn.Sequential(
nn.Conv2d(64, 32, 1, bias=False), nn.BatchNorm2d(32), nn.ReLU(),
nn.Conv2d(32, 32, 3, padding=1, groups=32, bias=False), nn.BatchNorm2d(32), nn.ReLU(),
nn.Conv2d(32, 32, 1, bias=False), nn.BatchNorm2d(32)
)
self.refine4_lh = nn.Sequential(
nn.Conv2d(64, 32, 1, bias=False), nn.BatchNorm2d(32), nn.ReLU(),
nn.Conv2d(32, 32, 3, padding=1, groups=32, bias=False), nn.BatchNorm2d(32), nn.ReLU(),
nn.Conv2d(32, 32, 1, bias=False), nn.BatchNorm2d(32)
)
self.refine3_lh = nn.Sequential(
nn.Conv2d(64, 32, 1, bias=False), nn.BatchNorm2d(32), nn.ReLU(),
nn.Conv2d(32, 32, 3, padding=1, groups=32, bias=False), nn.BatchNorm2d(32), nn.ReLU(),
nn.Conv2d(32, 32, 1, bias=False), nn.BatchNorm2d(32)
)
self.attention2_lh = _AttentionModule()
self.attention3_lh = _AttentionModule()
self.attention4_lh = _AttentionModule()
self.fuse_attention = nn.Sequential(
nn.Conv2d(64, 16, 3, padding=1, bias=False), nn.BatchNorm2d(16), nn.ReLU(),
nn.Conv2d(16, 2, 1)
)
self.predict = nn.Sequential(
nn.Conv2d(32, 8, 3, padding=1, bias=False), nn.BatchNorm2d(8), nn.ReLU(),
nn.Dropout(0.1), nn.Conv2d(8, 1, 1)
)
# for m in self.modules():
# if isinstance(m, nn.ReLU) or isinstance(m, nn.Dropout):
# m.inplace = True
for m in self.modules():
if isinstance(m, nn.ReLU):
m.inplace = True
def forward(self, x):
layer0 = self.layer0(x)
layer1 = self.layer1(layer0)
layer2 = self.layer2(layer1)
layer3 = self.layer3(layer2)
layer4 = self.layer4(layer3)
down4 = self.down4(layer4)
down3 = self.down3(layer3)
down2 = self.down2(layer2)
down1 = self.down1(layer1)
down4 = F.upsample(down4, size=down3.size()[2:], mode='bilinear')
refine3_hl_0 = F.relu(self.refine3_hl(torch.cat((down4, down3), 1)) + down4, True)
refine3_hl_0 = (1 + self.attention3_hl(torch.cat((down4, down3), 1))) * refine3_hl_0
refine3_hl_1 = F.relu(self.refine3_hl(torch.cat((refine3_hl_0, down3), 1)) + refine3_hl_0, True)
refine3_hl_1 = (1 + self.attention3_hl(torch.cat((refine3_hl_0, down3), 1))) * refine3_hl_1
refine3_hl_1 = F.upsample(refine3_hl_1, size=down2.size()[2:], mode='bilinear')
refine2_hl_0 = F.relu(self.refine2_hl(torch.cat((refine3_hl_1, down2), 1)) + refine3_hl_1, True)
refine2_hl_0 = (1 + self.attention2_hl(torch.cat((refine3_hl_1, down2), 1))) * refine2_hl_0
refine2_hl_1 = F.relu(self.refine2_hl(torch.cat((refine2_hl_0, down2), 1)) + refine2_hl_0, True)
refine2_hl_1 = (1 + self.attention2_hl(torch.cat((refine2_hl_0, down2), 1))) * refine2_hl_1
refine2_hl_1 = F.upsample(refine2_hl_1, size=down1.size()[2:], mode='bilinear')
refine1_hl_0 = F.relu(self.refine1_hl(torch.cat((refine2_hl_1, down1), 1)) + refine2_hl_1, True)
refine1_hl_0 = (1 + self.attention1_hl(torch.cat((refine2_hl_1, down1), 1))) * refine1_hl_0
refine1_hl_1 = F.relu(self.refine1_hl(torch.cat((refine1_hl_0, down1), 1)) + refine1_hl_0, True)
refine1_hl_1 = (1 + self.attention1_hl(torch.cat((refine1_hl_0, down1), 1))) * refine1_hl_1
down2 = F.upsample(down2, size=down1.size()[2:], mode='bilinear')
refine2_lh_0 = F.relu(self.refine2_lh(torch.cat((down1, down2), 1)) + down1, True)
refine2_lh_0 = (1 + self.attention2_lh(torch.cat((down1, down2), 1))) * refine2_lh_0
refine2_lh_1 = F.relu(self.refine2_lh(torch.cat((refine2_lh_0, down2), 1)) + refine2_lh_0, True)
refine2_lh_1 = (1 + self.attention2_lh(torch.cat((refine2_lh_0, down2), 1))) * refine2_lh_1
down3 = F.upsample(down3, size=down1.size()[2:], mode='bilinear')
refine3_lh_0 = F.relu(self.refine3_lh(torch.cat((refine2_lh_1, down3), 1)) + refine2_lh_1, True)
refine3_lh_0 = (1 + self.attention3_lh(torch.cat((refine2_lh_1, down3), 1))) * refine3_lh_0
refine3_lh_1 = F.relu(self.refine3_lh(torch.cat((refine3_lh_0, down3), 1)) + refine3_lh_0, True)
refine3_lh_1 = (1 + self.attention3_lh(torch.cat((refine3_lh_0, down3), 1))) * refine3_lh_1
down4 = F.upsample(down4, size=down1.size()[2:], mode='bilinear')
refine4_lh_0 = F.relu(self.refine4_lh(torch.cat((refine3_lh_1, down4), 1)) + refine3_lh_1, True)
refine4_lh_0 = (1 + self.attention4_lh(torch.cat((refine3_lh_1, down4), 1))) * refine4_lh_0
refine4_lh_1 = F.relu(self.refine4_lh(torch.cat((refine4_lh_0, down4), 1)) + refine4_lh_0, True)
refine4_lh_1 = (1 + self.attention4_lh(torch.cat((refine4_lh_0, down4), 1))) * refine4_lh_1
refine3_hl_1 = F.upsample(refine3_hl_1, size=down1.size()[2:], mode='bilinear')
predict4_hl = self.predict(down4)
predict3_hl = self.predict(refine3_hl_1)
predict2_hl = self.predict(refine2_hl_1)
predict1_hl = self.predict(refine1_hl_1)
predict1_lh = self.predict(down1)
predict2_lh = self.predict(refine2_lh_1)
predict3_lh = self.predict(refine3_lh_1)
predict4_lh = self.predict(refine4_lh_1)
fuse_attention = F.sigmoid(self.fuse_attention(torch.cat((refine1_hl_1, refine4_lh_1), 1)))
fuse_predict = torch.sum(fuse_attention * torch.cat((predict1_hl, predict4_lh), 1), 1, True)
predict4_hl = F.upsample(predict4_hl, size=x.size()[2:], mode='bilinear')
predict3_hl = F.upsample(predict3_hl, size=x.size()[2:], mode='bilinear')
predict2_hl = F.upsample(predict2_hl, size=x.size()[2:], mode='bilinear')
predict1_hl = F.upsample(predict1_hl, size=x.size()[2:], mode='bilinear')
predict1_lh = F.upsample(predict1_lh, size=x.size()[2:], mode='bilinear')
predict2_lh = F.upsample(predict2_lh, size=x.size()[2:], mode='bilinear')
predict3_lh = F.upsample(predict3_lh, size=x.size()[2:], mode='bilinear')
predict4_lh = F.upsample(predict4_lh, size=x.size()[2:], mode='bilinear')
fuse_predict = F.upsample(fuse_predict, size=x.size()[2:], mode='bilinear')
if self.training:
return fuse_predict, predict1_hl, predict2_hl, predict3_hl, predict4_hl, predict1_lh, predict2_lh, predict3_lh, predict4_lh
return F.sigmoid(fuse_predict)
| 51.141361
| 135
| 0.619676
| 1,427
| 9,768
| 4.056762
| 0.06377
| 0.04975
| 0.064605
| 0.129211
| 0.617205
| 0.575056
| 0.507342
| 0.429608
| 0.373121
| 0.373121
| 0
| 0.098968
| 0.226249
| 9,768
| 190
| 136
| 51.410526
| 0.666975
| 0.011159
| 0
| 0.180124
| 0
| 0
| 0.013257
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024845
| false
| 0
| 0.024845
| 0
| 0.080745
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
251a755eafd6983caca29826a579cc38212144dd
| 7,413
|
py
|
Python
|
pgeng/font.py
|
Bouncehball/pgeng
|
6f88991e16cfd744c8565b68b6348f313b4d75c0
|
[
"MIT"
] | null | null | null |
pgeng/font.py
|
Bouncehball/pgeng
|
6f88991e16cfd744c8565b68b6348f313b4d75c0
|
[
"MIT"
] | null | null | null |
pgeng/font.py
|
Bouncehball/pgeng
|
6f88991e16cfd744c8565b68b6348f313b4d75c0
|
[
"MIT"
] | null | null | null |
'Classes and functions for creating fonts and text buttons'
#IMPORTS
import pygame
from pathlib import Path
from .core import clip_surface, load_image
from .colour import palette_swap
#IMPORTS
#VARIALBES
__all__ = ['create_font', 'TextButton']
path = Path(__file__).resolve().parent
#VARIABLES
#CREATE_FONT
def create_font(colour):
'''A function to create small and large Font objects
colour will be the colour of the text
The first value in the returned tuple is the small font and the second value is the large font
Returns: tuple'''
if tuple(colour[:3]) == (0, 0, 0):
small_font_image = palette_swap(load_image(path.joinpath('font/small.png')), {(255, 0, 0): colour[:3], tuple(colour[:3]): (255, 255, 255)})
large_font_image = palette_swap(load_image(path.joinpath('font/large.png')), {(255, 0, 0): colour[:3], tuple(colour[:3]): (255, 255, 255)})
return Font(small_font_image, background_colour=255), Font(large_font_image, background_colour=255)
if tuple(colour[:3]) == (127, 127, 127):
small_font_image = palette_swap(load_image(path.joinpath('font/small.png')), {(255, 0, 0): colour[:3], tuple(colour[:3]): (128, 128, 128)})
large_font_image = palette_swap(load_image(path.joinpath('font/large.png')), {(255, 0, 0): colour[:3], tuple(colour[:3]): (128, 128, 128)})
return Font(small_font_image, 128), Font(large_font_image, 128)
small_font_image = palette_swap(load_image(path.joinpath('font/small.png')), {(255, 0, 0): colour[:3]})
large_font_image = palette_swap(load_image(path.joinpath('font/large.png')), {(255, 0, 0): colour[:3]})
return Font(small_font_image), Font(large_font_image)
#CREATE_FONT
#FONT
class Font:
'''A class to create a pixel art font
It will get all the letters out of the image and render them
The border between letters is usually (127, 127, 127) and the background is usually (0, 0, 0) change them if it is necessary
The font is made by DaFluffyPotato
Attributes:
character_height
characters
font_image
space_width'''
#__INIT__
def __init__(self, font_image, border_colour=127, background_colour=0):
'Initialising a font object'
self.font_image = font_image
self.font_image.set_colorkey((0, 0, 0) if not background_colour else [background_colour for i in range(3)])
self.characters = {}
current_width, character_count = 0, 0
character_order = ['A','B','C','D','E','F','G','H','I','J','K','L','M','N','O','P','Q','R','S','T','U','V','W','X','Y','Z','a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z','.','-',',',':','+','\'','!','?','0','1','2','3','4','5','6','7','8','9','(',')','/','_','=','\\','[',']','*','"','<','>',';']
for x in range(self.font_image.get_width()):
colour = self.font_image.get_at((x, 0))
if colour[:3] == (border_colour, border_colour, border_colour): #IF THE TEXT COLOR IS (127, 127, 127), CHANGE BORDER_COLOR
character_image = clip_surface(self.font_image, (x - current_width, 0), (current_width, self.font_image.get_height())) #CLIP EVERY CHARACTER OUT OF THE FONT IMAGE
self.characters[character_order[character_count]] = character_image
character_count += 1
current_width = 0
else:
current_width += 1
self.space_width, self.character_height = self.characters['A'].get_size()
#__INIT__
#__REPR__
def __repr__(self):
'''Returns a string representation of the object
Returns: str'''
return 'pgeng.Font'
#__REPR__
#GET_SIZE
def get_size(self, text):
'''Get the size that that a rendered string would use
It will return the width and height
Returns: tuple'''
if type(text) is not str:
raise TypeError('text has to be a string')
width, height = 0, 0
for character in text:
if character not in ('\n', ' ') and character in self.characters:
width += self.characters[character].get_width() + 1 #+ 1 FOR SPACING
elif character == ' ' or character not in ['\n']:
width += self.space_width + 1 #+ 1 FOR SPACING
else:
width = 0
height += self.character_height + 1 #+ 1 FOR SPACING
return width, height
#GET_SIZE
#RENDER
def render(self, surface, text, location, scroll=pygame.Vector2()):
'Render a string on a surface at a location'
if type(text) is not str:
raise TypeError('text has to be a string')
x_offset, y_offset = 0, 0
for character in text:
if character not in ('\n', ' ') and character in self.characters:
surface.blit(self.characters[character], (location[0] + x_offset - scroll[0], location[1] + y_offset - scroll[1]))
x_offset += self.characters[character].get_width() + 1 #+ 1 FOR SPACING
elif character == ' ' or character not in ['\n']:
x_offset += self.space_width + 1 #+ 1 FOR SPACING
else:
x_offset = 0
y_offset += self.character_height + 1 #+ 1 FOR SPACING
#RENDER
#FONT
#TEXTBUTTON
class TextButton:
'''A string of text that is also a button
The collide function is to collide with the mouse and clicks
It also needs a font size, it has to be either 'small' or 'large'
Use the location variable instead of the rect values
Attributes:
location
rect
size
test_font
text'''
#__INIT__
def __init__(self, text, location, font_size):
'Initialising a TextButton object'
if font_size != 'small' and font_size != 'large':
raise ValueError('font_size is not \'small\' or \'large\'')
if type(text) is not str:
raise TypeError('text is not a string')
self.text = text
self.location = pygame.Vector2(location)
self.test_font = Font(load_image(path.joinpath(f'font/{font_size}.png')))
self.size = self.test_font.get_size(text)
#__INIT__
#__REPR__
def __repr__(self):
'''Returns a string representation of the object
Returns: str'''
return f'pgeng.TextButton({tuple(self.location)})'
#__REPR__
#RECT
@property
def rect(self):
'''Returns the pygame.Rect object of the TextButton
Returns: pygame.Rect'''
self.location = pygame.Vector2(self.location)
return pygame.Rect(self.location, (self.size[0] - 1, self.size[1] + self.test_font.character_height)) #- 1 FOR THE EXTRA SPACING
#RECT
#SET_TEXT
def set_text(self, text):
'''Sets a new string as the text
All the variables will be updated, so the functions can be used the same'''
if type(text) is not str:
raise TypeError('text is not a string')
self.text = text
self.size = self.test_font.get_size(text)
#SET_TEXT
#COLLIDE
def collide(self, click, check_location=None):
'''This will check collision with the mouse location and also if click is True with it
A custom location can be set with location if pygame.mouse.get_pos() is not wished to be used
The first value returns True if the mouse has collided with the button, the second one is if the mouse clicked on it
Returns: tuple'''
check_location = pygame.mouse.get_pos() if check_location is None else check_location
if self.rect.collidepoint(check_location):
if click:
return True, True
return True, False
return False, False
#COLLIDE
#RENDER
def render(self, surface, font, scroll=pygame.Vector2()):
'Renders the text from the button'
if not isinstance(font, Font):
raise TypeError('font is not a Font object')
font.render(surface, self.text, self.location, scroll)
#RENDER
#TEXTBUTTON
| 37.439394
| 356
| 0.673816
| 1,139
| 7,413
| 4.225637
| 0.170325
| 0.041139
| 0.018907
| 0.030542
| 0.323914
| 0.288386
| 0.288386
| 0.275504
| 0.250987
| 0.250987
| 0
| 0.029494
| 0.181303
| 7,413
| 198
| 357
| 37.439394
| 0.763552
| 0.288008
| 0
| 0.23
| 0
| 0.03
| 0.117191
| 0.007761
| 0
| 0
| 0
| 0
| 0
| 1
| 0.11
| false
| 0
| 0.04
| 0
| 0.27
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
251ac80cf768d166a984daeae7c4d2c5d7422487
| 1,814
|
py
|
Python
|
pyguetzli/pil_image.py
|
wanadev/pyguetzli
|
765cc89137e2f5fca80e5f894f4ec95c38995d96
|
[
"Apache-2.0"
] | 28
|
2017-05-03T17:48:21.000Z
|
2022-02-14T13:40:24.000Z
|
pyguetzli/pil_image.py
|
wanadev/pyguetzli
|
765cc89137e2f5fca80e5f894f4ec95c38995d96
|
[
"Apache-2.0"
] | 6
|
2017-08-21T07:52:18.000Z
|
2020-07-17T16:41:44.000Z
|
pyguetzli/pil_image.py
|
wanadev/pyguetzli
|
765cc89137e2f5fca80e5f894f4ec95c38995d96
|
[
"Apache-2.0"
] | 3
|
2018-03-13T23:33:10.000Z
|
2021-09-09T02:33:07.000Z
|
"""
This modules contain helper function to deal with PIL / Pillow Images.
.. note::
Please note that the ``[PIL]`` (pillow) extra dependency must be installed
to allow functions from this module to work.
"""
from . import guetzli
def _to_pil_rgb_image(image):
"""Returns an PIL Image converted to the RGB color space. If the image has
an alpha channel (transparency), it will be overlaid on a black background.
:param image: the PIL image to convert
:returns: The input image if it was already in RGB mode, or a new RGB image
if converted.
:raises ImportError: PIL / Pillow cannot be imported.
"""
if image.mode == "RGB":
return image
from PIL import Image
image.load()
rgb_image = Image.new("RGB", image.size, (0x00, 0x00, 0x00))
mask = None
if image.mode == "RGBA":
mask = image.split()[3] # bands: R=0, G=1, B=2, 1=3
rgb_image.paste(image, mask=mask)
return rgb_image
def process_pil_image(image, quality=guetzli.DEFAULT_JPEG_QUALITY):
"""Generates an optimized JPEG from a PIL image. If the image has an alpha
channel (transparency), it will be overlaid on a black background.
:param image: the PIL image
:param quality: the output JPEG quality (default 95)
:returns: Optimized JPEG bytes
:rtype: bytes
:raises ImportError: PIL / Pillow cannot be imported.
.. code:: python
import pyguetzli
from PIL import Image
image = Image.open("./test/image.jpg")
optimized_jpeg = pyguetzli.process_pil_image(image)
"""
image_rgb = _to_pil_rgb_image(image)
image_rgb_bytes = image_rgb.tobytes()
return guetzli.process_rgb_bytes(
image_rgb_bytes,
*image.size,
quality=quality
)
| 26.676471
| 79
| 0.656009
| 254
| 1,814
| 4.582677
| 0.370079
| 0.085911
| 0.033505
| 0.022337
| 0.304124
| 0.233677
| 0.233677
| 0.161512
| 0.161512
| 0.161512
| 0
| 0.012602
| 0.25634
| 1,814
| 67
| 80
| 27.074627
| 0.850259
| 0.588754
| 0
| 0
| 0
| 0
| 0.015291
| 0
| 0
| 0
| 0.018349
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.1
| 0
| 0.35
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
251cba64cfe05ed7cdba8439be4d154984b803ea
| 12,053
|
py
|
Python
|
src/dip_main.py
|
BardiaMojra/dip
|
201bd14c13052b81967e051444f4e5c08c72631a
|
[
"MIT"
] | null | null | null |
src/dip_main.py
|
BardiaMojra/dip
|
201bd14c13052b81967e051444f4e5c08c72631a
|
[
"MIT"
] | null | null | null |
src/dip_main.py
|
BardiaMojra/dip
|
201bd14c13052b81967e051444f4e5c08c72631a
|
[
"MIT"
] | null | null | null |
''' dip
@author Bardia Mojra - 1000766739
@brief ee-5323 - project -
@date 10/31/21
code based on below YouTube tutorial and Pymotw.com documentation for socket mod.
@link https://www.youtube.com/watch?v=3QiPPX-KeSc
@link https://pymotw.com/2/socket/tcp.html
python socket module documentation
@link https://docs.python.org/3/library/socket.html
@link https://docs.python.org/3/howto/sockets.html
'''
import csv
import math
import numpy as np
import os
import pygame
import pyglet
from pyglet.window import key
import pymunk
import pymunk.constraints
import pymunk.pygame_util
import pandas as pd
import pyglet.gl as gl
''' custom libs
'''
import dvm
import tcm
''' NBUG
'''
from nbug import *
''' TEST CONFIG
'''
TEST_ID = 'Test 903'
SIM_DUR = 30.0 # in seconds
OUT_DIR = '../out/'
OUT_DATA = OUT_DIR+TEST_ID+'_data.csv'
CONF_DIR = '../config/'
# cart
m_c = 0.5
all_friction = 0.2
''' pendulum 1 '''
l_1 = 0.4 # 6, 5, 4, 7 -- 4 ->
m_1 = 0.2 # 2, 3, 4 -- 1 -> stable
m_1_moment = 0.01
m_1_radius = 0.05
''' pendulum 2 '''
l_2 = 0.7 # 6, 5, 7 -- 3 -> unstable
m_2 = 0.3 # 2, 3, 4 -- 2 -> unstable
m_2_moment = 0.001
m_2_radius = 0.05
# other config
output_labels=['t', 'x', 'dx', 'th_1', 'dth_1', 'th_2', 'dth_2']
# control config
# K gain matrix and Nbar found from modelling via Jupyter
# K = [16.91887353, 21.12423935, 137.96378003, -3.20040325, -259.72220049, -50.48383455]
# Nbar = 17.0
K = [51.43763708,
54.12690472,
157.5467596,
-21.67111679,
-429.11603909,
-88.73125241]
Nbar = 51.5
tConfig = tcm.test_configuration(TEST_ID=TEST_ID,
OUT_DIR=OUT_DIR,
OUT_DATA=OUT_DATA,
CONF_DIR=CONF_DIR,
SIM_DUR=SIM_DUR,
output_labels=output_labels,
all_friction=all_friction,
cart_mass=m_c,
pend_1_length=l_1,
pend_1_mass=m_1,
pend_1_moment=m_1_moment,
pend_2_length=l_2,
pend_2_mass=m_2,
pend_2_moment=m_2_moment,
K=K,
Nbar=Nbar)
# log test config
tcm.pkl(tConfig)
''' MOD CONFIG
'''
SCREEN_WIDTH = 700
SCREEN_HEIGHT = 500
# sim config
MAX_FORCE = 25
DT = 1 / 60.0
PPM = 200.0 # pxls per meter
END_ = 1000 # samples used for plotting and analysis
SHOW_ = True
cart_size = 0.3, 0.2
white_color = (0,0,0,0)
black_color = (255,255,255,255)
green_color = (0,135,0,255)
red_color = (135,0,0,255)
blue_color = (0,0,135,255)
''' main
'''
pygame.init()
# screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))
surface = pygame.Surface((SCREEN_WIDTH, SCREEN_HEIGHT))
# clock = pygame.time.Clock()
window = pyglet.window.Window(SCREEN_WIDTH, SCREEN_HEIGHT, vsync=False, caption='Double Inverted Pendulum Simulation')
gl.glClearColor(255,255,255,255)
# setup the space
space = pymunk.Space()
# options = pymunk.pygame_util.DrawOptions(surface)
# space.debug_draw(options)
space.gravity = 0, -9.81
# space.debug_draw(options)
fil = pymunk.ShapeFilter(group=1)
# screen.fill(pygame.Color("white"))
# options = pymunk.pygame_util.DrawOptions(screen)
# space.debug_draw(options)
# ground
ground = pymunk.Segment(space.static_body, (-4, -0.1), (4, -0.1), 0.1)
# ground.color = pygame.Color("pink")
ground.friction = all_friction
ground.filter = fil
space.add(ground)
# space.debug_draw(options)
# cart
cart_moment = pymunk.moment_for_box(m_c, cart_size)
cart_body = pymunk.Body(mass=m_c, moment=cart_moment)
cart_body.position = 0.0, cart_size[1] / 2
cart_shape = pymunk.Poly.create_box(cart_body, cart_size)
cart_shape.color = black_color
# cart_shape.color = red_color
# cart_shape.fill_color = red_color
# cart_shape.color = black_color
cart_shape.friction = ground.friction
space.add(cart_body, cart_shape)
# space.debug_draw(options)
# pendulum 1
pend_1_body = pymunk.Body(mass=m_1, moment=m_1_moment)
pend_1_body.position = cart_body.position[0], cart_body.position[1] + cart_size[1] / 2 + l_1
pend_shape = pymunk.Circle(pend_1_body, m_1_radius)
pend_shape.filter = fil
space.add(pend_1_body, pend_shape)
# joint
joint = pymunk.constraints.PivotJoint(cart_body, pend_1_body, cart_body.position + (0, cart_size[1] / 2))
joint.collide_bodies = False
space.add(joint)
# pendulum 2
pend_2_body = pymunk.Body(mass=m_2, moment=m_2_moment)
pend_2_body.position = cart_body.position[0], cart_body.position[1] + cart_size[1] / 2 + (2 * l_2)
pend_shape2 = pymunk.Circle(pend_2_body, m_2_radius)
pend_shape2.filter = fil
space.add(pend_2_body, pend_shape2)
# joint 2
joint2 = pymunk.constraints.PivotJoint(pend_1_body, pend_2_body, cart_body.position + (0, cart_size[1] / 2 + l_2))
joint2.collide_bodies = False
space.add(joint2)
# space.debug_draw(options)
print(f"cart mass = {cart_body.mass:0.1f} kg")
print(f"pendulum 1 mass = {pend_1_body.mass:0.1f} kg, pendulum moment = {pend_1_body.moment:0.3f} kg*m^2")
print(f"pendulum 2 mass = {pend_2_body.mass:0.1f} kg, pendulum moment = {pend_2_body.moment:0.3f} kg*m^2")
force = 0.0
ref = 0.0
color = (200, 200, 200, 200)
label_x = pyglet.text.Label(text='', font_size=12, color=color, x=10, y=SCREEN_HEIGHT - 28)
label_th_1 = pyglet.text.Label(text='', font_size=12, color=color, x=10, y=SCREEN_HEIGHT - 58)
label_th_2 = pyglet.text.Label(text='', font_size=12, color=color, x=10, y=SCREEN_HEIGHT - 88)
label_force = pyglet.text.Label(text='', font_size=12, color=color, x=10, y=SCREEN_HEIGHT - 118)
labels = [label_x, label_th_1, label_th_2, label_force]
# data recorder so we can compare our results to our predictions
if os.path.exists(OUT_DATA):
os.remove(OUT_DATA)
with open(OUT_DATA, 'w') as f:
output_header = str()
for i, s in enumerate(output_labels):
if i == 0:
output_header = s
else:
output_header += ', '+s
output_header += '\n'
f.write(output_header)
f.close()
currtime = 0.0
record_data = True
def draw_body(offset, body):
for shape in body.shapes:
if isinstance(shape, pymunk.Circle):
vertices = []
num_points = 10
for ii in range(num_points):
angle = ii / num_points * 2 * math.pi
vertices.append(body.position + (shape.radius * math.cos(angle), shape.radius * math.sin(angle)))
points = []
for v in vertices:
points.append(int(v[0] * PPM) + offset[0])
points.append(int(v[1] * PPM) + offset[1])
data = ('v2i', tuple(points))
gl.glColor3b(255,255,255)
pyglet.graphics.draw(len(vertices), pyglet.gl.GL_LINE_LOOP, data)
elif isinstance(shape, pymunk.Poly):
# get vertices in world coordinates
vertices = [v.rotated(body.angle) + body.position for v in shape.get_vertices()]
# convert vertices to pixel coordinates
points = []
for v in vertices:
points.append(int(v[0] * PPM) + offset[0])
points.append(int(v[1] * PPM) + offset[1])
data = ('v2i', tuple(points))
gl.glColor3b(255,255,255)
pyglet.graphics.draw(len(vertices), pyglet.gl.GL_LINE_LOOP, data)
def draw_line_between(offset, pos1, pos2):
vertices = [pos1, pos2]
points = []
for v in vertices:
points.append(int(v[0] * PPM) + offset[0])
points.append(int(v[1] * PPM) + offset[1])
data = ('v2i', tuple(points))
gl.glColor3b(255,255,255)
pyglet.graphics.draw(len(vertices), pyglet.gl.GL_LINE_STRIP, data)
def draw_ground(offset):
vertices = [v + (0, ground.radius) for v in (ground.a, ground.b)]
# convert vertices to pixel coordinates
points = []
for v in vertices:
points.append(int(v[0] * PPM) + offset[0])
points.append(int(v[1] * PPM) + offset[1])
data = ('v2i', tuple(points))
pyglet.graphics.draw(len(vertices), pyglet.gl.GL_LINES, data)
@window.event
def on_draw():
window.clear()
# center view x around 0
offset = (250, 5)
draw_body(offset, cart_body)
draw_body(offset, pend_1_body)
draw_line_between(offset, cart_body.position + (0, cart_size[1] / 2), pend_1_body.position)
draw_body(offset, pend_2_body)
draw_line_between(offset, pend_1_body.position, pend_2_body.position)
draw_ground(offset)
for label in labels:
label.draw()
@window.event
def on_key_press(symbol, modifiers):
# Symbolic names:
if symbol == key.ESCAPE:
window.close()
def simulate(_):
global currtime
if currtime > SIM_DUR:
window.close()
# nprint('_',_)
# ensure we get a consistent simulation step - ignore the input dt value
dt = DT
# simulate the world
# NOTE: using substeps will mess up gains
space.step(dt)
# populate the current state
posx = cart_body.position[0]
velx = cart_body.velocity[0]
th_1 = pend_1_body.angle
th_1v = pend_1_body.angular_velocity
th_2 = pend_2_body.angle
th_2v = pend_2_body.angular_velocity
# dump our data so we can plot
if record_data:
with open(OUT_DATA, 'a+') as f:
f.write(f"{currtime:0.5f}, {posx:0.5f}, {velx:0.5f}, {th_1:0.5f}, {th_1v:0.5f}, {th_2:0.5f}, {th_2v:0.5f} \n")
f.close()
currtime += dt
# calculate our gain based on the current state
gain = K[0] * posx + K[1] * velx + K[2] * th_1 + K[3] * th_1v + K[4] * th_2 + K[5] * th_2v
# calculate the force required
global force
force = ref * Nbar - gain
# kill our motors if our angles get out of control
if math.fabs(pend_1_body.angle) > 1.0 or math.fabs(pend_2_body.angle) > 1.0:
force = 0.0
# cap our maximum force so it doesn't go crazy
if math.fabs(force) > MAX_FORCE:
force = math.copysign(MAX_FORCE, force)
# apply force to cart center of mass
cart_body.apply_force_at_local_point((force, 0.0), (0, 0))
def update_state_label(_):
'''
function to store the current state to draw on screen
'''
label_x.text = f'x: {cart_body.position[0]:0.3f} m'
label_th_1.text = f'theta_1: {pend_1_body.angle:0.3f} rad'
label_th_2.text = f'theta_2: {pend_2_body.angle:0.3f} rad'
label_force.text = f'force: {force:0.1f} N'
def update_reference(_, newref):
global ref
ref = newref
# callback for simulation
pyglet.clock.schedule_interval(simulate, DT)
pyglet.clock.schedule_interval(update_state_label, 0.25)
# schedule some small movements by updating our reference
pyglet.clock.schedule_once(update_reference, 2, 0.2)
pyglet.clock.schedule_once(update_reference, 7, 0.6)
pyglet.clock.schedule_once(update_reference, 12, 0.2)
pyglet.clock.schedule_once(update_reference, 17, 0.0)
pyglet.app.run()
f.close()
# data recorder so we can compare our results to our predictions
# f = open(OUT_DATA, 'r')
# ['t', 'x', 'dx', 'th_1', 'dth_1', 'th_2', 'dth_2', 'L1', 'L2']
# for i in test_IDs:
tConfig = tcm.unpkl(TEST_ID, CONF_DIR)
df = pd.read_csv(tConfig.out_data)
df = dvm.get_losses(df,
dataPath=tConfig.data_path,
lossPath=tConfig.loss_path)
# plot pose
# ['t', 'x', 'dx', 'th_1', 'dth_1', 'th_2', 'dth_2', 'L1', 'L2']
cols = [0, 1, 3, 5]
xy_df = df.iloc[:,cols].copy()
dvm.plot_df(xy_df,
plot_title='State Position',
labels=xy_df.columns,
test_id=tConfig.id,
out_dir=tConfig.out_dir,
end=END_,
show=SHOW_)
# plot vel
# ['t', 'x', 'dx', 'th_1', 'dth_1', 'th_2', 'dth_2', 'L1', 'L2']
cols = [0, 2, 4, 6]
xy_df = df.iloc[:,cols].copy()
dvm.plot_df(xy_df,
plot_title='State Velocity',
labels=xy_df.columns,
test_id=tConfig.id,
out_dir=tConfig.out_dir,
end=END_,
show=SHOW_)
# plot losses
# ['t', 'x', 'dx', 'th_1', 'dth_1', 'th_2', 'dth_2', 'L1', 'L2']
cols = [0, 7, 8]
xy_df = df.iloc[:,cols].copy()
dvm.plot_df(xy_df,
plot_title='State Losses',
labels=xy_df.columns,
test_id=tConfig.id,
out_dir=tConfig.out_dir,
end=END_,
show=SHOW_)
# print losses
dvm.print_losses(df)
| 31.064433
| 118
| 0.652867
| 1,937
| 12,053
| 3.867321
| 0.206505
| 0.012014
| 0.018022
| 0.018155
| 0.34428
| 0.28087
| 0.254172
| 0.240822
| 0.213056
| 0.204779
| 0
| 0.064702
| 0.208828
| 12,053
| 387
| 119
| 31.144703
| 0.720847
| 0.207085
| 0
| 0.213178
| 0
| 0.011628
| 0.064751
| 0.02069
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031008
| false
| 0
| 0.05814
| 0
| 0.089147
| 0.015504
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
251d295ac1daf4f6c0aa7d07697c6e03ea7c9186
| 1,128
|
py
|
Python
|
generator/apigen/CommandParser.py
|
grbd/GBD.Build.BlackJack
|
3e8d027625b7528af3674a373fd9931e3feaaab4
|
[
"Apache-2.0"
] | 1
|
2017-05-26T00:18:26.000Z
|
2017-05-26T00:18:26.000Z
|
generator/apigen/CommandParser.py
|
grbd/GBD.Build.BlackJack
|
3e8d027625b7528af3674a373fd9931e3feaaab4
|
[
"Apache-2.0"
] | null | null | null |
generator/apigen/CommandParser.py
|
grbd/GBD.Build.BlackJack
|
3e8d027625b7528af3674a373fd9931e3feaaab4
|
[
"Apache-2.0"
] | null | null | null |
"""
A Command parser to parse over each jinja template for a given cmake command
"""
import os
from apigen.Logger import Logger
from jinja2 import Environment, PackageLoader, FileSystemLoader
class CommandParser(object):
def __init__(self, cmdfile: str, env: Environment, outdir: str):
super().__init__()
self.__log = Logger.getlogger()
self.CommandFilePath = cmdfile
self.__env = env
self.OutputDirectory = outdir
def ParseFile(self):
cmd_basefilename = os.path.basename(self.CommandFilePath)
self.__log.info("Parsing File: " + cmd_basefilename)
cmd_name = os.path.splitext(cmd_basefilename)[0]
cmd_outfile = os.path.join(self.OutputDirectory, cmd_basefilename)
#if (cmd_basefilename != "add_executable.py"):
# return
# Render the command output from the template
template = self.__env.get_template(cmd_basefilename)
tmpl_output = template.render(CmdName=cmd_name)
# Save the File
with open(cmd_outfile, "w") as text_file:
text_file.write(tmpl_output)
return
| 31.333333
| 76
| 0.675532
| 133
| 1,128
| 5.488722
| 0.503759
| 0.123288
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002323
| 0.236702
| 1,128
| 35
| 77
| 32.228571
| 0.845528
| 0.16844
| 0
| 0
| 0
| 0
| 0.016181
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.15
| 0
| 0.35
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
2521a1ac6de3b8964ba83ce10e729714793f678d
| 2,578
|
py
|
Python
|
cineapp/push.py
|
ptitoliv/cineapp
|
4b6a8c68144436c5497353135a013ea783cfd224
|
[
"MIT"
] | 2
|
2016-12-02T02:29:01.000Z
|
2019-03-03T15:48:50.000Z
|
cineapp/push.py
|
ptitoliv/cineapp
|
4b6a8c68144436c5497353135a013ea783cfd224
|
[
"MIT"
] | 128
|
2016-05-22T21:44:20.000Z
|
2022-03-11T23:14:18.000Z
|
cineapp/push.py
|
ptitoliv/cineapp
|
4b6a8c68144436c5497353135a013ea783cfd224
|
[
"MIT"
] | 1
|
2017-08-20T14:14:52.000Z
|
2017-08-20T14:14:52.000Z
|
from __future__ import print_function
from cineapp import app, db, lm
from flask_login import login_required
from flask import jsonify, session, g, url_for, request
from pywebpush import webpush, WebPushException
from cineapp.models import PushNotification
import json, traceback, sys, datetime, time
from cineapp.auth import guest_control
@app.route('/notifications/subscribe', methods=['POST'])
@login_required
@guest_control
def notification_subscribe():
app.logger.info('New user subscription !!')
subscription = request.get_json()
app.logger.info('User id: %s, Subscription data: %s' % (g.user.id,subscription))
# Let's register the subscription message into the database
push_notification = PushNotification(endpoint_id=subscription["endpoint"], public_key=subscription["keys"]["p256dh"], auth_token=subscription["keys"]["auth"], session_id=session.sid, user_id=g.user.id)
# Store the subscription data into database
try:
db.session.add(push_notification)
db.session.commit()
app.logger.info('User subscription correctly stored into database')
except Exception as e:
app.logger.error('Unable to store subscription user in database %s', repr(e))
return jsonify({ "status": "failure", "message": u"Unable to store subscription object into database" })
return jsonify({ "status": "success", "message": u"Endpoint enregistray" })
def notification_send(text,active_subscriptions):
for cur_active_sub in active_subscriptions:
try:
expiration_date = time.mktime((datetime.datetime.now() + datetime.timedelta(hours=12)).timetuple())
webpush(cur_active_sub.serialize(),
data=json.dumps({ "url":url_for('chat'), "message_title": "Message depuis le chat", "message": text }) ,
vapid_private_key=app.config["NOTIF_PRIVATE_KEY_PATH"],
vapid_claims={
"sub": "mailto:ptitoliv@gmail.com",
"exp": expiration_date
}
)
except WebPushException as ex:
# If there is an error let's remove the subscription
app.logger.error("Subscription for endpoint %s is incorrect ==> Delete it", cur_active_sub)
print(traceback.print_exc(file=sys.stdout));
# Let's remove the notification
notification_unsubscribe(cur_active_sub)
print(("I'm sorry, Dave, but I can't do that: {}", repr(ex)))
print(ex.response.json())
def notification_unsubscribe(sub):
try:
db.session.delete(sub)
db.session.commit()
app.logger.info('User subscription correctly delete from database')
return True
except Exception as e:
app.logger.error('Unable to remove subscription user in database %s', repr(e))
return False
| 39.060606
| 202
| 0.747867
| 350
| 2,578
| 5.382857
| 0.397143
| 0.033439
| 0.027601
| 0.02707
| 0.139066
| 0.139066
| 0.139066
| 0.139066
| 0.098726
| 0
| 0
| 0.002236
| 0.132661
| 2,578
| 65
| 203
| 39.661538
| 0.84034
| 0.069822
| 0
| 0.137255
| 0
| 0
| 0.255328
| 0.02967
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0.156863
| 0
| 0.294118
| 0.078431
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
25249f6ffc68bd327fd5d0540e42e061ccc8880f
| 4,577
|
py
|
Python
|
Codes/trreemap.py
|
Pepeisadog/Project
|
49d77b1590723f87111a0e3a64bd94fa4bb65986
|
[
"Unlicense"
] | null | null | null |
Codes/trreemap.py
|
Pepeisadog/Project
|
49d77b1590723f87111a0e3a64bd94fa4bb65986
|
[
"Unlicense"
] | 3
|
2015-01-12T09:33:30.000Z
|
2015-01-29T22:56:47.000Z
|
Codes/trreemap.py
|
Pepeisadog/Project
|
49d77b1590723f87111a0e3a64bd94fa4bb65986
|
[
"Unlicense"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Sun Jan 25 15:48:52 2015
@author: Sofia
"""
import csv
import json
import os
sourceEncoding = "iso-8859-1"
targetEncoding = "utf-8"
# encode files to utf8 (source: http://stackoverflow.com/questions/191359/how-to-convert-a-file-to-utf-8-in-python)
csvfile = open('..\Data\AMFI.csv',"r")
csvfile_encoded = open("..\Data\AMFI_encoded.csv", "w")
csvfile_encoded.write(unicode(csvfile.read(), sourceEncoding).encode(targetEncoding))
csvfile_encoded.close()
csvfile = open('..\Data\AMFI_categories.csv',"r")
csvfile_encoded = open("..\Data\AMFIcategories_encoded.csv", "w")
csvfile_encoded.write(unicode(csvfile.read(), sourceEncoding).encode(targetEncoding))
csvfile_encoded.close()
csvfile = open('..\Data\AMFI_domains.csv',"r")
csvfile_encoded = open("..\Data\AMFIdomains_encoded.csv", "w")
csvfile_encoded.write(unicode(csvfile.read(), sourceEncoding).encode(targetEncoding))
csvfile_encoded.close()
# open files
AMFI_books = open("..\Data\AMFI_encoded.csv","r")
AMFI_categories = open("..\Data\AMFIcategories_encoded.csv","r")
AMFI_domains = open("..\Data\AMFIdomains_encoded.csv","r")
# define fieldnames
fieldnames_books = ("Callnumber","Barcode","Title","Year","Location")
fieldnames_categories = ("Barcode","Category")
# put data in reader
reader_books = csv.DictReader(AMFI_books, fieldnames_books, delimiter=';')
reader_categories = csv.DictReader(AMFI_categories, fieldnames_categories, delimiter = ';')
reader_domains = csv.DictReader(AMFI_domains, delimiter = ';')
output = {"name": "Library of the University of Applied Sciences", "type":"parent", "total":5605, "value":50, "children": []}
# get data from reader_books
barcode_books = []
names_books = []
tags_books = []
copies = []
for books in reader_books:
barcode_books.append(books["Callnumber"])
names_books.append(books["Title"])
tags_books.append(books["Barcode"])
tags = []
size_books = len(barcode_books)
# Modify data books
for k in range(0, len(names_books), 1):
# count copies
count = names_books.count(names_books[k])
copies.append(count)
# collect unique ids
indeces = [i for i, x in enumerate(names_books) if x == names_books[k]]
if len(indeces) == 1:
tags.append(tags_books[indeces[0]])
else:
list_tags = []
for w in range(0,len(indeces),1):
tag = tags_books[indeces[w]]
list_tags.append(tag)
tags.append(list_tags)
# set copies to NaN
for t in range(1,len(indeces),1):
names_books[indeces[t]] = "NaN"
# Enter domains
barcode_domain = []
for domain in reader_domains:
output["children"].append({
"type": "domain",
"name": domain["Domain"],
"barcode": domain["Barcode"],
"value": 6,
"children": []
})
barcode_domain.append(domain["Barcode"])
# get category data
barcode_category = []
names_category = []
for category in reader_categories:
barcode_category.append(category["Barcode"])
names_category.append(category["Category"])
# Enter categories
for i in range(0,len(barcode_domain),1):
barcode_domain_values = output["children"][i]["barcode"]
for j in range(0,len(barcode_category),1):
if barcode_category[j] < barcode_domain_values:
if names_category[j] != "NaN":
output["children"][i]["children"].append({
"type":"category",
"barcode": barcode_category[j],
"value": 5,
"name": names_category[j],
"children": []
})
names_category[j] = "NaN"
# append data to output
lengths = []
codes_categories =[]
for i in range(0,len(barcode_domain),1):
lengths.append(len(output["children"][i]["children"]))
for k in range(0, lengths[i], 1):
#counter = 0
codes_categories = output["children"][i]["children"][k]["barcode"]
for j in range(0,len(names_books),1):
if barcode_books[j] < codes_categories:
if names_books[j] != "NaN":
output["children"][i]["children"][k]["children"].append({
"type":"book",
"barcode": barcode_books[j],
"tags": tags[j],
"value": 2,
"name": names_books[j],
"copies": copies[j]
})
names_books[j] = "NaN"
# write data to file
with open('../Data/tree.json', 'w') as f:
json.dump(output, f, indent=True)
| 31.136054
| 125
| 0.622023
| 560
| 4,577
| 4.941071
| 0.228571
| 0.043368
| 0.020239
| 0.023853
| 0.284785
| 0.226961
| 0.179256
| 0.151428
| 0.151428
| 0.151428
| 0
| 0.015402
| 0.219795
| 4,577
| 146
| 126
| 31.349315
| 0.759451
| 0.095259
| 0
| 0.132653
| 0
| 0
| 0.16861
| 0.055637
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.030612
| 0
| 0.030612
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
2526119172205dbcc83b912e56e47b1cfd9d139b
| 3,751
|
py
|
Python
|
test_haystack/whoosh_tests/test_whoosh_management_commands.py
|
cbows/django-haystack
|
80c154b7b11fdcf99dd2ef0e82342ed13e26053a
|
[
"BSD-3-Clause"
] | 2,021
|
2015-02-06T07:45:08.000Z
|
2022-03-30T12:26:39.000Z
|
test_haystack/whoosh_tests/test_whoosh_management_commands.py
|
cbows/django-haystack
|
80c154b7b11fdcf99dd2ef0e82342ed13e26053a
|
[
"BSD-3-Clause"
] | 787
|
2015-02-03T20:06:04.000Z
|
2022-03-30T09:00:38.000Z
|
test_haystack/whoosh_tests/test_whoosh_management_commands.py
|
cbows/django-haystack
|
80c154b7b11fdcf99dd2ef0e82342ed13e26053a
|
[
"BSD-3-Clause"
] | 878
|
2015-02-04T15:29:50.000Z
|
2022-03-28T16:51:44.000Z
|
import datetime
import os
import unittest
from io import StringIO
from tempfile import mkdtemp
from unittest.mock import patch
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.management import call_command as real_call_command
from django.core.management.base import CommandError
from django.test import TestCase
from whoosh.qparser import QueryParser
from haystack import connections, constants, indexes
from haystack.utils.loading import UnifiedIndex
from ..core.models import MockModel
from .test_whoosh_backend import WhooshMockSearchIndex
from .testcases import WhooshTestCase
def call_command(*args, **kwargs):
kwargs["using"] = ["whoosh"]
print(args, kwargs)
real_call_command(*args, **kwargs)
class ManagementCommandTestCase(WhooshTestCase):
fixtures = ["bulk_data"]
def setUp(self):
super().setUp()
self.old_ui = connections["whoosh"].get_unified_index()
self.ui = UnifiedIndex()
self.wmmi = WhooshMockSearchIndex()
self.ui.build(indexes=[self.wmmi])
self.sb = connections["whoosh"].get_backend()
connections["whoosh"]._index = self.ui
self.sb.setup()
self.raw_whoosh = self.sb.index
self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
self.sb.delete_index()
self.sample_objs = MockModel.objects.all()
def tearDown(self):
connections["whoosh"]._index = self.old_ui
super().tearDown()
def verify_indexed_document_count(self, expected):
with self.raw_whoosh.searcher() as searcher:
count = searcher.doc_count()
self.assertEqual(count, expected)
def verify_indexed_documents(self):
"""Confirm that the documents in the search index match the database"""
with self.raw_whoosh.searcher() as searcher:
count = searcher.doc_count()
self.assertEqual(count, 23)
indexed_doc_ids = set(i["id"] for i in searcher.documents())
expected_doc_ids = set(
"core.mockmodel.%d" % i
for i in MockModel.objects.values_list("pk", flat=True)
)
self.assertSetEqual(indexed_doc_ids, expected_doc_ids)
def test_basic_commands(self):
call_command("clear_index", interactive=False, verbosity=0)
self.verify_indexed_document_count(0)
call_command("update_index", verbosity=0)
self.verify_indexed_documents()
call_command("clear_index", interactive=False, verbosity=0)
self.verify_indexed_document_count(0)
call_command("rebuild_index", interactive=False, verbosity=0)
self.verify_indexed_documents()
def test_remove(self):
call_command("clear_index", interactive=False, verbosity=0)
self.verify_indexed_document_count(0)
call_command("update_index", verbosity=0)
self.verify_indexed_documents()
# Remove several instances.
MockModel.objects.get(pk=1).delete()
MockModel.objects.get(pk=2).delete()
MockModel.objects.get(pk=8).delete()
self.verify_indexed_document_count(23)
# Plain ``update_index`` doesn't fix it.
call_command("update_index", verbosity=0)
self.verify_indexed_document_count(23)
# … but remove does:
call_command("update_index", remove=True, verbosity=0)
self.verify_indexed_document_count(20)
def test_multiprocessing(self):
call_command("clear_index", interactive=False, verbosity=0)
self.verify_indexed_document_count(0)
call_command("update_index", verbosity=2, workers=2, batchsize=5)
self.verify_indexed_documents()
| 33.491071
| 84
| 0.691816
| 454
| 3,751
| 5.517621
| 0.292952
| 0.061477
| 0.074651
| 0.071856
| 0.351697
| 0.33014
| 0.312974
| 0.291816
| 0.272655
| 0.253094
| 0
| 0.009109
| 0.209811
| 3,751
| 111
| 85
| 33.792793
| 0.83502
| 0.039989
| 0
| 0.2625
| 0
| 0
| 0.050654
| 0
| 0
| 0
| 0
| 0
| 0.0375
| 1
| 0.1
| false
| 0
| 0.2125
| 0
| 0.3375
| 0.0125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
25282fa8805725b2acc31f9c959840083384e1e2
| 2,977
|
py
|
Python
|
src/server.py
|
tyler-fishbone/http_server
|
93a49090d356b31522acd5bc3a25a1c8a3b604e3
|
[
"MIT"
] | null | null | null |
src/server.py
|
tyler-fishbone/http_server
|
93a49090d356b31522acd5bc3a25a1c8a3b604e3
|
[
"MIT"
] | null | null | null |
src/server.py
|
tyler-fishbone/http_server
|
93a49090d356b31522acd5bc3a25a1c8a3b604e3
|
[
"MIT"
] | null | null | null |
from http.server import HTTPServer, BaseHTTPRequestHandler
from urllib.parse import urlparse, parse_qs
from cowpy import cow
import json
import sys
class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
parsed_path = urlparse(self.path)
parsed_qs = parse_qs(parsed_path.query)
# import pdb; pdb.set_trace()
if parsed_path.path == '/':
self.send_response(200)
self.end_headers()
self.wfile.write(return_html_string())
return
elif parsed_path.path == '/cowsay':
self.send_response(200)
self.end_headers()
self.wfile.write(b'Helpful instructions about this application')
return
elif parsed_path.path == '/cow':
try:
# import pdb; pdb.set_trace()
msg = parsed_qs['msg'][0]
print(msg)
except (KeyError, json.decoder.JSONDecodeError):
self.send_response(400)
self.end_headers()
self.wfile.write(b'You did a bad thing')
return
cheese = cow.Moose(thoughts=True)
message = cheese.milk(msg)
self.send_response(200)
self.end_headers()
self.wfile.write(message.encode('utf8'))
return
else:
self.send_response(404)
self.end_headers()
self.wfile.write(b'Not Found')
def do_POST(self):
parsed_path = urlparse(self.path)
parsed_qs = parse_qs(parsed_path.query)
if parsed_path.path == '/cow':
try:
msg = parsed_qs['msg'][0]
cheese = cow.Moose(thoughts=True)
message = cheese.milk(msg)
post_dict = {}
post_dict['content'] = message
self.send_response(200)
self.end_headers()
self.wfile.write(json.dumps(post_dict).encode('utf8'))
return
except (KeyError, json.decoder.JSONDecodeError):
self.send_response(400)
self.end_headers()
self.wfile.write(b'You did a bad thing')
return
def create_server():
return HTTPServer(('127.0.0.1', 3000), SimpleHTTPRequestHandler)
def run_forever():
server = create_server()
try:
print('Starting server on port 3000')
server.serve_forever()
except KeyboardInterrupt:
server.shutdown()
server.server_close()
# sys.exit()
def return_html_string():
return b'''<!DOCTYPE html>
<html>
<head>
<title> cowsay </title>
</head>
<body>
<header>
<nav>
<ul>
<li><a href="/cowsay">cowsay</a></li>
</ul>
</nav>
<header>
<main>
<!-- project description -->
</main>
</body>
</html>'''
if __name__ == '__main__':
run_forever()
| 26.114035
| 76
| 0.543164
| 318
| 2,977
| 4.915094
| 0.320755
| 0.051184
| 0.071657
| 0.080614
| 0.506718
| 0.414587
| 0.414587
| 0.395393
| 0.395393
| 0.336532
| 0
| 0.01998
| 0.344306
| 2,977
| 113
| 77
| 26.345133
| 0.780738
| 0.02217
| 0
| 0.431818
| 0
| 0
| 0.155487
| 0.01032
| 0
| 0
| 0
| 0
| 0
| 1
| 0.056818
| false
| 0
| 0.056818
| 0.022727
| 0.215909
| 0.022727
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
2529f17c13ced51c4629d6195cff0d46c5800cac
| 7,033
|
py
|
Python
|
Chapter06/6B_TrendFollowings/6B_3_RunCNN.py
|
uyenphuong18406/Hands-On-Artificial-Intelligence-for-Banking
|
3a10a14194368478bb8b78d3d17e9c6a7b7253db
|
[
"MIT"
] | 115
|
2020-06-18T15:00:58.000Z
|
2022-03-02T10:13:19.000Z
|
Chapter06/6B_TrendFollowings/6B_3_RunCNN.py
|
uyenphuong18406/Hands-On-Artificial-Intelligence-for-Banking
|
3a10a14194368478bb8b78d3d17e9c6a7b7253db
|
[
"MIT"
] | 2
|
2020-11-06T11:02:31.000Z
|
2021-01-22T12:44:35.000Z
|
Chapter06/6B_TrendFollowings/6B_3_RunCNN.py
|
uyenphuong18406/Hands-On-Artificial-Intelligence-for-Banking
|
3a10a14194368478bb8b78d3d17e9c6a7b7253db
|
[
"MIT"
] | 60
|
2020-07-22T14:53:10.000Z
|
2022-03-23T10:17:59.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 30 00:58:34 2018
@author: jeff
"""
'''*************************************
#1. Import libraries and key varable values
'''
import os
import quandl
import pandas as pd
import numpy as np
import keras
from PIL import Image
#folder path
folder_path = os.path.dirname(__file__)
#date range for full dataset
str_dte = '2003-01-01'
end_dte = '2018-7-31'
date_dict = {'gte':str_dte, 'lte':end_dte}
#Dates for back-testing
start_dte = '2015-1-1'
#Create list of dates
datelist = pd.date_range(start_dte, periods=365*2).tolist()
#API key for quandl
quandl.ApiConfig.api_key = '[quandl id]'
#Parameters for the image generation
col_num_mid = 10
col_num_dte = 9
pixel_size = 100
window_size = 60
pred_window_size = 1
#model path
model_path = "model2_2DCov.h5"
model = keras.models.load_model(model_path)
#number of channel for the image
num_channel=1
#strategies parameters
curr_pnl = 10000
curr_pnl_0=curr_pnl
curr_pnl_1=curr_pnl
curr_pnl_2=curr_pnl
quant_trans_0 = 0
quant_trans_1 = 0
quant_trans_2 = 0
min_pnl = 0.0005
trading_cost = 0
trade_limit = 0.5
'''*************************************
#2. Define functions
'''
#input_X is a series of price
#output_X is a series of price expressed in pixel
def rescale(input_X, pixel, min_x,max_x):
unit = (max_x - min_x)/pixel
output_X = round((input_X-min_x)/unit,0)
return output_X,unit
'''*************************************
#3. Running the test
'''
#Get the data
tkr = 'VTV'
df =quandl.get_table('SHARADAR/SFP',date=date_dict,ticker=tkr)
df = df.sort_values(by=['date'])
df=df.reset_index(drop=True)
#write header for the log of the strategy back-testing
f = open('log.txt','w+')
f.write('strategy\tBuySell\t' + 'dte' +'\t'+ 'cost' +'\t'+ 'T+1_actual' +'\t'+ 'T+1_pred'+'\t'+ 'Quantity'+'\t'+ 'PnL'+'\n')
#loop through the dates
for pred_dte in datelist:
df_i = df.index[df['date']==pred_dte]
#make sure both start and end dates are valid
if df_i.empty:
print('no data')
continue
df_i = df_i[0]
print(pred_dte)
df_start = df_i-(window_size) #starts at zero
if df_start < 0: #in case the date inputted is not valid
print('later date')
continue
#prepare the input data
df['mid'] = (df['high'] + df['low'])/2
df_plot = df.iloc[df_start:df_i,:]
min_p = min(df_plot['mid'])
max_p = max(df_plot['mid'])
output_pixel,unit = rescale(df_plot['mid'],pixel_size,min_p,max_p)
#if no trend, then drop this data point
if min_p ==max_p:
print('no trend')
continue
#stack up for a numpy for Image Recognition
#print the historical data
img_ar = np.zeros((1,pixel_size,window_size,num_channel))
img_display = np.zeros((pixel_size,window_size,num_channel))
k=0
pix_p=0
for pix in output_pixel:
y_pos = int(pix)-1
img_ar[0][y_pos][k][num_channel-1] = 255
img_display[y_pos][k][num_channel-1] = 255
pix_p=y_pos
k+=1
img_row = img_ar/255
last_actual_p = pix_p * unit + min_p
#make prediction
pred_y = model.predict(img_row)
max_y_val = max(pred_y[0])
pred_y_img = np.zeros((pixel_size,1))
#Obtain predicted price
pred_pixel = 0
expected_p = 0
#calculate expected values
for i in range(pixel_size):
expected_p += pred_y_img[i,0] * i
if pred_y[0,i] == max_y_val:
pred_y_img[i,0] = 255
pred_pixel = i
pred_p = pred_pixel * unit + min_p
print('cost at ' + str(last_actual_p))
print('predict p be ' + str(pred_p) + ' and probability of ' + str(max_y_val))
pred_exp_p = expected_p * unit + min_p
print('expected predict p be ' + str(pred_exp_p))
y_actual_p = df.iloc[df_i+1,:]['mid']
print('actual p be '+str(y_actual_p))
#Strategy Back-Testing
#Benchmark - Strategy 0 - buy and hold
if quant_trans_0 == 0:
quant_trans_0 = curr_pnl/y_actual_p
pnl = 0-trading_cost
else:
pnl = (y_actual_p/last_actual_p-1) * quant_trans_0
curr_pnl_0 += pnl
f.write('B0\tNA\t' + str(pred_dte) +'\t'+ str(last_actual_p) +'\t'+ str(y_actual_p) +'\t'+ str(y_actual_p)+'\t'+ str(1)+'\t'+ str(last_actual_p-y_actual_p)+'\n')
#Testing of strategy1
order_type = ""
quant_trans_1 = int(curr_pnl_1/last_actual_p*0.5)
if abs(pred_exp_p/last_actual_p-1)>min_pnl:
if pred_exp_p>last_actual_p:
#buy one now / long one unit
#stock_unit_1+=quant_trans_1
pnl = (y_actual_p-last_actual_p) * quant_trans_1-trading_cost
order_type = "B"
curr_pnl_1 += pnl
f.write('S1\tBuy\t' + str(pred_dte) +'\t'+ str(last_actual_p) +'\t'+ str(y_actual_p) +'\t'+ str(pred_exp_p)+'\t'+ str(quant_trans_1)+'\t'+ str(y_actual_p-last_actual_p)+'\n')
elif pred_exp_p<last_actual_p:
#sell one now / short one unit
#stock_unit_1-=quant_trans_1
pnl = (last_actual_p-y_actual_p) * quant_trans_1-trading_cost
order_type = "S"
curr_pnl_1 += pnl
f.write('S1\tSell\t' + str(pred_dte) +'\t'+ str(last_actual_p) +'\t'+ str(y_actual_p) +'\t'+ str(pred_exp_p)+'\t'+ str(quant_trans_1)+'\t'+ str(last_actual_p-y_actual_p)+'\n')
else: #no trade
if order_type == "B":
pnl = (y_actual_p-last_actual_p) * quant_trans_1
else:
pnl = (last_actual_p-y_actual_p) * quant_trans_1
curr_pnl_1 += pnl
#Testing of strategy2
if max_y_val > 0.99 and abs(pred_p/last_actual_p-1)>min_pnl:
quant_trans_2 = int(curr_pnl_2/last_actual_p*0.5)
if pred_p>last_actual_p:
#buy one now / long one unit
#stock_unit_2+=quant_trans_2
order_type = "B"
curr_pnl_2 += (y_actual_p-last_actual_p) * quant_trans_2-trading_cost
f.write('S2\tBuy\t' + str(pred_dte) +'\t'+ str(last_actual_p) +'\t'+ str(y_actual_p) +'\t'+ str(pred_p) +'\t'+str(quant_trans_2)+'\t'+ str(y_actual_p-last_actual_p)+'\n')
elif pred_p<last_actual_p:
#sell one now / short one unit
#stock_unit_2-=quant_trans_2
order_type = "S"
curr_pnl_2 += (last_actual_p-y_actual_p) * quant_trans_2-trading_cost
f.write('S2\tSell\t' + str(pred_dte) +'\t'+ str(last_actual_p) +'\t'+ str(y_actual_p) +'\t'+ str(pred_p)+'\t'+ str(quant_trans_2)+'\t'+ str(last_actual_p-y_actual_p)+'\n')
else: #no trade
if order_type == "B":
pnl = (y_actual_p-last_actual_p) * quant_trans_2
else:
pnl = (last_actual_p-y_actual_p) * quant_trans_2
curr_pnl_2 += pnl
#print the final result of the strategies
print(curr_pnl_0)
print(curr_pnl_1)
print(curr_pnl_2)
f.close()
'''
export CUDA_VISIBLE_DEVICES=''
tensorboard --logdir AI_Finance_book/6B_TrendFollowings/Graph/ --host localhost --port 6006
'''
| 32.560185
| 187
| 0.624485
| 1,183
| 7,033
| 3.404903
| 0.215554
| 0.092105
| 0.079196
| 0.038729
| 0.377855
| 0.353029
| 0.303625
| 0.269861
| 0.269861
| 0.244538
| 0
| 0.032056
| 0.22821
| 7,033
| 215
| 188
| 32.711628
| 0.710022
| 0.158538
| 0
| 0.125926
| 0
| 0
| 0.070989
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.007407
| false
| 0
| 0.044444
| 0
| 0.059259
| 0.081481
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
252ac1c22921db6597accc034da434758be4405a
| 2,589
|
py
|
Python
|
lichee/dataset/field_parser/image_local_path.py
|
Tencent/Lichee
|
7653becd6fbf8b0715f788af3c0507c012be08b4
|
[
"Apache-2.0"
] | 91
|
2021-10-30T02:25:05.000Z
|
2022-03-28T06:51:52.000Z
|
lichee/dataset/field_parser/image_local_path.py
|
zhaijunyu/Lichee
|
7653becd6fbf8b0715f788af3c0507c012be08b4
|
[
"Apache-2.0"
] | 1
|
2021-12-17T09:30:25.000Z
|
2022-03-05T12:30:13.000Z
|
lichee/dataset/field_parser/image_local_path.py
|
zhaijunyu/Lichee
|
7653becd6fbf8b0715f788af3c0507c012be08b4
|
[
"Apache-2.0"
] | 17
|
2021-11-04T07:50:23.000Z
|
2022-03-24T14:24:11.000Z
|
# -*- coding: utf-8 -*-
from lichee import plugin
from .field_parser_base import BaseFieldParser
import os
from PIL import Image
from torchvision import transforms
import torch
from lichee.utils import storage
@plugin.register_plugin(plugin.PluginType.FIELD_PARSER, "image_local_path")
class ImgDataFieldParser(BaseFieldParser):
"""The field parser for local image. Read the image data from the path provided,
transforms through ToSensor, Resize and Normalize.
Attributes
----------
transformer: transforms.Compose
compose the transforms(ToSensor, Resize and Normalize)
"""
def __init__(self):
super().__init__()
self.transformer = None
def init(self, cfg):
self.cfg = cfg
resolution = [int(x) for x in self.global_config.DATASET.CONFIG.IMAGE_RESOLUTION]
self.transformer = transforms.Compose([
transforms.ToTensor(),
transforms.Resize(resolution),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
])
def parse(self, row, training=False):
"""Parse the row and obtain the path of image, invoke prepare_img_data to transform the image data to tensor.
Parameters
----------
row: memoryview
Object contained in a single record
training: bool
inherited from parent, not used here.
Returns
-------
record: torch.Tensor
the tensor of image data
"""
record = {}
if self.key not in row:
raise Exception("Cannot find key %s in row by image_local_path" % self.key)
img_path = bytes(row[self.key]).decode("utf-8")
if img_path[0] != "/":
img_path = os.path.join(self.global_config.DATASET.DATA_BASE_DIR, img_path)
record[self.alias] = self.prepare_img_data(img_path)
return record
def prepare_img_data(self, img_path):
"""Read and process the image from image_path
Parameters
----------
img_path: str
path of image
Returns
------
torch.Tensor
the tensor transformed from image data.
"""
with open(storage.get_storage_file(img_path), 'rb') as f:
img = Image.open(f)
img = img.convert('RGB')
return self.transformer(img)
def collate(self, batch):
record = {}
imgs = [instance[self.alias] for instance in batch]
imgs = torch.stack(imgs)
record[self.alias] = imgs
return record
| 30.821429
| 117
| 0.611047
| 315
| 2,589
| 4.901587
| 0.374603
| 0.036269
| 0.027202
| 0.033679
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014618
| 0.286597
| 2,589
| 83
| 118
| 31.192771
| 0.821332
| 0.288142
| 0
| 0.1
| 0
| 0
| 0.043929
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.175
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
252b421527774d5fb18e906562e999ce4cef4de4
| 2,054
|
py
|
Python
|
models/inception.py
|
ildoonet/kaggle-human-protein-atlas-image-classification
|
9faedaf6e480712492ccfb36c7bdf5e9f7db8b41
|
[
"Apache-2.0"
] | 35
|
2019-01-11T00:55:19.000Z
|
2021-07-14T11:44:10.000Z
|
models/inception.py
|
ildoonet/kaggle-human-protein-atlas-image-classification
|
9faedaf6e480712492ccfb36c7bdf5e9f7db8b41
|
[
"Apache-2.0"
] | null | null | null |
models/inception.py
|
ildoonet/kaggle-human-protein-atlas-image-classification
|
9faedaf6e480712492ccfb36c7bdf5e9f7db8b41
|
[
"Apache-2.0"
] | 9
|
2019-01-11T01:42:14.000Z
|
2020-03-02T05:47:18.000Z
|
import torch
from torch import nn
import torch.nn.functional as F
import torchvision
from torchvision.models.inception import BasicConv2d, InceptionAux
import pretrainedmodels
from common import num_class
class InceptionV3(nn.Module):
def __init__(self, pre=True):
super().__init__()
self.encoder = torchvision.models.inception_v3(pretrained=pre)
conv1 = BasicConv2d(4, 32, kernel_size=3, stride=2)
if pre:
w = self.encoder.Conv2d_1a_3x3.conv.weight
conv1.conv.weight = nn.Parameter(torch.cat((w, 0.5 * (w[:, :1, :, :] + w[:, 2:, :, :])), dim=1))
self.encoder.Conv2d_1a_3x3 = conv1
self.encoder.AuxLogits = InceptionAux(768, num_class())
self.encoder.fc = nn.Linear(2048, num_class())
def forward(self, x):
x = torch.nn.functional.interpolate(x, size=(299, 299), mode='bilinear') # resize
if self.training:
x, x_aux, feat = self.encoder(x)
x = (torch.sigmoid(x) + torch.sigmoid(x_aux)) * 0.5
else:
x, feat = self.encoder(x)
x = torch.sigmoid(x)
return {'logit': x, 'feat': feat}
class InceptionV4(nn.Module):
def __init__(self, pre=True):
super().__init__()
self.encoder = pretrainedmodels.__dict__['inceptionv4'](num_classes=1000, pretrained='imagenet')
conv1 = BasicConv2d(4, 32, kernel_size=3, stride=2)
if pre:
w = self.encoder.features[0].conv.weight
conv1.conv.weight = nn.Parameter(torch.cat((w, 0.5 * (w[:, :1, :, :] + w[:, 2:, :, :])), dim=1))
self.encoder.features[0].conv = conv1
self.last_linear = nn.Linear(1536, num_class())
pass
def forward(self, x):
# x = torch.nn.functional.interpolate(x, size=(299, 299), mode='bilinear')
x = self.encoder.features(x)
x = F.adaptive_avg_pool2d(x, (1, 1))
x = x.view(x.size(0), -1)
feat = x
x = self.last_linear(x)
x = torch.sigmoid(x)
return {'logit': x, 'feat': feat}
| 36.035088
| 108
| 0.595424
| 275
| 2,054
| 4.309091
| 0.290909
| 0.10211
| 0.029536
| 0.047257
| 0.527426
| 0.490295
| 0.490295
| 0.490295
| 0.452321
| 0.452321
| 0
| 0.050196
| 0.253165
| 2,054
| 56
| 109
| 36.678571
| 0.722295
| 0.038462
| 0
| 0.347826
| 0
| 0
| 0.022819
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086957
| false
| 0.021739
| 0.152174
| 0
| 0.326087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
252f0a3cb8c24df7cf5db2bc1599071146727275
| 1,238
|
py
|
Python
|
Problem001.py
|
DimitrisMantas/ProjectEuler
|
69b647232729a2d2a38ea08d1214616a861046cf
|
[
"Apache-2.0"
] | null | null | null |
Problem001.py
|
DimitrisMantas/ProjectEuler
|
69b647232729a2d2a38ea08d1214616a861046cf
|
[
"Apache-2.0"
] | null | null | null |
Problem001.py
|
DimitrisMantas/ProjectEuler
|
69b647232729a2d2a38ea08d1214616a861046cf
|
[
"Apache-2.0"
] | null | null | null |
"""This is the solution to Problem 1 of Project Euler."""
"""Copyright 2021 Dimitris Mantas"""
import time
def compute_all_multiples(of_number, below_number):
"""Compute all natural numbers, which are multiples of a natural number below a predefined number."""
# Register the list of said multiples.
multiples = []
for i in range(1, below_number):
if not i % of_number:
multiples.append(i)
return multiples
# These lines are for debugging purposes.
# print(compute_all_multiples(3,10))
# print(compute_all_multiples(5,10))
if __name__ == "__main__":
# This line is for debugging purposes.
# Start measuring the program runtime.
runtime = time.time()
# The resulting list is not sorted and contains the unique values the lists involved in the calculation.
# This is because the multiples of 15 are contained on both said lists.
ans = set([i for i in (compute_all_multiples(3, 1000) + compute_all_multiples(5, 1000))])
print(ans)
print(sum(ans))
# These lines are for debugging purposes.
# Compute the program runtime.
print("This problem was solved in {0} seconds.".format(time.time() - runtime))
| 30.195122
| 109
| 0.673667
| 171
| 1,238
| 4.748538
| 0.450292
| 0.073892
| 0.116995
| 0.039409
| 0.081281
| 0.081281
| 0
| 0
| 0
| 0
| 0
| 0.026539
| 0.239095
| 1,238
| 40
| 110
| 30.95
| 0.835456
| 0.493538
| 0
| 0
| 0
| 0
| 0.088512
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0.076923
| 0
| 0.230769
| 0.230769
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
2533ae4893b1c779f4471ef4511dd0dbc0e4068c
| 3,701
|
py
|
Python
|
03_queue/queue_xrh.py
|
Xinrihui/Data-Structure-and-Algrithms
|
fa3a455f64878e42d033c1fd8d612f108c71fb72
|
[
"Apache-2.0"
] | 1
|
2021-08-13T10:55:33.000Z
|
2021-08-13T10:55:33.000Z
|
03_queue/queue_xrh.py
|
Xinrihui/Data-Structure-and-Algrithms
|
fa3a455f64878e42d033c1fd8d612f108c71fb72
|
[
"Apache-2.0"
] | null | null | null |
03_queue/queue_xrh.py
|
Xinrihui/Data-Structure-and-Algrithms
|
fa3a455f64878e42d033c1fd8d612f108c71fb72
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
import timeit
import numpy as np
import sys
import random as rand
class Queue_array:
"""
顺序队列
"""
def __init__(self,capacity):
self._items = [None]*(capacity+1) #最后一个位置 空置
self._capacity = capacity
self._head = 0
self._tail = 0
def enqueue(self,item):
"""
入队
:param item:
:return:
"""
if self._tail== self._capacity: # 队列的最后一个位置为空置,队尾指针指在此处
if self._head!=0: # 进行数据的搬移 ,在头部腾出空间,插入新的元素
self._items[0:self._tail-self._head]=self._items[self._head:self._tail]
self._tail= self._tail-self._head
self._head=0
else: # self._head==0 并且 self._tail== self._capacity 表示 队列已满
print('the Queue is full!')
return False
self._items[self._tail]=item
self._tail+=1
return True
def dequeue(self):
"""
出队
:return:
"""
if self._head==self._tail: # 队列为空
print('the Queue is empty!')
return None
res=self._items[self._head]
self._items[self._head]=None
self._head += 1
return res
def __repr__(self):
return ','.join(self._items[self._head : self._tail])
class CircularQueue:
"""
循环队列
"""
def __init__(self,capacity):
self._items = [None]*(capacity)
self._capacity = capacity
self._head = 0
self._tail = 0
def enqueue(self,item):
"""
入队
循环队列 省略了 数据搬移的 开销
:param item:
:return:
"""
if (self._tail+1) % self._capacity==self._head: # (tail+1)% n=head 表示 队列已满
print('the Queue is full!')
return False
self._items[self._tail]=item
self._tail=(self._tail+1)%self._capacity
return True
def dequeue(self):
"""
出队
:return:
"""
if self._head==self._tail: # 队列为空
print('the Queue is empty!')
return None
res=self._items[self._head]
self._items[self._head]=None
self._head = (self._head+1)%self._capacity
return res
class BlockingQueue:
"""
阻塞队列
"""
def __init__(self, capacity):
self._items = []
self._capacity = capacity
def producer(self,item): #TODO:多线程调用,然后给队列加锁
if len(self._items)<=self._capacity:
self._items.append(item)
return True
else:
print('the Queue is full!')
return False
def consumer(self):
if len(self._items)>0:
res=self._items.pop()
return res
else:
print('the Queue is empty!')
return None
if __name__ == '__main__':
# 1. 顺序队列
# queue=Queue_array(8)
# string_list=['a','b','c','d','e','f','g','h']
#
# for ele in string_list:
# queue.enqueue(ele)
#
# print(queue._items)
#
# queue.enqueue('i')
#
# print('pop:',queue.dequeue())
# print('pop:', queue.dequeue())
# print('pop:', queue.dequeue())
# print(queue._items)
#
# queue.enqueue('i')
# print(queue)
#2. 循环队列
queue = CircularQueue(8)
string_list=['e','f','g','h','i','j']
for ele in string_list:
queue.enqueue(ele)
print(queue._items)
for i in range(3):
print('pop:',queue.dequeue())
print(queue._items)
queue.enqueue('a')
queue.enqueue('b')
print(queue._items)
queue.enqueue('c')
queue.enqueue('d')
print(queue._items)
queue.enqueue('e')
| 19.276042
| 87
| 0.518779
| 433
| 3,701
| 4.210162
| 0.21709
| 0.083379
| 0.071311
| 0.055952
| 0.641251
| 0.568843
| 0.49424
| 0.450905
| 0.407021
| 0.333516
| 0
| 0.009042
| 0.34261
| 3,701
| 191
| 88
| 19.376963
| 0.740238
| 0.168873
| 0
| 0.551724
| 0
| 0
| 0.047502
| 0
| 0
| 0
| 0
| 0.005236
| 0
| 1
| 0.114943
| false
| 0
| 0.045977
| 0.011494
| 0.344828
| 0.126437
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
253438c9cde5237ab336b6ebc0e8e1089525b6e7
| 1,703
|
py
|
Python
|
domains/gym_craft/tests/plotting.py
|
AndrewPaulChester/sage-code
|
9fe676bfbcbc6f642eca29b30a1027fba2a426a0
|
[
"MIT"
] | null | null | null |
domains/gym_craft/tests/plotting.py
|
AndrewPaulChester/sage-code
|
9fe676bfbcbc6f642eca29b30a1027fba2a426a0
|
[
"MIT"
] | null | null | null |
domains/gym_craft/tests/plotting.py
|
AndrewPaulChester/sage-code
|
9fe676bfbcbc6f642eca29b30a1027fba2a426a0
|
[
"MIT"
] | null | null | null |
import numpy as np
from matplotlib import pyplot as plt
import math
MAX_SPEED = 2
ACCELERATION = 0.5
DRAG = 0.3
TURN_SPEED=5
IMAGE = np.array([
[0,0,0,1,0,0,0],
[0,0,1,1,1,0,0],
[0,1,1,1,1,1,0],
[1,1,1,1,1,1,1],
[0,1,1,1,1,1,0],
[0,0,1,1,1,0,0],
[0,0,0,1,0,0,0]])
def main():
position=(42 ,42)
speed=0
bearing=0
acc=0
turn=0
plt.ion()
fig, ax = plt.subplots()
img = np.zeros((420,420))
img[207:214,207:214]=IMAGE
im = ax.imshow(img)
for i in range(1000):
acc+=np.random.rand()-0.5
turn+=np.random.rand()-0.5
acc=np.clip(acc,-1,1)
turn=np.clip(turn,-1,1)
(position,bearing,speed) = update_coords(position,bearing,speed,acc,turn)
print(acc,turn)
print(position)
render(ax,im,position,bearing,speed)
def update_coords(position,bearing,speed,acceleration,turning):
(x_pos,y_pos) = position
speed = update_speed(speed,acceleration)
bearing = (bearing + TURN_SPEED*turning) % 360
x_pos += speed * math.sin(bearing*2*math.pi/360)
y_pos += speed * math.cos(bearing*2*math.pi/360)
return ((x_pos,y_pos),bearing,speed)
def update_speed(speed,acceleration):
speed *= DRAG
speed += acceleration*ACCELERATION
speed = min(speed,MAX_SPEED) if speed > 0 else max(speed,-MAX_SPEED)
return speed
def render(ax,im,position,bearing,speed):
x_pos,y_pos = position
img = np.zeros((420,420))
x = int(x_pos*5)
y = int(y_pos*5)
img[x:x+7,y:y+7]=IMAGE
# plt.scatter(x,y)
# plt.show()
im.set_data(img)
ax.set_title(f"bearing : {bearing}, speed: {speed}")
plt.pause(0.001)
plt.draw()
if __name__ == "__main__":
main()
| 22.706667
| 81
| 0.613623
| 299
| 1,703
| 3.397993
| 0.247492
| 0.03937
| 0.038386
| 0.031496
| 0.294291
| 0.107283
| 0.048228
| 0.036417
| 0.036417
| 0.018701
| 0
| 0.08841
| 0.20963
| 1,703
| 75
| 82
| 22.706667
| 0.666419
| 0.015854
| 0
| 0.101695
| 0
| 0
| 0.025687
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.067797
| false
| 0
| 0.050847
| 0
| 0.152542
| 0.033898
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
25388135b2590bec6c24b4f712d9da835c81c62b
| 4,338
|
py
|
Python
|
pysplit/clusgroup.py
|
haochiche/pysplit
|
df6f8ebe93dd81ff8925529b8dfaaea2f446f2e5
|
[
"BSD-3-Clause"
] | 110
|
2015-07-12T15:13:18.000Z
|
2022-03-28T00:58:59.000Z
|
pysplit/clusgroup.py
|
haochiche/pysplit
|
df6f8ebe93dd81ff8925529b8dfaaea2f446f2e5
|
[
"BSD-3-Clause"
] | 70
|
2016-02-23T03:19:55.000Z
|
2022-03-14T09:12:43.000Z
|
pysplit/clusgroup.py
|
haochiche/pysplit
|
df6f8ebe93dd81ff8925529b8dfaaea2f446f2e5
|
[
"BSD-3-Clause"
] | 66
|
2015-07-10T20:43:30.000Z
|
2022-02-18T01:00:33.000Z
|
from __future__ import division, print_function
from .trajgroup import TrajectoryGroup
from .hypath import HyPath
from .hygroup import HyGroup
def print_clusterprocedure():
"""Print clustering guide."""
print("""
In ``PySPLIT``
1. Create ``TrajectoryGroup`` with desired set of trajectories
2. ``TrajectoryGroup.make_infile()``
In ``HYSPLIT``
3. Trajectory --> Special Runs --> Clustering --> Standard
4. Adjust clustering parameters and working folder
(where output will be stored, where INFILE lives)
5. ``Run cluster analysis``
6. Determine and set appropriate number of clusters
7. Assign trajectories to clusters (``Run``)
8. ``Display Means``, ``Quit``
In ``PySPLIT``
9. ``spawn_clusters()``""")
class Cluster(HyPath, HyGroup):
"""
A special :subclass: of both ``HyGroup`` and ``HyPath``.
Clusters contain both trajectories and mean path information. The mean
path and the trajectory composition is determined by ``HySPLIT``.
"""
def __init__(self, clusterdata, pathdata, datetime, clusterheader,
trajectories, cluster_number):
"""
Initialize ``Cluster`` object.
Parameters
----------
trajectories : list of ``Trajectory`` objects
Trajectories that belong in the cluster.
cluster_number : int
The ``Cluster`` identification number. Distinguishes ``Cluster``
from other ``Clusters`` in its ``ClusterGroup``
"""
HyPath.__init__(self, clusterdata, pathdata, datetime,
clusterheader)
HyGroup.__init__(self, trajectories)
self.start_longitude = self.trajectories[0].data.loc[0, 'geometry'].x
self.clusternumber = cluster_number
self.multitraj = False
def __getitem__(self, index):
"""
Get ``Trajectory`` or ``TrajectoryGroup``.
Parameters
----------
index : int or slice
Returns
-------
``Trajectory`` or ``TrajectoryGroup`` depending if indexed
or sliced. Won't return a ``Cluster`` because those are
specially defined.
"""
newthing = self.trajectories[index]
if isinstance(newthing, list):
newthing = TrajectoryGroup(newthing)
return newthing
def __add__(self, other):
"""
Add a ``HyGroup`` to this ``Cluster`` instance.
Parameters
----------
other : ``HyGroup``
Another ``TrajectoryGroup`` or ``Cluster``. May or may not
contain some of the same ``Trajectory`` instances.
Returns
-------
A new ``TrajectoryGroup`` containing the union of the sets
of ``Trajectory`` instances.
"""
return TrajectoryGroup(HyGroup.__add__(self, other))
def __sub__(self, other):
"""
Subtract a ``HyGroup`` from this ``Cluster`` instance.
Parameters
----------
other : ``HyGroup``
Another ``Cluster`` or ``TrajectoryGroup``
Returns
-------
A new ``TrajectoryGroup`` containing the set difference betwee
the sets of ``Trajectory`` instances.
"""
return TrajectoryGroup(HyGroup.__sub__(self, other))
class ClusterGroup(object):
"""
Group of ``Cluster`` instances.
Contains all the ``Cluster``s produced in one ``HYSPLIT`` cluster analysis.
"""
def __init__(self, clusters):
"""
Initialize ``ClusterGroup`` object.
Parameters
----------
clusters : list of ``Cluster`` instances
``Cluster`` instances from the same HYSPLIT clustering run.
"""
self.clusters = clusters
self.clustercount = len(clusters)
self.trajcount = sum([c.trajcount for c in self.clusters])
def __getitem__(self, index):
"""
Get ``Cluster`` or ``ClusterGroup``.
Index or slice ``self.clusters`` to get a ``Cluster`` or
``ClusterGroup``, respectively.
"""
newthing = self.clusters[index]
try:
newthing = ClusterGroup(newthing)
except:
pass
return newthing
| 27.807692
| 79
| 0.574919
| 408
| 4,338
| 5.985294
| 0.382353
| 0.02457
| 0.009009
| 0.022113
| 0.174447
| 0.156429
| 0.085176
| 0.045864
| 0
| 0
| 0
| 0.003659
| 0.307054
| 4,338
| 155
| 80
| 27.987097
| 0.808716
| 0.397418
| 0
| 0.122449
| 0
| 0
| 0.296786
| 0.015595
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0.020408
| 0.081633
| 0
| 0.346939
| 0.061224
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
253a183c509b499df726c22fb7b3ee45b370c6ff
| 2,424
|
py
|
Python
|
bin/lkft_notify_developer.py
|
roxell/lkft-tools
|
bd1981b1f616114cb260878fe7319753107e581b
|
[
"MIT"
] | 3
|
2018-12-14T02:37:10.000Z
|
2020-04-30T19:07:01.000Z
|
bin/lkft_notify_developer.py
|
roxell/lkft-tools
|
bd1981b1f616114cb260878fe7319753107e581b
|
[
"MIT"
] | 25
|
2018-07-27T13:38:17.000Z
|
2021-10-05T13:01:36.000Z
|
bin/lkft_notify_developer.py
|
roxell/lkft-tools
|
bd1981b1f616114cb260878fe7319753107e581b
|
[
"MIT"
] | 12
|
2018-07-09T22:52:32.000Z
|
2021-11-29T19:45:33.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import os
import re
import requests
import sys
sys.path.append(os.path.join(sys.path[0], "../", "lib"))
import lkft_squad_client # noqa: E402
def get_branch_from_make_kernelversion(make_kernelversion):
"""
IN: "4.4.118"
OUT: "4.4"
IN: "4.9.118-rc1"
OUT: "4.9"
"""
pattern = re.compile(r"^(\d+\.\d+).*$")
match = pattern.match(make_kernelversion)
return match.group(1)
def get_most_recent_release(builds_url):
"""
Given a list of builds that is sorted with the newest first,
return the most recent finished build.
"""
first_build = None
for build in lkft_squad_client.Builds(builds_url):
if not first_build:
first_build = build
if build["finished"]:
return build
# If none found, return first build
return first_build
def get_build_report(build_url):
build = lkft_squad_client.Build(build_url)
baseline_branch = get_branch_from_make_kernelversion(
build.build_metadata["make_kernelversion"]
)
# Get baseline
baseline_project_url = lkft_squad_client.get_projects_by_branch()[baseline_branch]
baseline_builds_url = baseline_project_url + "builds"
baseline_build = get_most_recent_release(baseline_builds_url)
template_url = build_url + "email"
parameters = {"baseline": baseline_build["id"], "template": "9"}
result = requests.get(template_url, parameters)
email = build.build_metadata.get("email-notification", "")
if "No regressions" in result.text:
subject = "{}: no regressions found".format(build.build["version"])
else:
subject = "{}: regressions detected".format(build.build["version"])
return (email, subject, result.text)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("build_url", help="API URL to developer build")
args = parser.parse_args()
(email_destination, email_subject, email_body) = get_build_report(args.build_url)
with open("email.to", "w") as f:
f.write(email_destination)
with open("email.subject", "w") as f:
f.write(email_subject)
with open("email.body", "w") as f:
f.write(email_body)
print("TO: {}".format(email_destination))
print("SUBJECT: {}".format(email_subject))
print("\n{}\n".format(email_body))
| 28.186047
| 86
| 0.664604
| 318
| 2,424
| 4.820755
| 0.336478
| 0.039139
| 0.039139
| 0.009785
| 0.068493
| 0.029354
| 0
| 0
| 0
| 0
| 0
| 0.011948
| 0.205858
| 2,424
| 85
| 87
| 28.517647
| 0.784416
| 0.105198
| 0
| 0
| 0
| 0
| 0.12894
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.06
| false
| 0
| 0.12
| 0
| 0.26
| 0.06
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
253c3b4e7dd3233e756d0a0d7809bcec3e7f9d2a
| 1,507
|
py
|
Python
|
day_3.py
|
bastoche/adventofcode2017
|
a93ecff1de78376b03d4c922c82dff96574f2466
|
[
"MIT"
] | null | null | null |
day_3.py
|
bastoche/adventofcode2017
|
a93ecff1de78376b03d4c922c82dff96574f2466
|
[
"MIT"
] | null | null | null |
day_3.py
|
bastoche/adventofcode2017
|
a93ecff1de78376b03d4c922c82dff96574f2466
|
[
"MIT"
] | null | null | null |
from math import ceil, sqrt
def part_one(input):
circle_index = get_circle_index(input)
circle_zero = get_circle_zero(circle_index)
cardinal_points = get_cardinal_points(circle_index, circle_zero)
distance_to_closest_cardinal_point = compute_distance_to_closest_cardinal_point(input, cardinal_points)
return circle_index + distance_to_closest_cardinal_point
def get_circle_index(input):
return ceil(sqrt(input)) // 2
def get_circle_zero(circle_index):
return pow(circle_index * 2 - 1, 2)
def get_cardinal_points(circle_index, circle_zero):
return [circle_zero + x * circle_index for x in [1, 3, 5, 7]]
def compute_distance_to_closest_cardinal_point(input, cardinal_points):
return min([abs(input - x) for x in cardinal_points])
def part_two(input):
spiral = {}
x = 0
y = 0
spiral[(0, 0)] = 1
while spiral[(x, y)] < input:
x, y = get_next_coordinates(x, y)
coordinates_offsets = [-1, 0, 1]
spiral[(x, y)] = sum([spiral.get((x + i, y + j), 0) for i in coordinates_offsets for j in coordinates_offsets])
return spiral[(x, y)]
def get_next_coordinates(x, y):
if x == y == 0:
return (1, 0)
if y > -x and x > y:
return (x, y + 1)
if y > -x and y >= x:
return (x - 1, y)
if y <= -x and x < y:
return (x, y - 1)
if y <= -x and x >= y:
return (x + 1, y)
if __name__ == "__main__":
input = 325489
print(part_one(input))
print(part_two(input))
| 25.542373
| 119
| 0.639681
| 238
| 1,507
| 3.777311
| 0.205882
| 0.026696
| 0.07564
| 0.111235
| 0.467186
| 0.288098
| 0.288098
| 0.20356
| 0.193548
| 0.193548
| 0
| 0.02627
| 0.242203
| 1,507
| 58
| 120
| 25.982759
| 0.760946
| 0
| 0
| 0
| 0
| 0
| 0.005309
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.175
| false
| 0
| 0.025
| 0.1
| 0.475
| 0.05
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
253e8b5989062bd43d076499f35aace1547716ff
| 2,395
|
py
|
Python
|
src/pysqldump/domain/manager.py
|
tongyeouki/sql-converter
|
28039fe16b43f443925447d06d682f6aa8c3a909
|
[
"MIT"
] | 1
|
2020-06-12T03:32:35.000Z
|
2020-06-12T03:32:35.000Z
|
src/pysqldump/domain/manager.py
|
tongyeouki/sql-converter
|
28039fe16b43f443925447d06d682f6aa8c3a909
|
[
"MIT"
] | null | null | null |
src/pysqldump/domain/manager.py
|
tongyeouki/sql-converter
|
28039fe16b43f443925447d06d682f6aa8c3a909
|
[
"MIT"
] | 1
|
2020-06-12T03:32:15.000Z
|
2020-06-12T03:32:15.000Z
|
from typing import Optional
from pysqldump.domain.formatters import (
CSVFormatter,
DictFormatter,
JsonFormatter,
ConsoleFormatter,
)
from pysqldump.settings.base import get_config
config = get_config()
class File:
def __init__(self, filename):
self.filename = filename
def get_extension(self):
try:
return self.filename.split(".")[1]
except (IndexError, AttributeError):
return None
def get_filename(self):
if self.filename is None:
return ""
return self.filename
class OutputManager:
formats = {"csv": "csv", "json": "json", "console": "console"}
def __init__(self, data: tuple, headers: list, export_to: Optional[str] = None):
self._filename = File(filename=export_to)
self.headers = headers
self.data = data
@property
def filename(self):
return self._filename.get_filename()
@property
def formatter(self):
extension = self._filename.get_extension()
return self.formats.get(extension, "console")
def run(self, pprint: bool = False, json: bool = False):
if self.formatter == "csv":
return self.__to_csv(pprint=pprint)
elif self.formatter == "console" and json or self.formatter == "json":
return self.__to_json(pprint=pprint, json=json)
elif self.formatter == "console":
return self.__to_console(pprint=pprint)
def __to_console(self, pprint: bool = False):
if pprint:
return ConsoleFormatter(
headers=self.headers, data=self.data, export_to=self.filename
).print()
return DictFormatter(
headers=self.headers, data=self.data, export_to=self.filename
).export()
def __to_csv(self, pprint: bool = False):
formatter = CSVFormatter(
headers=self.headers, data=self.data, export_to=self.filename
)
if pprint:
return formatter.print()
return formatter.export()
def __to_json(self, pprint: bool = False, json: bool = False):
formatter = JsonFormatter(
headers=self.headers, data=self.data, export_to=self.filename
)
if pprint:
formatter.print()
if self._filename and not json:
return formatter.export()
return formatter.use()
| 29.567901
| 84
| 0.617954
| 264
| 2,395
| 5.44697
| 0.212121
| 0.108484
| 0.041725
| 0.052851
| 0.194715
| 0.194715
| 0.194715
| 0.150209
| 0.150209
| 0.150209
| 0
| 0.000579
| 0.279332
| 2,395
| 80
| 85
| 29.9375
| 0.832561
| 0
| 0
| 0.169231
| 0
| 0
| 0.0238
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.153846
| false
| 0
| 0.046154
| 0.015385
| 0.476923
| 0.2
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
25405166ea1f14ffbb145a0fad72cb35236d7ab6
| 605
|
py
|
Python
|
Mortgage Calculator.py
|
BokijonovM/Projects
|
7c032f872aaa4bdf0fba100385019c6058c3c8fb
|
[
"BSD-2-Clause"
] | 1
|
2021-03-18T08:12:15.000Z
|
2021-03-18T08:12:15.000Z
|
Mortgage Calculator.py
|
BokijonovM/Python_Projects
|
7c032f872aaa4bdf0fba100385019c6058c3c8fb
|
[
"BSD-2-Clause"
] | null | null | null |
Mortgage Calculator.py
|
BokijonovM/Python_Projects
|
7c032f872aaa4bdf0fba100385019c6058c3c8fb
|
[
"BSD-2-Clause"
] | null | null | null |
"""**Mortgage Calculator** -
Calculate the monthly payments of a fixed term mortgage
over given Nth terms at a given interest rate. Also figure
out how long it will take the user to pay back the loan."""
months = int(input("Enter mortgage term (in months): "))
rate = float(input("Enter interest rate (in %): "))
loan = float(input("Enter loan value: "))
monthly_rate = rate / 100 / 12
payment = (monthly_rate / (1 - (1 + monthly_rate)**(-months))) * loan
print("Monthly payment for a $%.2f %s year mortgage at %.2f%% interest rate is: $%.2f" % (loan, (months / 12), rate, payment))
| 37.8125
| 127
| 0.661157
| 90
| 605
| 4.411111
| 0.522222
| 0.09068
| 0.075567
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024691
| 0.196694
| 605
| 15
| 128
| 40.333333
| 0.792181
| 0.330579
| 0
| 0
| 0
| 0
| 0.409922
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
2545a6ce4bad291b2182fea9564fd36668358b01
| 660
|
py
|
Python
|
scrapingData/scraping.py
|
karumo10/coursesel-helper
|
deb7e52a7bfe1fc41cd630d5a2cbe96fa089d986
|
[
"MIT"
] | null | null | null |
scrapingData/scraping.py
|
karumo10/coursesel-helper
|
deb7e52a7bfe1fc41cd630d5a2cbe96fa089d986
|
[
"MIT"
] | null | null | null |
scrapingData/scraping.py
|
karumo10/coursesel-helper
|
deb7e52a7bfe1fc41cd630d5a2cbe96fa089d986
|
[
"MIT"
] | null | null | null |
from requests_html import HTMLSession
import os
import sys
writeFileName = "courseLinks.out"
writeFileStream = open(writeFileName,'w',encoding='utf-8')
session = HTMLSession()
url = 'https://www.ji.sjtu.edu.cn/academics/courses/courses-by-number/'
r = session.get(url)
for i in range(2,100):
sel = '#Faculty-information > li:nth-child(' + str(i) + ') > a'
# print(sel)
results = r.html.find(sel)
if len(results) == 0:
break;
else:
for result in results:
writeFileStream.write(result.absolute_links.pop()+'\n')
writeFileStream.close()
# #Faculty-information > li:nth-child(3) > a
| 24.444444
| 72
| 0.636364
| 85
| 660
| 4.917647
| 0.705882
| 0.086124
| 0.095694
| 0.110048
| 0.133971
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013514
| 0.215152
| 660
| 26
| 73
| 25.384615
| 0.793436
| 0.078788
| 0
| 0
| 0
| 0.058824
| 0.219723
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.176471
| 0
| 0.176471
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
254a5b1fda824a925564dbbe740873888025ca2b
| 7,655
|
py
|
Python
|
jukebot/cogs/gametime.py
|
Kommotion/Jukebot
|
4e50342b914ff6b91fd78802900d1e24bee946db
|
[
"MIT"
] | 1
|
2021-07-26T02:44:00.000Z
|
2021-07-26T02:44:00.000Z
|
jukebot/cogs/gametime.py
|
Kommotion/Jukebot
|
4e50342b914ff6b91fd78802900d1e24bee946db
|
[
"MIT"
] | null | null | null |
jukebot/cogs/gametime.py
|
Kommotion/Jukebot
|
4e50342b914ff6b91fd78802900d1e24bee946db
|
[
"MIT"
] | null | null | null |
import logging
import discord
from datetime import datetime
from discord.ext import tasks, commands
from discord.ext.commands import Cog
from cogs.utils.utils import json_io_dump, json_io_load
log = logging.getLogger(__name__)
STATUS = 'status'
TIME_STARTED = 'time_started'
NAME = 'name'
GAMES = 'games'
NONE = 'none'
# Reference JSON
# {
# "player_id1": {
# "status": "a string of status",
# "time_started": "time_started_current_status",
# "games":{
# "COD MW2": "time_played",
# "Poop": "time_played"
# }
# },
# "player_id2": {
# "status": "a string of status",
# "time_started": "time_started_current_status",
# "games":{
# "COD MW2": "time_played",
# "Poop": "time_played"
# }
# }
# }
class TimePlayed(Cog):
""" Tracks your time played for each status you have had """
def __init__(self, bot):
self.bot = bot
self.log = logging.getLogger()
self.gametime_file = 'gametime.json'
self.gametime = None
self.update_time.start()
async def game_load(self):
""" Loads games from JSON """
self.gametime = json_io_load(self.gametime_file)
async def game_dump(self):
""" Dumps games to JSON """
if not json_io_dump(self.gametime_file, self.gametime):
self.log.critical('Unable to dump JSON file for TimePlayed!')
def calculate_addition(self, time_started):
""" Returns whether to add 2 minutes (in seconds) or something less than that
Time_started is a datetime.datetime string
"""
converted_time = datetime.strptime(time_started, '%Y-%m-%d %H:%M:%S')
delta = (datetime.utcnow().replace(microsecond=0) - converted_time).total_seconds()
return int(delta) if delta < 120 else 120
async def get_current_gametime(self):
""" Returns the dictionary of the current players and what they are playing """
current_gametime = dict()
for member in set(self.bot.get_all_members()):
# Initialize the dictionary for this member and set everything to None
current_gametime[str(member.id)] = dict()
current_gametime[str(member.id)][NAME] = member.name
current_gametime[str(member.id)][STATUS] = NONE
current_gametime[str(member.id)][TIME_STARTED] = NONE
current_gametime[str(member.id)][GAMES] = dict()
# If the member is not doing anything, continue
if not member.activities:
continue
# If the member is playing something, then take note of this
for activity in member.activities:
if activity.type == discord.ActivityType.playing:
# If for some reason this is not None, then we have 2 gaming activities on this member
if current_gametime[str(member.id)][STATUS] != NONE:
self.log.critical('There are multiple games playing right now in Gametime for single member!')
self.log.critical('{} had status {} instead of none as expected.'.format(
current_gametime[str(member.id)][NAME], current_gametime[str(member.id)][STATUS]))
current_gametime[str(member.id)][STATUS] = activity.name
date = member.activity.start.replace(microsecond=0) if member.activity.start else datetime.utcnow().replace(microsecond=0)
current_gametime[str(member.id)][TIME_STARTED] = str(date)
current_gametime[str(member.id)][GAMES][activity.name] = 0
return current_gametime
async def compare_and_update(self, current_gametime):
""" Compares and updates the playing list """
for id in current_gametime:
if id not in self.gametime:
self.gametime[id] = current_gametime[id]
current_status = current_gametime[id][STATUS]
# If the current gametime is not None, then update the time on the game currently played
if current_status != NONE:
if current_status not in self.gametime[id][GAMES]:
self.gametime[id][GAMES][current_status] = 0
result = self.calculate_addition(current_gametime[id][TIME_STARTED])
self.gametime[id][GAMES][current_status] += result
# If the current game is different from last game, add 2 minutes to the last game
if current_status != self.gametime[id][STATUS] and self.gametime[id][STATUS] != NONE:
self.gametime[id][GAMES][self.gametime[id][STATUS]] += 120
# Update the game status regardless of what's going on
self.gametime[id][STATUS] = current_gametime[id][STATUS]
self.gametime[id][TIME_STARTED] = current_gametime[id][TIME_STARTED]
def calculate_days_minutes_seconds(self, seconds):
""" Returns the days hours minutes seconds from seconds """
# years, seconds = seconds // 31556952, seconds % 31556952
# months, seconds = seconds // 2629746, seconds % 2629746
days, seconds = seconds // 86400, seconds % 86400
hours, seconds = seconds // 3600, seconds % 3600
minutes, seconds = seconds // 60, seconds % 60
msg = '{:02d} Days, {:02d} Hours, {:02d} Minutes, {:02d} Seconds'.format(days, hours, minutes, seconds)
if days > 9000:
msg += ' ITS OVER 9000!'
if days == 69:
msg += ' Hah, nice... 69'
return msg
@commands.command()
async def played(self, ctx, *, member: discord.Member = None):
"""Returns the amount of time played for every game
If Member is not specified, then returns the played information for member that sent command
"""
if member is None:
member = ctx.author
if str(member.id) not in self.gametime:
await ctx.send('ERROR!: Unable to find {} in gametime list... looks like a bug'.format(member.mention))
msg = 'Time played for {}\n'.format(member.mention)
if not self.gametime[str(member.id)][GAMES]:
msg += '`Looks like {} hasn\'t played any games!`'.format(member.display_name)
for game in self.gametime[str(member.id)][GAMES]:
msg += '`{:<30}: {}`\n'.format(game, self.calculate_days_minutes_seconds(self.gametime[str(member.id)][GAMES][game]))
await ctx.send('{}'.format(msg))
@tasks.loop(minutes=2)
async def update_time(self):
""" Loop that updates the time played of the current game for each member
Steps:
1. Load list
2. Get Current List of people playing
3. Compare new with old list of people playing and update old gametime list as needed
4. Write list
"""
self.log.debug('Starting gametime save loop')
await self.game_load()
current_gametime = await self.get_current_gametime()
await self.compare_and_update(current_gametime)
await self.game_dump()
self.log.debug('End gametime save loop')
@update_time.before_loop
async def before_update_time(self):
""" We want to wait until the bot is ready before going into the loop """
await self.bot.wait_until_ready()
@update_time.after_loop
async def after_update_time(self):
""" If anything is happening after the loop, we want to store all the information before any exits """
await self.game_dump()
def setup(bot):
bot.add_cog(TimePlayed(bot))
| 40.502646
| 142
| 0.617897
| 963
| 7,655
| 4.796469
| 0.220145
| 0.081186
| 0.035722
| 0.057588
| 0.22992
| 0.164105
| 0.107383
| 0.039402
| 0.039402
| 0.039402
| 0
| 0.01806
| 0.276682
| 7,655
| 188
| 143
| 40.718085
| 0.816146
| 0.169432
| 0
| 0.019802
| 0
| 0.009901
| 0.086575
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039604
| false
| 0
| 0.059406
| 0
| 0.138614
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
254ca1af527eda83d904a3bb25f7ec725799bb3b
| 2,578
|
py
|
Python
|
transformy/conversion/_pyqtgraph.py
|
AllenInstitute/transformy
|
17c769857d0cb05ad252ab684dec9eadb61a7c59
|
[
"BSD-3-Clause"
] | 1
|
2021-06-22T18:06:06.000Z
|
2021-06-22T18:06:06.000Z
|
transformy/conversion/_pyqtgraph.py
|
AllenInstitute/transformy
|
17c769857d0cb05ad252ab684dec9eadb61a7c59
|
[
"BSD-3-Clause"
] | null | null | null |
transformy/conversion/_pyqtgraph.py
|
AllenInstitute/transformy
|
17c769857d0cb05ad252ab684dec9eadb61a7c59
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
from .converter import TransformConverter
from .. import linear
class PyqtgraphTransformConverter(TransformConverter):
name = 'pyqtgraph'
def __init__(self):
try:
import pyqtgraph
self._import_error = None
except ImportError as exc:
self._import_error = str(exc)
return
self._to_classes = {
linear.STTransform: self._STTransform_to_pg,
linear.AffineTransform: self._AffineTransform_to_pg,
}
self._from_classes = {
# pyqtgraph.SRTTransform: self._from_SRTTransform,
# pyqtgraph.SRTTransform3D: self._from_SRTTransform,
pyqtgraph.QtGui.QTransform: self._from_QTransform,
pyqtgraph.QtGui.QMatrix4x4: self._from_QMatrix4x4,
pyqtgraph.Transform3D: self._from_QMatrix4x4,
}
def _STTransform_to_pg(self, tr):
import pyqtgraph
if tr.dims == (2, 2):
ptr = pyqtgraph.SRTTransform()
ptr.setScale(tr.scale)
ptr.setTranslate(tr.offset)
return ptr
elif tr.dims == (3, 3):
ptr = pyqtgraph.SRTTransform3D()
ptr.setScale(tr.scale)
ptr.setTranslate(tr.offset)
return ptr
else:
raise TypeError("Converting STTransform of dimension %r to pyqtgraph is not supported." % tr.dims)
def _AffineTransform_to_pg(self, tr):
import pyqtgraph
if tr.dims == (2, 2):
m = tr.matrix
o = tr.offset
ptr = pyqtgraph.QtGui.QTransform(m[0,0], m[1,0], 0.0, m[0,1], m[1,1], 0.0, o[0], o[1], 1.0)
return ptr
elif tr.dims == (3, 3):
m = np.eye(4)
m[:3, :3] = tr.matrix
m[:3, 3] = tr.offset
ptr = pyqtgraph.Transform3D(m)
return ptr
else:
raise TypeError("Converting AffineTransform of dimension %r to pyqtgraph is not supported." % tr.dims)
def _from_SRTTransform(self, tr):
return linear.STTransform(offset=tr.getTranslation(), scale=tr.getScale())
def _from_QTransform(self, tr):
m = np.array([
[tr.m11(), tr.m21()],
[tr.m12(), tr.m22()],
])
o = np.array([tr.m31(), tr.m32()])
return linear.AffineTransform(matrix=m, offset=o)
def _from_QMatrix4x4(self, tr):
m = np.array(tr.copyDataTo()).reshape(4,4)
return linear.AffineTransform(matrix=m[:3, :3], offset=m[:3, 3])
| 34.837838
| 114
| 0.564779
| 295
| 2,578
| 4.8
| 0.247458
| 0.033898
| 0.008475
| 0.032486
| 0.324859
| 0.276836
| 0.208333
| 0.185028
| 0.185028
| 0.185028
| 0
| 0.033792
| 0.322731
| 2,578
| 73
| 115
| 35.315068
| 0.777205
| 0.038402
| 0
| 0.274194
| 0
| 0
| 0.060985
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.096774
| false
| 0
| 0.145161
| 0.016129
| 0.403226
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
25507a35dbe62df6d608b962eb29203e902472af
| 5,018
|
py
|
Python
|
src/means/io/sbml.py
|
nicktimko/means
|
fe164916a1d84ab2a4fa039871d38ccdf638b1db
|
[
"MIT"
] | 10
|
2016-05-25T08:28:39.000Z
|
2020-06-04T03:19:50.000Z
|
src/means/io/sbml.py
|
nicktimko/means
|
fe164916a1d84ab2a4fa039871d38ccdf638b1db
|
[
"MIT"
] | 5
|
2015-12-08T14:01:15.000Z
|
2020-01-10T22:42:18.000Z
|
src/means/io/sbml.py
|
nicktimko/means
|
fe164916a1d84ab2a4fa039871d38ccdf638b1db
|
[
"MIT"
] | 6
|
2015-12-10T17:24:11.000Z
|
2021-03-22T16:12:17.000Z
|
from collections import namedtuple
import os
import sympy
import numpy as np
from means.core.model import Model
_Reaction = namedtuple('_REACTION', ['id', 'reactants', 'products', 'propensity', 'parameters'])
def _sbml_like_piecewise(*args):
if len(args) % 2 == 1:
# Add a final True element you can skip in SBML
args += (True,)
sympy_args = []
for i in range(len(args)/2):
# We need to group args into tuples of form
# (value, condition)
# SBML usually outputs them in form (value, condition, value, condition, value ...)
sympy_args.append((args[i*2], args[i*2+1]))
return sympy.Piecewise(*sympy_args)
def _sympify_kinetic_law_formula(formula):
# We need to define some namespace hints for sympy to deal with certain functions in SBML formulae
# For instance, `eq` in formula should map to `sympy.Eq`
namespace = {'eq': sympy.Eq,
'neq': sympy.Ne,
'floor': sympy.floor,
'ceiling': sympy.ceiling,
'gt': sympy.Gt,
'lt': sympy.Lt,
'geq': sympy.Ge,
'leq': sympy.Le,
'pow': sympy.Pow,
'piecewise': _sbml_like_piecewise}
return sympy.sympify(formula, locals=namespace)
def _parse_reaction(libsbml_reaction):
id_ = libsbml_reaction.getId()
reactants = {sympy.Symbol(r.getSpecies()): r.getStoichiometry() for r in libsbml_reaction.getListOfReactants()}
products = {sympy.Symbol(p.getSpecies()): p.getStoichiometry() for p in libsbml_reaction.getListOfProducts()}
kinetic_law = _sympify_kinetic_law_formula(libsbml_reaction.getKineticLaw().getFormula())
# This would only work for SBML Level 3, prior levels do not have parameters within kinetic law
parameters = [(sympy.Symbol(p.getId()), p.getValue())
for p in libsbml_reaction.getKineticLaw().getListOfParameters()]
return _Reaction(id_, reactants, products, kinetic_law, parameters)
def read_sbml(filename):
"""
Read the model from a SBML file.
:param filename: SBML filename to read the model from
:return: A tuple, consisting of :class:`~means.core.model.Model` instance,
set of parameter values, and set of initial conditions variables.
"""
import libsbml
if not os.path.exists(filename):
raise IOError('File {0!r} does not exist'.format(filename))
reader = libsbml.SBMLReader()
document = reader.readSBML(filename)
sbml_model = document.getModel()
if not sbml_model:
raise ValueError('Cannot parse SBML model from {0!r}'.format(filename))
species = sympy.symbols([s.getId() for s in sbml_model.getListOfSpecies()])
initial_conditions = [s.getInitialConcentration() for s in sbml_model.getListOfSpecies()]
compartments = sympy.symbols([s.getId() for s in sbml_model.getListOfCompartments()])
compartment_sizes = [s.getSize() for s in sbml_model.getListOfCompartments()]
reactions = map(_parse_reaction, sbml_model.getListOfReactions())
# getListOfParameters is an attribute of the model for SBML Level 1&2
parameters_with_values = [(sympy.Symbol(p.getId()), p.getValue())
for p in sbml_model.getListOfParameters()]
parameter_values = dict(parameters_with_values)
parameters = map(lambda x: x[0], parameters_with_values)
if not parameters:
track_local_parameters = True
parameters = set()
parameter_values = {}
else:
track_local_parameters = False
stoichiometry_matrix = np.zeros((len(species), len(reactions)), dtype=int)
propensities = []
for reaction_index, reaction in enumerate(reactions):
if track_local_parameters:
for param, value in reaction.parameters:
parameters.add(param)
parameter_values[param] = value
reactants = reaction.reactants
products = reaction.products
propensities.append(reaction.propensity)
for species_index, species_id in enumerate(species):
net_stoichiometry = products.get(species_id, 0) - reactants.get(species_id, 0)
stoichiometry_matrix[species_index, reaction_index] = net_stoichiometry
if track_local_parameters:
# sympy does not allow sorting its parameter lists by default,
# explicitly tell to sort by str representation
sorted_parameters = sorted(parameters, key=str)
else:
sorted_parameters = parameters
parameter_values_list = [parameter_values[p] for p in sorted_parameters]
# We need to concatenate compartment names and parameters as in our framework we cannot differentiate the two
compartments_and_parameters = compartments + sorted_parameters
parameter_values_list = compartment_sizes + parameter_values_list
model = Model(species, compartments_and_parameters, propensities, stoichiometry_matrix)
return model, parameter_values_list, initial_conditions
| 40.144
| 115
| 0.682742
| 603
| 5,018
| 5.527363
| 0.31675
| 0.040504
| 0.016502
| 0.012001
| 0.081008
| 0.070207
| 0.039004
| 0.039004
| 0.039004
| 0
| 0
| 0.0036
| 0.22499
| 5,018
| 125
| 116
| 40.144
| 0.853433
| 0.190514
| 0
| 0.050633
| 0
| 0
| 0.036273
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.050633
| false
| 0
| 0.075949
| 0
| 0.177215
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
2551cc7f888a7265ce1f8beeca110b9348759577
| 1,123
|
py
|
Python
|
clrenv/tests/test_path.py
|
color/clrenv
|
e11b67fcce129a4c828b6d7b421d9f2eac58785b
|
[
"MIT"
] | 2
|
2019-12-04T05:38:17.000Z
|
2022-02-17T06:24:23.000Z
|
clrenv/tests/test_path.py
|
color/clrenv
|
e11b67fcce129a4c828b6d7b421d9f2eac58785b
|
[
"MIT"
] | 9
|
2019-11-11T20:01:11.000Z
|
2021-09-30T00:41:52.000Z
|
clrenv/tests/test_path.py
|
color/clrenv
|
e11b67fcce129a4c828b6d7b421d9f2eac58785b
|
[
"MIT"
] | 4
|
2017-08-24T00:00:34.000Z
|
2021-06-25T16:41:20.000Z
|
import pytest
import clrenv
@pytest.fixture(autouse=True)
def clear_overlay_path(monkeypatch):
monkeypatch.setenv("CLRENV_OVERLAY_PATH", "")
def test_custom_base(tmp_path, monkeypatch):
custom_path = tmp_path / "custom/path"
custom_path.parent.mkdir()
custom_path.write_text("data")
monkeypatch.setenv("CLRENV_PATH", str(custom_path))
assert clrenv.path.environment_paths() == (custom_path,)
def test_missing_base(tmp_path, monkeypatch):
monkeypatch.setenv("CLRENV_PATH", str(tmp_path / "aaa"))
with pytest.raises(ValueError):
clrenv.path.environment_paths()
def test_overlay(tmp_path, monkeypatch):
env_path = tmp_path / "env"
monkeypatch.setenv("CLRENV_PATH", str(env_path))
env_path.write_text("")
overlay_path1 = tmp_path / "overlay1"
overlay_path2 = tmp_path / "overlay2"
overlay_path1.write_text("data")
overlay_path2.write_text("data2")
monkeypatch.setenv("CLRENV_OVERLAY_PATH", f"{overlay_path1}:{overlay_path2}")
assert clrenv.path.environment_paths() == (
overlay_path1,
overlay_path2,
env_path,
)
| 27.390244
| 81
| 0.715049
| 140
| 1,123
| 5.407143
| 0.264286
| 0.073976
| 0.151915
| 0.107001
| 0.332893
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011702
| 0.162956
| 1,123
| 40
| 82
| 28.075
| 0.793617
| 0
| 0
| 0
| 0
| 0
| 0.13179
| 0.027605
| 0
| 0
| 0
| 0
| 0.068966
| 1
| 0.137931
| false
| 0
| 0.068966
| 0
| 0.206897
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
25536ba36fdcd55ea907e174eeadb755910513a2
| 2,583
|
py
|
Python
|
utils/convert_codah.py
|
Longday0923/CODAH_Baseline
|
e9e331452a12c85e35969833cbfc824d6c0256c1
|
[
"MIT"
] | null | null | null |
utils/convert_codah.py
|
Longday0923/CODAH_Baseline
|
e9e331452a12c85e35969833cbfc824d6c0256c1
|
[
"MIT"
] | null | null | null |
utils/convert_codah.py
|
Longday0923/CODAH_Baseline
|
e9e331452a12c85e35969833cbfc824d6c0256c1
|
[
"MIT"
] | null | null | null |
import random
import pandas as pd
import numpy as np
import json
from tqdm import *
def split(full_list, shuffle=False, ratio=0.2):
n_total = len(full_list)
offset = int(n_total * ratio)
if n_total == 0 or offset < 1:
return [], full_list
if shuffle:
random.shuffle(full_list)
sublist_1 = full_list[:offset]
sublist_2 = full_list[offset:2 * offset]
sublist_3 = full_list[2 * offset:]
return sublist_1, sublist_2, sublist_3
def convert_to_codah_statement(input_file: str, output_file1: str):
print(f'converting {input_file} to entailment dataset...')
tsv_file = pd.read_csv(input_file)
qa_list = tsv_file.to_numpy()
nrow = sum(1 for _ in qa_list)
id = 0
with open(output_file1, 'w') as output_handle1:
# print("Writing to {} from {}".format(output_file, qa_file))
for sample in tqdm(qa_list, total=nrow):
output_dict = convert_sample_to_entailment(sample, id)
output_handle1.write(json.dumps(output_dict))
output_handle1.write("\n")
id += 1
print(f'converted statements saved to {output_file1}')
print()
# Convert the QA file json to output dictionary containing premise and hypothesis
def convert_sample_to_entailment(sample: list, id: int):
question_text = sample[1]
choices = sample[3:7] # left close right open
single_qa_dict = {'id': id, 'question': {'stem': sample[1]}, 'answer_key': 0}
choice_list = []
choice_count = 0
for choice in choices:
statement = question_text + ' ' + choice
create_output_dict(single_qa_dict, statement, choice_count == 0)
choice_list.append({'text': choice, 'label': choice_count})
choice_count += 1
single_qa_dict['question']['choices'] = choice_list
return single_qa_dict
# Create the output json dictionary from the input json, premise and hypothesis statement
def create_output_dict(input_json: dict, statement: str, label: bool) -> dict:
if "statements" not in input_json:
input_json["statements"] = []
input_json["statements"].append({"label": label, "statement": statement})
return input_json
if __name__ == "__main__":
convert_to_codah_statement('../data/codah/fold_0/train.csv', './data/codah/fold_0/train.jsonl')
# train, dev, test = split(full_list, shuffle=True, ratio=0.2)
# convert_to_codah_statement(train, 'train.statement.jsonl')
# convert_to_codah_statement(dev, 'train.statement.jsonl')
# convert_to_codah_statement(test, 'train.statement.jsonl')
print('Hey, there!')
| 37.434783
| 99
| 0.684863
| 363
| 2,583
| 4.608815
| 0.278237
| 0.038255
| 0.041841
| 0.068739
| 0.109982
| 0.050209
| 0.050209
| 0
| 0
| 0
| 0
| 0.015957
| 0.199381
| 2,583
| 68
| 100
| 37.985294
| 0.793037
| 0.187379
| 0
| 0
| 0
| 0
| 0.12823
| 0.029187
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078431
| false
| 0
| 0.098039
| 0
| 0.254902
| 0.078431
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
25541a58e6ade5999bf8649b87e0a951c63912f5
| 3,237
|
py
|
Python
|
new_imgt_scraping/new_imgt/new_imgt/spiders/new_imgt_spider.py
|
yaosichao0915/DeepImmuno
|
a2a7832f6cded9296735475c2e8fa5c9b62b3f8d
|
[
"MIT"
] | 20
|
2020-12-28T03:34:34.000Z
|
2022-03-14T01:36:52.000Z
|
new_imgt_scraping/new_imgt/new_imgt/spiders/new_imgt_spider.py
|
zhangjiahuan17/DeepImmuno
|
5ab182429bc3276fd43be2ec8d86b72e773992ef
|
[
"MIT"
] | 3
|
2021-04-23T19:21:11.000Z
|
2021-08-22T00:39:01.000Z
|
new_imgt_scraping/new_imgt/new_imgt/spiders/new_imgt_spider.py
|
zhangjiahuan17/DeepImmuno
|
5ab182429bc3276fd43be2ec8d86b72e773992ef
|
[
"MIT"
] | 11
|
2021-04-23T16:46:29.000Z
|
2022-03-18T15:53:55.000Z
|
'''
pip install Scrapy
pip install selenium
In a folder:
scrapy startproject imgt
when running:
scrapy crawl new_imgt -o out.json
when using scrapy shell:
scrapy shell 'url'
in Ipython, you can use response.xpath or response.css to try out
object:
1. selectorlist if css('a') and there are a lot of 'a'
2. selector it will have css and xpath method
3. reponse
conda activate selenium
remember make change to the python scirpt under spider folder
'''
'''
If encounter robot blockage error:
open setting.py and change the robot setting to False
you can specify hla in __init__, and then when call:
scrapy crawl new_imgt -a hla="HLA-A*0101" -o out.json
When encounter dynamic page, use selenium to get the page and pass it to scrapy response object
Double check using both 'inspect' and 'see source code' in a webpage, they can be different
'''
'''
cat inventory_compliant.txt | while read line; do scrapy crawl new_imgt -a hla="$line" -o "./hla_paratope/$line.json"; done
'''
import scrapy
from scrapy.crawler import CrawlerProcess
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
class imgtSpider(scrapy.Spider):
name = 'new_imgt'
start_urls = ['http://www.imgt.org/3Dstructure-DB/']
def __init__(self,hla):
self.hla = hla
path_to_chromedriver = '/Users/ligk2e/Downloads/chromedriver'
self.driver = webdriver.Chrome(executable_path=path_to_chromedriver)
self.driver.implicitly_wait(5)
def get_selenium(self,url):
self.driver.get(url)
self.driver.find_element_by_xpath('//*[@id="species"]/option[27]').click() # choose Home Sapien (select drop down)
self.driver.find_element_by_xpath('//*[@id="radio_pMH1"]').click() # choose pMHCI (input)
self.driver.find_element_by_xpath('//*[@id="datas"]/p[2]/input[1]').click() # click submit (button)
return self.driver.page_source.encode('utf-8')
def parse(self,response): # for parsing 550 entry page
response = scrapy.Selector(text=self.get_selenium(imgtSpider.start_urls[0]))
for row in response.css('body#result div#data table.Results tbody tr')[1:]: #[Selector,Selector,Selector...] # don't need header
mhc = row.css('td')[2].css('td::text').get()
if self.hla in mhc:
url_suffix = row.css('td')[1].css('a::attr(href)').get() # details.cgi?pdbcode=2CLR
# what we need is: http://www.imgt.org/3Dstructure-DB/cgi/details.cgi?pdbcode=2CLR&Part=Epitope
url_next = 'http://www.imgt.org/3Dstructure-DB/cgi/' + url_suffix + '&Part=Epitope'
yield scrapy.Request(url_next,callback=self.parse_paratope)
def parse_paratope(self,response):
url_next = response.url
paratope = ''
for i in response.css('body#result div#mybody div#main table')[0].css('tr')[2].css('td')[1].css('span a'):
aa = i.css('a::text').get()
paratope += aa
yield {'{}'.format(url_next):paratope}
# if using process, you can just run a python new_imgt_spider.py
# process = CrawlerProcess()
# process.crawl(imgtSpider)
# process.start()
| 33.030612
| 139
| 0.666976
| 467
| 3,237
| 4.526767
| 0.426124
| 0.033113
| 0.019868
| 0.025544
| 0.129139
| 0.129139
| 0.070956
| 0
| 0
| 0
| 0
| 0.011691
| 0.207291
| 3,237
| 98
| 140
| 33.030612
| 0.812159
| 0.279271
| 0
| 0
| 0
| 0
| 0.190476
| 0.064986
| 0
| 0
| 0
| 0
| 0
| 1
| 0.121212
| false
| 0
| 0.121212
| 0
| 0.363636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
25582a95ad549fbb53f7bc9394341328228fcce8
| 38,786
|
py
|
Python
|
Base/opcode_tab.py
|
robertmuth/Cwerg
|
fdf30b06c93b4620c0a45b448b6d92acb81c35f0
|
[
"Apache-2.0"
] | 171
|
2020-01-30T16:58:07.000Z
|
2022-03-27T22:12:17.000Z
|
Base/opcode_tab.py
|
robertmuth/Cwerg
|
fdf30b06c93b4620c0a45b448b6d92acb81c35f0
|
[
"Apache-2.0"
] | 14
|
2021-05-15T02:12:09.000Z
|
2022-03-16T04:16:18.000Z
|
Base/opcode_tab.py
|
robertmuth/Cwerg
|
fdf30b06c93b4620c0a45b448b6d92acb81c35f0
|
[
"Apache-2.0"
] | 5
|
2021-03-01T20:52:13.000Z
|
2022-03-07T06:35:03.000Z
|
#!/usr/bin/python3
# (c) Robert Muth - see LICENSE for more info
from typing import List, Dict
import enum
from Util import cgen
# maximum number of operands in an instruction
MAX_OPERANDS = 5
# maximum number of function parameters (or results)
MAX_PARAMETERS = 64
############################################################
# Opcode Families [OF.]
#
# Each Opcode belongs to one of the families below.
# Within each family the order and kind of the operands is similar
############################################################
@enum.unique
class OPC_KIND(enum.Enum):
INVALID = 0
ALU = 1
ALU1 = 2
MOV = 3
LEA = 4
LEA1 = 5
COND_BRA = 6
BRA = 7
BSR = 8
JSR = 9
SWITCH = 10
RET = 11
SYSCALL = 12
ST = 13
LD = 14
PUSHARG = 15
POPARG = 16
NOP = 17
NOP1 = 18
CONV = 19
CMP = 20
BCOPY = 21
BZERO = 22
DIRECTIVE = 23 # not a real instruction
_OF_TO_PURPOSE = {
OPC_KIND.ALU: ["dst", "src1", "src2"],
OPC_KIND.ALU1: ["dst", "src"],
OPC_KIND.COND_BRA: ["op1", "op2", "target_bbl"],
OPC_KIND.SWITCH: ["index", "table"],
OPC_KIND.BRA: ["target_bbl"],
OPC_KIND.RET: [],
OPC_KIND.BSR: ["target_fun"],
OPC_KIND.JSR: ["target_fun_addr", "target_fun_sig"],
OPC_KIND.SYSCALL: ["target_fun_sig", "syscall_no"],
OPC_KIND.LEA: ["dst", "base", "offset"],
OPC_KIND.LEA1: ["dst", "base"],
OPC_KIND.LD: ["dst", "base", "offset"],
OPC_KIND.ST: ["base", "offset", "src"],
OPC_KIND.NOP: [],
OPC_KIND.NOP1: ["src_and_dst"],
OPC_KIND.BZERO: ["dst_addr", "width"],
OPC_KIND.BCOPY: ["dst_addr", "src_addr", "width"],
OPC_KIND.POPARG: ["dst"],
OPC_KIND.PUSHARG: ["src"],
OPC_KIND.CONV: ["dst", "src"],
OPC_KIND.MOV: ["dst", "src"],
OPC_KIND.CMP: ["dst", "src1", "src2", "cmp1", "cmp2"],
}
_OFS_CFG = {OPC_KIND.BSR, OPC_KIND.JSR, OPC_KIND.SYSCALL, OPC_KIND.SWITCH,
OPC_KIND.BRA, OPC_KIND.COND_BRA, OPC_KIND.RET}
# These instructions do not have a written register
_OFS_NO_DEF = _OFS_CFG | {OPC_KIND.ST, OPC_KIND.BCOPY, OPC_KIND.BZERO,
OPC_KIND.PUSHARG, OPC_KIND.NOP}
# These instructions have a written register
_OFS_WRITING_REGS = {
OPC_KIND.LEA, OPC_KIND.LEA1, OPC_KIND.ALU, OPC_KIND.ALU1, OPC_KIND.CMP,
OPC_KIND.MOV, OPC_KIND.CONV, OPC_KIND.LD,
OPC_KIND.POPARG, OPC_KIND.NOP1}
@enum.unique
class OA(enum.Flag):
"""Opcode Attributes"""
BBL_TERMINATOR = 1 << 0
NO_FALL_THROUGH = 1 << 1
CALL = 1 << 2
COMMUTATIVE = 1 << 3
MEM_RD = 1 << 4
MEM_WR = 1 << 5
SPECIAL = 1 << 6
OAS_CFG = OA.CALL | OA.BBL_TERMINATOR
OAS_SIDE_EFFECT = OA.CALL | OA.BBL_TERMINATOR | OA.MEM_RD | OA.MEM_WR | OA.SPECIAL
############################################################
# Operand Kinds [OK.]
#
# Each instruction operates on a list of operands. Since we mimic a
# three address machine, ALU instructions usually have 3 operands,
# the destination being the first one.
# There is a large variety of operands denoting registers or immediates
# which enable some basic typing on a per operand basis.
# Additional typing constraints across the operands are enforced by "rules".
############################################################
@enum.unique
class OP_KIND(enum.Enum):
INVALID = 0
REG = 1
CONST = 2
REG_OR_CONST = 3
# bbl immediates ref to a bbl in the current function
# Note: bbls can be referred to before they are defined
BBL = 4
# mem immediates refer to a global memory or stack region
MEM = 5
# stk immediates refer to a stack region in the current function
STK = 6
# fun immediates ref to a function in global function table
# Note: funs can be referred to before they are defined
FUN = 7
JTB = 8
TYPE_LIST = 20
DATA_KIND = 21 # one of the RK_
MEM_KIND = 23 # one of the MK_
FUN_KIND = 24 # one of the FK_
FIELD = 25
NAME = 26
NAME_LIST = 27
INT = 28
BBL_TAB = 29
BYTES = 30
############################################################
# Type Constraints
############################################################
@enum.unique
class TC(enum.Enum):
INVALID = 0
ANY = 1
ADDR_NUM = 2
ADDR_INT = 3
NUM = 4
FLT = 5
INT = 6
ADDR = 7
CODE = 8
UINT = 9
SINT = 10
OFFSET = 11
#
SAME_AS_PREV = 20
# for bitcast
SAME_SIZE_AS_PREV = 22
############################################################
# DataType Flavors
############################################################
DK_FLAVOR_S = 0x20 # signed int
DK_FLAVOR_U = 0x40 # unsigned int
DK_FLAVOR_F = 0x60 # ieee floating point
DK_FLAVOR_A = 0x80 # data address
DK_FLAVOR_C = 0xa0 # code address
_DK_WIDTH_8 = 0
_DK_WIDTH_16 = 1
_DK_WIDTH_32 = 2
_DK_WIDTH_64 = 3
_DK_WIDTH_128 = 4
class DK(enum.Enum):
"""Data Kind - primarily used to associate a type with Const and Reg"""
INVALID = 0
# signed
S8 = DK_FLAVOR_S + _DK_WIDTH_8
S16 = DK_FLAVOR_S + _DK_WIDTH_16
S32 = DK_FLAVOR_S + _DK_WIDTH_32
S64 = DK_FLAVOR_S + _DK_WIDTH_64
# S128 = _RK_S + _RK_128
# unsigned
U8 = DK_FLAVOR_U + _DK_WIDTH_8
U16 = DK_FLAVOR_U + _DK_WIDTH_16
U32 = DK_FLAVOR_U + _DK_WIDTH_32
U64 = DK_FLAVOR_U + _DK_WIDTH_64
# U128 = _RK_U + _RK_128
# float
F8 = DK_FLAVOR_F + _DK_WIDTH_8
F16 = DK_FLAVOR_F + _DK_WIDTH_16
F32 = DK_FLAVOR_F + _DK_WIDTH_32
F64 = DK_FLAVOR_F + _DK_WIDTH_64
# F128 = _RK_F + _RK_128
# data address
A32 = DK_FLAVOR_A + _DK_WIDTH_32
A64 = DK_FLAVOR_A + _DK_WIDTH_64
# code address
C32 = DK_FLAVOR_C + _DK_WIDTH_32
C64 = DK_FLAVOR_C + _DK_WIDTH_64
def flavor(self) -> int:
return self.value & 0xe0
def bitwidth(self) -> int:
return 8 << (self.value & 0x7)
SHORT_STR_TO_RK = {x.name: x for x in DK} # this does contain the aliases
def RegIsAddrInt(rk: DK):
return (DK.A32.value <= rk.value <= DK.A64.value or
DK.S8.value <= rk.value <= DK.U64.value)
def RegIsInt(rk: DK):
return DK.S8.value <= rk.value <= DK.U64.value
TC_TO_CHECKER = {
TC.ANY: lambda x: True,
TC.ADDR_NUM: lambda x: x.flavor() != DK_FLAVOR_C,
TC.NUM: lambda x: x.flavor() in {DK_FLAVOR_U, DK_FLAVOR_S, DK_FLAVOR_F},
TC.INT: lambda x: x.flavor() in {DK_FLAVOR_U, DK_FLAVOR_S},
TC.ADDR: lambda x: x.flavor() == DK_FLAVOR_A,
TC.CODE: lambda x: x.flavor() == DK_FLAVOR_C,
TC.SINT: lambda x: x.flavor() == DK_FLAVOR_S,
TC.UINT: lambda x: x.flavor() == DK_FLAVOR_U,
TC.ADDR_INT: RegIsAddrInt,
TC.FLT: lambda x: x.flavor() == DK_FLAVOR_F,
TC.OFFSET: lambda x: x.flavor() in {DK_FLAVOR_U, DK_FLAVOR_S},
# maybe change this to just U or S
}
def CheckTypeConstraint(last_type: DK, constraint: TC, this_type: DK) -> bool:
checker = TC_TO_CHECKER.get(constraint)
if checker:
return checker(this_type)
if constraint == TC.SAME_AS_PREV:
return last_type == this_type
elif constraint == TC.SAME_SIZE_AS_PREV:
return last_type.bitwidth() == this_type.bitwidth()
else:
assert False, f"unknown contraint {constraint.name}"
@enum.unique
class MEM_KIND(enum.Enum):
"""Represents Allocation Type of Global Memory """
INVALID = 0
RO = 1
RW = 2
TLS = 3
FIX = 4 # a fixed address provide via
EXTERN = 5 # forward declaration must be defined before code emission
BUILTIN = 6 # linker defined
SHORT_STR_TO_MK = {x.name: x for x in MEM_KIND}
@enum.unique
class FUN_KIND(enum.Enum):
"""Function Kinds"""
INVALID = 0
BUILTIN = 1 # linker defined
EXTERN = 2 # forward declaration must be defined before code emission
NORMAL = 3
SIGNATURE = 4
SHORT_STR_TO_FK = {x.name: x for x in FUN_KIND}
############################################################
# Operand Value Kind Sets
############################################################
OKS_LIST = {OP_KIND.BYTES, OP_KIND.NAME_LIST, OP_KIND.BBL_TAB,
OP_KIND.TYPE_LIST}
OKS_ALLOWED_FOR_INSTRUCTIONS = {OP_KIND.REG, OP_KIND.CONST,
OP_KIND.REG_OR_CONST,
OP_KIND.FUN, OP_KIND.BBL, OP_KIND.JTB,
OP_KIND.MEM, OP_KIND.STK, OP_KIND.FIELD}
# we do not want non-scalar operands in instructions as they
# increase memory usage and complicate the code
assert not (OKS_LIST & OKS_ALLOWED_FOR_INSTRUCTIONS)
OKS_ALLOWED_FOR_DIRECTIVES = {OP_KIND.INT, OP_KIND.MEM_KIND, OP_KIND.BYTES,
OP_KIND.NAME, OP_KIND.BBL_TAB,
OP_KIND.FUN_KIND, OP_KIND.TYPE_LIST,
OP_KIND.NAME_LIST, OP_KIND.DATA_KIND, OP_KIND.FUN,
OP_KIND.MEM, OP_KIND.BBL
}
OKS_ALL = OKS_ALLOWED_FOR_INSTRUCTIONS | OKS_ALLOWED_FOR_DIRECTIVES
############################################################
# Opcode Groups
############################################################
@enum.unique
class OPC_GENUS(enum.Enum):
INVALID = 0
BASE = 1
TBD = 2
_DIR_TO_PURPOSE = {
".mem": ["name", "alignment", "mem_kind"],
".data": ["repeat", "data"],
".addr.fun": ["width", "fun"],
".addr.mem": ["width", "mem", "offset"],
".fun": ["name", "fun_kind", "out_params", "in_params"],
".bbl": ["name"],
".reg": ["reg_kind", "names"],
".stk": ["name", "alignment", "size"],
".jtb": ["name", "size", "default_bbl", "map"],
".struct": ["name"],
".field": ["name", "alignment", "size"],
".endstruct": [],
".stk.s": ["name", "name"],
}
############################################################
# Opcode
############################################################
class Opcode:
"""Opcodes are templates for instructions similar to what you would
find in assembly language manual for a processor.
Note, the main purpose of instantiating an opcode instance is to
populate the Table/TableByNo class member
"""
Table: Dict[str, "Opcode"] = {}
TableByNo: Dict[int, "Opcode"] = {}
def __init__(self, no, name: str, kind: OPC_KIND,
operand_kinds: List[OP_KIND],
constraints: List[TC], group: OPC_GENUS, desc,
attributes=OA(0)):
assert name not in Opcode.Table, f"duplicate opcode {name}"
assert len(operand_kinds) <= MAX_OPERANDS, name
Opcode.Table[name] = self
assert no not in Opcode.TableByNo, f"duplicate no: {no} {name}"
Opcode.TableByNo[no] = self
self.no = no
self.name = name
self.kind: OPC_KIND = kind
self.operand_kinds: List[OP_KIND] = operand_kinds
self.constraints: List[TC] = constraints
self.group = group
self.desc = desc
self.attributes = attributes
assert kind != OPC_KIND.INVALID, f"unknown {kind}"
is_directive = kind == OPC_KIND.DIRECTIVE
if is_directive:
assert name.startswith(".")
self.purpose = _DIR_TO_PURPOSE[name]
else:
self.purpose = _OF_TO_PURPOSE[kind]
assert len(self.purpose) == len(
operand_kinds), f"{name} {operand_kinds}"
assert len(operand_kinds) == len(constraints), f"{no} {name}"
for ok, tc in zip(operand_kinds, constraints):
# self.operands_tab[o] = op
assert ok in OKS_ALL, f"unexpected operand: {ok}"
if ok in {OP_KIND.REG, OP_KIND.CONST, OP_KIND.REG_OR_CONST}:
assert tc != TC.INVALID, f"{no} {name}"
else:
assert tc == TC.INVALID, f"{no} {name}"
if is_directive:
assert ok in OKS_ALLOWED_FOR_DIRECTIVES, f"bad ins op [{ok}]"
else:
assert ok in OKS_ALLOWED_FOR_INSTRUCTIONS, f"bad ins op [{ok}]"
def is_call(self):
return OA.CALL in self.attributes
def is_bbl_terminator(self):
return OA.BBL_TERMINATOR in self.attributes
def has_fallthrough(self):
return OA.NO_FALL_THROUGH not in self.attributes
def has_side_effect(self):
return OAS_SIDE_EFFECT & self.attributes
def def_ops_count(self):
"""How many of the leading operands write are register writes"""
if self.kind in {OPC_KIND.INVALID,
OPC_KIND.DIRECTIVE} or self.kind in _OFS_NO_DEF:
return 0
else:
return 1
@classmethod
def Lookup(cls, name: str) -> "Opcode":
return cls.Table[name]
def __str__(self):
return f"[OPCODE: {self.name}]"
############################################################
# ARITHMETIC ALU 0x10
# FLOAT + INT
# note: limited address arithmetic allowed
ADD = Opcode(0x10, "add", OPC_KIND.ALU,
[OP_KIND.REG, OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST],
[TC.NUM, TC.SAME_AS_PREV, TC.SAME_AS_PREV], OPC_GENUS.BASE,
"Addition: dst := src1 + src2",
OA.COMMUTATIVE)
# note: limited address arithmetic allowed
SUB = Opcode(0x11, "sub", OPC_KIND.ALU,
[OP_KIND.REG, OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST],
[TC.NUM, TC.SAME_AS_PREV, TC.SAME_AS_PREV], OPC_GENUS.BASE,
"""Subtraction: dst := src1 - src2
Note: `sub dst = 0 src` can be used to emulate `neg` for integers.
(for floating point use `dat = mul src -1.0`)
""")
# needs more work wrt to size
MUL = Opcode(0x12, "mul", OPC_KIND.ALU,
[OP_KIND.REG, OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST],
[TC.NUM, TC.SAME_AS_PREV, TC.SAME_AS_PREV], OPC_GENUS.BASE,
"Multiplication: dst := src1 \\* src2",
OA.COMMUTATIVE)
DIV = Opcode(0x13, "div", OPC_KIND.ALU,
[OP_KIND.REG, OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST],
[TC.NUM, TC.SAME_AS_PREV, TC.SAME_AS_PREV], OPC_GENUS.BASE,
"""Division: dst := src1 / src2
Some day the operation might be more strictly defined as:
dst := 0 if src2 == 0 else src1 / src2""")
# cf.:
# https://www.gingerbill.org/article/2020/01/25/a-reply-to-lets-stop-copying-c/
REM = Opcode(0x14, "rem", OPC_KIND.ALU,
[OP_KIND.REG, OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST],
[TC.INT, TC.SAME_AS_PREV, TC.SAME_AS_PREV], OPC_GENUS.BASE,
"""Modulo: dst := a % b
Some day the sign of the result might be more strictly defined.
Note: does not apply to floating point numbers""")
COPYSIGN = Opcode(0x15, "copysign", OPC_KIND.ALU, [OP_KIND.REG, OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST],
[TC.FLT, TC.SAME_AS_PREV, TC.SAME_AS_PREV], OPC_GENUS.BASE,
"""Set the sign of src1 to match src2 (floating point only)
Note: `copysign dst src1 0.0` can be used to emulate `abs`""")
############################################################
# LOGIC ALU 0x30
# INT ONLY (all regs are treated as unsigned except for shr/rshr
XOR = Opcode(0x18, "xor", OPC_KIND.ALU,
[OP_KIND.REG, OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST],
[TC.INT, TC.SAME_AS_PREV, TC.SAME_AS_PREV], OPC_GENUS.BASE,
"""Bitwise exclusive or: dst := src1 ^ src2
Note: `xor dst = src1 0b111...1` can be used to emulate `not`""",
OA.COMMUTATIVE)
# note: limited address arithmetic allowed
AND = Opcode(0x19, "and", OPC_KIND.ALU,
[OP_KIND.REG, OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST],
[TC.INT, TC.SAME_AS_PREV, TC.SAME_AS_PREV], OPC_GENUS.BASE,
"Bitwise and: dst := src1 & src2",
OA.COMMUTATIVE)
# note: limited address arithmetic allowed
OR = Opcode(0x1a, "or", OPC_KIND.ALU,
[OP_KIND.REG, OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST],
[TC.INT, TC.SAME_AS_PREV, TC.SAME_AS_PREV], OPC_GENUS.BASE,
"Bitwise or: dst := src1 | src2",
OA.COMMUTATIVE)
# shift amount is determined as follows:
# use the log2(width(dst)) low order bits of src2
# e.g. for a dst of kind s8 the low order 3 bits of
# src2 will be used.
# src2 is treated as an unsigned register
SHL = Opcode(0x1b, "shl", OPC_KIND.ALU,
[OP_KIND.REG, OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST],
[TC.INT, TC.SAME_AS_PREV, TC.SAME_AS_PREV], OPC_GENUS.BASE,
"""Shift left: dst := src1 << src2
dst: = src1 << (src2 % bitwidth(src1))""")
SHR = Opcode(0x1c, "shr", OPC_KIND.ALU,
[OP_KIND.REG, OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST],
[TC.INT, TC.SAME_AS_PREV, TC.SAME_AS_PREV], OPC_GENUS.BASE,
"""Shift right: dst := src1 >> src2
dst: = src1 >> (src2 % bitwidth(src1))""")
# do we need both directions, do we need a reverse version?
# should we rather use a funnel shift?
# ROTL = Opcode(0x1d, "rotl", OPC_KIND.ALU,
# [OP_KIND.REG, OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST],
# [TC.INT, TC.SAME_AS_PREV, TC.SAME_AS_PREV], OPC_GENUS.TBD,
# "Rotation Left")
############################################################
# CONDITIONAL BRANCHES 0x20
# do we need unordered variants for floating point?
# not beq/bne is the only operation for c_regs
BEQ = Opcode(0x20, "beq", OPC_KIND.COND_BRA,
[OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST, OP_KIND.BBL],
[TC.ANY, TC.SAME_AS_PREV, TC.INVALID], OPC_GENUS.BASE,
"Conditional branch if equal.",
OA.COMMUTATIVE | OA.BBL_TERMINATOR)
BNE = Opcode(0x21, "bne", OPC_KIND.COND_BRA,
[OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST, OP_KIND.BBL],
[TC.ANY, TC.SAME_AS_PREV, TC.INVALID], OPC_GENUS.BASE,
"Conditional branch if not equal.",
OA.COMMUTATIVE | OA.BBL_TERMINATOR)
BLT = Opcode(0x22, "blt", OPC_KIND.COND_BRA,
[OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST, OP_KIND.BBL],
[TC.ADDR_NUM, TC.SAME_AS_PREV, TC.INVALID], OPC_GENUS.BASE,
"Conditional branch if greater than.",
OA.BBL_TERMINATOR)
BLE = Opcode(0x23, "ble", OPC_KIND.COND_BRA,
[OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST, OP_KIND.BBL],
[TC.ADDR_NUM, TC.SAME_AS_PREV, TC.INVALID], OPC_GENUS.BASE,
"Conditional branch if less or equal.",
OA.BBL_TERMINATOR)
############################################################
# More Control Flow 0x28
SWITCH = Opcode(0x28, "switch", OPC_KIND.SWITCH, [OP_KIND.REG, OP_KIND.JTB],
[TC.UINT, TC.INVALID], OPC_GENUS.BASE,
"""Multi target computed jump.
The register argument must be less than the jtb `size`.
The jtb symbol must have been previously defined with the `.jtb` directive.
""",
OA.BBL_TERMINATOR | OA.NO_FALL_THROUGH)
BRA = Opcode(0x29, "bra", OPC_KIND.BRA, [OP_KIND.BBL],
[TC.INVALID], OPC_GENUS.BASE,
"Unconditional branch.",
OA.BBL_TERMINATOR | OA.NO_FALL_THROUGH)
RET = Opcode(0x2a, "ret", OPC_KIND.RET, [],
[], OPC_GENUS.BASE,
"Return from subroutine.",
OA.BBL_TERMINATOR | OA.NO_FALL_THROUGH)
BSR = Opcode(0x2b, "bsr", OPC_KIND.BSR, [OP_KIND.FUN],
[TC.INVALID], OPC_GENUS.BASE,
"Branch to subroutine fun",
OA.CALL)
JSR = Opcode(0x2c, "jsr", OPC_KIND.JSR, [OP_KIND.REG, OP_KIND.FUN],
[TC.CODE, TC.INVALID], OPC_GENUS.BASE,
"""Jump indirectly to subroutine through register (fun describes the signature).
The signature must have been previously defined with the `.fun` directive.""",
OA.CALL)
SYSCALL = Opcode(0x2d, "syscall", OPC_KIND.SYSCALL,
[OP_KIND.FUN, OP_KIND.CONST],
[TC.INVALID, TC.UINT], OPC_GENUS.BASE,
"""Syscall to `syscall_no`. (fun describes the signature).
The signature must have been previously defined with the `.fun` directive.""",
OA.CALL)
TRAP = Opcode(0x2e, "trap", OPC_KIND.RET, [],
[], OPC_GENUS.BASE,
"Abort program.",
OA.BBL_TERMINATOR | OA.NO_FALL_THROUGH)
############################################################
# Misc 0x30
PUSHARG = Opcode(0x30, "pusharg", OPC_KIND.PUSHARG, [OP_KIND.REG_OR_CONST],
[TC.ANY], OPC_GENUS.BASE,
"push a call or return arg - must immediately precede bsr/jsr or ret.",
OA.SPECIAL)
POPARG = Opcode(0x31, "poparg", OPC_KIND.POPARG, [OP_KIND.REG],
[TC.ANY], OPC_GENUS.BASE,
"pop a call or return arg - must immediately follow fun entry or bsr/jsr.",
OA.SPECIAL)
CONV = Opcode(0x32, "conv", OPC_KIND.CONV, [OP_KIND.REG, OP_KIND.REG_OR_CONST],
[TC.NUM, TC.NUM], OPC_GENUS.BASE,
# TODO: specify rounding and overflow for float <-> int conversions
"""Conversion of numerical regs which do not have to be of same size. Bits may change.
If the conversion involves both a widening and a change of type, the widening is performed
first. """)
BITCAST = Opcode(0x33, "bitcast", OPC_KIND.CONV,
[OP_KIND.REG, OP_KIND.REG_OR_CONST],
[TC.ANY, TC.SAME_SIZE_AS_PREV], OPC_GENUS.BASE,
"""Cast between regs of same size. Bits will be re-interpreted but do not change.
This is useful for manipulating addresses in unusual ways or
looking at the binary representation of floats.""")
MOV = Opcode(0x34, "mov", OPC_KIND.MOV, [OP_KIND.REG, OP_KIND.REG_OR_CONST],
[TC.ANY, TC.SAME_AS_PREV], OPC_GENUS.BASE,
"""Move between registers.
While a mov can be emulated via a `add dst = src 0`,
having a dedicated instruction makes some optimizations easier to
implement when combined with a canonicalization.""")
CMPEQ = Opcode(0x35, "cmpeq", OPC_KIND.CMP,
[OP_KIND.REG, OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST,
OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST],
[TC.ANY, TC.SAME_AS_PREV, TC.SAME_AS_PREV, TC.ANY,
TC.SAME_AS_PREV],
OPC_GENUS.BASE,
"""Conditional move (compare equal). dst := (cmp1 == cmp2) ? src1 : src2
Note: dst/cmp1/cmp2 may be of a different type than src1/src2.""",
OA.COMMUTATIVE)
CMPLT = Opcode(0x36, "cmplt", OPC_KIND.CMP,
[OP_KIND.REG, OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST,
OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST],
[TC.ANY, TC.SAME_AS_PREV, TC.SAME_AS_PREV, TC.ADDR_NUM,
TC.SAME_AS_PREV],
OPC_GENUS.BASE,
"""Conditional move (compare less than). dst := (cmp1 < cmp2) ? src1 : src2
Note: dst/cmp1/cmp2 may be of a different type than src1/src2.""")
# materialize addresses in a register
LEA = Opcode(0x38, "lea", OPC_KIND.LEA,
[OP_KIND.REG, OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST],
[TC.ADDR, TC.SAME_AS_PREV, TC.OFFSET], OPC_GENUS.BASE,
"""Load effective Address. dst := base + offset
Note: dst and base are addresses but offset is not.""")
LEA_MEM = Opcode(0x39, "lea.mem", OPC_KIND.LEA,
[OP_KIND.REG, OP_KIND.MEM, OP_KIND.REG_OR_CONST],
[TC.ADDR, TC.INVALID, TC.OFFSET], OPC_GENUS.BASE,
"Load effective memory address with offset, dst := base + offset")
LEA_STK = Opcode(0x3a, "lea.stk", OPC_KIND.LEA,
[OP_KIND.REG, OP_KIND.STK, OP_KIND.REG_OR_CONST],
[TC.ADDR, TC.INVALID, TC.OFFSET], OPC_GENUS.BASE,
"Load effective stack address with offset. dst := base + offset")
LEA_FUN = Opcode(0x3b, "lea.fun", OPC_KIND.LEA1, [OP_KIND.REG, OP_KIND.FUN],
[TC.CODE, TC.INVALID], OPC_GENUS.BASE,
"Load effective function address: dst := base (note: no offset).")
############################################################
# LOAD STORE 0x60
# ld/st base address is in register, offset is immediate
# ld/st base address is register
LD = Opcode(0x40, "ld", OPC_KIND.LD,
[OP_KIND.REG, OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST],
[TC.ANY, TC.ADDR, TC.OFFSET], OPC_GENUS.BASE,
"Load from register base with offset. dst := RAM[base + offset]",
OA.MEM_RD)
# note: signedness of offset may matter here
LD_MEM = Opcode(0x41, "ld.mem", OPC_KIND.LD,
[OP_KIND.REG, OP_KIND.MEM, OP_KIND.REG_OR_CONST],
[TC.ANY, TC.INVALID, TC.OFFSET], OPC_GENUS.BASE,
"Load from memory base with offset. dst := RAM[base + offset] ",
OA.MEM_RD)
LD_STK = Opcode(0x42, "ld.stk", OPC_KIND.LD,
[OP_KIND.REG, OP_KIND.STK, OP_KIND.REG_OR_CONST],
[TC.ANY, TC.INVALID, TC.OFFSET], OPC_GENUS.BASE,
"Load from stack base with offset. dst := RAM[base + offset]",
OA.MEM_RD)
ST = Opcode(0x48, "st", OPC_KIND.ST,
[OP_KIND.REG, OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST],
[TC.ADDR, TC.OFFSET, TC.ANY], OPC_GENUS.BASE,
"Store to register base with offset. RAM[base + offset] := src",
OA.MEM_WR)
ST_MEM = Opcode(0x49, "st.mem", OPC_KIND.ST,
[OP_KIND.MEM, OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST],
[TC.INVALID, TC.OFFSET, TC.ANY], OPC_GENUS.BASE,
"Store to memory base with offset. RAM[base + offset] := src",
OA.MEM_WR)
ST_STK = Opcode(0x4a, "st.stk", OPC_KIND.ST,
[OP_KIND.STK, OP_KIND.REG_OR_CONST, OP_KIND.REG_OR_CONST],
[TC.INVALID, TC.OFFSET, TC.ANY], OPC_GENUS.BASE,
"Store to stack base with offset. RAM[base + offset] := src",
OA.MEM_WR)
############################################################
# FLOAT ALU OPERAND: 0x50
CEIL = Opcode(0x50, "ceil", OPC_KIND.ALU1, [OP_KIND.REG, OP_KIND.REG_OR_CONST],
[TC.FLT, TC.SAME_AS_PREV], OPC_GENUS.BASE,
"Round float to integral, toward positive infinity")
FLOOR = Opcode(0x51, "floor", OPC_KIND.ALU1,
[OP_KIND.REG, OP_KIND.REG_OR_CONST],
[TC.FLT, TC.SAME_AS_PREV], OPC_GENUS.BASE,
"Round float to integral, toward negative infinity")
ROUND = Opcode(0x52, "round", OPC_KIND.ALU1,
[OP_KIND.REG, OP_KIND.REG_OR_CONST],
[TC.FLT, TC.SAME_AS_PREV], OPC_GENUS.BASE,
"Round float to integral, to nearest with ties to away")
TRUNC = Opcode(0x53, "trunc", OPC_KIND.ALU1,
[OP_KIND.REG, OP_KIND.REG_OR_CONST],
[TC.FLT, TC.SAME_AS_PREV], OPC_GENUS.BASE,
"""
Round float to integral, toward zero.
Note, frac(val) = val - trunc(val)""")
SQRT = Opcode(0x54, "sqrt", OPC_KIND.ALU1, [OP_KIND.REG, OP_KIND.REG_OR_CONST],
[TC.FLT, TC.SAME_AS_PREV], OPC_GENUS.BASE,
"Compute the sqrt of floating point value")
# do we need all these?
Opcode(0x58, "sin", OPC_KIND.ALU1, [OP_KIND.REG, OP_KIND.REG_OR_CONST],
[TC.FLT, TC.SAME_AS_PREV], OPC_GENUS.TBD,
"TBD")
Opcode(0x59, "cos", OPC_KIND.ALU1, [OP_KIND.REG, OP_KIND.REG_OR_CONST],
[TC.FLT, TC.SAME_AS_PREV], OPC_GENUS.TBD,
"TBD")
Opcode(0x5a, "tan", OPC_KIND.ALU1, [OP_KIND.REG, OP_KIND.REG_OR_CONST],
[TC.FLT, TC.SAME_AS_PREV], OPC_GENUS.TBD,
"TBD")
Opcode(0x5b, "asin", OPC_KIND.ALU1, [OP_KIND.REG, OP_KIND.REG_OR_CONST],
[TC.FLT, TC.SAME_AS_PREV], OPC_GENUS.TBD,
"TBD")
Opcode(0x5c, "acos", OPC_KIND.ALU1, [OP_KIND.REG, OP_KIND.REG_OR_CONST],
[TC.FLT, TC.SAME_AS_PREV], OPC_GENUS.TBD,
"TBD")
Opcode(0x5d, "atan", OPC_KIND.ALU1, [OP_KIND.REG, OP_KIND.REG_OR_CONST],
[TC.FLT, TC.SAME_AS_PREV], OPC_GENUS.TBD,
"TBD")
Opcode(0x5e, "exp", OPC_KIND.ALU1, [OP_KIND.REG, OP_KIND.REG_OR_CONST],
[TC.FLT, TC.SAME_AS_PREV], OPC_GENUS.TBD,
"TBD")
Opcode(0x5f, "log", OPC_KIND.ALU1, [OP_KIND.REG, OP_KIND.REG_OR_CONST],
[TC.FLT, TC.SAME_AS_PREV], OPC_GENUS.TBD,
"TBD")
############################################################
# Advanced ALU
############################################################
CNTLZ = Opcode(0x60, "cntlz", OPC_KIND.ALU1, [OP_KIND.REG, OP_KIND.REG_OR_CONST],
[TC.INT, TC.SAME_AS_PREV], OPC_GENUS.BASE,
"Count leading zeros.")
CNTTZ = Opcode(0x61, "cnttz", OPC_KIND.ALU1, [OP_KIND.REG, OP_KIND.REG_OR_CONST],
[TC.INT, TC.SAME_AS_PREV], OPC_GENUS.BASE,
"Count trailing zeros.")
# INT SINGLE OPERAND 0xb0
# the src reg is treated as an unsigned reg
Opcode(0x62, "cntpop", OPC_KIND.ALU1, [OP_KIND.REG, OP_KIND.REG_OR_CONST],
[TC.INT, TC.SAME_AS_PREV], OPC_GENUS.TBD,
"TBD")
############################################################
# Annotations
############################################################
NOP = Opcode(0x70, "nop", OPC_KIND.NOP, [],
[], OPC_GENUS.BASE,
"nop - internal use.")
NOP1 = Opcode(0x71, "nop1", OPC_KIND.NOP1, [OP_KIND.REG],
[TC.ANY], OPC_GENUS.BASE,
"nop with one reg - internal use. Can be used to `reserve` a reg for code generation.",
OA.SPECIAL)
# LINE = Opcode(0x78, "line", OPC_KIND., [OP_KIND.NAME, OP_KIND.CONST],
# [TC.ANY], OPC_GENUS.BASE,
# "",
# OA.SPECIAL)
############################################################
# Misc Experimental
############################################################
# Note, negative lengths copy downwards
Opcode(0xb8, "bcopy", OPC_KIND.BCOPY,
[OP_KIND.REG, OP_KIND.REG, OP_KIND.REG_OR_CONST],
[TC.ADDR, TC.SAME_AS_PREV, TC.OFFSET], OPC_GENUS.TBD,
"TBD",
OA.MEM_WR | OA.MEM_RD)
# Note, negative lengths copy downwards
Opcode(0xba, "bzero", OPC_KIND.BZERO, [OP_KIND.REG, OP_KIND.REG_OR_CONST],
[TC.ADDR, TC.OFFSET], OPC_GENUS.TBD,
"TBD",
OA.MEM_WR)
############################################################
# Directives 0xd
#
# do not correspond to instructions
############################################################
def Directive(no: int, name: str, operands, desc,
group=OPC_GENUS.BASE):
return Opcode(no, name, OPC_KIND.DIRECTIVE, operands,
constraints=[TC.INVALID] * len(operands),
desc=desc, group=group)
Directive(0x01, ".mem", [OP_KIND.NAME, OP_KIND.INT, OP_KIND.MEM_KIND],
"Add new memory region to unit")
Directive(0x02, ".data", [OP_KIND.INT, OP_KIND.BYTES],
"Add content to current memory region: multiple bytes")
Directive(0x03, ".addr.fun", [OP_KIND.INT, OP_KIND.FUN],
"Add content to current memory region: code address")
Directive(0x04, ".addr.mem", [OP_KIND.INT, OP_KIND.MEM, OP_KIND.INT],
"Add content to current memory region: "
"memory address with offset")
Directive(0x05, ".fun", [OP_KIND.NAME, OP_KIND.FUN_KIND, OP_KIND.TYPE_LIST,
OP_KIND.TYPE_LIST],
"Add new function to unit")
Directive(0x06, ".bbl", [OP_KIND.NAME],
"Add new basic block to current function")
Directive(0x07, ".reg", [OP_KIND.DATA_KIND, OP_KIND.NAME_LIST],
"Add new registers to current function")
Directive(0x08, ".stk", [OP_KIND.NAME, OP_KIND.INT, OP_KIND.INT],
"Add stack region to current function")
Directive(0x09, ".jtb",
[OP_KIND.NAME, OP_KIND.INT, OP_KIND.BBL, OP_KIND.BBL_TAB],
"bbl jump table: <name> <size> <default-bbl> <sparse-table>")
############################################################
# experimental/unimplemented
############################################################
# add/sub/rotate with carry for legalizing say 64bit regs into pairs of 32bit regs
# unreachable
# swap
# unordered comparison
# https://stackoverflow.com/questions/8627331/what-does-ordered-unordered-comparison-mean
# conv int - flt (urgent)
# conv int - int (urgent)
# extract (urgent)
# insert (urgent)
# ld_l, st_C, cmpxch, cmpswp
# pow, pow2 powi
# log
# crc32c (supported by x86-64 and arm64 - using 0x1EDC6F41)
# aes ???
# ld.scaled /st.scaled: base_reg + index_reg * scale imm + offset_imm
# copysign
# prefetch
# other built-ins: cf.:
# https://github.com/llvm-mirror/compiler-rt/tree/master/lib/builtins
_GROUPS = {
0x01: "## Directives\n",
0x10: "## Basic ALU\n",
0x20: "## Conditional Branches\n",
0x28: "## Other Control Flow\n",
0x30: "## Move/Conversion\n",
0x38: "## Address Arithmetic\n",
0x40: "## Load\n",
0x48: "## Store\n",
0x50: "## Float ALU\n",
0x60: "## Advanced ALU\n",
0x70: "## Annotation\n",
0xf1: "## Misc\n",
}
def _render_operand_desc(purpose: str, kind: OP_KIND, constraint: TC, mod1="",
mod2="") -> str:
kind_str = kind.name.replace("REG_OR_CONST", "REG/CONST")
if constraint == TC.INVALID:
return f"*{purpose}* {mod1}{kind_str}{mod2}"
else:
return f"*{purpose}* {mod1}{kind_str}:{constraint.name}{mod2}"
def _render_directive_doc(o: Opcode, fout):
print_ops = [_render_operand_desc(*t, mod1="<sub>[", mod2="]</sub>")
for t in zip(o.purpose, o.operand_kinds, o.constraints)]
print(f"#### [{o.no:02x}] {o.name} {' '.join(print_ops)}", file=fout)
print(o.desc, file=fout)
def _render_opcode_doc(o: Opcode, fout):
print_ops = [_render_operand_desc(*t, mod1="<sub>[", mod2="]</sub>")
for t in zip(o.purpose, o.operand_kinds, o.constraints)]
if o.kind in _OFS_WRITING_REGS:
print_ops.insert(1, "=")
if o.kind in {OPC_KIND.ST}:
print_ops.insert(-1, "=")
print(f"#### [{o.no:02x}] {o.name} {' '.join(print_ops)}", file=fout)
print(o.desc, file=fout)
# print("* constraints:", ' '.join(ops))
# print(f"{name:15.15}, // {' '.join(ops)} [{'
# '.join(cons)}]"
def _render_documentation(fout):
for opc in Opcode.Table.values():
if opc.group != OPC_GENUS.BASE:
continue
if opc.no in _GROUPS:
print(_GROUPS[opc.no], file=fout)
if opc.kind == OPC_KIND.DIRECTIVE:
_render_directive_doc(opc, fout)
else:
_render_opcode_doc(opc, fout)
print()
def _render_h(fout):
print("enum class OPC : uint8_t {", file=fout)
last = 0
print(f" INVALID = 0x00,", file=fout)
for opc in Opcode.Table.values():
if opc.group != OPC_GENUS.BASE:
continue
if (opc.no & 0xff0) != last & 0xff0:
print("", file=fout)
last = opc.no
name = opc.name.upper().replace(".", "_")
if opc.kind == OPC_KIND.DIRECTIVE:
name = "DIR_" + name[1:]
print(f" {name} = 0x{opc.no:02x},", file=fout)
print("};", file=fout)
# _render_enum("OpcodeFamily", ["OF.INVALID", "OF.DIRECTIVE"] +
# list(OFS_ALL))
# _render_enum("OperandKind", ["OK.INVALID"] +
# [x.upper() for x in OKS_ALL])
for cls in [OPC_GENUS, FUN_KIND, MEM_KIND, TC, OPC_KIND, DK, OP_KIND]:
cgen.RenderEnum(cgen.NameValues(cls), f"class {cls.__name__} : uint8_t",
fout)
cgen.RenderEnum(cgen.NameValues(OA), f"{OA.__name__} : uint16_t", fout)
def _render_c(fout):
def render(cls, both_ways=True):
cgen.RenderEnumToStringMap(cgen.NameValues(cls), cls.__name__, fout)
cgen.RenderEnumToStringFun(cls.__name__, fout)
if both_ways:
cgen.RenderStringToEnumMap(cgen.NameValues(cls),
cls.__name__ + "FromStringMap",
cls.__name__ + "Jumper", fout)
render(OPC_GENUS)
render(FUN_KIND)
render(MEM_KIND)
render(TC)
render(DK)
render(OP_KIND, False)
alpha = [(opc.name, opc.no) for opc in Opcode.Table.values()]
cgen.RenderStringToEnumMap(alpha, "OPCFromStringMap", "OPCJumper", fout)
print("const Opcode GlobalOpcodes[256] = {")
opcodes = sorted([(o.no, o) for o in Opcode.Table.values()])
last = -1
dummy_opc = Opcode(0, "", OPC_KIND.RET, [], [], OPC_GENUS.INVALID, "")
dummy_opc.name = ""
dummy_opc.kind = OPC_KIND.INVALID
def emit_one(opc: Opcode):
kinds_str = [f"OP_KIND::{x.name}" for x in opc.operand_kinds]
constraints_str = [f"TC::{x.name}" for x in opc.constraints]
attributes = [f"OA::{x.name}" for x in OA if x in opc.attributes]
if not attributes:
attributes = ["0"]
print(" { // %2x %s" % (opc.no, opc.name))
print(' {%s}, ' % ", ".join(kinds_str))
print(' OPC_KIND::%s, OPC_GENUS::%s, %d, %d,' %
(opc.kind.name, opc.group.name, len(opc.operand_kinds),
opc.def_ops_count()))
print(' {%s}, ' % ", ".join(constraints_str))
print(' "%s", %s },' % (opc.name, '|'.join(attributes)))
for n, o in opcodes:
if o.group != OPC_GENUS.BASE:
continue
last += 1
while last < n:
dummy_opc.no = last
emit_one(dummy_opc)
last += 1
emit_one(o)
print("};\n")
def Dump():
last = None
for opc in Opcode.Table.values():
if opc.kind != last:
print()
last = opc.kind
ops = [_render_operand_desc(a, b, c) for a, b, c in
zip(opc.purpose, opc.operand_kinds, opc.constraints)]
print(f"{opc.kind.name} {opc.name} {' '.join(ops)}")
print("total opcodes: %d" % len(Opcode.Table))
if __name__ == "__main__":
import sys
if len(sys.argv) > 1:
if sys.argv[1] == "documentation":
cgen.ReplaceContent(_render_documentation, sys.stdin, sys.stdout)
elif sys.argv[1] == "gen_h":
cgen.ReplaceContent(_render_h, sys.stdin, sys.stdout)
elif sys.argv[1] == "gen_c":
cgen.ReplaceContent(_render_c, sys.stdin, sys.stdout)
else:
Dump()
| 36.113594
| 108
| 0.571443
| 5,307
| 38,786
| 3.958357
| 0.145657
| 0.057695
| 0.054839
| 0.040844
| 0.417385
| 0.356643
| 0.318989
| 0.295901
| 0.263388
| 0.256296
| 0
| 0.023573
| 0.260635
| 38,786
| 1,073
| 109
| 36.147251
| 0.708965
| 0.125355
| 0
| 0.237537
| 0
| 0.001466
| 0.12837
| 0.002149
| 0
| 0
| 0.012511
| 0.000932
| 0.020528
| 1
| 0.033724
| false
| 0
| 0.005865
| 0.016129
| 0.222874
| 0.036657
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
255a4a642a2b2e33a26ec84bb18d2413e8e4b098
| 31,149
|
py
|
Python
|
main/staff.py
|
YukiGao7718/Airline-Reservation-System
|
ecc75316ccbc6aa2db4d0378b938c0275fddb6d3
|
[
"MIT"
] | null | null | null |
main/staff.py
|
YukiGao7718/Airline-Reservation-System
|
ecc75316ccbc6aa2db4d0378b938c0275fddb6d3
|
[
"MIT"
] | null | null | null |
main/staff.py
|
YukiGao7718/Airline-Reservation-System
|
ecc75316ccbc6aa2db4d0378b938c0275fddb6d3
|
[
"MIT"
] | null | null | null |
from flask import Flask, render_template, request, session, redirect, url_for
import pymysql.cursors
import datetime
from pyecharts import options as opts
from pyecharts.charts import Pie,Bar
from appdef import *
#Get the airline the staff member works for
def getStaffAirline():
username = session['username']
cursor = conn.cursor()
#username is a primary key
query = 'select airline_name from airline_staff where username = %s'
cursor.execute(query, (username))
#fetchall returns an array, each element is a dictionary
airline = cursor.fetchall()[0]['airline_name']
cursor.close()
return airline
#Make sure that the user is actually staff before performing any operations
def authenticateStaff():
username = ""
try:
#could be that there is no user, make sure
username = session['username']
except:
return False
cursor = conn.cursor()
query = 'select * from airline_staff where username=%s'
cursor.execute(query, (username))
data = cursor.fetchall()
cursor.close()
if data:
return True
else:
#Logout before returning error message
session.pop('username')
return False
@app.route('/staffHome')
def staffHome():
if authenticateStaff():
username = session['username']
message = request.args.get('message')
cursor = conn.cursor()
queryGetairline = "SELECT airline_name FROM airline_staff WHERE username= %s"
cursor.execute(queryGetairline, username)
airline_name = cursor.fetchone()['airline_name']
# query top destination for the past 3 months
query1 = "select count(ticket.ticket_id) as cnt, airport.airport_city as city\
from airport,flight,ticket,purchases\
where airport.airport_name = flight.arrival_airport\
and flight.flight_num = ticket.flight_num\
and flight.airline_name = %s\
and purchases.ticket_id = ticket.ticket_id\
and purchases.purchase_date between DATE_SUB(curdate(), INTERVAL 3 MONTH) and curdate()\
group by city \
order by cnt DESC limit 3"
cursor.execute(query1,airline_name)
data1 = cursor.fetchall()
if len(data1)<3:
num = len(data1)
range1 = range(num)
data1 = [data1[i]['city'] for i in range(num)]
else:
range1 = range(3)
data1 = [data1[i]['city'] for i in range(3)]
# query top destination for the past 1 year
query2 = "select count(ticket.ticket_id) as cnt, airport.airport_city as city\
from airport,flight,ticket,purchases\
where airport.airport_name = flight.arrival_airport\
and flight.flight_num = ticket.flight_num\
and flight.airline_name = %s\
and purchases.ticket_id = ticket.ticket_id\
and purchases.purchase_date between DATE_SUB(curdate(), INTERVAL 1 YEAR) and curdate()\
group by city \
order by cnt DESC limit 3"
cursor.execute(query2,airline_name)
data2 = cursor.fetchall()
if len(data2)<3:
num = len(data2)
range2 = range(num)
data2 = [data2[i]['city'] for i in range(num)]
else:
range2 = range(3)
data2 = [data2[i]['city'] for i in range(3)]
cursor.close()
return render_template('staff.html', username=username,
message=message,
destination1 = data1,
destination2 = data2,
range1 = range1,
range2 = range2)
else:
error = 'Invalid Credentials'
return redirect(url_for('errorpage', error=error))
@app.route('/staffHome/searchFlights')
def searchFlightsPage():
if authenticateStaff():
cursor = conn.cursor()
airline = getStaffAirline()
query = "select * from flight where airline_name = %s \
and ((departure_time between curdate() and date_add(curdate(), interval 30 day)) \
or (arrival_time between curdate() and date_add(curdate(), interval 30 day)))"
cursor.execute(query, (airline))
data = cursor.fetchall()
cursor.close()
error = request.args.get('error')
return render_template('searchStaff.html', error=error, results=data)
else:
error = 'Invalid Credentials'
return redirect(url_for('errorpage', error=error))
@app.route('/staffHome/searchFlights/city', methods=['POST'])
def searchFlightsCity():
if authenticateStaff():
cursor = conn.cursor()
city = request.form['citysearchbox']
airline = getStaffAirline()
query = "select * from flight,airport \
where (airport.airport_name=flight.departure_airport or airport.airport_name=flight.arrival_airport) \
and airport.airport_city=%s and airline_name=%s"
cursor.execute(query, (city, airline))
data = cursor.fetchall()
cursor.close()
error = None
if data:
return render_template('searchStaffResults.html', results=data)
else:
#returns an error message to the html page
error = 'No results found'
return redirect(url_for('searchFlightsPage', error=error))
else:
error = 'Invalid Credentials'
return redirect(url_for('errorpage', error=error))
@app.route('/staffHome/searchFlights/airport', methods=['POST'])
def searchFlightsAirport():
if authenticateStaff():
cursor = conn.cursor()
airport = request.form['airportsearchbox']
airline = getStaffAirline()
query = 'select * from flight where (departure_airport = %s or arrival_airport = %s) and airline_name=%s'
cursor.execute(query, (airport, airport, airline))
data = cursor.fetchall()
cursor.close()
error = None
if data:
return render_template('searchStaffResults.html', results=data)
else:
#returns an error message to the html page
error = 'No results found'
return redirect(url_for('searchFlightsPage', error=error))
else:
error = 'Invalid Credentials'
return redirect(url_for('errorpage', error=error))
@app.route('/staffHome/searchFlights/date', methods=['POST'])
def searchFlightsDate():
if authenticateStaff():
begintime = request.form['begintime']
endtime = request.form['endtime']
if not validateDates(begintime, endtime):
error = 'Invalid date range'
return redirect(url_for('searchFlightsPage', error=error))
airline = getStaffAirline()
cursor = conn.cursor()
query = "select * from flight \
where ((departure_time between %s and %s) \
or (arrival_time between %s and %s)) and airline_name=%s"
cursor.execute(query, (begintime, endtime, begintime, endtime, airline))
data = cursor.fetchall()
cursor.close()
error = None
if data:
return render_template('searchStaffResults.html', results=data)
else:
#returns an error message to the html page
error = 'No results found'
return redirect(url_for('searchFlightsPage', error=error))
else:
error = 'Invalid Credentials'
return redirect(url_for('errorpage', error=error))
@app.route('/staffHome/searchFlights/customers', methods=['POST'])
def searchFlightsCustomer():
if authenticateStaff():
flightnum = request.form['flightsearchbox']
airline = getStaffAirline()
cursor = conn.cursor()
query = "select customer_email from purchases natural join ticket\
where flight_num = %s and airline_name=%s"
cursor.execute(query, (flightnum, airline))
data = cursor.fetchall()
cursor.close()
if data:
return render_template('searchStaffResults.html', customerresults=data, flightnum=flightnum)
else:
#returns an error message to the html page
error = 'No results found'
return redirect(url_for('searchFlightsPage', error=error))
else:
error = 'Invalid Credentials'
return redirect(url_for('errorpage', error=error))
@app.route('/staffHome/createFlight')
def createFlightPage():
if authenticateStaff():
airline = getStaffAirline()
cursor = conn.cursor()
airline = getStaffAirline()
query = "select * from flight where airline_name = %s \
and ((departure_time between curdate() and date_add(curdate(), interval 30 day)) \
or (arrival_time between curdate() and date_add(curdate(), interval 30 day)))"
cursor.execute(query, (airline))
data = cursor.fetchall()
cursor = conn.cursor()
query = 'select distinct airport_name from airport'
cursor.execute(query)
airportdata = cursor.fetchall()
query = 'select distinct airplane_id from airplane where airline_name=%s'
cursor.execute(query, (airline))
airplanedata = cursor.fetchall()
cursor.close()
error = request.args.get('error')
return render_template('createFlight.html', error = error,
airportdata = airportdata,
airplanedata = airplanedata,
results = data)
else:
error = 'Invalid Credentials'
return redirect(url_for('errorpage', error=error))
@app.route('/staffHome/createFlight/Auth', methods=['POST'])
def createFlight():
# prevent unauthorized users from doing this action
if not authenticateStaff():
error = 'Invalid Credentials'
return redirect(url_for('errorpage', error=error))
username = session['username']
flightnum = request.form['flightnum']
departport = request.form['departport']
departtime = request.form['departtime']
arriveport = request.form['arriveport']
arrivetime = request.form['arrivetime']
price = request.form['price']
status = "Upcoming"
airplaneid = request.form['airplanenum']
##########################################################################
airline = getStaffAirline()
cursor = conn.cursor()
query1 = 'select * from flight where airline_name = %s and flight_num = %s'
cursor.execute(query1,(airline,flightnum))
data1 = cursor.fetchall()
if data1:
error = "The flight number already exists, please enter another one."
return redirect(url_for('createFlightPage', error=error))
cursor.close()
#############################################################################
#############################################################################
cursor = conn.cursor()
query2 = 'select * from airport where airport_name = %s '
cursor.execute(query2,(departport))
data2 = cursor.fetchall()
query3 = 'select * from airport where airport_name = %s '
cursor.execute(query3,(arriveport))
data3 = cursor.fetchall()
if (not data2):
error = "The Departure Airport does not exist, please add the airport first."
return redirect(url_for('createFlightPage', error=error))
if (not data3):
error = "The Arrival Airport does not exist, please add the airport first."
return redirect(url_for('createFlightPage', error=error))
cursor.close()
#############################################################################
if not validateDates(departtime, arrivetime):
error = 'Invalid date range'
return redirect(url_for('createFlightPage', error=error))
airline = getStaffAirline()
#Check that airplane is valid
cursor = conn.cursor()
query = 'select * from airplane where airplane_id = %s'
cursor.execute(query, (airplaneid))
data = cursor.fetchall()
if not data:
error = 'Invalid Airplane ID'
return redirect(url_for('createFlightPage', error=error))
query = 'insert into flight values (%s, %s, %s, %s, %s, %s, %s, %s, %s)'
cursor.execute(query, (airline, flightnum, departport, departtime, arriveport, arrivetime, price, status, airplaneid))
conn.commit()
cursor.close()
return redirect(url_for('staffHome', message="Operation Successful"))
@app.route('/staffHome/changeFlight')
def changeFlightStatusPage():
if authenticateStaff():
error = request.args.get('error')
return render_template('changeFlight.html', error=error)
else:
error = 'Invalid Credentials'
return redirect(url_for('errorpage', error=error))
@app.route('/staffHome/changeFlight/Auth', methods=['POST'])
def changeFlightStatus():
# prevent unauthorized users from doing this action
if not authenticateStaff():
error = 'Invalid Credentials'
return redirect(url_for('errorpage', error=error))
username = session['username']
cursor = conn.cursor()
flightnum = request.form['flightnum']
status = request.form['status']
if not status:
error = 'Did not select new status'
return redirect(url_for('changeFlightStatusPage', error=error))
airline = getStaffAirline()
#Check that the flight is from the same airline as the staff
query = 'select * from flight where flight_num = %s and airline_name = %s'
cursor.execute(query, (flightnum, airline))
data = cursor.fetchall()
##################################################################################
if not data:
error = 'Incorrect enter - flight number is not in your airline '
return redirect(url_for('changeFlightStatusPage', error=error))
##################################################################################
#Update the specified flight
query = 'update flight set status=%s where flight_num=%s and airline_name = %s'
cursor.execute(query, (status, flightnum, airline))
conn.commit()
cursor.close()
return redirect(url_for('staffHome', message="Operation Successful"))
@app.route('/staffHome/addAirplane')
def addAirplanePage():
if authenticateStaff():
error = request.args.get('error')
return render_template('addAirplane.html', error=error)
else:
error = 'Invalid Credentials'
return redirect(url_for('errorpage', error=error))
@app.route('/staffHome/addAirplane/confirm', methods=['POST'])
def addAirplane():
# prevent unauthorized users from doing this action
if not authenticateStaff():
error = 'Invalid Credentials'
return redirect(url_for('errorpage', error=error))
username = session['username']
planeid = request.form['id']
seats = request.form['seats']
airline = getStaffAirline()
#Check if planeid is not taken
cursor = conn.cursor()
query = 'select * from airplane where airplane_id = %s'
cursor.execute(query, (planeid))
data = cursor.fetchall()
if data:
error = "Airplane ID already taken"
return redirect(url_for('addAirplanePage', error=error))
#Insert the airplane
query = 'insert into airplane values (%s, %s, %s)'
cursor.execute(query, (airline, planeid, seats))
conn.commit()
#Get a full list of airplanes
query = 'select * from airplane where airline_name = %s'
cursor.execute(query, (airline))
data = cursor.fetchall()
cursor.close()
return render_template('addAirplaneConfirm.html', results=data)
@app.route('/staffHome/addAirport')
def addAirportPage():
if authenticateStaff():
error = request.args.get('error')
return render_template('addAirport.html', error=error)
else:
error = 'Invalid Credentials'
return redirect(url_for('errorpage', error=error))
@app.route('/staffHome/addAirport/Auth', methods=['POST'])
def addAirport():
# prevent unauthorized users from doing this action
if not authenticateStaff():
error = 'Invalid Credentials'
return redirect(url_for('errorpage', error=error))
username = session['username']
name = request.form['name']
city = request.form['city']
#####################################################################
if len(name)>3:
error = "Please enter the abbreviation of airport."
return redirect(url_for('addAirportPage', error=error))
cursor = conn.cursor()
query = "select * from airport where airport_name = %s and airport_city = %s"
cursor.execute(query,(name,city))
data1 = cursor.fetchall()
cursor.close()
if data1:
error = "Airport Already exits."
return redirect(url_for('addAirportPage', error=error))
#####################################################################
cursor = conn.cursor()
query = 'insert into airport values (%s, %s)'
cursor.execute(query, (name, city))
conn.commit()
cursor.close()
return redirect(url_for('staffHome', message="Operation Successful"))
@app.route('/staffHome/viewAgents')
def viewAgentsPage():
if authenticateStaff():
error = request.args.get('error')
return render_template('viewAgents.html', error=error)
else:
error = "Invalid Credentials"
return redirect(url_for('errorpage', error=error))
@app.route('/staffHome/viewAgents/sales', methods=['POST'])
def viewAgentsSales():
if authenticateStaff():
daterange = request.form['range']
airline = getStaffAirline()
#datrange specify the past 1 month or year
cursor = conn.cursor()
query = 'select email,count(ticket_id) as sales \
from booking_agent natural join purchases natural join ticket \
where purchase_date >= date_sub(curdate(), interval 1 ' + daterange + ') \
and airline_name=%s group by email order by sales DESC limit 5'
cursor.execute(query, (airline))
data = cursor.fetchall()
cursor.close()
#Use only the top 5 sellers
#Python will not break if we try to access a range that extends beyond the end of the array
return render_template('viewAgentsSales.html', results = data[0:5], date=daterange)
else:
error = "Invalid Credentials"
return redirect(url_for('errorpage', error=error))
@app.route('/staffHome/viewAgents/commission')
def viewAgentsCommission():
if authenticateStaff():
airline = getStaffAirline()
cursor = conn.cursor()
query = "select email,sum(flight.price)*0.1 as commission \
from booking_agent natural join purchases natural join ticket natural join flight \
where purchase_date >= date_sub(curdate(), interval 1 year) and airline_name=%s\
group by email order by commission DESC limit 5"
cursor.execute(query, (airline))
data = cursor.fetchall()
cursor.close()
#Use only the top 5 sellers
#Python will not break if we try to access a range that extends beyond the end of the array
return render_template('viewAgentsCommission.html', results = data[0:5])
else:
error = "Invalid Credentials"
return redirect(url_for('errorpage', error=error))
@app.route('/staffHome/viewCustomers')
def viewCustomersPage():
if authenticateStaff():
airline = getStaffAirline()
cursor = conn.cursor()
query = 'select customer_email, count(ticket_id) as customerpurchases \
from purchases natural join ticket \
where airline_name= %s \
and purchase_date >= date_sub(curdate(), interval 1 year) group by customer_email \
having customerpurchases \
>= all (select count(ticket_id) \
from purchases natural join ticket \
where airline_name = %s \
and purchase_date >= date_sub(curdate(), interval 1 year) GROUP by customer_email)'
cursor.execute(query, (airline, airline))
data = cursor.fetchall()
cursor.close()
error = request.args.get('error')
return render_template('viewCustomers.html', error=error, results=data)
else:
error = "Invalid Credentials"
return redirect(url_for('errorpage', error=error))
@app.route('/staffHome/viewCustomers/results', methods=['POST'])
def viewCustomers():
if authenticateStaff():
airline = getStaffAirline()
customer = request.form['email']
cursor = conn.cursor()
query1 = "select * from customer where email = %s"
cursor.execute(query1,customer)
data1 = cursor.fetchone()
error = request.args.get('error')
cursor.close()
if not data1:
error = "Not a customer email, please enter a customer email."
return redirect(url_for('viewCustomersPage',error = error))
else:
cursor = conn.cursor()
query = 'select distinct flight_num from purchases natural join ticket where airline_name = %s and customer_email=%s'
cursor.execute(query, (airline, customer))
data = cursor.fetchall()
cursor.close()
return render_template('viewCustomersResults.html', results=data, customer=customer)
else:
error = "Invalid Credentials"
return redirect(url_for('errorpage', error=error))
@app.route('/staffHome/viewReports')
def viewReportsPage():
if authenticateStaff():
airline = getStaffAirline()
currentmonth = datetime.datetime.now().month
monthtickets = []
cursor = conn.cursor()
for i in range(0, 12):
query = 'select count(ticket_id) as sales \
from purchases natural join ticket \
where year(purchase_date) = year(curdate() - interval ' + str(i) + ' month) \
and month(purchase_date) = month(curdate() - interval ' + str(i) + ' month) \
and airline_name=%s'
cursor.execute(query, (airline))
data = cursor.fetchall()
salemonth = ((currentmonth - (i+1)) % 12) + 1
# print (data[0]['sales'])
monthtickets.append([data[0]['sales'], salemonth])
cursor.close()
c1 = (
Bar()
.add_xaxis([d[1] for d in monthtickets])
.add_yaxis('total ticket number',[d[0] for d in monthtickets])
.set_global_opts(xaxis_opts=opts.AxisOpts(axislabel_opts=opts.LabelOpts(rotate=0)),
title_opts=opts.TitleOpts(title="Ticket Amount in the Past",
subtitle= "In the past 1 year"),
legend_opts=opts.LegendOpts(pos_right="15%"))
)
error = request.args.get('error')
return render_template('viewReports.html',
bar_options1=c1.dump_options(),error = error)
else:
error = "Invalid Credentials"
return redirect(url_for('errorpage', error=error))
@app.route('/staffHome/viewReports/dates', methods=['POST'])
def viewReportsDates():
if authenticateStaff():
airline = getStaffAirline()
begintime = request.form['begintime']
endtime = request.form['endtime']
if not validateDates(begintime, endtime):
error = 'Invalid date range'
return redirect(url_for('viewReportsPage', error=error))
cursor = conn.cursor()
query = 'select count(ticket_id) as sales \
from purchases natural join ticket where airline_name=%s\
and purchase_date between %s and %s'
cursor.execute(query, (airline, begintime, endtime))
data = cursor.fetchall()
cursor.close()
return render_template('viewReportsDate.html', sales=data[0]['sales'], begintime=begintime, endtime=endtime)
else:
error = "Invalid Credentials"
return render_template('error.html',error=error)
@app.route('/staffHome/viewReports/past', methods=['POST'])
def viewReportsPast():
if authenticateStaff():
airline = getStaffAirline()
daterange = request.form['range']
cursor = conn.cursor()
query = 'select count(ticket_id) as sales \
from purchases natural join ticket where airline_name=%s \
and purchase_date >= date_sub(curdate(), interval 1 ' + daterange + ')'
cursor.execute(query, (airline))
data = cursor.fetchall()
cursor.close()
return render_template('viewReportsPast.html', sales=data[0]['sales'], datetime=daterange)
else:
error = "Invalid Credentials"
return render_template('error.html',error=error)
@app.route('/staffHome/ComparisonRevenue')
def ComparisonRevenue():
if authenticateStaff():
username = session['username']
error = None
# query for airline_name the staff works for
cursor = conn.cursor()
queryGetairline = "SELECT airline_name FROM airline_staff WHERE username= %s"
cursor.execute(queryGetairline, username)
airline_name = cursor.fetchone()['airline_name']
# query for direct purchase revenue (last month)
query1 = "select sum(flight.price) as rev\
from purchases, ticket, flight\
where purchases.ticket_id = ticket.ticket_id \
and ticket.flight_num = flight.flight_num\
and ticket.airline_name = flight.airline_name\
and flight.airline_name = %s\
and purchases.purchase_date between DATE_SUB(curdate(), INTERVAL 1 MONTH) and curdate()\
and purchases.booking_agent_id is null"
cursor.execute(query1,str(airline_name))
direct_revenue = cursor.fetchone()['rev']
# query for indirect purchase revenue (last month)
query2 = "select sum(flight.price) as rev\
from purchases, ticket, flight\
where purchases.ticket_id = ticket.ticket_id \
and ticket.flight_num = flight.flight_num\
and ticket.airline_name = flight.airline_name\
and flight.airline_name = %s\
and purchases.purchase_date between DATE_SUB(curdate(), INTERVAL 1 MONTH) and curdate()\
and purchases.booking_agent_id is not null"
cursor.execute(query2,str(airline_name))
indirect_revenue = cursor.fetchone()['rev']
#draw the pie chart (last month)
x_data = ['Direct Revenue','Indirect Revenue']
y_data = [direct_revenue,indirect_revenue]
data_pair = [list(z) for z in zip(x_data, y_data)]
c1 = (
Pie()
.add('',[d for d in data_pair])
.set_global_opts(title_opts=opts.TitleOpts(title="Revenue Comparison",
subtitle = "Last Month"),
legend_opts=opts.LegendOpts(pos_right="15%"))
.set_series_opts(label_opts=opts.LabelOpts(formatter="{b}: {c}"))
)
#Customized pie (a fancier version pie chart)
# c1 = (
# Pie()
# .add(
# series_name="Revenue Source",
# data_pair=data_pair,
# rosetype="radius",
# radius="55%",
# center=["50%", "50%"],
# label_opts=opts.LabelOpts(is_show=False, position="center"),
# )
# .set_global_opts(
# title_opts=opts.TitleOpts(
# title="Revenue Source (last month)",
# pos_left="center",
# pos_top="20",
# title_textstyle_opts=opts.TextStyleOpts(color="black"),
# ),
# legend_opts=opts.LegendOpts(is_show=False),
# )
# .set_series_opts(
# tooltip_opts=opts.TooltipOpts(
# trigger="item", formatter="{a} <br/>{b}: {c} ({d}%)"
# ),
# label_opts=opts.LabelOpts(color="rgba(0,0,0,255)"),
# )
# )
# query for direct purchase revenue (last year)
query1_ = "select sum(flight.price) as rev\
from purchases, ticket, flight\
where purchases.ticket_id = ticket.ticket_id \
and ticket.flight_num = flight.flight_num\
and ticket.airline_name = flight.airline_name\
and flight.airline_name = %s\
and purchases.purchase_date between DATE_SUB(curdate(), INTERVAL 1 YEAR) and curdate()\
and purchases.booking_agent_id is null"
cursor.execute(query1_,str(airline_name))
direct_revenue_ = cursor.fetchone()['rev']
# query for indirect purchase revenue (last month)
query2_ = "select sum(flight.price) as rev\
from purchases, ticket, flight\
where purchases.ticket_id = ticket.ticket_id \
and ticket.flight_num = flight.flight_num\
and ticket.airline_name = flight.airline_name\
and flight.airline_name = %s\
and purchases.purchase_date between DATE_SUB(curdate(), INTERVAL 1 YEAR) and curdate()\
and purchases.booking_agent_id is not null"
cursor.execute(query2_,str(airline_name))
indirect_revenue_ = cursor.fetchone()['rev']
cursor.close()
#draw the pie chart (last month)
x_data_ = ['Direct Revenue','Indirect Revenue']
y_data_ = [direct_revenue_,indirect_revenue_]
data_pair_ = [list(z) for z in zip(x_data_, y_data_)]
c2 = (
Pie()
.add('',[d for d in data_pair_])
.set_global_opts(title_opts=opts.TitleOpts(title="Revenue Comparison",
subtitle = "Last Year"),
legend_opts=opts.LegendOpts(pos_right="15%"))
.set_series_opts(label_opts=opts.LabelOpts(formatter="{b}: {c}"))
)
if direct_revenue and indirect_revenue:
return render_template('ComparisonRevenue.html',
pie_options1 = c1.dump_options(),
pie_options2 = c2.dump_options())
else:
error = 'Sorry! No data available Right Now.'
return render_template('ComparisonRevenue.html',error = error)
else:
error = "Invalid Credentials"
return render_template('error.html',error=error)
| 40.400778
| 129
| 0.596969
| 3,270
| 31,149
| 5.591437
| 0.106116
| 0.026799
| 0.031394
| 0.043754
| 0.700831
| 0.666102
| 0.629293
| 0.598337
| 0.561693
| 0.529206
| 0
| 0.006372
| 0.279527
| 31,149
| 770
| 130
| 40.453247
| 0.80835
| 0.071206
| 0
| 0.591667
| 0
| 0.001667
| 0.159295
| 0.029828
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0
| 0.01
| 0
| 0.165
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|