hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringdate 2015-01-01 00:00:47 2022-03-31 23:42:18 ⌀ | max_issues_repo_issues_event_max_datetime stringdate 2015-01-01 17:43:30 2022-03-31 23:59:58 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
eea0783e1d150b2a10a9b357db454fabc3181131 | 2,154 | py | Python | common_multicore.py | bgrayburn/itemSetCount | b1d8a9262a0d90a9038ecb7b38c94d3a33f235f1 | [
"MIT"
] | null | null | null | common_multicore.py | bgrayburn/itemSetCount | b1d8a9262a0d90a9038ecb7b38c94d3a33f235f1 | [
"MIT"
] | null | null | null | common_multicore.py | bgrayburn/itemSetCount | b1d8a9262a0d90a9038ecb7b38c94d3a33f235f1 | [
"MIT"
] | null | null | null | from multiprocessing import Process
import pymongo
from itertools import combinations
import csv
import time
import sys
mongo_ip = "192.168.1.127"
db_name = "analysis"
collection_name = "common_items"
max_item_threshold = 20
def load_transaction(filename):
transaction = []
with open(filename,'rb') as csvfile:
cread = csv.reader(csvfile, delimiter='|', quotechar="'")
for row in cread:
transaction.append(list(row))
return transaction
def common_job(job_transaction, batch_num):
mongo_con = pymongo.MongoClient(mongo_ip)
mongo_col = eval("mongo_con."+db_name+"."+collection_name)
name_of_sets = ['singles','doubles','triples','quads']
for ind, v in enumerate(job_transaction):
print 'batch: ' + str(batch_num) + ' transaction #' + str(ind) + ' with ' + str(v.__len__()) + ' of items'
for i in range(1,5): #singles, doubles, etc.
cur_set = name_of_sets[i-1]
for combo in combinations(v, i):
combo_set = tuple(set(combo))
if combo_set[0]=='':
break
mongo_col.update({'name':cur_set, 'batch':batch_num}, {'$inc':{'data.'+str(combo_set) : 1}} ,upsert=True)
def make_batches(transaction, batch_size):
last_pos = 0
batches = []
while (last_pos<transaction.__len__()):
start = last_pos + 1
end = min(last_pos+batch_size+1, transaction.__len__())
last_pos = end
batches.append(transaction[start:end])
return batches
def still_running(processes):
out = False
for p in processes:
if p.is_alive():
out = True
return out
def main(filename):
transaction = load_transaction(filename)
transaction = [v for v in transaction if v.__len__()<max_item_threshold]
batch_size = 50
batches = make_batches(transaction, batch_size)
processes = []
for ind, b in enumerate(batches):
job = Process(target=common_job, args=([b, ind]))
processes.append(job)
job.start()
return processes
if __name__ == '__main__':
mongo_con = pymongo.MongoClient(mongo_ip)
mongo_col = eval("mongo_con."+db_name+"."+collection_name)
mongo_col.remove()
processess = main(sys.argv[1])
while(still_running(processess)):
time.sleep(2)
| 29.916667 | 113 | 0.689415 |
eea08039ae5c75a5d6c24f6bfa31fee73a4d653a | 1,962 | py | Python | 5_arborescences/code/noeud_bin_p1.py | efloti/cours-nsi-terminale | 091df5518c25b50ef523a803ac747c63be76f670 | [
"CC0-1.0"
] | null | null | null | 5_arborescences/code/noeud_bin_p1.py | efloti/cours-nsi-terminale | 091df5518c25b50ef523a803ac747c63be76f670 | [
"CC0-1.0"
] | null | null | null | 5_arborescences/code/noeud_bin_p1.py | efloti/cours-nsi-terminale | 091df5518c25b50ef523a803ac747c63be76f670 | [
"CC0-1.0"
] | null | null | null | class NoeudBin:
def __init__(self, valeur, gauche=None, droit=None):
self.valeur = valeur
# self.parent = None
self.gauche = gauche
self.droit = droit
def est_double(self):
"""Renvoie True si le noeud a deux enfants exactement, False autrement."""
return self.gauche and self.droit
def est_feuille(self):
"""Renvoie True si le noeud n'a pas d'enfant, False autrement."""
return not self.gauche and not self.droit
def est_simple(self):
"""Renvoie True si le noeud n'a qu'un enfant, False autrement."""
return self.gauche and not self.droit \
or self.droit and not self.gauche
def taille(self):
"""Renvoie le nombre de noeuds de l'arbre dont la racine est le noeud courant self"""
if self.est_feuille():
return 1
return 1 + (self.gauche.taille() if self.gauche else 0) \
+ (self.droit.taille() if self.droit else 0)
def hauteur(self):
"""Renvoie la hauteur de l'arbre dont la racine est le noeud courant self.
La hauteur d'un arbre est le nombre de liens de sa plus grande branche.
"""
if self.est_feuille():
return 0
return 1 + max(
self.gauche.hauteur() if self.gauche else 0,
self.droit.hauteur() if self.droit else 0
)
def est_ancetre(self, n):
"""Renvoie True si le noeud courant est un ancetre du noeud fourni en argument.
Un noeud n1 est un ancetre d'un noeud n2 si n2 fait partie de l'arbre enraciné
au noeud n1."""
if self is n:
return True
return (self.gauche.est_ancetre(n) if self.gauche else False) or \
(self.droit.est_ancetre(n) if self.droit else False)
def est_descendant(self, n):
"""Renvoie True si le noeud courant est un descendant du noeud fourni en argument."""
return n.est_ancetre(self)
| 38.470588 | 93 | 0.61315 |
eea2c9cc9d5f2240a45df9dae18361db691a6948 | 3,753 | py | Python | bluebottle/segments/migrations/0024_auto_20220210_1336.py | terrameijar/bluebottle | b4f5ba9c4f03e678fdd36091b29240307ea69ffd | [
"BSD-3-Clause"
] | null | null | null | bluebottle/segments/migrations/0024_auto_20220210_1336.py | terrameijar/bluebottle | b4f5ba9c4f03e678fdd36091b29240307ea69ffd | [
"BSD-3-Clause"
] | null | null | null | bluebottle/segments/migrations/0024_auto_20220210_1336.py | terrameijar/bluebottle | b4f5ba9c4f03e678fdd36091b29240307ea69ffd | [
"BSD-3-Clause"
] | null | null | null | # Generated by Django 2.2.24 on 2022-02-10 12:36
import bluebottle.utils.fields
import bluebottle.utils.validators
import colorfield.fields
from django.db import migrations, models
import django_better_admin_arrayfield.models.fields
class Migration(migrations.Migration):
dependencies = [
('segments', '0023_auto_20220209_1312'),
]
operations = [
migrations.AddField(
model_name='segmenttype',
name='inherit',
field=models.BooleanField(default=True, help_text='Newly created activities will inherit the segments set on the activity owner.', verbose_name='Inherit'),
),
migrations.AlterField(
model_name='segment',
name='alternate_names',
field=django_better_admin_arrayfield.models.fields.ArrayField(base_field=models.CharField(max_length=200), blank=True, default=list, size=None),
),
migrations.AlterField(
model_name='segment',
name='background_color',
field=colorfield.fields.ColorField(blank=True, default=None, help_text='Add a background colour to your segment page.', max_length=18, null=True, verbose_name='Background color'),
),
migrations.AlterField(
model_name='segment',
name='closed',
field=models.BooleanField(default=False, help_text='Closed segments will only be accessible to members that belong to this segment.', verbose_name='Restricted'),
),
migrations.AlterField(
model_name='segment',
name='cover_image',
field=bluebottle.utils.fields.ImageField(blank=True, help_text='The uploaded image will be cropped to fit a 4:3 rectangle.', max_length=255, null=True, upload_to='categories/logos/', validators=[bluebottle.utils.validators.FileMimetypeValidator(['image/png', 'image/jpeg', 'image/gif', 'image/svg+xml'], None, 'invalid_mimetype'), bluebottle.utils.validators.validate_file_infection], verbose_name='cover image'),
),
migrations.AlterField(
model_name='segment',
name='email_domains',
field=django_better_admin_arrayfield.models.fields.ArrayField(base_field=models.CharField(max_length=200), blank=True, default=list, help_text='Users with email addresses for this domain are automatically added to this segment.', size=None, verbose_name='Email domains'),
),
migrations.AlterField(
model_name='segment',
name='logo',
field=bluebottle.utils.fields.ImageField(blank=True, help_text='The uploaded image will be scaled so that it is fully visible.', max_length=255, null=True, upload_to='categories/logos/', validators=[bluebottle.utils.validators.FileMimetypeValidator(['image/png', 'image/jpeg', 'image/gif', 'image/svg+xml'], None, 'invalid_mimetype'), bluebottle.utils.validators.validate_file_infection], verbose_name='logo'),
),
migrations.AlterField(
model_name='segment',
name='story',
field=models.TextField(blank=True, help_text='A more detailed story for your segment. This story can be accessed via a link on the page.', null=True, verbose_name='Story'),
),
migrations.AlterField(
model_name='segment',
name='tag_line',
field=models.CharField(blank=True, help_text='A short sentence to explain your segment. This sentence is directly visible on the page.', max_length=255, null=True, verbose_name='Slogan'),
),
migrations.AlterField(
model_name='segmenttype',
name='enable_search',
field=models.BooleanField(default=False, verbose_name='Enable search filters'),
),
]
| 55.191176 | 425 | 0.679456 |
eea2f57d28acf6796635f1259b4f5d6adad79071 | 7,980 | py | Python | codeball/tests/test_models.py | metrica-sports/codeball | 60bfe54b7898bed87cbbbae9dfc0f3bc49d31025 | [
"MIT"
] | 54 | 2020-09-16T13:09:03.000Z | 2022-03-28T12:32:19.000Z | codeball/tests/test_models.py | metrica-sports/codeball | 60bfe54b7898bed87cbbbae9dfc0f3bc49d31025 | [
"MIT"
] | null | null | null | codeball/tests/test_models.py | metrica-sports/codeball | 60bfe54b7898bed87cbbbae9dfc0f3bc49d31025 | [
"MIT"
] | 9 | 2021-03-28T13:02:57.000Z | 2022-03-24T11:19:06.000Z | import os
import pandas as pd
from kloppy import (
load_epts_tracking_data,
to_pandas,
load_metrica_json_event_data,
load_xml_code_data,
)
from codeball import (
GameDataset,
DataType,
TrackingFrame,
EventsFrame,
CodesFrame,
PossessionsFrame,
BaseFrame,
Zones,
Area,
PatternEvent,
Pattern,
PatternsSet,
)
import codeball.visualizations as vizs
class TestModels:
def test_pattern_event(self):
xy = [0.3, 0.6]
viz = vizs.Players(
start_time=500, end_time=700, players=[], options=[]
)
pattern_event = PatternEvent(
pattern_code="MET_001",
start_time=400,
event_time=500,
end_time=800,
coordinates=[xy, xy],
visualizations=[viz, viz],
tags=["T001"],
)
assert pattern_event.end_time == 800
assert pattern_event.coordinates[0][0] == 0.3
assert pattern_event.visualizations[0].start_time == 500
def test_pattern(self):
class pattern_class(Pattern):
def __init__(
self,
name: str,
code: str,
in_time: int = 0,
out_time: int = 0,
parameters: dict = None,
game_dataset: GameDataset = None,
):
super().__init__(
name, code, in_time, out_time, parameters, game_dataset
)
def run(self):
return True
def build_pattern_event(self):
pass
test_pattern = pattern_class(
name="Test Pattern",
code="MET_001",
in_time=3,
out_time=2,
parameters=None,
game_dataset=None,
)
assert test_pattern.in_time == 3
assert test_pattern.run() is True
def test_game_dataset(self):
base_dir = os.path.dirname(__file__)
game_dataset = GameDataset(
tracking_metadata_file=f"{base_dir}/files/metadata.xml",
tracking_data_file=f"{base_dir}/files/tracking.txt",
events_metadata_file=f"{base_dir}/files/metadata.xml",
events_data_file=f"{base_dir}/files/events.json",
)
assert game_dataset.tracking.data_type == DataType.TRACKING
assert game_dataset.events.data_type == DataType.EVENT
def test_tracking_game_dataset(self):
base_dir = os.path.dirname(__file__)
game_dataset = GameDataset(
tracking_metadata_file=f"{base_dir}/files/metadata.xml",
tracking_data_file=f"{base_dir}/files/tracking.txt",
)
assert game_dataset.tracking.data_type == DataType.TRACKING
assert game_dataset.has_event_data is False
def test_codes_only_game_dataset(self):
base_dir = os.path.dirname(__file__)
game_dataset = GameDataset(
codes_files=f"{base_dir}/files/code_xml.xml",
)
assert game_dataset.codes[0].data_type == DataType.CODE
assert game_dataset.has_event_data is False
def test_pattern_set(self):
base_dir = os.path.dirname(__file__)
game_dataset = GameDataset(
tracking_metadata_file=f"{base_dir}/files/metadata.xml",
tracking_data_file=f"{base_dir}/files/tracking.txt",
events_metadata_file=f"{base_dir}/files/metadata.xml",
events_data_file=f"{base_dir}/files/events.json",
)
class pattern_class(Pattern):
def __init__(
self,
name: str,
code: str,
in_time: int = 0,
out_time: int = 0,
parameters: dict = None,
game_dataset: GameDataset = None,
):
super().__init__(
name, code, in_time, out_time, parameters, game_dataset
)
def run(self):
return True
def build_pattern_event(self):
pass
test_pattern = pattern_class(
name="Test Pattern",
code="MET_001",
in_time=3,
out_time=2,
parameters=None,
game_dataset=game_dataset,
)
patterns_set = PatternsSet(game_dataset=game_dataset)
patterns_set.patterns = [test_pattern, test_pattern]
assert patterns_set.game_dataset.events.data_type == DataType.EVENT
assert len(patterns_set.patterns) == 2
def test_base_data_frame(self):
data = {
"player1_x": [1, 2, 3, 4],
"player2_x": [5, 6, 7, 8],
"player3_x": [9, 10, 11, 12],
}
base_df = BaseFrame(data)
base_df.metadata = "metadata"
base_df.records = [1, 2, 3, 4]
base_df.data_type = "test"
assert isinstance(base_df, BaseFrame)
assert hasattr(base_df, "metadata")
assert hasattr(base_df, "records")
assert isinstance(base_df[["player1_x", "player2_x"]], BaseFrame)
assert hasattr(base_df[["player1_x", "player2_x"]], "metadata")
assert not hasattr(base_df[["player1_x", "player2_x"]], "records")
def test_tracking_data_frame(self):
base_dir = os.path.dirname(__file__)
tracking_dataset = load_epts_tracking_data(
metadata_filename=f"{base_dir}/files/metadata.xml",
raw_data_filename=f"{base_dir}/files/tracking.txt",
)
tracking = TrackingFrame(to_pandas(tracking_dataset))
tracking.data_type = DataType.TRACKING
tracking.metadata = tracking_dataset.metadata
tracking.records = tracking_dataset.records
assert tracking.get_team_by_id("FIFATMA").team_id == "FIFATMA"
assert tracking.get_period_by_id(1).id == 1
assert tracking.get_other_team_id("FIFATMA") == "FIFATMB"
assert tracking.team("FIFATMA").shape[1] == 22
assert tracking.dimension("x").shape[1] == 23
assert tracking.players().shape[1] == 44
assert tracking.players("field").shape[1] == 40
assert sum(tracking.phase(defending_team_id="FIFATMA")) == 0
assert sum(tracking.team("FIFATMA").stretched(90)) == 863
def test_events_data_frame(self):
base_dir = os.path.dirname(__file__)
events_dataset = load_metrica_json_event_data(
metadata_filename=f"{base_dir}/files/metadata.xml",
raw_data_filename=f"{base_dir}/files/events.json",
)
events = EventsFrame(to_pandas(events_dataset))
events.data_type = DataType.EVENT
events.metadata = events_dataset.metadata
events.records = events_dataset.records
assert events.type("PASS").shape[0] == 26
assert events.result("COMPLETE").shape[0] == 45
assert events.into(Zones.OPPONENT_BOX).shape[0] == 1
assert events.starts_inside(Zones.OPPONENT_BOX).shape[0] == 2
assert events.ends_inside(Zones.OPPONENT_BOX).shape[0] == 2
assert events.ends_outside(Zones.OPPONENT_BOX).shape[0] == 43
# Test diferent ways to input Zones and areas
custom_area = Area((0.25, 0.2), (0.75, 0.8))
assert (
events.ends_outside(Zones.OPPONENT_BOX, Zones.OWN_BOX).shape[0]
== 45
)
assert (
events.ends_inside(Zones.OPPONENT_BOX, custom_area).shape[0] == 14
)
assert events.ends_inside(custom_area, custom_area).shape[0] == 12
def test_codes_data_frame(self):
base_dir = os.path.dirname(__file__)
codes_dataset = load_xml_code_data(
xml_filename=f"{base_dir}/files/code_xml.xml",
)
codes = CodesFrame(to_pandas(codes_dataset))
codes.data_type = DataType.CODE
codes.metadata = codes_dataset.metadata
codes.records = codes_dataset.records
assert len(codes.records) == 3
| 30.930233 | 78 | 0.595614 |
eea39007c18df9bb3a13dd73ee8b29fd1990d82d | 2,025 | py | Python | 100dayspython/day004/main.py | mrqssjeff/project-python | b3b08f2acfe825640a5ee92cf9d6fa45ab580384 | [
"MIT"
] | null | null | null | 100dayspython/day004/main.py | mrqssjeff/project-python | b3b08f2acfe825640a5ee92cf9d6fa45ab580384 | [
"MIT"
] | null | null | null | 100dayspython/day004/main.py | mrqssjeff/project-python | b3b08f2acfe825640a5ee92cf9d6fa45ab580384 | [
"MIT"
] | null | null | null | import random
print("Rock, Paper, Scissors!")
player = int(input("What do you choose? Type 0 for Rock, 1 for paper, 2 for Scissors: "))
computer = random.randint(0, 2)
game = ["""
_______
---' ____)
(_____)
(_____)
(____)
---.__(___)
""", """
_______
---' ____)____
______)
_______)
_______)
---.__________)
""", """
_______
---' ____)____
______)
__________)
(____)
---.__(___)
"""]
if player == 0:
print(game[0])
elif player == 1:
print(game[1])
elif player == 2:
print(game[2])
if computer == 0:
print("Computer chose:")
print(game[0])
elif computer == 1:
print("Computer chose:")
print(game[1])
elif computer == 2:
print("Computer chose:")
print(game[2])
if player == 0 and computer == 2 or player == 1 and computer == 0 or player == 2 and computer == 1:
print("YOU WIN!")
elif player == 0 and computer == 0 or player == 1 and computer == 1 or player == 2 and computer == 2:
print("IT'S A TIE!")
if player == 0 and computer == 1 or player == 1 and computer == 2 or player == 2 and computer == 0:
print("YOU LOSE!")
else:
print("404! ERROR!")
print("""
\ / _
___,,,
\_[o o]
Invalid Number! C\ _\/
/ _____),_/__
________ / \/ /
_| .| / o /
| | .| / /
\| .| / /
|________| /_ \/
__|___|__ _//\ \
_____|_________|____ \ \ \ \
_| /// \ \
| \ /
| / /
| / /
________________ | /__ /_
bger ...|_|.............. /______\.......""")
| 28.125 | 101 | 0.382222 |
eea423068f1d28596bc373c0840d0c29a2ee48d7 | 3,249 | py | Python | python/evaluation/track_detection/rpn/rpn_eval.py | billy000400/Mu2e_MLTracking | 675e62d844ff8a5ccba9019e316c374c40658101 | [
"MIT"
] | null | null | null | python/evaluation/track_detection/rpn/rpn_eval.py | billy000400/Mu2e_MLTracking | 675e62d844ff8a5ccba9019e316c374c40658101 | [
"MIT"
] | 1 | 2021-01-03T08:57:34.000Z | 2021-01-03T23:41:22.000Z | python/evaluation/track_detection/rpn/rpn_eval.py | billy000400/Mu2e_MLTracking | 675e62d844ff8a5ccba9019e316c374c40658101 | [
"MIT"
] | null | null | null | # Detector (Faster RCNN)
# forward propogate from input to output
# Goal: test if the validation output act as expected
import sys
from pathlib import Path
import pickle
import timeit
from datetime import datetime
import pandas as pd
import numpy as np
import tensorflow as tf
from tensorflow.keras.regularizers import l2
from tensorflow.keras import Model
from tensorflow.keras.layers import Input, Dense, Conv2D, Dropout, Flatten, TimeDistributed, Reshape, Softmax
from tensorflow.keras.optimizers import Adam
from tensorflow.distribute import MirroredStrategy
from tensorflow.keras.metrics import CategoricalAccuracy
script_dir = Path.cwd().parent.parent.parent.joinpath('frcnn_mc_train')
sys.path.insert(1, str(script_dir))
util_dir = Path.cwd().parent.parent.parent.joinpath('Utility')
sys.path.insert(1, str(util_dir))
from Information import *
from Configuration import frcnn_config
from DataGenerator import DataGeneratorV2
from Layers import *
from Loss import *
from Metric import *
### Using a specific pair of CPU and GPU
physical_devices = tf.config.experimental.list_physical_devices('GPU')
tf.config.set_visible_devices(physical_devices[1], 'GPU')
tf.config.experimental.set_memory_growth(physical_devices[1], True)
print(tf.config.experimental.get_visible_devices())
# load configuration object
cwd = Path.cwd()
pickle_path = cwd.joinpath('frcnn.test.config.pickle')
C = pickle.load(open(pickle_path,'rb'))
# re-build model
input_layer = Input(C.input_shape)
base_net = C.base_net.get_base_net(input_layer, trainable=False)
rpn_layer = rpn(C.anchor_scales, C.anchor_ratios)
classifier = rpn_layer.classifier(base_net)
regressor = rpn_layer.regression(base_net)
model = Model(inputs=input_layer, outputs = [classifier,regressor])
model.summary()
# load model weights
model.load_weights(str(Path.cwd().joinpath('rpn_mc_00.h5')), by_name=True)
model.load_weights(str(Path.cwd().joinpath('detector_mc_RCNN_dr=0.0.h5')), by_name=True)
# set data generator
val_generator = DataGeneratorV2(C.validation_img_inputs_npy, C.validation_labels_npy, C.validation_deltas_npy, batch_size=8)
# evaluate model
rpn_class_loss = define_rpn_class_loss(1)
rpn_regr_loss = define_rpn_regr_loss(100)
adam = Adam()
class StdCallback(tf.keras.callbacks.Callback):
accs = []
ious = []
def on_test_batch_end(self, batch, logs=None):
self.accs.append(logs['rpn_out_class_unmasked_binary_accuracy'])
self.ious.append(logs['rpn_out_regress_unmasked_IoU'])
def on_test_end(self, epoch, logs=None):
accs = np.array(self.accs)
ious = np.array(self.ious)
print()
print(f'accs_std:{accs.std()}; ious_std:{ious.std()}')
model.compile(optimizer=adam, loss={'rpn_out_class' : rpn_class_loss,\
'rpn_out_regress': rpn_regr_loss},\
metrics={'rpn_out_class': [unmasked_binary_accuracy, positive_number],\
'rpn_out_regress': unmasked_IoU})
result = model.evaluate(x=val_generator, return_dict=True, callbacks=[StdCallback()])
result = {key:[value] for key, value in result.items()}
df = pd.DataFrame.from_dict(result)
df.to_csv(Path.cwd().joinpath('result.csv'), index=None)
| 36.505618 | 124 | 0.748538 |
eea44ef30a81ba67ad14a68694b3cdcb38fe067e | 1,686 | py | Python | cv_workshops/6-day/2-clazz.py | afterloe/opencv-practice | 83d76132d004ebbc96d99d34a0fd3fc37a044f9f | [
"MIT"
] | 5 | 2020-03-13T07:34:30.000Z | 2021-10-01T03:03:05.000Z | cv_workshops/6-day/2-clazz.py | afterloe/Opencv-practice | 83d76132d004ebbc96d99d34a0fd3fc37a044f9f | [
"MIT"
] | null | null | null | cv_workshops/6-day/2-clazz.py | afterloe/Opencv-practice | 83d76132d004ebbc96d99d34a0fd3fc37a044f9f | [
"MIT"
] | 1 | 2020-03-01T13:21:43.000Z | 2020-03-01T13:21:43.000Z | #!/usr/bin/env python3
# -*- coding=utf-8 -*-
import cv2 as cv
import numpy as np
"""
使用几何矩计算轮廓中心与横纵波比对过滤
对二值图像的各个轮廓进行计算获得对应的几何矩,根据几何矩计算轮廓点的中心位置。
cv.moments(contours, binaryImage)
- contours: 轮廓点集
- binaryImage: bool, default False;二值图返回
"""
def main():
src = cv.imread("../../pic/money.jpg")
cv.namedWindow("src", cv.WINDOW_KEEPRATIO)
cv.namedWindow("dst", cv.WINDOW_KEEPRATIO)
cv.imshow("src", src)
t = 80
binary = cv.Canny(src, t, t * 2)
k = np.ones((3, 3), dtype=np.uint8)
binary = cv.morphologyEx(binary, cv.MORPH_DILATE, k)
contours, _ = cv.findContours(binary, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE)
for index in range(len(contours)):
contour = contours[index]
rect = cv.minAreaRect(contour)
# cx, cy = rect[0]
ww, hh = rect[1]
ratio = np.minimum(ww, hh) / np.maximum(ww, hh)
print("ratio is ", ratio)
mm = cv.moments(contour)
m00 = mm["m00"]
m10 = mm["m10"]
m01 = mm["m01"]
cx = np.int(m10 / m00)
cy = np.int(m01 / m00)
box = np.int0(cv.boxPoints(rect))
if 0.9 < ratio:
cv.drawContours(src, [box], 0, (255, 0, 0), 2, cv.LINE_8)
cv.circle(src, (np.int32(cx), np.int32(cy)), 2, (0, 0, 255), 2, cv.LINE_8)
if 0.5 > ratio:
cv.drawContours(src, [box], 0, (255, 255, 0), 2, cv.LINE_8)
cv.circle(src, (np.int32(cx), np.int32(cy)), 2, (0, 255, 0), 2, cv.LINE_8)
cv.imshow("dst", src)
cv.waitKey(0)
cv.destroyAllWindows()
if "__main__" == __name__:
main()
| 31.811321 | 87 | 0.541518 |
eea59cf9926de3446a108f54259fdcc310099f7a | 172 | py | Python | api/utils/get_earnings.py | syth0le/REST-API_YANDEX | 7a693430973e4d0ae428860d17fc33504dc25fb2 | [
"MIT"
] | null | null | null | api/utils/get_earnings.py | syth0le/REST-API_YANDEX | 7a693430973e4d0ae428860d17fc33504dc25fb2 | [
"MIT"
] | null | null | null | api/utils/get_earnings.py | syth0le/REST-API_YANDEX | 7a693430973e4d0ae428860d17fc33504dc25fb2 | [
"MIT"
] | null | null | null | def get_salary(courier_type, completed_orders):
DATA = {"foot": 2, "bike": 5, "car": 9}
salary = 500 * DATA[str(courier_type)] * completed_orders
return salary
| 34.4 | 61 | 0.668605 |
eea747f6a5f58fa9f7cb6e82312ed9dadca75ac3 | 1,967 | py | Python | war.py | Eduardojvr/Space_Atack_Game | f37e1891bf00af71f3c1758a0288a6b0b830bb9e | [
"MIT"
] | null | null | null | war.py | Eduardojvr/Space_Atack_Game | f37e1891bf00af71f3c1758a0288a6b0b830bb9e | [
"MIT"
] | null | null | null | war.py | Eduardojvr/Space_Atack_Game | f37e1891bf00af71f3c1758a0288a6b0b830bb9e | [
"MIT"
] | null | null | null | from settings import Settings
from ship import Ship
import pygame
import sys
from trap import Trap
from time import clock
from random import randint
def run_game():
tela1 = Settings()
screen = pygame.display.set_mode((tela1.altura, tela1.largura))
background = Settings()
pygame.display.set_caption("Space War")
nave = Ship(screen)
#pygame.mouse.set_visible(0)
trap = [Trap(screen,randint(0,1200)), Trap(screen,randint(0,1200)), Trap(screen,randint(0,1200))]
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
elif event.type == pygame.KEYDOWN or event.type == pygame.KEYUP:
if event.key == pygame.K_RIGHT:
nave.rect.centerx +=30
elif event.key == pygame.K_LEFT:
nave.rect.centerx -=30
elif event.key == pygame.K_UP:
nave.rect.bottom -=30
elif event.key == pygame.K_DOWN:
nave.rect.bottom +=30
elif event.key == pygame.K_SPACE:
nave.moveMissile()
for i in trap:
i.rect.bottom += 30
if (i.rect.colliderect(nave.rect)):
nave.vida = nave.vida-1
if (nave.vida < 1):
background.bg_image = pygame.image.load('imagens/gameover.bmp')
screen.fill(tela1.bg_color)
screen.blit(background.bg_image, (0,0))
nave.blitme()
nave.blitmemissile()
for i in trap:
i.blitme()
for i in trap:
if i.rect.centery > Settings().altura:
i.rect.centery = 0
i.rect.centerx = randint(0,1200)
i.rect.centery = randint(0,200)
pygame.display.flip()
################################ Main ################################
run_game()
| 32.245902 | 101 | 0.516523 |
eea8fc748971275806d47350049795a3a98b474a | 1,463 | py | Python | Project-2/doc/contingency_mat_parser.py | TooSchoolForCool/EE219-Larger-Scale-Data-Mining | 9a42c88169ace88f9b652d0e174c7f641fcc522e | [
"Apache-2.0"
] | null | null | null | Project-2/doc/contingency_mat_parser.py | TooSchoolForCool/EE219-Larger-Scale-Data-Mining | 9a42c88169ace88f9b652d0e174c7f641fcc522e | [
"Apache-2.0"
] | 12 | 2020-01-28T22:09:15.000Z | 2022-03-11T23:16:26.000Z | Project-2/doc/contingency_mat_parser.py | TooSchoolForCool/EE219-Larger-Scale-Data-Mining | 9a42c88169ace88f9b652d0e174c7f641fcc522e | [
"Apache-2.0"
] | null | null | null | import sys
import argparse
def read_in(file_path):
try:
file = open(file_path, 'r')
except:
sys.stderr.write("[ERROR] read_in(): Cannot open file '%s'\n" % file_path)
exit(1)
file_content = []
for line in file:
file_content.append(line)
i = 0
while i < len(file_content):
line = file_content[i]
title = line[5:-6]
print("\t%s" % title)
line = file_content[i + 7].strip('\n').strip(' ').strip('\t')
line = [l.strip('\t') for l in line.split(' ') if l]
for item in line:
print("\t%s" % item.replace("cluster_", "c")),
print("")
for j in range(8, 28):
line = file_content[i + j].strip('\n').strip(' ').strip('\t')
line = [l.strip('\t') for l in line.split(' ') if l]
for item in line:
print("%s\t" % item),
print("")
i += 28
return file_content
def add_parser():
parser = argparse.ArgumentParser(prog='Compare Evaluation Result')
parser.add_argument("-s", "--src",
dest = "src",
help = "source file path",
required = True
)
parser.add_argument("-d", "--dest",
dest = "dest",
help = "destination file path",
)
return parser
def main():
parser = add_parser()
args = parser.parse_args()
src_file = read_in(args.src)
if __name__ == '__main__':
main() | 21.835821 | 82 | 0.514012 |
eea9326c5e16b9ddd8185aff0917cab86602e465 | 5,426 | py | Python | voldemort_client/helper.py | mirko-lelansky/voldemort-client | a2839a0cc50ca4fdc5bdb36b2df3a3cf7f7d9db9 | [
"Apache-2.0"
] | null | null | null | voldemort_client/helper.py | mirko-lelansky/voldemort-client | a2839a0cc50ca4fdc5bdb36b2df3a3cf7f7d9db9 | [
"Apache-2.0"
] | null | null | null | voldemort_client/helper.py | mirko-lelansky/voldemort-client | a2839a0cc50ca4fdc5bdb36b2df3a3cf7f7d9db9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Mirko Lelansky <mlelansky@mail.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module contains some helper methods for building parts of http requests.
"""
from datetime import datetime
import simplejson as json
from voldemort_client.exception import VoldemortError
def create_vector_clock(node_id, timeout):
"""This method builds the initial vector clock for a new key.
Parameters
----------
node_id : int
the id of one node in the cluster
timeout : int
the expire timeout of the key
Returns
-------
dict
the vector clock as dictonary
"""
if node_id is not None and timeout is not None:
return {
"versions": [{"nodeId": node_id, "version": 1}],
"timestamp": timeout
}
else:
raise ValueError("You must gave the node id and the timeout.")
def merge_vector_clock(vector_clock, node_id, timeout=None):
"""This method merges an existing vector clock with the new values.
Parameters
----------
vector_clock : dict
the vector clock which should be updated
node_id : int
the node id to use
timeout : int
the expire timeout of the key
Returns
-------
dict
the update vector clock as dictionary
"""
if vector_clock is not None and node_id is not None:
versions = vector_clock["versions"]
version_map_list_node = [version_map for version_map in versions
if version_map["nodeId"] == node_id]
if version_map_list_node == []:
versions.append({"nodeId": node_id, "version": 1})
elif len(version_map_list_node) == 1:
old_map = version_map_list_node[0]
new_map = old_map
new_map["version"] = new_map["version"] + 1
versions.remove(old_map)
versions.append(new_map)
else:
raise VoldemortError("Only one version map per node is allowed.")
vector_clock["versions"] = versions
if timeout is not None:
vector_clock["timestamp"] = timeout
return vector_clock
else:
raise ValueError("You need the vector clock, timeout and the node id.")
def build_get_headers(request_timeout):
"""This method builds the request headers for get requests like receving keys.
Parameters
----------
request_timeout : int
the time where the request should be done in milli seconds
Returns
-------
dict
the headers as dictonary
"""
timestamp = datetime.now().timestamp()
return {
"X-VOLD-Request-Timeout-ms": str(int(request_timeout)),
"X-VOLD-Request-Origin-Time-ms": str(int(timestamp))
}
def build_delete_headers(request_timeout, vector_clock):
"""This method builds the request headers for the delete requests.
Parameters
----------
request_timeout : int
the time where the request should be done in milli seconds
vector_clock : dict
the vector clock which represents the version which should be delete
Returns
-------
dict
the headers as dictionary
"""
delete_headers = build_get_headers(request_timeout)
delete_headers["X-VOLD-Vector-Clock"] = json.dumps(vector_clock)
return delete_headers
def build_set_headers(request_timeout, vector_clock, content_type="text/plain"):
"""This method builds the request headers for the set requests.
Parameters
----------
request_timeout : int
the time where the request should be done in milli seconds
vector_clock : dict
the vector clock which represents the version which should be create or
update
content_type : str
the content type of the value
Returns
-------
dict
the headers as dictionary
"""
set_headers = build_delete_headers(request_timeout, vector_clock)
set_headers["Content-Type"] = content_type
return set_headers
def build_version_headers(request_timeout):
"""This method builds the request headers for the version requests.
Parameters
----------
request_timeout : int
the time where the request should be done in milli seconds
Returns
--------
dict
the headers as dictionary
"""
version_headers = build_get_headers(request_timeout)
version_headers["X-VOLD-Get-Version"] = ""
return version_headers
def build_url(url, store_name, key):
"""This method combine the different parts of the urls to build the url to
acces the REST-API.
Parameters
----------
url : str
the base url
store_name : str
the name of the voldemort store
key : str
the url part which represents the key or keys
Returns
-------
str
the combined url of the REST-API
"""
return "%s/%s/%s" % (url, store_name, key)
| 29.32973 | 82 | 0.651493 |
eea9c161475ffd63195c5ca94c42455b4deb9625 | 1,581 | py | Python | src/reddack/exceptions.py | diatomicDisaster/Reddit-Slackbot | 4f22af110e72eab19d9162a4428800a1895303f3 | [
"MIT"
] | null | null | null | src/reddack/exceptions.py | diatomicDisaster/Reddit-Slackbot | 4f22af110e72eab19d9162a4428800a1895303f3 | [
"MIT"
] | 10 | 2022-02-21T01:11:20.000Z | 2022-02-22T18:13:00.000Z | src/reddack/exceptions.py | diatomicDisaster/redack | 4f22af110e72eab19d9162a4428800a1895303f3 | [
"MIT"
] | null | null | null | from __future__ import (
annotations,
)
class ModFromSlackError(Exception):
"""Base class for modfromslack errors"""
def __init__(
self,
message: str,
*,
preamble: str | None = None,
afterword: str | None = None
) -> None:
if preamble is not None:
message = f"{preamble} {message}"
if afterword is not None:
message = f"{message}\n\n{afterword}"
super().__init__(message)
class MsgSendError(ModFromSlackError):
"""Failed to send Slack message."""
class SequenceError(ModFromSlackError):
"""Something has happened in the wrong order."""
def __init__(
self,
should_be_first,
should_be_second,
*,
preamble: str | None = None,
afterword: str | None = None
) -> None:
message = f"Expected {should_be_first} before {should_be_second}"
super().__init__(
message,
preamble = preamble,
afterword = afterword
)
class ActionSequenceError(SequenceError):
"""App thinks action came before its parent message."""
def __init__(
self,
parentmsg_ts,
action_ts,
*,
afterword=None
) -> None:
_preamble=f"'message_ts' {parentmsg_ts} is later than 'action_ts' {action_ts}"
super().__init__(
"parent message",
"action",
preamble=_preamble,
afterword=afterword
)
class ConfigError(ModFromSlackError):
"""Error in config file format."""
| 26.79661 | 86 | 0.573688 |
eeaa2be76b33b3286d73455fcb963e240ddf8af4 | 7,276 | py | Python | cid/cli/cli_generator.py | zeljko-bal/CID | 52ecc445c441ec63386c9f092b226090588a3789 | [
"MIT"
] | 1 | 2017-09-15T06:14:54.000Z | 2017-09-15T06:14:54.000Z | cid/cli/cli_generator.py | zeljko-bal/CID | 52ecc445c441ec63386c9f092b226090588a3789 | [
"MIT"
] | null | null | null | cid/cli/cli_generator.py | zeljko-bal/CID | 52ecc445c441ec63386c9f092b226090588a3789 | [
"MIT"
] | null | null | null | from collections import defaultdict
from os import makedirs
from os.path import realpath, join, dirname, isdir, exists
from shutil import copy
from jinja2 import Environment, FileSystemLoader
from cid.cli.cli_model_specs import CliModelSpecs
from cid.cli import cli_post_processing
from cid.parser.cid_parser import parse
from cid.common.cid_model_processor import CidModelProcessor
from cid.common.utils import *
_cli_templates_path = join(dirname(realpath(__file__)), 'templates')
_cli_framework_path = join(dirname(realpath(__file__)), 'framework')
# ------------------------------- JINJA FILTERS -------------------------------
def parameter_model_filter(parameter):
def print_list(lst):
return str(lst) if len(lst) > 1 else "'{}'".format(lst[0])
if parameter.type == 'Bool':
positives = [p.positive for p in parameter.all_patterns if p.positive]
negatives = [p.negative for p in parameter.all_patterns if p.negative]
positives_str = ", positives={prefixes}".format(prefixes=print_list(positives)) if positives else ''
negatives_str = ", negatives={prefixes}".format(prefixes=print_list(negatives)) if negatives else ''
return "BooleanNonpositional('{name}'{positives}{negatives})".format(
name=parameter.name, positives=positives_str, negatives=negatives_str)
else:
ret = []
classified = defaultdict(lambda: defaultdict(set))
for pattern in parameter.all_patterns:
if pattern.white_space:
if pattern.count:
count_str = ", count={count}".format(count=pattern.count)
elif pattern.count_many:
count_str = ", count='*'"
else:
count_str = ''
classified['MultiArgNonpositional'][count_str].add(pattern)
else:
if pattern.count_many:
if pattern.separator:
separator_str = ", '{}'".format(pattern.separator)
else:
separator_str = ''
classified['SeparatedNonpositional'][separator_str].add(pattern)
elif pattern.count_char:
classified['CounterNonpositional'][pattern.count_char].add(pattern)
else:
classified['BasicNonpositional']['_'].add(pattern)
if classified['MultiArgNonpositional']:
for count_str, patterns in classified['MultiArgNonpositional'].items():
prefixes = [p.prefix for p in patterns]
ret.append("MultiArgNonpositional('{name}', {prefixes}{count_str})".format(name=parameter.name, prefixes=print_list(prefixes), count_str=count_str))
if classified['SeparatedNonpositional']:
for separator_str, patterns in classified['SeparatedNonpositional'].items():
prefixes = [p.prefix for p in patterns]
ret.append("SeparatedNonpositional('{name}', {prefixes}{separator_str})".format(name=parameter.name, prefixes=print_list(prefixes), separator_str=separator_str))
if classified['CounterNonpositional']:
for count_char, patterns in classified['CounterNonpositional'].items():
prefixes = [p.prefix for p in patterns]
ret.append("CounterNonpositional('{name}', {prefixes}, '{count_char}')".format(name=parameter.name, prefixes=print_list(prefixes), count_char=count_char))
if classified['BasicNonpositional']:
for _, patterns in classified['BasicNonpositional'].items():
prefixes = [p.prefix for p in patterns]
ret.append("BasicNonpositional('{name}', {prefixes})".format(name=parameter.name, prefixes=print_list(prefixes)))
return ', '.join(ret)
def have_sub_commands_filter(commands):
return any([c.sub_commands for c in commands])
# ------------------------------- GENERATOR FUNCTIONS -------------------------------
def process_model(model):
for visitor in cli_post_processing.model_visitors:
CidModelProcessor(visitor).process_model(model)
CidModelProcessor(CliModelSpecs().visitor).process_model(model)
def render_cli_code(model, root_command_name, cli_app_path):
# EXTRACT DATA ---------------------
model_extractor = ElementExtractor()
CidModelProcessor(model_extractor.visitor).process_model(model)
all_commands = model_extractor.all_commands
all_parameters = model_extractor.all_parameters
# RENDER CLI PARSER ---------------------
env = Environment(loader=FileSystemLoader(_cli_templates_path))
env.filters['parameter_model'] = parameter_model_filter
env.filters['element_type'] = element_type
env.filters['tab_indent'] = tab_indent_filter
env.filters['stringify'] = stringify_filter
env.filters['have_sub_commands'] = have_sub_commands_filter
env.globals['raise'] = raise_exception_helper
parser_template = env.get_template('cli_parser.template')
parser_rendered = parser_template.render(root_command_name=root_command_name, root_command_id=element_id(root_command_name),
commands=all_commands, parameters=all_parameters)
with open(join(cli_app_path, root_command_name + "_cli_parser.py"), "w") as text_file:
text_file.write(parser_rendered)
# RENDER CLI COMMAND ---------------------
command_file_path = join(cli_app_path, root_command_name + '_cli.py')
if not exists(command_file_path):
command_template = env.get_template('cli_command.template')
command_rendered = command_template.render(root_command_name=root_command_name)
with open(command_file_path, "w") as text_file:
text_file.write(command_rendered)
def copy_framework(cli_app_path):
if not isdir(cli_app_path):
makedirs(cli_app_path)
copy(join(_cli_framework_path, "generic_cli_parser.py"), cli_app_path)
copy(join(_cli_framework_path, "js_date.py"), cli_app_path)
def render_runner_script(root_command_name, dest_path):
env = Environment(loader=FileSystemLoader(_cli_templates_path))
template = env.get_template('windows_cli_py_runner.template')
rendered = template.render(command_path=join(root_command_name + '_cli', root_command_name + "_cli.py"))
with open(join(dest_path, root_command_name + ".bat"), "w") as text_file:
text_file.write(rendered)
def is_root_command_defined(model, root_command_name):
model_extractor = ElementExtractor()
CidModelProcessor(model_extractor.visitor).process_model(model)
return root_command_name in [command.name for command in model_extractor.all_commands]
def generate_cli(cid_file, root_command_name, dest_path):
cli_app_path = join(dest_path, root_command_name + "_cli")
model = parse(cid_file)
if not is_root_command_defined(model, root_command_name):
print("Error: The specified root command is not defined.")
return
process_model(model)
copy_framework(cli_app_path)
render_cli_code(model, root_command_name, cli_app_path)
render_runner_script(root_command_name, dest_path)
print("Generated cli successfully.")
| 42.8 | 177 | 0.67276 |
eeaa72a12bf7e9c9d8b1d3537dc9a129425ee115 | 2,037 | py | Python | container/sample-inf1/inf1_mx.py | yunma10/neo-ai-dlr | 1f5c65d9bf7155c016e5d2f78d273755760a4f2a | [
"Apache-2.0"
] | 446 | 2019-01-24T02:04:17.000Z | 2022-03-16T13:45:32.000Z | container/sample-inf1/inf1_mx.py | yunma10/neo-ai-dlr | 1f5c65d9bf7155c016e5d2f78d273755760a4f2a | [
"Apache-2.0"
] | 179 | 2019-01-24T10:03:34.000Z | 2022-03-19T02:06:56.000Z | container/sample-inf1/inf1_mx.py | yunma10/neo-ai-dlr | 1f5c65d9bf7155c016e5d2f78d273755760a4f2a | [
"Apache-2.0"
] | 111 | 2019-01-24T20:51:45.000Z | 2022-02-18T06:22:40.000Z | import mxnet as mx
#import neomxnet
import os
import json
import numpy as np
from collections import namedtuple
import os
dtype='float32'
Batch = namedtuple('Batch', ['data'])
ctx = mx.neuron()
is_gpu = False
def model_fn(model_dir):
print("param {}".format(os.environ.get('MODEL_NAME_CUSTOM')))
print("ctx {}".format(ctx))
sym, arg_params, aux_params = mx.model.load_checkpoint(os.path.join(model_dir, os.environ.get('MODEL_NAME_CUSTOM')), 0)
mod = mx.mod.Module(symbol=sym, context=ctx, label_names=None)
for arg in arg_params:
arg_params[arg] = arg_params[arg].astype(dtype)
for arg in aux_params:
aux_params[arg] = aux_params[arg].astype(dtype)
exe = mod.bind(for_training=False,
data_shapes=[('data', (1,3,224,224))],
label_shapes=mod._label_shapes)
mod.set_params(arg_params, aux_params, allow_missing=True)
return mod
def transform_fn(mod, img, input_content_type, output_content_type):
'''
stream = os.popen('/opt/aws/neuron/bin/neuron-cli list-model')
output = stream.read()
print(output)
stream = os.popen('/opt/aws/neuron/bin/neuron-cli list-ncg')
output = stream.read()
print(output)
'''
image = mx.image.imdecode(img)
resized = mx.image.resize_short(image, 224) # minimum 224x224 images
cropped, crop_info = mx.image.center_crop(resized, (224, 224))
normalized = mx.image.color_normalize(cropped.astype(np.float32) / 255,
mean=mx.nd.array([0.485, 0.456, 0.406]),
std=mx.nd.array([0.229, 0.224, 0.225]))
# the network expect batches of the form (N,3,224,224)
transposed = normalized.transpose((2, 0, 1)) # Transposing from (224, 224, 3) to (3, 224, 224)
batchified = transposed.expand_dims(axis=0) # change the shape from (3, 224, 224) to (1, 3, 224, 224)
image = batchified.astype(dtype='float32')
mod.forward(Batch([image]))
prob = mod.get_outputs()[0].asnumpy().tolist()
prob_json = json.dumps(prob)
return prob_json, output_content_type
| 37.722222 | 121 | 0.675994 |
eeab90972c87f9c41713b77c4809b4a9c645a33d | 4,040 | py | Python | data/process_data.py | KCKhoo/disaster_response_dashboard | ee337125121664503675bfb5bf01af85c7c1a8ca | [
"FTL",
"CNRI-Python",
"blessing"
] | null | null | null | data/process_data.py | KCKhoo/disaster_response_dashboard | ee337125121664503675bfb5bf01af85c7c1a8ca | [
"FTL",
"CNRI-Python",
"blessing"
] | null | null | null | data/process_data.py | KCKhoo/disaster_response_dashboard | ee337125121664503675bfb5bf01af85c7c1a8ca | [
"FTL",
"CNRI-Python",
"blessing"
] | null | null | null | import sys
import pandas as pd
from sqlalchemy import create_engine
def load_data(messages_filepath, categories_filepath):
''' Load and merge two CSV files - one containing messages and the other containing categories
Args:
messages_filepath (str): Path to the CSV file containing messages
categories_filepath (str): Path to the CSV file containing categories of each message
Returns:
df (DataFrame): A merged DataFrame containing messages and categories
'''
# Load messages dataset
messages = pd.read_csv(messages_filepath)
# load categories dataset
categories = pd.read_csv(categories_filepath)
# Merge datasets
df = messages.merge(categories, on='id')
return df
def clean_data(df):
'''
Clean the data for machine learning model. Cleaning processes include:
1) Split 'categories' column in the dataframe into separate category columns.
2) Convert category values to just numbers 0 or 1 by removing the texts.
3) Replace 'categories' column in df with new category columns created in Step 1.
4) Remove duplicates.
5) Remove rows with 2 in 'related' category column.
Args:
df (DataFrame): A DataFrame
Returns:
df_clean (DataFrame): clean DataFrame
'''
# Make a copy of df
df_clean = df.copy()
# Create a dataframe of the 36 individual category columns
categories = df_clean['categories'].str.strip().str.split(';', expand=True)
# Select the first row of the categories dataframe
row = categories.iloc[0, :]
# Use this row to extract a list of new column names for categories.
category_colnames = row.apply(lambda x: x[:-2])
# Rename the columns of `categories`
categories.columns = category_colnames
# Convert category values to just numbers 0 or 1.
for column in categories:
# Set each value to be the last character of the string
categories[column] = categories[column].str.split('-').str[-1]
# Convert column from string to numeric
categories[column] = pd.to_numeric(categories[column])
# Drop the original categories column from 'df'
df_clean = df_clean.drop(columns=['categories'])
# Concatenate the original dataframe with the new 'categories' dataframe
df_clean = pd.concat([df_clean, categories], axis=1)
# Drop duplicates
df_clean = df_clean.drop_duplicates()
# Drop rows with 2 in 'related' column
df_clean = df_clean[df_clean['related'] != 2].reset_index(drop=True)
return df_clean
def save_data(df, database_filename):
''' Save clean dataset to a SQLite database
Args:
df (DataFrame): Clean dataframe
database_filename (string): Path at which database will be stored
Returns:
None
'''
engine = create_engine('sqlite:///' + database_filename)
df.to_sql('DisasterMessages', engine, index=False)
def main():
if len(sys.argv) == 4:
messages_filepath, categories_filepath, database_filepath = sys.argv[1:]
print('Loading data...\n MESSAGES: {}\n CATEGORIES: {}'
.format(messages_filepath, categories_filepath))
df = load_data(messages_filepath, categories_filepath)
print('Cleaning data...')
df = clean_data(df)
print('Saving data...\n DATABASE: {}'.format(database_filepath))
save_data(df, database_filepath)
print('Cleaned data saved to database!')
else:
print('Please provide the filepaths of the messages and categories '\
'datasets as the first and second argument respectively, as '\
'well as the filepath of the database to save the cleaned data '\
'to as the third argument. \n\nExample: python process_data.py '\
'disaster_messages.csv disaster_categories.csv '\
'DisasterResponse.db')
if __name__ == '__main__':
main() | 33.94958 | 98 | 0.658168 |
eeaca61d7f8a12d9407b89ba0d429021d517e4c0 | 179 | py | Python | problem0650.py | kmarcini/Project-Euler-Python | d644e8e1ec4fac70a9ab407ad5e1f0a75547c8d3 | [
"BSD-3-Clause"
] | null | null | null | problem0650.py | kmarcini/Project-Euler-Python | d644e8e1ec4fac70a9ab407ad5e1f0a75547c8d3 | [
"BSD-3-Clause"
] | null | null | null | problem0650.py | kmarcini/Project-Euler-Python | d644e8e1ec4fac70a9ab407ad5e1f0a75547c8d3 | [
"BSD-3-Clause"
] | null | null | null | ###########################
#
# #650 Divisors of Binomial Product - Project Euler
# https://projecteuler.net/problem=650
#
# Code by Kevin Marciniak
#
###########################
| 19.888889 | 51 | 0.502793 |
eeacff18635731300c340b2e253ce1bf7ee2b4e0 | 3,432 | py | Python | pycle/bicycle-scrapes/bike-data-scrape/scraperMulti.py | fusuyfusuy/School-Projects | 8e38f19da90f63ac9c9ec91e550fc5aaab3d0234 | [
"MIT"
] | null | null | null | pycle/bicycle-scrapes/bike-data-scrape/scraperMulti.py | fusuyfusuy/School-Projects | 8e38f19da90f63ac9c9ec91e550fc5aaab3d0234 | [
"MIT"
] | null | null | null | pycle/bicycle-scrapes/bike-data-scrape/scraperMulti.py | fusuyfusuy/School-Projects | 8e38f19da90f63ac9c9ec91e550fc5aaab3d0234 | [
"MIT"
] | null | null | null | from bs4 import BeautifulSoup
import os
import csv
bicycles = []
basepath = 'HTMLFiles/'
outputFile = open('scraped.py','a')
outputFile.write("list=[")
len1 = len(os.listdir(basepath))
counter1 = 0
for entry in os.listdir(basepath):
counter2 = 0
len2 = len(os.listdir(basepath+'/'+entry))
for folder in os.listdir(basepath+'/'+entry):
listFile = open(basepath+entry+'/'+folder,"r")
try:
parsed = BeautifulSoup(listFile, "html.parser")
except:
print('bs4 error in '+basepath+entry+'/'+folder)
break
bicycle = {
'Brand': '-',
'Model': '-',
'Weight': '-',
'Released on the market': '-',
'For women': '-',
'For kids': '-',
'Frame material': '-',
'Frame type': '-',
'Collapsible frame': '-',
'Color': '-',
'Fork type': '-',
'Shock absorber type': '-',
'Shock absorber pressure': '-',
'Fork name': '-',
'Wheel drive': '-',
'Drive type': '-',
'Transmission type': '-',
'Number of speeds': '-',
'System name': '-',
'Cassette name': '-',
'Front derailleur gears name': '-',
'Rear derailleur gears name': '-',
'Shifters type': '-',
'Shifters name': '-',
'Front brakes': '-',
'Front brakes name': '-',
'Rear brakes': '-',
'Number of wheels': '-',
'Wheels diameter': '-',
'Double rim': '-',
'Rim material': '-',
'Rims name': '-',
'Tyres pattern': '-',
'Tyres name': '-',
'Handlebar type': '-',
'Handlebar name': '-',
'Seat type': '-',
'Seat suspension': '-',
'Seat name': '-',
'Pedals type': '-',
'Pedals name': '-',
'Front panel': '-',
'Rear panel panel': '-',
'Trunk': '-',
'Rearview mirror': '-',
'Horn': '-',
'Basket': '-'
}
tableRows = parsed.findAll('tr')
for row in tableRows:
tableData = row.findAll('td')
try:
key = tableData[0].text.strip()
value = tableData[1].text.strip()
except:
print('error in '+basepath+entry+'/'+folder)
break
else:
bicycle[key] = value
if(bicycle['Brand']!='-'):
bicycles.append(bicycle)
outputFile.write(str(bicycle)+',\n')
counter2+=1
print("parsing "+str(counter2)+" of "+str(len2)+" ", end='\r')
counter1+=1
print("\nFOLDER parsing "+str(counter1)+" of "+str(len1)+" \n", end='\r')
# keys = bicycles[0].keys()
# with open('bicycles.csv', 'w', newline='') as output_file:
# dict_writer = csv.DictWriter(output_file, keys)
# dict_writer.writeheader()
# dict_writer.writerows(bicycles)
outputFile.write(']')
toWrite = """
import csv
keys = list[0].keys()
with open('bicycles.csv', 'w', newline='') as output_file:
dict_writer = csv.DictWriter(output_file, keys)
dict_writer.writeheader()
dict_writer.writerows(list)
"""
outputFile.write(toWrite) | 28.840336 | 106 | 0.456002 |
eeade2b142cd94a4f1b12d3d46fc0e265a2be53e | 642 | py | Python | python/283_move_zeroes.py | liaison/LeetCode | 8b10a1f6bbeb3ebfda99248994f7c325140ee2fd | [
"MIT"
] | 17 | 2016-03-01T22:40:53.000Z | 2021-04-19T02:15:03.000Z | python/283_move_zeroes.py | liaison/LeetCode | 8b10a1f6bbeb3ebfda99248994f7c325140ee2fd | [
"MIT"
] | null | null | null | python/283_move_zeroes.py | liaison/LeetCode | 8b10a1f6bbeb3ebfda99248994f7c325140ee2fd | [
"MIT"
] | 3 | 2019-03-07T03:48:43.000Z | 2020-04-05T01:11:36.000Z | class Solution:
def moveZeroes(self, nums: List[int]) -> None:
"""
Do not return anything, modify nums in-place instead.
"""
read_index, write_index = 0, -1
len_nums = len(nums)
# shift non-zero numbers to the head of the list
while read_index < len_nums:
if nums[read_index] != 0:
write_index += 1
nums[write_index] = nums[read_index]
read_index += 1
# reset the rest of numbers to zeroes
write_index += 1
while write_index < len_nums:
nums[write_index] = 0
write_index += 1
| 30.571429 | 61 | 0.542056 |
eeaeecc00f80638bdeeeac780d5b87b92462f522 | 464 | py | Python | dummyGPIO.py | yasokada/python-151127-7segLed_IPadrDisplay | eb97f17685ac2477e6a3a7321159d6463f736dd2 | [
"MIT"
] | 1 | 2017-01-13T23:57:21.000Z | 2017-01-13T23:57:21.000Z | toLearn/dummyGPIO.py | yasokada/python-151113-lineMonitor | 224342d5855d8ee6792fad6ad36399d95fce1b09 | [
"MIT"
] | 2 | 2015-12-08T23:40:12.000Z | 2015-12-24T22:09:07.000Z | dummyGPIO.py | yasokada/python-151127-7segLed_IPadrDisplay | eb97f17685ac2477e6a3a7321159d6463f736dd2 | [
"MIT"
] | null | null | null | '''
v0.1 2015/11/26
- add output()
- add setmode()
- add setup()
'''
class CDummyGPIO:
def __init__(self):
self.BOARD = 0;
self.OUT = 1;
# do nothing
return
def setmode(self, board):
# do nothing
return
def setup(self, pinnum, inout):
# do nothing
return
def output(self, pinnum, onoff):
# do nothing
return
# Usage
'''
from dummyGPIO import CDummyGPIO
GPIO = CDummyGPIO()
GPIO.setmode(GPIO.BOARD)
GPIO.setup(10, GPIO.OUT)
'''
| 12.888889 | 33 | 0.642241 |
eeb4e80e6cc8868c343b5e9768135af13ccbaa18 | 380 | py | Python | setup.py | CyrusBiotechnology/django-headmaster | 0100b4086c09da43da5f2f68e3cb549dca8af96a | [
"MIT"
] | null | null | null | setup.py | CyrusBiotechnology/django-headmaster | 0100b4086c09da43da5f2f68e3cb549dca8af96a | [
"MIT"
] | null | null | null | setup.py | CyrusBiotechnology/django-headmaster | 0100b4086c09da43da5f2f68e3cb549dca8af96a | [
"MIT"
] | null | null | null | from setuptools import setup
setup(name='django-headmaster',
version='0.0.1',
description='Add extra headers to your site via your settings file',
url='http://github.com/CyrusBiotechnology/django-headmaster',
author='Peter Novotnak',
author_email='peter@cyrusbio.com',
license='MIT',
packages=['django_headmaster'],
zip_safe=True)
| 31.666667 | 74 | 0.681579 |
eeb69df1582f775092e1af736d2173a50d2365bb | 484 | py | Python | tests/test_lines_count.py | MacHu-GWU/single_file_module-project | 01f7a6b250853bebfd73de275895bf274325cfc1 | [
"MIT"
] | 3 | 2017-02-27T05:07:46.000Z | 2022-01-17T06:46:20.000Z | tests/test_lines_count.py | MacHu-GWU/single_file_module-project | 01f7a6b250853bebfd73de275895bf274325cfc1 | [
"MIT"
] | null | null | null | tests/test_lines_count.py | MacHu-GWU/single_file_module-project | 01f7a6b250853bebfd73de275895bf274325cfc1 | [
"MIT"
] | 1 | 2017-09-05T14:05:55.000Z | 2017-09-05T14:05:55.000Z | # -*- coding: utf-8 -*-
import os
import pytest
from sfm import lines_count
def test_lines_count():
assert lines_count.count_lines(__file__) >= 22
def test_lines_stats():
n_files, n_lines = lines_count.lines_stats(
os.path.dirname(__file__), lines_count.filter_python_script)
assert n_files >= 17
assert n_lines >= 1096
if __name__ == "__main__":
import os
basename = os.path.basename(__file__)
pytest.main([basename, "-s", "--tb=native"])
| 20.166667 | 68 | 0.688017 |
eeb6fa5b0b347f06d9b353c9e9aeb47e31e57218 | 1,884 | py | Python | rrwebapp/accesscontrol.py | louking/rrwebapp | 5c73f84e1a21bc3b5fa51d83ba576c3152e6cf27 | [
"Apache-2.0"
] | null | null | null | rrwebapp/accesscontrol.py | louking/rrwebapp | 5c73f84e1a21bc3b5fa51d83ba576c3152e6cf27 | [
"Apache-2.0"
] | 417 | 2015-05-07T16:50:22.000Z | 2022-03-14T16:16:13.000Z | rrwebapp/accesscontrol.py | louking/rrwebapp | 5c73f84e1a21bc3b5fa51d83ba576c3152e6cf27 | [
"Apache-2.0"
] | null | null | null | ###########################################################################################
# accesscontrol - access control permission and need definitions
#
# Date Author Reason
# ---- ------ ------
# 01/18/14 Lou King Create
#
# Copyright 2014 Lou King
#
###########################################################################################
'''
accesscontrol - access control permission and need definitions
===================================================================
'''
# standard
from collections import namedtuple
from functools import partial
# pypi
import flask
from flask_login import current_user
from flask_principal import Principal, Permission, RoleNeed, UserNeed
# home grown
from . import app
from .model import db # this is ok because this module only runs under flask
########################################################################
# permissions definition
########################################################################
# load principal extension, and define permissions
# see http://pythonhosted.org/Flask-Principal/ section on Granular Resource Protection
principals = Principal(app)
owner_permission = Permission(RoleNeed('owner'))
admin_permission = Permission(RoleNeed('admin'))
viewer_permission = Permission(RoleNeed('viewer'))
ClubDataNeed = namedtuple('club_data', ['method', 'value'])
UpdateClubDataNeed = partial(ClubDataNeed,'update')
ViewClubDataNeed = partial(ClubDataNeed,'view')
class UpdateClubDataPermission(Permission):
def __init__(self, clubid):
need = UpdateClubDataNeed(clubid)
super(UpdateClubDataPermission, self).__init__(need)
class ViewClubDataPermission(Permission):
def __init__(self, clubid):
need = ViewClubDataNeed(clubid)
super(ViewClubDataPermission, self).__init__(need)
| 34.254545 | 91 | 0.571656 |
eeb941243abfa405873eabb4951a2447d2772339 | 173 | py | Python | bareon_fuel_extension/tests.py | gitfred/bareon-fuel-extension | 0074f187a6244e786b37e551009fa2eadcae1d3a | [
"Apache-2.0"
] | null | null | null | bareon_fuel_extension/tests.py | gitfred/bareon-fuel-extension | 0074f187a6244e786b37e551009fa2eadcae1d3a | [
"Apache-2.0"
] | null | null | null | bareon_fuel_extension/tests.py | gitfred/bareon-fuel-extension | 0074f187a6244e786b37e551009fa2eadcae1d3a | [
"Apache-2.0"
] | null | null | null | from nailgun.extensions import BaseExtension
class NoElo(BaseExtension):
name = 'noelo'
description = 'no elo'
version = '1.0.0'
def test_ext():
NoElo()
| 14.416667 | 44 | 0.65896 |
eebab051d6bd2499eba549e8e5c3faefb5989879 | 1,404 | py | Python | email_utils/models/EmailMessage.py | E7ernal/quizwhiz | a271d40922eaad682a76d7700beafc7a5df51fac | [
"MIT"
] | null | null | null | email_utils/models/EmailMessage.py | E7ernal/quizwhiz | a271d40922eaad682a76d7700beafc7a5df51fac | [
"MIT"
] | 7 | 2020-02-12T00:31:35.000Z | 2022-03-11T23:19:21.000Z | email_utils/models/EmailMessage.py | E7ernal/quizwhiz | a271d40922eaad682a76d7700beafc7a5df51fac | [
"MIT"
] | null | null | null | # vim: ts=4:sw=4:expandtabs
__author__ = 'zach.mott@gmail.com'
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from email_utils.tasks import send_mail
RESEND_EMAIL_PERMISSION = 'can_resend_email'
@python_2_unicode_compatible
class EmailMessage(models.Model):
RESEND_EMAIL_PERMISSION = RESEND_EMAIL_PERMISSION
to = models.CharField(max_length=256)
from_address = models.CharField(max_length=256, verbose_name=_('From'))
subject = models.CharField(max_length=256, blank=True)
body = models.TextField()
html_body = models.TextField(blank=True, verbose_name=_('HTML body'))
date_sent = models.DateTimeField()
delivery_successful = models.BooleanField()
error_message = models.CharField(max_length=256, blank=True)
class Meta:
app_label = 'email_utils'
verbose_name = _('Email message')
verbose_name_plural = _('Email messages')
permissions = [
(RESEND_EMAIL_PERMISSION, _('Can resend email')),
]
def __str__(self):
return "{self.date_sent:%Y-%m-%d %H:%M:%S} - {self.subject}".format(self=self)
def resend(self):
send_mail.apply_async((
self.subject,
self.body,
self.from_address,
self.to
), {'html_message': self.html_body})
| 29.87234 | 86 | 0.688034 |
eebbbd0016582d70d21cbd69a90c5e0e380ce3d8 | 1,262 | py | Python | core/string_utils.py | phage-nz/observer | 2a2d9b5047c5b2aba0d102c0c21e97de472bbd39 | [
"Apache-2.0"
] | 2 | 2020-04-25T05:11:49.000Z | 2021-02-09T21:27:38.000Z | core/string_utils.py | phage-nz/observer | 2a2d9b5047c5b2aba0d102c0c21e97de472bbd39 | [
"Apache-2.0"
] | null | null | null | core/string_utils.py | phage-nz/observer | 2a2d9b5047c5b2aba0d102c0c21e97de472bbd39 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python3
from .log_utils import get_module_logger
from defang import defang
import random
import string
import urllib.parse
logger = get_module_logger(__name__)
def random_string(length):
return ''.join(
random.choice(
string.ascii_uppercase +
string.ascii_lowercase +
string.digits) for i in range(length))
def double_url_encode(input):
return urllib.parse.quote(urllib.parse.quote(input))
def defang_url(input):
return defang(input)
def get_host_from_url(url):
host_name = urllib.parse.urlparse(url).hostname
if ':' in host_name:
host_name = host_name.split(':')[0]
return host_name
def get_path_from_url(url):
return urllib.parse.urlparse(url).path
def is_valid_url(input):
try:
result = urllib.parse.urlparse(input)
url_parts = all([result.scheme, result.netloc])
return url_parts
except Exception as e:
logger.error('Error validating URL: {0}'.format(str(e)))
return False
def clean_url(url):
if url is None:
return None
if '??' in url:
url = url.split('??')[0]
if url.endswith('?'):
url = url[:-1]
if '`' in url:
url = url.replace('`', '')
return url
| 20.354839 | 64 | 0.639461 |
eebda7906979f96edc59138dae061c5c2c92e491 | 61 | py | Python | udp_decoders/__init__.py | ccgcyber/xpcap | a0e6fd1355fd0a9cbff4e074275b236ce8c6c3b8 | [
"MIT"
] | 5 | 2017-07-31T02:07:05.000Z | 2021-02-14T16:39:49.000Z | udp_decoders/__init__.py | ccgcyber/xpcap | a0e6fd1355fd0a9cbff4e074275b236ce8c6c3b8 | [
"MIT"
] | null | null | null | udp_decoders/__init__.py | ccgcyber/xpcap | a0e6fd1355fd0a9cbff4e074275b236ce8c6c3b8 | [
"MIT"
] | 4 | 2016-07-24T08:56:54.000Z | 2020-07-12T11:50:02.000Z | # empty file telling python that this directory is a package
| 30.5 | 60 | 0.803279 |
eebdcac25970fd8db9e1b4ca1a89af16a4e7a240 | 803 | py | Python | slushtools/string/__init__.py | ZackPaceCoder/slushtools | 32bfee028d30fd8fd88e332bdd744a71e51d6dcc | [
"MIT"
] | null | null | null | slushtools/string/__init__.py | ZackPaceCoder/slushtools | 32bfee028d30fd8fd88e332bdd744a71e51d6dcc | [
"MIT"
] | null | null | null | slushtools/string/__init__.py | ZackPaceCoder/slushtools | 32bfee028d30fd8fd88e332bdd744a71e51d6dcc | [
"MIT"
] | null | null | null | # Slush Tools STRING Module
class String:
bu = None
dat = None
def __init__(str):
if str == None:
print("String argument required.")
exit()
else:
dat = str
bu = str
return dat
def reset():
dat = bu
return dat
def format(type="custom",args={}):
if type == "custom":
for i,v in args:
x = dat.split("$" + i)
v.join(v)
dat = x
return dat
elif type == "py":
x = dat.format(*args)
dat = x
return dat
else:
print("Unknown format type.")
def append(str):
dat = dat + str
return dat
def endswith(str):
if dat[len(dat)-len(str):len(str)] == str:
return True
else:
return False
def simple(delimiter):
return dat.split(delimiter)
| 20.075 | 48 | 0.523039 |
eebe3ee2689c486643e9c66684f0834e67a050c1 | 2,001 | py | Python | lib/gams/general_utils.py | zzzace2000/nodegam | 79c8675e65d75237f2e853ae55bbc40ae7124ee9 | [
"MIT"
] | 7 | 2021-11-06T14:26:07.000Z | 2022-03-17T10:27:17.000Z | lib/gams/general_utils.py | zzzace2000/node | 4501233177173ee9b246a5a5e462afd3b1d51bbb | [
"MIT"
] | 1 | 2022-03-22T01:08:27.000Z | 2022-03-22T17:19:50.000Z | lib/gams/general_utils.py | zzzace2000/node | 4501233177173ee9b246a5a5e462afd3b1d51bbb | [
"MIT"
] | 1 | 2021-11-06T14:27:05.000Z | 2021-11-06T14:27:05.000Z | import time, os
import numpy as np
import json
class Timer:
def __init__(self, name, remove_start_msg=True):
self.name = name
self.remove_start_msg = remove_start_msg
def __enter__(self):
self.start_time = time.time()
print('Run "%s".........' % self.name, end='\r' if self.remove_start_msg else '\n')
def __exit__(self, exc_type, exc_val, exc_tb):
time_diff = float(time.time() - self.start_time)
time_str = '{:.1f}s'.format(time_diff) if time_diff >= 1 else '{:.0f}ms'.format(time_diff * 1000)
print('Finish "{}" in {}'.format(self.name, time_str))
def output_csv(the_path, data_dict, order=None, delimiter=','):
if the_path.endswith('.tsv'):
delimiter = '\t'
is_file_exists = os.path.exists(the_path)
with open(the_path, 'a+') as op:
keys = list(data_dict.keys())
if order is not None:
keys = order + [k for k in keys if k not in order]
col_title = delimiter.join([str(k) for k in keys])
if not is_file_exists:
print(col_title, file=op)
else:
old_col_title = open(the_path, 'r').readline().strip()
if col_title != old_col_title:
old_order = old_col_title.split(delimiter)
additional_keys = [k for k in keys if k not in old_order]
if len(additional_keys) > 0:
print('WARNING! The data_dict has following additional keys %s' % (str(additional_keys)))
no_key = [k for k in old_order if k not in keys]
if len(no_key) > 0:
raise(RuntimeError('The data_dict does not have the following old keys: %s' % str(no_key)))
keys = old_order + additional_keys
print(delimiter.join([str(data_dict[k]) for k in keys]), file=op)
def vector_in(vec, names):
is_kept = (vec == names[0])
for m_name in names[1:]:
is_kept = (is_kept | (vec == m_name))
return is_kept
| 35.105263 | 111 | 0.592704 |
eebf77f5393d40a51825e9d1d10647b08c84de24 | 140 | py | Python | practica3/pregunta8.py | Vanesamorales/practica-N-3-python | e87d4662b5df208cfbc3a15db23d324f46ad838e | [
"Apache-2.0"
] | null | null | null | practica3/pregunta8.py | Vanesamorales/practica-N-3-python | e87d4662b5df208cfbc3a15db23d324f46ad838e | [
"Apache-2.0"
] | null | null | null | practica3/pregunta8.py | Vanesamorales/practica-N-3-python | e87d4662b5df208cfbc3a15db23d324f46ad838e | [
"Apache-2.0"
] | null | null | null | import carpeta8
# bloque principal
lista=carpeta8.cargar()
carpeta8.imprimir(lista)
carpeta8.ordenar(lista)
carpeta8.imprimir(lista) | 20 | 25 | 0.785714 |
eebfbf0ca3fc84c6b27f16b71cc79b9f09285376 | 692 | py | Python | core/clean.py | Saij84/mediaRename | 984fbe47dfa27b8e229934e5b29c73dd0ab48c05 | [
"MIT"
] | null | null | null | core/clean.py | Saij84/mediaRename | 984fbe47dfa27b8e229934e5b29c73dd0ab48c05 | [
"MIT"
] | null | null | null | core/clean.py | Saij84/mediaRename | 984fbe47dfa27b8e229934e5b29c73dd0ab48c05 | [
"MIT"
] | null | null | null | import re
from mediaRename.constants import constants as CONST
def cleanReplace(data):
"""
Takes each dict object and clean
:param data: dict object
:return: none
"""
dataIn = data["files"]
# (regX, replaceSTR)
cleanPasses = [(CONST.CLEAN_PASSONE, ""), (CONST.CLEAN_PASSTWO, ""),
(CONST.CLEAN_PASSTHREE, ""), (CONST.CLEAN_REPLACE, "_")]
for cPass, replaceSTR in cleanPasses:
seachString = re.compile(cPass, re.IGNORECASE)
for fileDict in dataIn:
if isinstance(fileDict, dict):
changedVal = seachString.sub(replaceSTR, fileDict["newName"])
fileDict["newName"] = changedVal
| 28.833333 | 77 | 0.619942 |
eec036acad92775b225df98eed2eda788c78e178 | 32,553 | py | Python | mindaffectBCI/decoder/utils.py | rohitvk1/pymindaffectBCI | 0348784d9b0fbd9d595e31ae46d2e74632399507 | [
"MIT"
] | 44 | 2020-02-07T15:01:47.000Z | 2022-03-21T14:36:15.000Z | mindaffectBCI/decoder/utils.py | CkiChen/pymindaffectBCI | 0119145a8b280c776f4c4e6cd776fed0f0156404 | [
"MIT"
] | 17 | 2020-02-07T17:11:23.000Z | 2022-02-20T18:01:42.000Z | mindaffectBCI/decoder/utils.py | CkiChen/pymindaffectBCI | 0119145a8b280c776f4c4e6cd776fed0f0156404 | [
"MIT"
] | 19 | 2020-02-07T17:13:22.000Z | 2022-03-17T01:22:35.000Z | # Copyright (c) 2019 MindAffect B.V.
# Author: Jason Farquhar <jason@mindaffect.nl>
# This file is part of pymindaffectBCI <https://github.com/mindaffect/pymindaffectBCI>.
#
# pymindaffectBCI is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pymindaffectBCI is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pymindaffectBCI. If not, see <http://www.gnu.org/licenses/>
import numpy as np
# time-series tests
def window_axis(a, winsz, axis=0, step=1, prependwindowdim=False):
''' efficient view-based slicing of equal-sized equally-spaced windows along a selected axis of a numpy nd-array '''
if axis < 0: # no negative axis indices
axis = len(a.shape)+axis
# compute the shape/strides for the windowed view of a
if prependwindowdim: # window dim before axis
shape = a.shape[:axis] + (winsz, int((a.shape[axis]-winsz)/step)+1) + a.shape[(axis+1):]
strides = a.strides[:axis] + (a.strides[axis], a.strides[axis]*step) + a.strides[(axis+1):]
else: # window dim after axis
shape = a.shape[:axis] + (int((a.shape[axis]-winsz)/step)+1, winsz) + a.shape[(axis+1):]
strides = a.strides[:axis] + (a.strides[axis]*step, a.strides[axis]) + a.strides[(axis+1):]
#print("a={}".format(a.shape))
#print("shape={} stride={}".format(shape,strides))
# return the computed view
return np.lib.stride_tricks.as_strided(a, shape=shape, strides=strides)
def equals_subarray(a, pat, axis=-1, match=-1):
''' efficiently find matches of a 1-d sub-array along axis within an nd-array '''
if axis < 0: # no negative dims
axis = a.ndim+axis
# reshape to match dims of a
if not isinstance(pat, np.ndarray): pat = np.array(pat) # ensure is numpy
pshape = np.ones(a.ndim+1, dtype=int); pshape[axis+1] = pat.size
pat = np.array(pat.ravel(),dtype=a.dtype).reshape(pshape) # [ ... x l x...]
# window a into pat-len pieces
aw = window_axis(a, pat.size, axis=axis, step=1) # [ ... x t-l x l x ...]
# do the match
F = np.all(np.equal(aw, pat), axis=axis+1) # [... x t-l x ...]
# pad to make the same shape as input
padshape = list(a.shape); padshape[axis] = a.shape[axis]-F.shape[axis]
if match == -1: # match at end of pattern -> pad before
F = np.append(np.zeros(padshape, dtype=F.dtype), F, axis)
else: # match at start of pattern -> pad after
F = np.append(F, np.zeros(padshape, dtype=F.dtype), axis)
return F
class RingBuffer:
''' time efficient linear ring-buffer for storing packed data, e.g. continguous np-arrays '''
def __init__(self, maxsize, shape, dtype=np.float32):
self.elementshape = shape
self.bufshape = (int(maxsize), )+shape
self.buffer = np.zeros((2*int(maxsize), np.prod(shape)), dtype=dtype) # store as 2d
# position for the -1 element. N.B. start maxsize so pos-maxsize is always valid
self.pos = int(maxsize)
self.n = 0 # count of total number elements added to the buffer
self.copypos = 0 # position of the last element copied to the 1st half
self.copysize = 0 # number entries to copy as a block
def clear(self):
'''empty the ring-buffer and reset to empty'''
self.pos=int(self.bufshape[0])
self.n =0
self.copypos=0
self.copysize=0
def append(self, x):
'''add single element to the ring buffer'''
return self.extend(x[np.newaxis, ...])
def extend(self, x):
'''add a group of elements to the ring buffer'''
# TODO[] : incremental copy to the 1st half, to spread the copy cost?
nx = x.shape[0]
if self.pos+nx >= self.buffer.shape[0]:
flippos = self.buffer.shape[0]//2
# flippos-nx to 1st half
self.buffer[:(flippos-nx), :] = self.buffer[(self.pos-(flippos-nx)):self.pos, :]
# move cursor to end 1st half
self.pos = flippos-nx
# insert in the buffer
self.buffer[self.pos:self.pos+nx, :] = x.reshape((nx, self.buffer.shape[1]))
# move the cursor
self.pos = self.pos+nx
# update the count
self.n = self.n + nx
return self
@property
def shape(self):
return (min(self.n,self.bufshape[0]),)+self.bufshape[1:]
def unwrap(self):
'''get a view on the valid portion of the ring buffer'''
return self.buffer[self.pos-min(self.n,self.bufshape[0]):self.pos, :].reshape(self.shape)
def __getitem__(self, item):
return self.unwrap()[item]
def __iter__(self):
return iter(self.unwrap())
def extract_ringbuffer_segment(rb, bgn_ts, end_ts=None):
''' extract the data between start/end time stamps, from time-stamps contained in the last channel of a nd matrix'''
# get the data / msgs from the ringbuffers
X = rb.unwrap() # (nsamp,nch+1)
X_ts = X[:, -1] # last channel is timestamps
# TODO: binary-search to make these searches more efficient!
# search backwards for trial-start time-stamp
# TODO[X] : use a bracketing test.. (better with wrap-arround)
bgn_samp = np.flatnonzero(np.logical_and(X_ts[:-1] < bgn_ts, bgn_ts <= X_ts[1:]))
# get the index of this timestamp, guarding for after last sample
if len(bgn_samp) == 0 :
bgn_samp = 0 if bgn_ts <= X_ts[0] else len(X_ts)+1
else:
bgn_samp = bgn_samp[0]
# and just to be sure the trial-end timestamp
if end_ts is not None:
end_samp = np.flatnonzero(np.logical_and(X_ts[:-1] < end_ts, end_ts <= X_ts[1:]))
# get index of this timestamp, guarding for after last data sample
end_samp = end_samp[-1] if len(end_samp) > 0 else len(X_ts)
else: # until now
end_samp = len(X_ts)
# extract the trial data, and make copy (just to be sure)
X = X[bgn_samp:end_samp+1, :].copy()
return X
def unwrap(x,range=None):
''' unwrap a list of numbers to correct for truncation due to limited bit-resolution, e.g. time-stamps stored in 24bit integers'''
if range is None:
range = 1<< int(np.ceil(np.log2(max(x))))
wrap_ind = np.diff(x) < -range/2
unwrap = np.zeros(x.shape)
unwrap[np.flatnonzero(wrap_ind)+1]=range
unwrap=np.cumsum(unwrap)
x = x + unwrap
return x
def unwrap_test():
x = np.cumsum(np.random.rand(6000,1))
xw = x%(1<<10)
xuw = unwrap(x)
import matplotlib.pyplot as plt
plt.plot(x,label='x')
plt.plot(xw,label='x (wrapped)')
plt.plot(xuw,label='x (unwrapped')
plt.legend()
def search_directories_for_file(f,*args):
"""search a given set of directories for given filename, return 1st match
Args:
f (str): filename to search for (or a pattern)
*args (): set for directory names to look in
Returns:
f (str): the *first* full path to where f is found, or f if not found.
"""
import os
import glob
f = os.path.expanduser(f)
if os.path.exists(f) or len(glob.glob(f))>0:
return f
for d in args:
#print('Searching dir: {}'.format(d))
df = os.path.join(d,f)
if os.path.exists(df) or len(glob.glob(df))>0:
f = df
break
return f
# toy data generation
#@function
def randomSummaryStats(d=10, nE=2, tau=10, nY=1):
import numpy as np
# pure random test-case
Cxx = np.random.standard_normal((d, d))
Cxy = np.random.standard_normal((nY, nE, tau, d))
Cyy = np.random.standard_normal((nY, nE, tau, nE, tau))
return (Cxx, Cxy, Cyy)
def testNoSignal(d=10, nE=2, nY=1, isi=5, tau=None, nSamp=10000, nTrl=1):
# Simple test-problem -- no real signal
if tau is None:
tau = 10*isi
X = np.random.standard_normal((nTrl, nSamp, d))
stimTimes_samp = np.arange(0, X.shape[-2] - tau, isi)
Me = np.random.standard_normal((nTrl, len(stimTimes_samp), nY, nE))>1
Y = np.zeros((nTrl, X.shape[-2], nY, nE))
Y[:, stimTimes_samp, :, :] = Me
return (X, Y, stimTimes_samp)
def testSignal(nTrl=1, d=5, nE=2, nY=30, isi=5, tau=None, offset=0, nSamp=10000, stimthresh=.6, noise2signal=1, irf=None):
#simple test problem, with overlapping response
import numpy as np
if tau is None:
tau = 10 if irf is None else len(irf)
nEp = int((nSamp-tau)/isi)
cb = np.random.standard_normal((nEp, nY, nE)) > stimthresh # codebook = per-epoch stimulus activity
E = cb # (nEp, nY, nE) # per-epoch stimulus activity
# up-sample to sample rate
stimTimes_samp = np.arange(0, nSamp-tau, isi) # (nEp)
Y = np.zeros((nSamp, nY, E.shape[-1]))
Y[stimTimes_samp, :, :] = E[:len(stimTimes_samp), :, :] #per-sample stimulus activity (nSamp, nY, nE) [nE x nY x nSamp]
Y = np.tile(Y,(nTrl,1,1,1)) # replicate for the trials
# generate the brain source
A = np.random.standard_normal((nE, d)) # spatial-pattern for the source signal
if irf is None:
B = np.zeros((tau), dtype=np.float32)
B[-3] = 1; # true response filter (shift by 10 samples)
else:
B = np.array(irf, dtype=np.float32)
Ytrue = Y[..., 0, :] # (nTrl, nSamp, nE)
if True:
# convolve with the impulse response - manually using window_axis
# zero pad before for the sliding window
Ys = np.zeros(Ytrue.shape[:-2]+(Ytrue.shape[-2]+tau-1,)+Ytrue.shape[-1:])
Ys[..., tau-1+offset:Ytrue.shape[-2]+tau-1+offset, :] = Ytrue # zero-pad at front + include the offset.
Yse = window_axis(Ys, winsz=len(B), axis=-2) # (nTr,nSamp,tau,nE)
YtruecB = np.einsum("Tste,t->Tse", Yse, B[::-1]) # N.B. time-reverse irf (nTr,nSamp,nE)
else:
# use the np convolve function, N.B. implicitly time reverses B (like we want)
YtruecB = np.array([np.convolve(Ytrue[:, ei], B, 'full') for ei in range(Ytrue.shape[-1])]).T #(nSamp+pad, nE) [nE x nSamp]
YtruecB = YtruecB[:Ytrue.shape[0], :] # trim the padding
#import matplotlib.pyplot as plt; plt.clf(); plt.plot(Ytrue[:100,0],'b*',label='Y'); plt.plot(YtruecB[:100,0],'g*',label='Y*B'); plt.plot(B,'k',label='B'); plt.legend()
#print("Ytrue={}".format(Ytrue.shape))
#print("YtruecB={}".format(YtruecB.shape))
S = YtruecB # (nTr, nSamp, nE) true response, i.e. filtered Y
N = np.random.standard_normal(S.shape[:-1]+(d,)) # EEG noise (nTr, nSamp, d)
X = np.einsum("tse,ed->tsd", S, A) + noise2signal*N # simulated data.. true source mapped through spatial pattern (nSamp, d) #[d x nSamp]
return (X, Y, stimTimes_samp, A, B)
def testtestSignal():
import matplotlib.pyplot as plt
plt.clf()
# shift by 5
offset=0; irf=(0,0,0,0,0,1,0,0,0,0)
X,Y,st,W,R = testSignal(nTrl=1,nSamp=500,d=1,nE=1,nY=1,isi=10,tau=10,offset=offset,irf=irf,noise2signal=0)
plt.subplot(311);plt.plot(X[0,:,0],label='X');plt.plot(Y[0,:,0,0],label='Y');plt.title("offset={}, irf={}".format(offset,irf));plt.legend()
# back-shift-by-5 -> 0 shift
offset=-5
X,Y,st,W,R = testSignal(nTrl=1,nSamp=500,d=1,nE=1,nY=1,isi=10,tau=10,offset=offset,irf=(0,0,0,0,0,1,0,0,0,0),noise2signal=0)
plt.subplot(312);plt.plot(X[0,:,0],label='X');plt.plot(Y[0,:,0,0],label='Y');plt.title("offset={}, irf={}".format(offset,irf));plt.legend()
# back-shift-by-10 -> -5 shift
offset=-9
X,Y,st,W,R = testSignal(nTrl=1,nSamp=500,d=1,nE=1,nY=1,isi=10,tau=10,offset=offset,irf=(0,0,0,0,0,1,0,0,0,0),noise2signal=0)
plt.subplot(313);plt.plot(X[0,:,0],label='X');plt.plot(Y[0,:,0,0],label='Y');plt.title("offset={}, irf={}".format(offset,irf));plt.legend()
def sliceData(X, stimTimes_samp, tau=10):
# make a sliced version
dst = np.diff(stimTimes_samp)
if np.all(dst == dst[0]) and stimTimes_samp[0] == 0: # fast path equaly spaced stimTimes
Xe = window_axis(X, winsz=tau, axis=-2, step=int(dst[0]), prependwindowdim=False) # (nTrl, nEp, tau, d) #d x tau x ep x trl
else:
Xe = np.zeros(X.shape[:-2] + (len(stimTimes_samp), tau, X.shape[-1])) # (nTrl, nEp, tau, d) [ d x tau x nEp x nTrl ]
for ei, si in enumerate(stimTimes_samp):
idx = range(si, si+tau)
Xe[:, ei, :, :] = X[:, idx, :] if X.ndim > 2 else X[idx, :]
return Xe
def sliceY(Y, stimTimes_samp, featdim=True):
'''
Y = (nTrl, nSamp, nY, nE) if featdim=True
OR
Y=(nTrl, nSamp, nY) if featdim=False #(nE x nY x nSamp x nTrl)
'''
# make a sliced version
si = np.array(stimTimes_samp, dtype=int)
if featdim:
return Y[:, si, :, :] if Y.ndim > 3 else Y[si, :, :]
else:
return Y[:, si, :] if Y.ndim > 2 else Y[si, :]
def block_randomize(true_target, npermute, axis=-3, block_size=None):
''' make a block random permutaton of the input array
Inputs:
npermute: int - number permutations to make
true_target: (..., nEp, nY, e): true target value for nTrl trials of length nEp flashes
axis : int the axis along which to permute true_target'''
if true_target.ndim < 3:
raise ValueError("true target info must be at least 3d")
if not (axis == -3 or axis == true_target.ndim-2):
raise NotImplementedError("Only implementated for axis=-2 currently")
# estimate the number of blocks to use
if block_size is None:
block_size = max(1, true_target.shape[axis]/2/npermute)
nblk = int(np.ceil(true_target.shape[axis]/block_size))
blk_lims = np.linspace(0, true_target.shape[axis], nblk, dtype=int)
# convert to start/end index for each block
blk_lims = [(blk_lims[i], blk_lims[i+1]) for i in range(len(blk_lims)-1)]
cb = np.zeros(true_target.shape[:axis+1] + (npermute, true_target.shape[-1]))
for ti in range(cb.shape[axis+1]):
for di, dest_blk in enumerate(blk_lims):
yi = np.random.randint(true_target.shape[axis+1])
si = np.random.randint(len(blk_lims))
# ensure can't be the same block
if si == di:
si = si+1 if si < len(blk_lims)-1 else si-1
src_blk = blk_lims[si]
# guard for different lengths for source/dest blocks
dest_len = dest_blk[1] - dest_blk[0]
if dest_len > src_blk[1]-src_blk[0]:
if src_blk[0]+dest_len < true_target.shape[axis]:
# enlarge the src
src_blk = (src_blk[0], src_blk[0]+dest_len)
elif src_blk[1]-dest_len > 0:
src_blk = (src_blk[1]-dest_len, src_blk[1])
else:
raise ValueError("can't fit source and dest")
elif dest_len < src_blk[1]-src_blk[0]:
src_blk = (src_blk[0], src_blk[0]+dest_len)
cb[..., dest_blk[0]:dest_blk[1], ti, :] = true_target[..., src_blk[0]:src_blk[1], yi, :]
return cb
def upsample_codebook(trlen, cb, ep_idx, stim_dur_samp, offset_samp=(0, 0)):
''' upsample a codebook definition to sample rate
Inputs:
trlen : (int) length after up-sampling
cb : (nTr, nEp, ...) the codebook
ep_idx : (nTr, nEp) the indices of the codebook entries
stim_dur_samp: (int) the amount of time the cb entry is held for
offset_samp : (2,):int the offset for the stimulus in the upsampled trlen data
Outputs:
Y : ( nTrl, trlen, ...) the up-sampled codebook '''
if ep_idx is not None:
if not np.all(cb.shape[:ep_idx.ndim] == ep_idx.shape):
raise ValueError("codebook and epoch indices must has same shape")
trl_idx = ep_idx[:, 0] # start each trial
else: # make dummy ep_idx with 0 for every trial!
ep_idx = np.zeros((cb.shape[0],1),dtype=int)
trl_idx = ep_idx
Y = np.zeros((cb.shape[0], trlen)+ cb.shape[2:], dtype='float32') # (nTr, nSamp, ...)
for ti, trl_start_idx in enumerate(trl_idx):
for ei, epidx in enumerate(ep_idx[ti, :]):
if ei > 0 and epidx == 0: # zero indicates end of variable length trials
break
# start index for this epoch in this *trial*, including the 0-offset
ep_start_idx = -int(offset_samp[0])+int(epidx-trl_start_idx)
Y[ti, ep_start_idx:(ep_start_idx+int(stim_dur_samp)), ...] = cb[ti, ei, ...]
return Y
def lab2ind(lab,lab2class=None):
''' convert a list of labels (as integers) to a class indicator matrix'''
if lab2class is None:
lab2class = [ (l,) for l in set(lab) ] # N.B. list of lists
if not isinstance(lab,np.ndarray):
lab=np.array(lab)
Y = np.zeros(lab.shape+(len(lab2class),),dtype=bool)
for li,ls in enumerate(lab2class):
for l in ls:
Y[lab == l, li]=True
return (Y,lab2class)
def zero_outliers(X, Y, badEpThresh=4, badEpChThresh=None, verbosity=0):
'''identify and zero-out bad/outlying data
Inputs:
X = (nTrl, nSamp, d)
Y = (nTrl, nSamp, nY, nE) OR (nTrl, nSamp, nE)
nE=#event-types nY=#possible-outputs nEpoch=#stimulus events to process
'''
# remove whole bad epochs first
if badEpThresh > 0:
bad_ep, _ = idOutliers(X, badEpThresh, axis=(-2, -1)) # ave over time,ch
if np.any(bad_ep):
if verbosity > 0:
print("{} badEp".format(np.sum(bad_ep.ravel())))
# copy X,Y so don't modify in place!
X = X.copy()
Y = Y.copy()
X[bad_ep[..., 0, 0], ...] = 0
#print("Y={}, Ybad={}".format(Y.shape, Y[bad_ep[..., 0, 0], ...].shape))
# zero out Y also, so don't try to 'fit' the bad zeroed data
Y[bad_ep[..., 0, 0], ...] = 0
# Remove bad individual channels next
if badEpChThresh is None: badEpChThresh = badEpThresh*2
if badEpChThresh > 0:
bad_epch, _ = idOutliers(X, badEpChThresh, axis=-2) # ave over time
if np.any(bad_epch):
if verbosity > 0:
print("{} badEpCh".format(np.sum(bad_epch.ravel())))
# make index expression to zero out the bad entries
badidx = list(np.nonzero(bad_epch)) # convert to linear indices
badidx[-2] = slice(X.shape[-2]) # broadcast over the accumulated dimensions
if not np.any(bad_ep): # copy so don't update in place
X = X.copy()
X[tuple(badidx)] = 0
return (X, Y)
def idOutliers(X, thresh=4, axis=-2, verbosity=0):
''' identify outliers with excessively high power in the input data
Inputs:
X:float the data to identify outliers in
axis:int (-2) axis of X to sum to get power
thresh(float): threshold standard deviation for outlier detection
verbosity(int): verbosity level
Returns:
badEp:bool (X.shape axis==1) indicator for outlying elements
epPower:float (X.shape axis==1) power used to identify bad
'''
#print("X={} ax={}".format(X.shape,axis))
power = np.sqrt(np.sum(X**2, axis=axis, keepdims=True))
#print("power={}".format(power.shape))
good = np.ones(power.shape, dtype=bool)
for _ in range(4):
mu = np.mean(power[good])
sigma = np.sqrt(np.mean((power[good] - mu) ** 2))
badThresh = mu + thresh*sigma
good[power > badThresh] = False
good = good.reshape(power.shape) # (nTrl, nEp)
#print("good={}".format(good.shape))
bad = ~good
if verbosity > 1:
print("%d bad" % (np.sum(bad.ravel())))
return (bad, power)
def robust_mean(X,thresh=(3,3)):
"""Compute robust mean of values in X, using gaussian outlier criteria
Args:
X (the data): the data
thresh (2,): lower and upper threshold in standard deviations
Returns:
mu (): the robust mean
good (): the indices of the 'good' data in X
"""
good = np.ones(X.shape, dtype=bool)
for _ in range(4):
mu = np.mean(X[good])
sigma = np.sqrt(np.mean((X[good] - mu) ** 2))
# re-compute outlier list
good[:]=True
if thresh[0] is not None:
badThresh = mu + thresh[0]*sigma
good[X > badThresh] = False
if thresh[1] is not None:
badThresh = mu - thresh[0]*sigma
good[X < badThresh] = False
mu = np.mean(X[good])
return (mu, good)
try:
from scipy.signal import butter, bessel, sosfilt, sosfilt_zi
except:
#if True:
# use the pure-python fallbacks
def sosfilt(sos,X,axis,zi):
return sosfilt_2d_py(sos,X,axis=axis,zi=zi)
def sosfilt_zi(sos):
return sosfilt_zi_py(sos)
def butter(order,freq,btype,output):
return butter_py(order,freq,btype,output)
def sosfilt_zi_warmup(zi, X, axis=-1, sos=None):
'''Use some initial data to "warmup" a second-order-sections filter to reduce startup artifacts.
Args:
zi (np.ndarray): the sos filter, state
X ([type]): the warmup data
axis (int, optional): The filter axis in X. Defaults to -1.
sos ([type], optional): the sos filter coefficients. Defaults to None.
Returns:
[np.ndarray]: the warmed up filter coefficients
'''
if axis < 0: # no neg axis
axis = X.ndim+axis
# zi => (order,...,2,...)
zi = np.reshape(zi, (zi.shape[0],) + (1,)*(axis) + (zi.shape[1],) + (1,)*(X.ndim-axis-1))
# make a programattic index expression to support arbitary axis
idx = [slice(None)]*X.ndim
# get the index to start the warmup
warmupidx = 0 if sos is None else min(sos.size*3,X.shape[axis]-1)
# center on 1st warmup value
idx[axis] = slice(warmupidx,warmupidx+1)
zi = zi * X[tuple(idx)]
# run the filter on the rest of the warmup values
if not sos is None and warmupidx>3:
idx[axis] = slice(warmupidx,1,-1)
_, zi = sosfilt(sos, X[tuple(idx)], axis=axis, zi=zi)
return zi
def iir_sosfilt_sos(stopband, fs, order=4, ftype='butter', passband=None, verb=0):
''' given a set of filter cutoffs return butterworth or bessel sos coefficients '''
# convert to normalized frequency, Note: not to close to 0/1
if stopband is None:
return np.array(())
if not hasattr(stopband[0],'__iter__'):
stopband=(stopband,)
sos=[]
for sb in stopband:
btype = None
if type(sb[-1]) is str:
btype = sb[-1]
sb = sb[:-1]
# convert to normalize frequency
sb = np.array(sb,dtype=np.float32)
sb[sb<0] = (fs/2)+sb[sb<0]+1 # neg freq count back from nyquist
Wn = sb/(fs/2)
if Wn[1] < .0001 or .9999 < Wn[0]: # no filter
continue
# identify type from frequencies used, cliping if end of frequency range
if Wn[0] < .0001:
Wn = Wn[1]
btype = 'highpass' if btype is None or btype == 'bandstop' else 'lowpass'
elif .9999 < Wn[1]:
Wn = Wn[0]
btype = 'lowpass' if btype is None or btype == 'bandstop' else 'highpass'
elif btype is None: # .001 < Wn[0] and Wn[1] < .999:
btype = 'bandstop'
if verb>0: print("{}={}={}".format(btype,sb,Wn))
if ftype == 'butter':
sosi = butter(order, Wn, btype=btype, output='sos')
elif ftype == 'bessel':
sosi = bessel(order, Wn, btype=btype, output='sos', norm='phase')
else:
raise ValueError("Unrecognised filter type")
sos.append(sosi)
# single big filter cascade
sos = np.concatenate(sos,axis=0)
return sos
def butter_sosfilt(X, stopband, fs:float, order:int=6, axis:int=-2, zi=None, verb=True, ftype='butter'):
"""use a (cascade of) butterworth SOS filter(s) filter X along axis
Args:
X (np.ndarray): the data to be filtered
stopband ([type]): the filter band specifications in Hz, as a list of lists of stopbands (given as (low-pass,high-pass)) or pass bands (given as (low-cut,high-cut,'bandpass'))
fs (float): the sampling rate of X
order (int, optional): the desired filter order. Defaults to 6.
axis (int, optional): the axis of X to filter along. Defaults to -2.
zi ([type], optional): the internal filter state -- propogate between calls for incremental filtering. Defaults to None.
verb (bool, optional): Verbosity level for logging. Defaults to True.
ftype (str, optional): The type of filter to make, one-of: 'butter', 'bessel'. Defaults to 'butter'.
Returns:
X [np.ndarray]: the filtered version of X
sos (np.ndarray): the designed filter coefficients
zi (np.ndarray): the filter state for propogation between calls
""" ''' '''
if stopband is None: # deal with no filter case
return (X,None,None)
if axis < 0: # no neg axis
axis = X.ndim+axis
# TODO []: auto-order determination?
sos = iir_sosfilt_sos(stopband, fs, order, ftype=ftype)
sos = sos.astype(X.dtype) # keep as single precision
if axis == X.ndim-2 and zi is None:
zi = sosfilt_zi(sos) # (order,2)
zi = zi.astype(X.dtype)
zi = sosfilt_zi_warmup(zi, X, axis, sos)
else:
zi = None
print("Warning: not warming up...")
# Apply the warmed up filter to the input data
#print("zi={}".format(zi.shape))
if not zi is None:
#print("filt:zi X{} axis={}".format(X.shape,axis))
X, zi = sosfilt(sos, X, axis=axis, zi=zi)
else:
print("filt:no-zi")
X = sosfilt(sos, X, axis=axis) # zi=zi)
# return filtered data, filter-coefficients, filter-state
return (X, sos, zi)
def save_butter_sosfilt_coeff(filename=None, stopband=((45,65),(5.5,25,'bandpass')), fs=200, order=6, ftype='butter'):
''' design a butterworth sos filter cascade and save the coefficients '''
import pickle
sos = iir_sosfilt_sos(stopband, fs, order, passband=None, ftype=ftype)
zi = sosfilt_zi(sos)
if filename is None:
# auto-generate descriptive filename
filename = "{}_stopband{}_fs{}.pk".format(ftype,stopband,fs)
print("Saving to: {}\n".format(filename))
with open(filename,'wb') as f:
pickle.dump(sos,f)
pickle.dump(zi,f)
f.close()
def test_butter_sosfilt():
fs= 100
X = np.random.randn(fs*10,2)
X = np.cumsum(X,0)
X = X + np.random.randn(1,X.shape[1])*100 # include start shift
import matplotlib.pyplot as plt
plt.clf();plt.subplot(511);plt.plot(X);
pbs=(((0,1),(40,-1)),(10,-1),((5,10),(15,20),(45,50)))
for i,pb in enumerate(pbs):
Xf,_,_ = butter_sosfilt(X,pb,fs)
plt.subplot(5,1,i+2);plt.plot(Xf);plt.title("{}".format(pb))
# test incremental application
pb=pbs[0]
sos=None
zi =None
Xf=[]
for i in range(0,X.shape[0],fs):
if sos is None:
# init filter and do 1st block
Xfi,sos,zi = butter_sosfilt(X[i:i+fs,:],pb,fs,axis=-2)
else: # incremenally apply
Xfi,zi = sosfilt(sos,X[i:i+fs,:],axis=-2,zi=zi)
Xf.append(Xfi)
Xf = np.concatenate(Xf,axis=0)
plt.subplot(5,1,5);plt.plot(Xf);plt.title("{} - incremental".format(pb))
plt.show()
# test diff specifications
pb = ((0,1),(40,-1)) # pair stops
Xf0,_,_ = butter_sosfilt(X,pb,fs,axis=-2)
plt.subplot(3,1,1);plt.plot(Xf0);plt.title("{}".format(pb))
pb = (1,40,'bandpass') # single pass
Xfi,_,_ = butter_sosfilt(X,pb,fs,axis=-2)
plt.subplot(3,1,2);plt.plot(Xfi);plt.title("{}".format(pb))
pb = (1,40,'bandpass') # single pass
Xfi,_,_ = butter_sosfilt(X,pb,fs,axis=-2,ftype='bessel')
plt.subplot(3,1,3);plt.plot(Xfi);plt.title("{} - bessel".format(pb))
plt.show()
# TODO[] : cythonize?
# TODO[X] : vectorize over d? ---- NO. 2.5x *slower*
def sosfilt_2d_py(sos,X,axis=-2,zi=None):
''' pure python fallback for second-order-sections filter in case scipy isn't available '''
X = np.asarray(X)
sos = np.asarray(sos)
if zi is None:
returnzi = False
zi = np.zeros((sos.shape[0],2,X.shape[-1]),dtype=X.dtype)
else:
returnzi = True
zi = np.asarray(zi)
Xshape = X.shape
if not X.ndim == 2:
print("Warning: X>2d.... treating as 2d...")
X = X.reshape((-1,Xshape[-1]))
if axis < 0:
axis = X.ndim + axis
if not axis == X.ndim-2:
raise ValueError("Only for time in dim 0/-2")
if sos.ndim != 2 or sos.shape[1] != 6:
raise ValueError('sos must be shape (n_sections, 6)')
if zi.ndim != 3 or zi.shape[1] != 2 or zi.shape[2] != X.shape[1]:
raise ValueError('zi must be shape (n_sections, 2, dim)')
# pre-normalize sos if needed
for j in range(sos.shape[0]):
if sos[j,3] != 1.0:
sos[j,:] = sos[j,:]/sos[j,3]
n_signals = X.shape[1]
n_samples = X.shape[0]
n_sections = sos.shape[0]
# extract the a/b
b = sos[:,:3]
a = sos[:,4:]
# loop over outputs
x_n = 0
for i in range(n_signals):
for n in range(n_samples):
for s in range(n_sections):
x_n = X[n, i]
# use direct II transposed structure
X[n, i] = b[s, 0] * x_n + zi[s, 0, i]
zi[s, 0, i] = b[s, 1] * x_n - a[s, 0] * X[n, i] + zi[s, 1, i]
zi[s, 1, i] = b[s, 2] * x_n - a[s, 1] * X[n, i]
# back to input shape
if not len(Xshape) == 2:
X = X.reshape(Xshape)
# match sosfilt, only return zi if given zi
if returnzi :
return X, zi
else:
return X
def sosfilt_zi_py(sos):
''' compute an initial state for a second-order section filter '''
sos = np.asarray(sos)
if sos.ndim != 2 or sos.shape[1] != 6:
raise ValueError('sos must be shape (n_sections, 6)')
n_sections = sos.shape[0]
zi = np.empty((n_sections, 2))
scale = 1.0
for section in range(n_sections):
b = sos[section, :3]
a = sos[section, 3:]
if a[0] != 1.0:
# Normalize the coefficients so a[0] == 1.
b = b / a[0]
a = a / a[0]
IminusA = np.eye(n_sections - 1) - np.linalg.companion(a).T
B = b[1:] - a[1:] * b[0]
# Solve zi = A*zi + B
lfilter_zi = np.linalg.solve(IminusA, B)
zi[section] = scale * lfilter_zi
scale *= b.sum() / a.sum()
return zi
def test_sosfilt_py():
import pickle
with open('butter_stopband((0, 5), (25, -1))_fs200.pk','rb') as f:
sos = pickle.load(f)
zi = pickle.load(f)
X = np.random.randn(10000,3)
print("X={} sos={}".format(X.shape,sos.shape))
Xsci = sosfilt(sos,X.copy(),-2)
Xpy = sosfilt_2d_py(sos,X.copy(),-2)
import matplotlib.pyplot as plt
plt.clf()
plt.subplot(411);plt.plot(X[:500,:]);plt.title('X')
plt.subplot(412);plt.plot(Xsci[:500,:]);plt.title('Xscipy')
plt.subplot(413);plt.plot(Xpy[:500,:]);plt.title('Xpy')
plt.subplot(414);plt.plot(Xsci-Xpy);plt.title('Xsci - Xpy')
# def butter_py(order,fc,fs,btype,output):
# ''' pure python butterworth filter synthesis '''
# if fc>=fs/2:
# error('fc must be less than fs/2')
# # I. Find poles of analog filter
# k= np.arange(order)
# theta= (2*k -1)*np.pi/(2*order);
# pa= -sin(theta) + j*cos(theta); # poles of filter with cutoff = 1 rad/s
# #
# # II. scale poles in frequency
# Fc= fs/np.pi * tan(np.pi*fc/fs); # continuous pre-warped frequency
# pa= pa*2*np.pi*Fc; # scale poles by 2*pi*Fc
# #
# # III. Find coeffs of digital filter
# # poles and zeros in the z plane
# p= (1 + pa/(2*fs))/(1 - pa/(2*fs)) # poles by bilinear transform
# q= -np.ones((1,N)); # zeros
# #
# # convert poles and zeros to polynomial coeffs
# a= poly(p); # convert poles to polynomial coeffs a
# a= real(a);
# b= poly(q); # convert zeros to polynomial coeffs b
# K= sum(a)/sum(b); # amplitude scale factor
# b= K*b;
if __name__=='__main__':
save_butter_sosfilt_coeff("sos_filter_coeff.pk")
#test_butter_sosfilt()
| 39.458182 | 183 | 0.595183 |
eec107c75238eeb480e6c150f395182753824077 | 155 | py | Python | Tasks/task_7.py | madhubmvs/python-self-teaching | adce7a18553fc13a96d0319fdeb5ce9894ec74fc | [
"MIT"
] | null | null | null | Tasks/task_7.py | madhubmvs/python-self-teaching | adce7a18553fc13a96d0319fdeb5ce9894ec74fc | [
"MIT"
] | null | null | null | Tasks/task_7.py | madhubmvs/python-self-teaching | adce7a18553fc13a96d0319fdeb5ce9894ec74fc | [
"MIT"
] | null | null | null | a = [1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89]
b = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
c = []
for x in a:
if x in b:
c.append(x)
print(c) | 17.222222 | 47 | 0.406452 |
eec118b9402f1ab3d9a333bb53d8180c1858ff75 | 2,100 | py | Python | model/test.py | yacoubb/lang-classifier | d39a342cf8ad64b191ea235f9af3f833033f254a | [
"MIT"
] | 1 | 2019-07-03T11:28:55.000Z | 2019-07-03T11:28:55.000Z | model/test.py | yacoubb/lang-classifier | d39a342cf8ad64b191ea235f9af3f833033f254a | [
"MIT"
] | null | null | null | model/test.py | yacoubb/lang-classifier | d39a342cf8ad64b191ea235f9af3f833033f254a | [
"MIT"
] | null | null | null | from tensorflow import keras
import os
import numpy as np
import sys
import json
sys.path.append("/".join(os.path.abspath(__file__).split("/")[:-2]))
from model.dataset import utils, test_sampler
def estimate_model_accuracy(model):
def predict(word):
word = utils.total_conversion(word)
word = word[: utils.max_word_length]
vector_word = utils.vectorize_word_2d(word)
vector_word = np.array([vector_word])
result = model.predict(vector_word)
return utils.vector_to_language(result, languages)
languages = []
with open("./RMS_model/metadata.json", "r") as metadata_file:
metadata = json.load(metadata_file)
languages = metadata["languages"]
print("starting sampler worker...")
test_sampler.get_sample(1000, languages)
test_words = {}
with open("./dataset/test_words.json", "r") as test_word_file:
test_words = json.load(test_word_file)
print("=" * 20 + " doing predictions " + "=" * 20)
results = []
word_predictions = []
for key in test_words:
print(key)
correct = 0.0
total = 0.0
for word in test_words[key]:
total += 1.0
prediction = predict(word)
word_predictions.append((word, prediction))
if predict(word) == key:
correct += 1.0
results.append((key, correct * 100.0 / total))
from tabulate import tabulate
summary = ""
summary += tabulate(results, headers=["language", "accuracy"])
summary += "\n"
summary += "overall accuracy: {:2f}".format(
sum(map(lambda x: x[1], results))
/ len(list(filter(lambda x: x[1] > 0, results)))
)
summary += "\n"
return summary, word_predictions
summary, all_predictions = estimate_model_accuracy(
keras.models.load_model("./RMS_model/model.h5")
)
print(summary)
with open("./RMS_model/testing.txt", "w+") as test_file:
test_file.write(summary)
test_file.write("=" * 20 + "\n")
for word, pred in all_predictions:
test_file.write(word + ", " + pred + "\n")
| 28.378378 | 68 | 0.623333 |
eec16f1e4b653abf2db741d973b4bf4d50090976 | 927 | py | Python | codes/Layer/Layer.py | serenaklm/rumor_detection | 8f4822951db111cc2e21f9a2901872c9681a2cbb | [
"MIT"
] | 42 | 2020-03-24T03:09:19.000Z | 2022-02-15T14:13:13.000Z | codes/Layer/Layer.py | serenaklm/rumor_detection | 8f4822951db111cc2e21f9a2901872c9681a2cbb | [
"MIT"
] | 3 | 2020-08-18T13:15:20.000Z | 2021-06-15T12:17:08.000Z | codes/Layer/Layer.py | serenaklm/rumor_detection | 8f4822951db111cc2e21f9a2901872c9681a2cbb | [
"MIT"
] | 15 | 2020-03-22T23:48:02.000Z | 2022-03-14T23:53:42.000Z | import torch
import torch.nn as nn
import torch.nn.functional as F
import os
import numpy as np
from Layer import FeedForwardNetwork
from Layer import MultiHeadAttention
__author__ = "Serena Khoo"
class Layer(nn.Module):
def __init__(self, config, d_model, n_head):
super(Layer,self).__init__()
self.config = config
self.d_model = d_model
self.n_head = n_head
self.attn_network = MultiHeadAttention.MultiHeadAttention(config, d_model, n_head)
self.ffn = FeedForwardNetwork.FeedForwardNetwork(config)
def forward(self, query, key, val, key_structure = None, val_structure = None, attention_mask = None):
self_atten_features, atten_values = self.attn_network(query, key, val, key_structure = key_structure, val_structure = val_structure, attention_mask = attention_mask)
enc_output = self.ffn(self_atten_features)
del self_atten_features
torch.cuda.empty_cache()
return enc_output, atten_values | 28.96875 | 167 | 0.785329 |
eec22817edf6f5ff4caafda2c75d1273cb9edbb8 | 2,102 | py | Python | crawler/crawler2.py | labcontext/image-inpainting-oldpaper | da4683a2c58d662e443ea24ab93fd9d8fcb96bda | [
"Apache-2.0"
] | null | null | null | crawler/crawler2.py | labcontext/image-inpainting-oldpaper | da4683a2c58d662e443ea24ab93fd9d8fcb96bda | [
"Apache-2.0"
] | 3 | 2021-03-19T11:16:57.000Z | 2022-01-13T02:18:17.000Z | crawler/crawler2.py | labcontext/image-inpainting-oldpaper | da4683a2c58d662e443ea24ab93fd9d8fcb96bda | [
"Apache-2.0"
] | null | null | null | import requests
import urllib.request
import os
import pickle
import argparse
# file read folder
path = 'http://db.itkc.or.kr//data/imagedb/BOOK/ITKC_{0}/ITKC_{0}_{1}A/ITKC_{0}_{1}A_{2}{5}_{3}{4}.JPG'
# Manual
label = ['BT', 'MO']
middle = 1400
last = ['A', 'V'] # A ~400 V ~009
num = 10
num1 = 400
fin = ['A', 'B', 'H', 'L']
# file path, save path
# pad for number
def pad(num, width):
return '%0{}d'.format(width) % num
def save_picture(file_name, save_dir):
return urllib.request.urlretrieve(file_name, save_dir)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--label', default='BT', type=str, help='BT, MO')
parser.add_argument('-f', '--fin', default='A', type=str, help='A,B,H,L')
opt = parser.parse_args()
# make directory
if not os.path.exists('oldDB'):
os.mkdir('oldDB')
if opt.label == 'BT':
for i in range(0, middle+1):
for k in range(num + 1):
for j in range(num1 + 1):
try:
p = path.format(opt.label, pad(i, 4),
'V', pad(j, 3), opt.fin, pad(k, 3))
print(p)
save_picture(p, './oldDB/{0}_{1}_{2}_{3}_{4}.jpg'.format(
opt.label, i, 'V', j, opt.fin))
except Exception as e:
print(str(e))
continue
elif opt.label == 'MO':
for i in range(0, middle+1):
for k in range(num1 + 1):
for j in range(num1 + 1):
try:
p = path.format(opt.label, pad(i, 4),
'A', pad(j, 3), opt.fin, pad(k, 3))
print(p)
save_picture(p, './oldDB/{0}_{1}_{2}_{3}_{4}.jpg'.format(
opt.label, i, 'A', j, opt.fin))
except Exception as e:
print(str(e))
continue
if __name__ == '__main__':
main()
| 26.275 | 103 | 0.460038 |
eec2dfa96c82d004b2ff333de47a8fe7f395770a | 2,646 | py | Python | src/spade/symbols/symbol.py | ArvinSKushwaha/SPADE | b9a0f7698606a698fbc5a44e3dd36cb40186bda3 | [
"MIT"
] | null | null | null | src/spade/symbols/symbol.py | ArvinSKushwaha/SPADE | b9a0f7698606a698fbc5a44e3dd36cb40186bda3 | [
"MIT"
] | null | null | null | src/spade/symbols/symbol.py | ArvinSKushwaha/SPADE | b9a0f7698606a698fbc5a44e3dd36cb40186bda3 | [
"MIT"
] | null | null | null | """This module holds the Symbol, ComputationalGraph, and ComputationalGraphNode classes and methods to help construct
a computational graph."""
from typing import Optional
from .operators import Add, Subtract, Multiply, Divide, Grad, Div, Curl, Laplacian
class Symbol:
"""The Symbol class is the superclass representing all components of differential equation. Superclasses
VectorField, ScalarField, Function, Constant, Operator"""
def __init__(self, name: str):
self.name = name
def __repr__(self):
return f'{self.name} <{type(self).__name__}>'
def __str__(self):
return self.name
class ComputationalGraphNode:
"""The ComputationalGraphNode class is a wrapper around the Symbol class that provides Graph functionality
for usage within the ComputationalGraph class"""
def __init__(
self, symbol: Symbol,
parent: 'ComputationalGraphNode' = None,
children: list['ComputationalGraphNode'] = None
):
self.symbol = symbol
self.parent = parent
self.children = children
def __add__(self, other: 'ComputationalGraphNode') -> 'ComputationalGraphNode':
return ComputationalGraphNode(Add(), children=[self, other])
def __sub__(self, other: 'ComputationalGraphNode') -> 'ComputationalGraphNode':
return ComputationalGraphNode(Subtract(), children=[self, other])
def __mul__(self, other: 'ComputationalGraphNode') -> 'ComputationalGraphNode':
return ComputationalGraphNode(Multiply(), children=[self, other])
def __truediv__(self, other: 'ComputationalGraphNode') -> 'ComputationalGraphNode':
return ComputationalGraphNode(Divide(), children=[self, other])
def gradient(self) -> 'ComputationalGraphNode':
return ComputationalGraphNode(Grad(), children=[self])
def divergence(self) -> 'ComputationalGraphNode':
return ComputationalGraphNode(Div(), children=[self])
def curl(self) -> 'ComputationalGraphNode':
return ComputationalGraphNode(Curl(), children=[self])
def laplacian(self) -> 'ComputationalGraphNode':
return ComputationalGraphNode(Laplacian(), children=[self])
class ComputationalGraph:
"""The ComputationalGraph class stores the context and computational relations between all information in a set
of coupled differential equations (of which, each component is stored in child classes of the Symbol class."""
def __init__(self):
self.context: list[Symbol] = []
self.__graph: Optional[ComputationalGraphNode] = None
def check_validity(self) -> bool:
return NotImplemented
| 38.347826 | 117 | 0.712018 |
eec6e813387d5c509fe53af51947031d9b165546 | 2,218 | py | Python | test-runner/measurement.py | brycewang-microsoft/iot-sdks-e2e-fx | 211c9c2615a82076bda02a27152d67366755edbf | [
"MIT"
] | 12 | 2019-02-02T00:15:13.000Z | 2022-02-08T18:20:08.000Z | test-runner/measurement.py | brycewang-microsoft/iot-sdks-e2e-fx | 211c9c2615a82076bda02a27152d67366755edbf | [
"MIT"
] | 36 | 2019-02-14T22:53:17.000Z | 2022-03-22T22:41:38.000Z | test-runner/measurement.py | brycewang-microsoft/iot-sdks-e2e-fx | 211c9c2615a82076bda02a27152d67366755edbf | [
"MIT"
] | 12 | 2019-02-19T13:28:25.000Z | 2022-02-08T18:20:55.000Z | # Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for
# full license information.
import datetime
import threading
import contextlib
class MeasureRunningCodeBlock(contextlib.AbstractContextManager):
def __init__(self, name):
self.count = 0
self.name = name
self.at_zero = threading.Event()
def __enter__(self):
self.count += 1
self.at_zero.clear()
def __exit__(self, *args):
self.count -= 1
if self.count == 0:
self.at_zero.set()
def wait_for_zero(self):
self.at_zero.wait()
def get_count(self):
return self.count
class MeasureLatency(contextlib.AbstractContextManager):
def __init__(self, tracker=None):
self.start_time = None
self.end_time = None
self.tracker = tracker
def __enter__(self):
self.start_time = datetime.datetime.now()
def __exit__(self, *args):
self.end_time = datetime.datetime.now()
if self.tracker:
self.tracker.add_sample(self.get_latency())
def get_latency(self):
if self.start_time:
if self.end_time:
return (self.end_time - self.start_time).total_seconds()
else:
return (datetime.datetime.now() - self.start_time).total_seconds()
else:
return 0
class TrackCount(object):
def __init__(self):
self.reset()
def reset(self):
self.count = 0
def increment(self):
self.count += 1
return self.count
def get_count(self):
return self.count
def extract(self):
count = self.count
self.reset()
return count
class TrackAverage(object):
def __init__(self):
self.reset()
def reset(self):
self.count = 0
self.total = 0
def add_sample(self, sample):
self.total += sample
self.count += 1
def get_average(self):
if self.count:
return self.total / self.count
else:
return 0
def extract(self):
average = self.get_average()
self.reset()
return average
| 22.632653 | 82 | 0.596934 |
eeca3c40e6643d64e2cc7861e9484fa8ec9bd6f8 | 9,415 | py | Python | main.py | Arnav-Ghatti/Tkinter-Money-Tracker | 365dcafc78522d03062a8f062fa8167b9c015583 | [
"MIT"
] | null | null | null | main.py | Arnav-Ghatti/Tkinter-Money-Tracker | 365dcafc78522d03062a8f062fa8167b9c015583 | [
"MIT"
] | null | null | null | main.py | Arnav-Ghatti/Tkinter-Money-Tracker | 365dcafc78522d03062a8f062fa8167b9c015583 | [
"MIT"
] | null | null | null | import tkinter as tk
from tkinter import messagebox
import json
# Constants
FONT_NAME = "Open Sans"
BG_COLOR = "#f9f7f7"
FONT_COLOR = "#112d4e"
ACCENT = "#dbe2ef"
root = tk.Tk()
root.title("Money Tracker")
root.config(bg=BG_COLOR)
root.resizable(0, 0)
root.iconbitmap("C:\\Users\\ASUA\\Desktop\\Tests\\MoneyTransactionsOriginal\\money.ico")
transactions_history = {}
transactions = []
def set_listbox():
"""Refreshes the listbox"""
global listbox
listbox.delete(0, tk.END)
for item in transactions:
listbox.insert(tk.END, f"{item[0]} to {item[1]}, {clicked.get()}{item[2]}, {item[3]}")
def save_json(data):
"""Saves the date to C:\\Users\\ASUA\\Desktop\\Tests\\MoneyTransactionsOriginal\\history.json file"""
with open("C:\\Users\\ASUA\\Desktop\\Tests\\MoneyTransactionsOriginal\\history.json", "w") as file:
json.dump(transactions_history, file, indent=4)
def check_fields():
if sender_input.get() == "" or reciever_input.get() == "" or desc_input.get("1.0", tk.END) == "":
return False
return True
def clear_fields():
sender_input.delete(0, tk.END)
reciever_input.delete(0, tk.END)
amount_input.delete(0, tk.END)
desc_input.delete("1.0", tk.END)
def add_transactions():
"""Adds transactios to the listbox"""
try:
check_int = int(amount_input.get())
except ValueError:
messagebox.showwarning(title="❌ Error ❌", message="Please enter only numbers in amount field")
return
if check_fields():
transactions.append([sender_input.get(), reciever_input.get(), amount_input.get(), desc_input.get("1.0", tk.END)])
transactions_history["Transactions"] = transactions
clear_fields()
save_json(transactions_history)
set_listbox()
else:
messagebox.showwarning(title="❌ Error ❌", message="Please do not leave any fields empty")
def delete_transaction():
"""Deletes transactions from the listbox"""
try:
del transactions[listbox.curselection()[0]]
except IndexError:
messagebox.showwarning(title="❌ Error ❌", message="Please select any item")
else:
transactions_history["Transactions"] = transactions
save_json(transactions_history)
set_listbox()
def load_transactions():
"""Loads data of transactions from the selected item in the listbox"""
try:
selected_idx = listbox.curselection()[0]
selected_item = transactions[selected_idx]
except IndexError:
messagebox.showwarning(title="❌ Error ❌", message="Please select any item")
else:
sender_var.set(selected_item[0])
reciever_var.set(selected_item[1])
amount_var.set(selected_item[2])
desc_input.delete("1.0", tk.END)
desc_input.insert(tk.END, selected_item[3])
def update_transactions():
"""Updates selected transaction to the details newly entered"""
if check_fields():
try:
transactions[listbox.curselection()[0]] = [sender_var.get(), reciever_var.get(), amount_var.get(), desc_input.get("1.0", tk.END)]
except IndexError:
messagebox.showwarning(title="❌ Error ❌", message="Please select any item")
else:
transactions_history["Transactions"] = transactions
save_json(transactions_history)
set_listbox()
else:
messagebox.showwarning(title="❌ Error ❌", message="Please do not leave any fields empty")
# Title
title = tk.Label(root, text="Money Tracker", font=(FONT_NAME, 15, "bold"), bg=BG_COLOR, highlightthickness=0, fg=FONT_COLOR)
title.grid(row=0, column=0, columnspan=2, pady=3)
# ---------------------------- ENTRIES AND LABELS ------------------------------- #
input_frame = tk.Frame(root, bg=BG_COLOR, highlightthickness=0)
input_frame.grid(row=1, column=0, sticky="N", padx=5)
# Sender
sender_label = tk.Label(input_frame, text="Sender: ", font=(FONT_NAME, 12, "normal"), bg=BG_COLOR, fg=FONT_COLOR, highlightthickness=0)
sender_label.grid(row=0, column=0, sticky="W", pady=5)
sender_var = tk.StringVar()
sender_input = tk.Entry(input_frame, textvariable=sender_var, width=36, font=(FONT_NAME, 12, "normal"), bg=ACCENT, fg=FONT_COLOR, highlightthickness=0, bd=0)
sender_input.focus()
sender_input.grid(row=0, column=1, sticky="W", pady=5, padx=10, columnspan=2)
# Reciever
reciever_label = tk.Label(input_frame, text="Reciever: ", font=(FONT_NAME, 12, "normal"), bg=BG_COLOR, fg=FONT_COLOR, highlightthickness=0)
reciever_label.grid(row=1, column=0, sticky="W", pady=5)
reciever_var = tk.StringVar()
reciever_input = tk.Entry(input_frame, textvariable=reciever_var, width=36, font=(FONT_NAME, 12, "normal"), bg=ACCENT, fg=FONT_COLOR, highlightthickness=0, bd=0)
reciever_input.grid(row=1, column=1, sticky="W", pady=5, padx=10, columnspan=2)
# Amount
amount_label = tk.Label(input_frame, text="Amount: ", font=(FONT_NAME, 12, "normal"), bg=BG_COLOR, fg=FONT_COLOR, highlightthickness=0)
amount_label.grid(row=2, column=0, sticky="W", pady=5)
amount_var = tk.StringVar()
amount_input = tk.Entry(input_frame, textvariable=amount_var, width=27, font=(FONT_NAME, 12, "normal"), bg=ACCENT, fg=FONT_COLOR, highlightthickness=0, bd=0)
amount_input.grid(row=2, column=1, sticky="W", pady=5, padx=10)
# Description
desc_label = tk.Label(input_frame, text="Description: ", font=(FONT_NAME, 12, "normal"), bg=BG_COLOR, fg=FONT_COLOR, highlightthickness=0, bd=0)
desc_label.grid(row=3, column=0, sticky="N", pady=5)
desc_input = tk.Text(input_frame, width=36, height=12, font=(FONT_NAME, 12, "normal"), bg=ACCENT, fg=FONT_COLOR, highlightthickness=0, bd=0)
desc_input.grid(row=3, column=1, sticky="W", pady=5, padx=10, columnspan=2)
currencies = [
"$",
"₹",
"€",
"£",
"¥"
]
clicked = tk.StringVar()
clicked.set("$")
currency = tk.OptionMenu(input_frame, clicked, *currencies)
currency.config(bg=ACCENT, fg=FONT_COLOR, bd=0, highlightthickness=0, font=(FONT_NAME, 10, "normal"))
currency["menu"].config(bg=ACCENT, fg=FONT_COLOR, bd=0, font=(FONT_NAME, 10, "normal"))
currency.grid(row=2, column=2)
# ---------------------------- BUTTONS ------------------------------- #
btn_frame = tk.Frame(root, bg=BG_COLOR, highlightthickness=0)
btn_frame.grid(row=2, column=0, padx=5, pady=5, sticky="N")
# Add
add_btn= tk.Button(btn_frame, text=" Add ", command=add_transactions, font=(FONT_NAME, 11, "normal"), bg=ACCENT, fg=FONT_COLOR, highlightthickness=0, bd=0)
add_btn.pack(side=tk.LEFT, padx=5, pady=5)
# Update
update_btn = tk.Button(btn_frame, text=" Update ", command=update_transactions, font=(FONT_NAME, 11, "normal"), bg=ACCENT, fg=FONT_COLOR, highlightthickness=0, bd=0)
update_btn.pack(side=tk.LEFT, padx=5, pady=5)
# Delete
del_btn = tk.Button(btn_frame, text=" Delete ", command=delete_transaction, font=(FONT_NAME, 11, "normal"), bg=ACCENT, fg=FONT_COLOR, highlightthickness=0, bd=0)
del_btn.pack(side=tk.LEFT, padx=5, pady=5)
# Load
load_btn = tk.Button(btn_frame, text=" Load ", command=load_transactions, font=(FONT_NAME, 11, "normal"), bg=ACCENT, fg=FONT_COLOR, highlightthickness=0, bd=0)
load_btn.pack(side=tk.LEFT, padx=5, pady=5)
# Refresh
refresh_btn = tk.Button(btn_frame, text=" Refresh ", command=set_listbox, font=(FONT_NAME, 11, "normal"), bg=ACCENT, fg=FONT_COLOR, highlightthickness=0, bd=0)
refresh_btn.pack(side=tk.LEFT, padx=5, pady=5)
# ---------------------------- LISTBOX ------------------------------- #
data_frame = tk.Frame(root, bg=ACCENT, highlightthickness=0)
data_frame.grid(row=1, column=1, rowspan=2)
# Scroll Bars
scroll_bar_y = tk.Scrollbar(data_frame, orient=tk.VERTICAL)
scroll_bar_x = tk.Scrollbar(data_frame, orient=tk.HORIZONTAL)
# Listbox
listbox = tk.Listbox(data_frame, height=18, width=50, yscrollcommand=scroll_bar_y.set, xscrollcommand=scroll_bar_x.set, font=(FONT_NAME, 12, "normal"), bg=ACCENT, fg=FONT_COLOR, highlightthickness=0, bd=0)
# Scroll Bars
scroll_bar_y.config(command=listbox.yview)
scroll_bar_y.pack(side=tk.RIGHT, fill=tk.Y)
scroll_bar_x.config(command=listbox.xview)
scroll_bar_x.pack(side=tk.BOTTOM, fill=tk.X)
listbox.pack(side=tk.LEFT, fill=tk.BOTH, expand=1)
# ---------------------------- STATUS BAR ------------------------------- #
status_frame = tk.LabelFrame(root, bd=0, relief=tk.SUNKEN, bg="#3f72af", highlightthickness=0)
status_frame.grid(sticky=tk.N+tk.S+tk.E+tk.W, columnspan=2)
# Made By
made_by = tk.Label(status_frame, text="Made By Arnav Ghatti", anchor=tk.E, font=(FONT_NAME, 9, "normal"), bg="#3f72af", highlightthickness=0, fg=BG_COLOR)
made_by.pack(side=tk.RIGHT, fill=tk.BOTH, expand=1)
# Version
version_label = tk.Label(status_frame, text="Version: 2.5.3", anchor=tk.W, font=(FONT_NAME, 9, "normal"), bg="#3f72af", highlightthickness=0, fg=BG_COLOR)
version_label.pack(side=tk.LEFT, fill=tk.BOTH, expand=1)
def load_data():
"""Loads data from the C:\\Users\\ASUA\\Desktop\\Tests\\MoneyTransactionsOriginal\\history.json file to the listbox"""
global transactions, listbox
with open("C:\\Users\\ASUA\\Desktop\\Tests\\MoneyTransactionsOriginal\\history.json", "r") as file:
transaction_history = json.load(file)
transactions = transaction_history["Transactions"]
listbox.delete(0, tk.END)
for item in transactions:
listbox.insert(tk.END, f"{item[0]} to {item[1]}, ${item[2]}, {item[3]}")
load_data()
root.mainloop()
| 40.235043 | 205 | 0.683696 |
eeca641ef832fde419fc26a2088df6a05f63fc33 | 519 | py | Python | ftmscan/utils/parsing.py | awilliamson10/ftmscan-python | d7ed384f1ac65461c86bed4a65f9332baf92c8f0 | [
"MIT"
] | 4 | 2022-01-10T21:58:02.000Z | 2022-03-27T20:21:35.000Z | polygonscan/utils/parsing.py | yusufseyrek/polygonscan-python | c58a8190e41a5c9bac0a5e88db809e5e207b1c77 | [
"MIT"
] | 3 | 2021-09-25T05:10:27.000Z | 2021-11-21T04:56:29.000Z | polygonscan/utils/parsing.py | yusufseyrek/polygonscan-python | c58a8190e41a5c9bac0a5e88db809e5e207b1c77 | [
"MIT"
] | 4 | 2021-09-25T05:11:08.000Z | 2022-03-09T01:01:33.000Z | import requests
class ResponseParser:
@staticmethod
def parse(response: dict):
result = response["result"]
if "status" in response.keys():
status = bool(int(response["status"]))
message = response["message"]
assert status, f"{result} -- {message}"
else:
# GETH or Parity proxy msg format
# TODO: see if we need those values
jsonrpc = response["jsonrpc"]
cid = int(response["id"])
return result | 30.529412 | 51 | 0.554913 |
eeca73f0a33396739525615f94801665b147bf27 | 12,725 | py | Python | empire_cellular_automaton/dataset_processing.py | ThomasMiller01/ProofOfConcept | 021bf29743309224628682d0f82b0be80ae83c95 | [
"MIT"
] | 1 | 2019-12-18T13:49:22.000Z | 2019-12-18T13:49:22.000Z | empire_cellular_automaton/dataset_processing.py | ThomasMiller01/Experiments | 021bf29743309224628682d0f82b0be80ae83c95 | [
"MIT"
] | null | null | null | empire_cellular_automaton/dataset_processing.py | ThomasMiller01/Experiments | 021bf29743309224628682d0f82b0be80ae83c95 | [
"MIT"
] | 1 | 2021-08-29T09:22:52.000Z | 2021-08-29T09:22:52.000Z | import json
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import os
import time
def people_distribution_map(data, file):
unique, indices, counts = np.unique(
data[:, 0], return_index=True, return_counts=True)
generations = list(zip(unique, indices, counts))
plt_size_x = int(np.ceil(np.sqrt(len(generations))))
plt_size_y = int(np.ceil(np.sqrt(len(generations)) - 0.5))
fig, axs = plt.subplots(plt_size_x, plt_size_y, figsize=(10, 10))
fig.suptitle("people distribution", fontsize=10)
fig.tight_layout(pad=3.0)
i = 0
s_all = ()
s_mapped_all = None
for ax_s in axs:
for ax in ax_s:
if i < len(generations):
gen = generations[i]
minified_data = data[gen[1]:gen[1] + gen[2]]
all_people = np.zeros((0, 2)).astype('int')
for day in minified_data[:, 2]:
for person in day:
if person[5] != 0:
all_people = np.append(all_people, np.asarray(
[[person[6], person[7]]]), axis=0)
unique, counts = np.unique(
all_people, return_counts=True, axis=0)
x, y = zip(*unique)
if not s_all:
s_all = (counts.min(), counts.max())
s_mapped_all = np.interp(
counts, (s_all[0], s_all[1]), (0, 100))
s_mapped = np.interp(
counts, (counts.min(), counts.max()), (0, 100))
color_palett = [
'#d3ae1b', '#de6e3b', '#b54d47', '#8e321e', '#522a1a']
color_ranges = np.arange(
s_mapped_all.min(), s_mapped_all.max(), (s_mapped_all.max() - s_mapped_all.min()) / len(color_palett))
color_indices = [np.where(n < color_ranges)[0]
for n in s_mapped]
colors = [color_palett[c[0]] if c.size != 0 else color_palett[len(
color_palett) - 1] for c in color_indices]
img = plt.imread("map.jpg")
ax.scatter(x, y, s=s_mapped, c=colors)
ax_xlim = ax.get_xlim()
ax_ylim = ax.get_ylim()
ax.imshow(img, origin="lower")
ax.set_xlim(ax_xlim)
ax.set_ylim(ax_ylim[::-1])
ax.set(title="gen " + str(gen[0]))
i += 1
plt.savefig(file)
plt.close(fig=fig)
def kind_of_disease_per_generation(data, file):
unique, indices, counts = np.unique(
data[:, 0], return_index=True, return_counts=True)
generations = list(zip(unique, indices, counts))
plt_size_x = int(np.ceil(np.sqrt(len(generations))))
plt_size_y = int(np.ceil(np.sqrt(len(generations)) - 0.5))
fig, axs = plt.subplots(plt_size_x, plt_size_y, figsize=(10, 10))
fig.suptitle("kind of disease", fontsize=16)
fig.tight_layout(pad=3.0)
i = 0
for ax_s in axs:
for ax in ax_s:
if i < len(generations):
gen = generations[i]
minified_data = data[gen[1]:gen[1] + gen[2]]
all_diseased_people = np.zeros((0, 2)).astype('int')
for day in minified_data[:, 2]:
for person in day:
if person[5] != 0:
all_diseased_people = np.append(all_diseased_people, np.asarray(
[[person[0], person[5]]]), axis=0)
disease_all = np.zeros((0, 2)).astype('int')
for disease_kind in np.unique(all_diseased_people[:, 1]):
people_disease_kind = all_diseased_people[np.where(
all_diseased_people[:, 1] == disease_kind)[0]]
unique_disease_kind, counts_disease_kind = np.unique(
all_diseased_people, return_counts=True)
disease_all = np.append(disease_all, np.asarray(
[[disease_kind, len(unique_disease_kind)]]), axis=0)
x = np.arange(0, len(disease_all))
y = disease_all[:, 1]
ax.bar(x, y)
ax.set_xticks(x)
ax.set_yticks(y)
ax.set_xticklabels(disease_all[:, 0])
ax.set(title="gen " + str(gen[0]))
i += 1
plt.savefig(file)
plt.close(fig=fig)
def strength_distribution_per_generation(data, file):
unique, indices, counts = np.unique(
data[:, 0], return_index=True, return_counts=True)
generations = list(zip(unique, indices, counts))
plt_size_x = int(np.ceil(np.sqrt(len(generations))))
plt_size_y = int(np.ceil(np.sqrt(len(generations)) - 0.5))
fig, axs = plt.subplots(plt_size_x, plt_size_y, figsize=(10, 10))
fig.tight_layout(pad=3.0)
fig.suptitle("strength distribution", fontsize=12)
i = 0
for ax_s in axs:
for ax in ax_s:
if i < len(generations):
gen = generations[i]
minified_data = data[gen[1]:gen[1] + gen[2]]
x = np.arange(0, 100)
y = np.zeros(100)
for strength in minified_data[:, 2][len(minified_data) - 1][:, 3]:
y[int(np.ceil(strength)) - 1] += 1
coeffs = np.polyfit(x, y, 3)
poly_eqn = np.poly1d(coeffs)
y_hat = poly_eqn(x)
ax.plot(x, y)
ax.plot(x, y_hat, label="average", c='r')
ax.set(xlabel='strength', ylabel='people',
title="gen " + str(gen[0]))
ax.grid()
i += 1
plt.savefig(file)
plt.close(fig=fig)
def age_distribution_per_generation(data, file):
unique, indices, counts = np.unique(
data[:, 0], return_index=True, return_counts=True)
generations = list(zip(unique, indices, counts))
plt_size_x = int(np.ceil(np.sqrt(len(generations))))
plt_size_y = int(np.ceil(np.sqrt(len(generations)) - 0.5))
fig, axs = plt.subplots(plt_size_x, plt_size_y, figsize=(10, 10))
fig.tight_layout(pad=3.0)
fig.suptitle("age distribution", fontsize=16)
i = 0
for ax_s in axs:
for ax in ax_s:
if i < len(unique):
gen = generations[i]
minified_data = data[gen[1]:gen[1] + gen[2]]
x = np.arange(0, 100)
y = np.zeros(100)
for age in minified_data[:, 2][len(minified_data) - 1][:, 2]:
if age > 100:
age = 100
y[int(np.ceil(age)) - 1] += 1
coeffs = np.polyfit(x, y, 3)
poly_eqn = np.poly1d(coeffs)
y_hat = poly_eqn(x)
ax.plot(x, y)
ax.plot(x, y_hat, label="average", c='r')
ax.set(xlabel='age', ylabel='people',
title="gen " + str(gen[0]))
ax.grid()
i += 1
plt.savefig(file)
plt.close(fig=fig)
def disease_over_time(data, file):
fig, ax = plt.subplots()
unique, indices, counts = np.unique(
data[:, 0], return_index=True, return_counts=True)
for gen in zip(unique, indices, counts):
minified_data = data[gen[1]:gen[1] + gen[2]]
x = np.arange(0, len(minified_data))
y = np.asarray([np.sum(x) for x in [a[:, 5]
for a in minified_data[:, 2]]])
ax.plot(x, y, label=gen[0])
ax.set(xlabel='days', ylabel='disease',
title='disease over time')
ax.grid()
plt.legend(loc="best", title="generation")
plt.savefig(file)
plt.close(fig=fig)
def avg_reproductionValue_over_time(data, file, settings):
fig, ax = plt.subplots()
unique, indices, counts = np.unique(
data[:, 0], return_index=True, return_counts=True)
for gen in zip(unique, indices, counts):
minified_data = data[gen[1]:gen[1] + gen[2]]
x = np.arange(0, len(minified_data))
y = np.asarray(np.asarray([np.average(a[:, 4])
for a in minified_data[:, 2]]))
ax.plot(x, y, label=gen[0])
ax.axhline(settings['p_reproductionThreshold'],
c='r', linestyle=':', label='rT')
ax.set(xlabel='days', ylabel='reproductionValue',
title='avg reproductionValue over time')
ax.grid()
plt.legend(loc="best", title="generation")
plt.savefig(file)
plt.close(fig=fig)
def avg_age_over_time(data, file):
fig, ax = plt.subplots()
unique, indices, counts = np.unique(
data[:, 0], return_index=True, return_counts=True)
for gen in zip(unique, indices, counts):
minified_data = data[gen[1]:gen[1] + gen[2]]
x = np.arange(0, len(minified_data))
y = np.asarray(np.asarray([np.average(a[:, 2])
for a in minified_data[:, 2]]))
ax.plot(x, y, label=gen[0])
ax.set(xlabel='days', ylabel='age',
title='avg age over time')
ax.grid()
plt.legend(loc="best", title="generation")
plt.savefig(file)
plt.close(fig=fig)
def population_over_time(data, file):
fig, ax = plt.subplots()
unique, indices, counts = np.unique(
data[:, 0], return_index=True, return_counts=True)
for gen in zip(unique, indices, counts):
minified_data = data[gen[1]:gen[1] + gen[2]]
x = np.arange(0, len(minified_data))
y = np.asarray([len(a) for a in minified_data[:, 2]])
ax.plot(x, y, label=gen[0])
ax.set(xlabel='days', ylabel='population',
title='population over time')
ax.grid()
plt.legend(loc="best", title="generation")
plt.savefig(file)
plt.close(fig=fig)
def save_figs(dataset_name):
start_all = time.time()
print("------")
print("saving " + dataset_name)
file_name = './datasets/' + dataset_name
print("loading data ...")
start = time.time()
# load data
with open(file_name + '/' + dataset_name + '_settings.json') as json_file:
settings = json.load(json_file)
data = np.load(file_name + '/' + dataset_name +
'_data.npy', allow_pickle=True)
end = time.time()
print("data loaded in " + str(round(end - start, 2)) + "s")
print("***")
start = time.time()
print("saving pdfs ...")
# save as pdf
try:
os.mkdir(file_name + "/pdf")
except:
pass
population_over_time(data, file_name + "/pdf/population_over_time.pdf")
avg_age_over_time(data, file_name + "/pdf/avg_age_over_time.pdf")
avg_reproductionValue_over_time(
data, file_name + "/pdf/avg_reproductionValue_over_time.pdf", settings)
disease_over_time(data, file_name + "/pdf/disease_over_time.pdf")
age_distribution_per_generation(
data, file_name + "/pdf/age_distribution_per_generation.pdf")
strength_distribution_per_generation(
data, file_name + "/pdf/strength_distribution_per_generation.pdf")
kind_of_disease_per_generation(
data, file_name + "/pdf/kind_of_disease.pdf")
people_distribution_map(
data, file_name + "/pdf/people_distribution_map.pdf")
end = time.time()
print("pdfs saved in " + str(round(end - start, 2)) + "s")
print("***")
print("saving pngs ...")
start = time.time()
# save as png
try:
os.mkdir(file_name + "/png")
except:
pass
population_over_time(data, file_name + "/png/population_over_time.png")
avg_age_over_time(data, file_name + "/png/avg_age_over_time.png")
avg_reproductionValue_over_time(
data, file_name + "/png/avg_reproductionValue_over_time.png", settings)
disease_over_time(data, file_name + "/png/disease_over_time.png")
age_distribution_per_generation(
data, file_name + "/png/age_distribution_per_generation.png")
strength_distribution_per_generation(
data, file_name + "/png/strength_distribution_per_generation.png")
kind_of_disease_per_generation(
data, file_name + "/png/kind_of_disease.png")
people_distribution_map(
data, file_name + "/png/people_distribution_map.png")
end = time.time()
print("pngs saved in " + str(round(end - start, 2)) + "s")
print("***")
end_all = time.time()
print("- " + dataset_name + " saved")
print("- time elapsed: " + str(round(end_all - start_all, 2)) + "s")
print("------")
if __name__ == "__main__":
for directory in os.listdir('./datasets'):
if "example" not in directory:
save_figs(directory)
print("creating statistics done")
| 37.985075 | 122 | 0.558428 |
eecf75568a4959cab7877ed219454c84c98b7e64 | 403 | py | Python | mindhome_alpha/erpnext/patches/v11_0/add_expense_claim_default_account.py | Mindhome/field_service | 3aea428815147903eb9af1d0c1b4b9fc7faed057 | [
"MIT"
] | 1 | 2021-04-29T14:55:29.000Z | 2021-04-29T14:55:29.000Z | mindhome_alpha/erpnext/patches/v11_0/add_expense_claim_default_account.py | Mindhome/field_service | 3aea428815147903eb9af1d0c1b4b9fc7faed057 | [
"MIT"
] | null | null | null | mindhome_alpha/erpnext/patches/v11_0/add_expense_claim_default_account.py | Mindhome/field_service | 3aea428815147903eb9af1d0c1b4b9fc7faed057 | [
"MIT"
] | 1 | 2021-04-29T14:39:01.000Z | 2021-04-29T14:39:01.000Z | from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doc("setup", "doctype", "company")
companies = frappe.get_all("Company", fields=["name", "default_payable_account"])
for company in companies:
if company.default_payable_account is not None:
frappe.db.set_value("Company", company.name, "default_expense_claim_payable_account", company.default_payable_account) | 36.636364 | 121 | 0.791563 |
eed48753201aaf2076987680b987b0334df7af1f | 4,653 | py | Python | cliff/lister.py | tivaliy/cliff | a04a48f4f7dc72b1bcc95a5c6a550c7650e35ab3 | [
"Apache-2.0"
] | 187 | 2015-01-13T04:07:41.000Z | 2022-03-10T14:12:27.000Z | cliff/lister.py | tivaliy/cliff | a04a48f4f7dc72b1bcc95a5c6a550c7650e35ab3 | [
"Apache-2.0"
] | 3 | 2016-01-05T20:52:55.000Z | 2020-10-01T06:16:58.000Z | cliff/lister.py | tivaliy/cliff | a04a48f4f7dc72b1bcc95a5c6a550c7650e35ab3 | [
"Apache-2.0"
] | 69 | 2015-02-01T01:28:37.000Z | 2021-11-15T08:28:53.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Application base class for providing a list of data as output."""
import abc
import logging
from . import display
class Lister(display.DisplayCommandBase, metaclass=abc.ABCMeta):
"""Command base class for providing a list of data as output."""
log = logging.getLogger(__name__)
@property
def formatter_namespace(self):
return 'cliff.formatter.list'
@property
def formatter_default(self):
return 'table'
@property
def need_sort_by_cliff(self):
"""Whether sort procedure is performed by cliff itself.
Should be overridden (return False) when there is a need to implement
custom sorting procedure or data is already sorted.
"""
return True
@abc.abstractmethod
def take_action(self, parsed_args):
"""Run command.
Return a tuple containing the column names and an iterable containing
the data to be listed.
"""
def get_parser(self, prog_name):
parser = super(Lister, self).get_parser(prog_name)
group = self._formatter_group
group.add_argument(
'--sort-column',
action='append',
default=[],
dest='sort_columns',
metavar='SORT_COLUMN',
help=(
'specify the column(s) to sort the data (columns specified '
'first have a priority, non-existing columns are ignored), '
'can be repeated'
),
)
sort_dir_group = group.add_mutually_exclusive_group()
sort_dir_group.add_argument(
'--sort-ascending',
action='store_const',
dest='sort_direction',
const='asc',
help=('sort the column(s) in ascending order'),
)
sort_dir_group.add_argument(
'--sort-descending',
action='store_const',
dest='sort_direction',
const='desc',
help=('sort the column(s) in descending order'),
)
return parser
def produce_output(self, parsed_args, column_names, data):
if parsed_args.sort_columns and self.need_sort_by_cliff:
indexes = [
column_names.index(c) for c in parsed_args.sort_columns
if c in column_names
]
reverse = parsed_args.sort_direction == 'desc'
for index in indexes[::-1]:
try:
# We need to handle unset values (i.e. None) so we sort on
# multiple conditions: the first comparing the results of
# an 'is None' type check and the second comparing the
# actual value. The second condition will only be checked
# if the first returns True, which only happens if the
# returns from the 'is None' check on the two values are
# the same, i.e. both None or both not-None
data = sorted(
data, key=lambda k: (k[index] is None, k[index]),
reverse=reverse,
)
except TypeError:
# Simply log and then ignore this; sorting is best effort
self.log.warning(
"Could not sort on field '%s'; unsortable types",
parsed_args.sort_columns[index],
)
columns_to_include, selector = self._generate_columns_and_selector(
parsed_args, column_names,
)
if selector:
# Generator expression to only return the parts of a row
# of data that the user has expressed interest in
# seeing. We have to convert the compress() output to a
# list so the table formatter can ask for its length.
data = (
list(self._compress_iterable(row, selector)) for row in data
)
self.formatter.emit_list(
columns_to_include, data, self.app.stdout, parsed_args,
)
return 0
| 36.637795 | 78 | 0.585214 |
eed5699e06d3cac61b4a945b53a1004046c608f3 | 1,026 | py | Python | task3/task3.py | ksmirenko/ml-homework | a5e558352ffc332ad5e40526dda21f205718a203 | [
"MIT"
] | 1 | 2020-08-05T08:06:33.000Z | 2020-08-05T08:06:33.000Z | task3/task3.py | ksmirenko/ml-homework | a5e558352ffc332ad5e40526dda21f205718a203 | [
"MIT"
] | null | null | null | task3/task3.py | ksmirenko/ml-homework | a5e558352ffc332ad5e40526dda21f205718a203 | [
"MIT"
] | null | null | null | from PIL import Image
import numpy as np
# Works when launched from terminal
# noinspection PyUnresolvedReferences
from k_means import k_means
input_image_file = 'lena.jpg'
output_image_prefix = 'out_lena'
n_clusters = [2, 3, 5]
max_iterations = 100
launch_count = 3
def main():
# Read input image
image = np.array(Image.open(input_image_file))
X = image.reshape((image.shape[0] * image.shape[1], image.shape[2]))
for k in n_clusters:
print(f"{k} clusters")
# 'Compress' image using K-means
centroids, clustered = k_means(X, k=k, max_iterations=max_iterations, launch_count=launch_count)
new_X = np.array([centroids[cluster_index] for cluster_index in clustered])
new_X = new_X.astype(np.uint8)
# Write output image
new_image = new_X.reshape(image.shape)
output_image_name = f"{output_image_prefix}_{k}.jpg"
Image.fromarray(new_image).save(output_image_name)
print(f"Saved {output_image_name}")
print("Done.")
main()
| 27.72973 | 104 | 0.692008 |
eed63ef06321c79002e85fdaeb08205c4299ea39 | 3,389 | py | Python | dcrnn_train.py | syin3/cs224w-traffic | 284836b49404bfd38ae23b31f89f8e617548e286 | [
"MIT"
] | 9 | 2019-03-20T01:02:07.000Z | 2020-11-25T06:45:30.000Z | dcrnn_train.py | syin3/cs224w-traffic | 284836b49404bfd38ae23b31f89f8e617548e286 | [
"MIT"
] | null | null | null | dcrnn_train.py | syin3/cs224w-traffic | 284836b49404bfd38ae23b31f89f8e617548e286 | [
"MIT"
] | 2 | 2020-09-24T07:03:58.000Z | 2020-11-09T04:43:03.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import tensorflow as tf
import yaml
from model.dcrnn_supervisor import DCRNNSupervisor
def main(args):
with open(args.config_filename) as f:
supervisor_config = yaml.load(f)
tf_config = tf.ConfigProto()
# if args.use_cpu_only:
# tf_config = tf.ConfigProto(device_count={'GPU': 0})
tf_config.gpu_options.allow_growth = True
with tf.Session(config=tf_config) as sess:
supervisor = DCRNNSupervisor(**supervisor_config)
supervisor.train(sess=sess)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--config_filename', required=True, default=None, type=str, help='Configuration filename for restoring the model.')
parser.add_argument('--use_cpu_only', default=False, type=bool, help='Set true to only use cpu.')
# adjacent and distance-weighted
parser.add_argument('--weightType', required=True, choices=['a', 'd'], help='w/ or w/o distance pre-processing')
parser.add_argument('--att', dest='attention', action='store_true', help='Call this command to raise attention mechanism in the training.')
parser.add_argument('--no-att', dest='attention', action='store_false', help='Call this command not to raise attention mechanism in the training.')
parser.set_defaults(attention=False)
subparsers = parser.add_subparsers()
fullyConnectParser = subparsers.add_parser('fc', help='In fully connect mode, choose embed file')
fullyConnectParser.add_argument('--gEmbedFile', required=True, default='LA-n2v-14-0.1-1', help='Embedding file for n2v, should add up-directory when calling')
fullyConnectParser.add_argument('--network', nargs='?', const='fc', default='fc', help='To store the choice of fully connected')
graphConvParser = subparsers.add_parser('graphConv', help='In graph conv mode, choose W matrix form')
graphConvParser.add_argument('--hop', required=True, type=int, default=2,
help='k-hop neighbors, default is 2 for distance-processed matrix; but must be one for binary matrix')
graphConvParser.add_argument('--network', nargs='?', const='gconv', default='gconv', help='To store the choice of gconv')
args = parser.parse_args()
with open(args.config_filename) as f:
doc = yaml.load(f)
# default batch sizes to 64, in training, validation and in testing
doc['data']['batch_size'] = 64
doc['data']['test_batch_size'] = 64
doc['data']['val_batch_size'] = 64
# set matrix to adjacency or distance-weighted
if args.weightType == 'd':
doc['data']['graph_pkl_filename'] = "data/sensor_graph/adj_mx_la.pkl"
else:
doc['data']['graph_pkl_filename'] = "data/sensor_graph/adj_bin_la.pkl"
# record necessary info to log
doc['model']['weightMatrix'] = args.weightType
doc['model']['attention'] = args.attention
doc['model']['network'] = args.network
if 'gEmbedFile' in vars(args):
doc['model']['graphEmbedFile'] = args.gEmbedFile
doc['model']['max_diffusion_step'] = 0
if 'hop' in vars(args):
doc['model']['max_diffusion_step'] = args.hop
# save the info
with open(args.config_filename, 'w') as f:
yaml.dump(doc, f)
main(args)
| 42.3625 | 162 | 0.689584 |
eed698cee32da7af7d7cb366130b591986c4feae | 1,035 | py | Python | train.py | k2sebeom/DeepLOLCourt | 630f1eee1729c06f686abc7c2a7ecbdfe66803b3 | [
"MIT"
] | null | null | null | train.py | k2sebeom/DeepLOLCourt | 630f1eee1729c06f686abc7c2a7ecbdfe66803b3 | [
"MIT"
] | null | null | null | train.py | k2sebeom/DeepLOLCourt | 630f1eee1729c06f686abc7c2a7ecbdfe66803b3 | [
"MIT"
] | null | null | null | import torch.optim as optim
from torch import nn
from data.match_dataset import MatchDataset
from torch.utils.data import DataLoader
from models.lol_result_model import LOLResultModel
import torch
if __name__ == '__main__':
EPOCH = 50
BATCH_SIZE = 32
loader = DataLoader(MatchDataset('dataset/train_data.csv'), BATCH_SIZE)
print("Dataset Loaded")
loss_criterion = nn.BCELoss()
device = torch.device('cuda:0')
model = LOLResultModel(190)
print("Model created")
optimizer = optim.Adam(model.parameters(), lr=0.0001)
model.to(device)
for epoch in range(EPOCH):
loss_data = 0
for i, data in enumerate(loader):
output = model(data['x'].to(device))
loss = loss_criterion(output, data['y'].unsqueeze(1).float().to(device))
optimizer.zero_grad()
loss.backward()
optimizer.step()
loss_data = loss.data
print(f'Epoch {epoch}: {loss_data}')
torch.save(model.state_dict(), 'checkpoints/model.pth')
| 30.441176 | 84 | 0.656039 |
eed71f6a7395828dd1b7ba56051666be99d7beff | 774 | py | Python | src/cpfromddd.py | theonewolf/TripleD | 875c903a302d5502ac65224c16fa65da1246483e | [
"MIT"
] | 13 | 2015-04-04T14:41:38.000Z | 2021-12-28T12:24:29.000Z | src/cpfromddd.py | theonewolf/TripleD | 875c903a302d5502ac65224c16fa65da1246483e | [
"MIT"
] | null | null | null | src/cpfromddd.py | theonewolf/TripleD | 875c903a302d5502ac65224c16fa65da1246483e | [
"MIT"
] | 8 | 2015-01-26T17:15:27.000Z | 2019-09-14T03:22:46.000Z | #!/usr/bin/env python
import libtripled, logging, sys, os
# CONSTANTS
log = logging.getLogger('tripled.cpfromddd')
def next_chunk(tripled, path):
chunks = tripled.read_file(path)
for chunk in chunks:
log.debug('reading from worker[%s] path[%s]' % (chunk[0], chunk[1]))
yield tripled.read_block(chunk[0], chunk[1])
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
if len(sys.argv) < 4:
print '%s <master> <tripled src> <local dst>' % (sys.argv[0])
exit(-1)
tripled = libtripled.tripled(sys.argv[1])
try: os.makedirs(os.path.dirname(sys.argv[3]))
except OSError: pass
with open(sys.argv[3], 'w') as f:
for chunk in next_chunk(tripled, sys.argv[2]):
f.write(chunk)
| 28.666667 | 75 | 0.630491 |
eed75ce868931dabebd40ef5cd1f3bab8cc08cc7 | 10,094 | py | Python | torchrec/distributed/test_utils/test_sharding.py | samiwilf/torchrec | 50ff0973d5d01ec80fe36ba5f1d524c92c799836 | [
"BSD-3-Clause"
] | 1 | 2022-03-07T09:06:11.000Z | 2022-03-07T09:06:11.000Z | torchrec/distributed/test_utils/test_sharding.py | samiwilf/torchrec | 50ff0973d5d01ec80fe36ba5f1d524c92c799836 | [
"BSD-3-Clause"
] | null | null | null | torchrec/distributed/test_utils/test_sharding.py | samiwilf/torchrec | 50ff0973d5d01ec80fe36ba5f1d524c92c799836 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
from enum import Enum
from typing import cast, Dict, List, Optional, Tuple, Union
import torch
import torch.distributed as dist
import torch.nn as nn
from fbgemm_gpu.split_embedding_configs import EmbOptimType
from torchrec.distributed.embedding_types import EmbeddingTableConfig
from torchrec.distributed.model_parallel import DistributedModelParallel
from torchrec.distributed.planner import (
EmbeddingShardingPlanner,
ParameterConstraints,
Topology,
)
from torchrec.distributed.test_utils.multi_process import MultiProcessContext
from torchrec.distributed.test_utils.test_model import (
ModelInput,
TestEBCSharder,
TestEBSharder,
TestETCSharder,
TestETSharder,
TestSparseNNBase,
)
from torchrec.distributed.types import (
ModuleSharder,
ShardedTensor,
ShardingEnv,
ShardingPlan,
ShardingType,
)
from torchrec.modules.embedding_configs import BaseEmbeddingConfig
from torchrec.optim.keyed import CombinedOptimizer, KeyedOptimizerWrapper
class SharderType(Enum):
EMBEDDING_BAG = "embedding_bag"
EMBEDDING_BAG_COLLECTION = "embedding_bag_collection"
EMBEDDING_TOWER = "embedding_tower"
EMBEDDING_TOWER_COLLECTION = "embedding_tower_collection"
def create_test_sharder(
sharder_type: str, sharding_type: str, kernel_type: str
) -> Union[TestEBSharder, TestEBCSharder, TestETSharder, TestETCSharder]:
if sharder_type == SharderType.EMBEDDING_BAG.value:
return TestEBSharder(sharding_type, kernel_type, {"learning_rate": 0.1})
elif sharder_type == SharderType.EMBEDDING_BAG_COLLECTION.value:
return TestEBCSharder(sharding_type, kernel_type, {"learning_rate": 0.1})
elif sharder_type == SharderType.EMBEDDING_TOWER.value:
return TestETSharder(sharding_type, kernel_type, {"learning_rate": 0.1})
elif sharder_type == SharderType.EMBEDDING_TOWER_COLLECTION.value:
return TestETCSharder(sharding_type, kernel_type, {"learning_rate": 0.1})
else:
raise ValueError(f"Sharder not supported {sharder_type}")
def generate_inputs(
world_size: int,
tables: List[EmbeddingTableConfig],
weighted_tables: Optional[List[EmbeddingTableConfig]] = None,
batch_size: int = 4,
num_float_features: int = 16,
) -> Tuple[ModelInput, List[ModelInput]]:
return ModelInput.generate(
batch_size=batch_size,
world_size=world_size,
num_float_features=num_float_features,
tables=tables,
weighted_tables=weighted_tables or [],
)
def gen_model_and_input(
model_class: TestSparseNNBase,
tables: List[EmbeddingTableConfig],
embedding_groups: Dict[str, List[str]],
world_size: int,
weighted_tables: Optional[List[EmbeddingTableConfig]] = None,
num_float_features: int = 16,
dense_device: Optional[torch.device] = None,
sparse_device: Optional[torch.device] = None,
) -> Tuple[nn.Module, List[Tuple[ModelInput, List[ModelInput]]]]:
torch.manual_seed(0)
model = model_class(
tables=cast(List[BaseEmbeddingConfig], tables),
num_float_features=num_float_features,
weighted_tables=cast(List[BaseEmbeddingConfig], weighted_tables),
embedding_groups=embedding_groups,
dense_device=dense_device,
sparse_device=sparse_device,
)
inputs = [
generate_inputs(
world_size=world_size,
tables=tables,
weighted_tables=weighted_tables,
num_float_features=num_float_features,
)
]
return (model, inputs)
def copy_state_dict(
loc: Dict[str, Union[torch.Tensor, ShardedTensor]],
glob: Dict[str, torch.Tensor],
) -> None:
for name, tensor in loc.items():
assert name in glob
global_tensor = glob[name]
if isinstance(global_tensor, ShardedTensor):
global_tensor = global_tensor.local_shards()[0].tensor
if isinstance(tensor, ShardedTensor):
for local_shard in tensor.local_shards():
assert global_tensor.ndim == local_shard.tensor.ndim
shard_meta = local_shard.metadata
t = global_tensor.detach()
if t.ndim == 1:
t = t[
shard_meta.shard_offsets[0] : shard_meta.shard_offsets[0]
+ local_shard.tensor.shape[0]
]
elif t.ndim == 2:
t = t[
shard_meta.shard_offsets[0] : shard_meta.shard_offsets[0]
+ local_shard.tensor.shape[0],
shard_meta.shard_offsets[1] : shard_meta.shard_offsets[1]
+ local_shard.tensor.shape[1],
]
else:
raise ValueError("Tensors with ndim > 2 are not supported")
local_shard.tensor.copy_(t)
else:
tensor.copy_(global_tensor)
def sharding_single_rank_test(
rank: int,
world_size: int,
model_class: TestSparseNNBase,
embedding_groups: Dict[str, List[str]],
tables: List[EmbeddingTableConfig],
sharders: List[ModuleSharder[nn.Module]],
backend: str,
optim: EmbOptimType,
weighted_tables: Optional[List[EmbeddingTableConfig]] = None,
constraints: Optional[Dict[str, ParameterConstraints]] = None,
local_size: Optional[int] = None,
) -> None:
with MultiProcessContext(rank, world_size, backend, local_size) as ctx:
# Generate model & inputs.
(global_model, inputs) = gen_model_and_input(
model_class=model_class,
tables=tables,
weighted_tables=weighted_tables,
embedding_groups=embedding_groups,
world_size=world_size,
num_float_features=16,
)
global_model = global_model.to(ctx.device)
global_input = inputs[0][0].to(ctx.device)
local_input = inputs[0][1][rank].to(ctx.device)
# Shard model.
local_model = model_class(
tables=cast(List[BaseEmbeddingConfig], tables),
weighted_tables=cast(List[BaseEmbeddingConfig], weighted_tables),
embedding_groups=embedding_groups,
dense_device=ctx.device,
sparse_device=torch.device("meta"),
num_float_features=16,
)
planner = EmbeddingShardingPlanner(
topology=Topology(
world_size, ctx.device.type, local_world_size=ctx.local_size
),
constraints=constraints,
)
plan: ShardingPlan = planner.collective_plan(local_model, sharders, ctx.pg)
"""
Simulating multiple nodes on a single node. However, metadata information and
tensor placement must still be consistent. Here we overwrite this to do so.
NOTE:
inter/intra process groups should still behave as expected.
TODO: may need to add some checks that only does this if we're running on a
single GPU (which should be most cases).
"""
for group in plan.plan:
for _, parameter_sharding in plan.plan[group].items():
if (
parameter_sharding.sharding_type
in {
ShardingType.TABLE_ROW_WISE.value,
ShardingType.TABLE_COLUMN_WISE.value,
}
and ctx.device.type != "cpu"
):
sharding_spec = parameter_sharding.sharding_spec
if sharding_spec is not None:
# pyre-ignore
for shard in sharding_spec.shards:
placement = shard.placement
rank: Optional[int] = placement.rank()
assert rank is not None
shard.placement = torch.distributed._remote_device(
f"rank:{rank}/cuda:{rank}"
)
local_model = DistributedModelParallel(
local_model,
env=ShardingEnv.from_process_group(ctx.pg),
plan=plan,
sharders=sharders,
device=ctx.device,
)
dense_optim = KeyedOptimizerWrapper(
dict(local_model.named_parameters()),
lambda params: torch.optim.SGD(params, lr=0.1),
)
local_opt = CombinedOptimizer([local_model.fused_optimizer, dense_optim])
# Load model state from the global model.
copy_state_dict(local_model.state_dict(), global_model.state_dict())
# Run a single training step of the sharded model.
local_pred = gen_full_pred_after_one_step(local_model, local_opt, local_input)
all_local_pred = []
for _ in range(world_size):
all_local_pred.append(torch.empty_like(local_pred))
dist.all_gather(all_local_pred, local_pred, group=ctx.pg)
# Run second training step of the unsharded model.
assert optim == EmbOptimType.EXACT_SGD
global_opt = torch.optim.SGD(global_model.parameters(), lr=0.1)
global_pred = gen_full_pred_after_one_step(
global_model, global_opt, global_input
)
# Compare predictions of sharded vs unsharded models.
torch.testing.assert_allclose(global_pred, torch.cat(all_local_pred))
def gen_full_pred_after_one_step(
model: nn.Module,
opt: torch.optim.Optimizer,
input: ModelInput,
) -> torch.Tensor:
# Run a single training step of the global model.
opt.zero_grad()
model.train(True)
loss, _ = model(input)
loss.backward()
# pyre-fixme[20]: Argument `closure` expected.
opt.step()
# Run a forward pass of the global model.
with torch.no_grad():
model.train(False)
full_pred = model(input)
return full_pred
| 36.705455 | 86 | 0.645928 |
eed876b1554e0a4c99de5f131d255d84ecaa3345 | 78 | py | Python | lyrebird/plugins/__init__.py | dodosophia/lyrebird | b3c3d6e0f0f47b8df0cc119a1e5d30763371fa3d | [
"MIT"
] | 1 | 2020-03-18T05:56:53.000Z | 2020-03-18T05:56:53.000Z | lyrebird/plugins/__init__.py | robert0825/lyrebird | 18bcbd2030bd4a506d1f519ae0316d8fc667db4f | [
"MIT"
] | null | null | null | lyrebird/plugins/__init__.py | robert0825/lyrebird | 18bcbd2030bd4a506d1f519ae0316d8fc667db4f | [
"MIT"
] | 1 | 2019-03-11T09:25:36.000Z | 2019-03-11T09:25:36.000Z | from .plugin_loader import manifest
from .plugin_manager import PluginManager
| 26 | 41 | 0.871795 |
eed9c6dd573fe2bb3afc30e2202d6ac77f9cb554 | 330 | py | Python | examples/optimizers/science/create_hgso.py | anukaal/opytimizer | 5f1ccc0da80e6a4cabd99578fa24cf4f6466f9b9 | [
"Apache-2.0"
] | 528 | 2018-10-01T20:00:09.000Z | 2022-03-27T11:15:31.000Z | examples/optimizers/science/create_hgso.py | anukaal/opytimizer | 5f1ccc0da80e6a4cabd99578fa24cf4f6466f9b9 | [
"Apache-2.0"
] | 17 | 2019-10-30T00:47:03.000Z | 2022-03-21T11:39:28.000Z | examples/optimizers/science/create_hgso.py | anukaal/opytimizer | 5f1ccc0da80e6a4cabd99578fa24cf4f6466f9b9 | [
"Apache-2.0"
] | 35 | 2018-10-01T20:03:23.000Z | 2022-03-20T03:54:15.000Z | from opytimizer.optimizers.science import HGSO
# One should declare a hyperparameters object based
# on the desired algorithm that will be used
params = {
'n_clusters': 2,
'l1': 0.0005,
'l2': 100,
'l3': 0.001,
'alpha': 1.0,
'beta': 1.0,
'K': 1.0
}
# Creates an HGSO optimizer
o = HGSO(params=params)
| 19.411765 | 51 | 0.633333 |
eedc7a11ee4379d86b302ba06badd9a7738a9e2e | 63 | py | Python | training_tools/architectures/image_generation/__init__.py | kylrth/training_tools | eccb19a28f65a83e40642c9761ccb1dd090a3e5d | [
"MIT"
] | null | null | null | training_tools/architectures/image_generation/__init__.py | kylrth/training_tools | eccb19a28f65a83e40642c9761ccb1dd090a3e5d | [
"MIT"
] | null | null | null | training_tools/architectures/image_generation/__init__.py | kylrth/training_tools | eccb19a28f65a83e40642c9761ccb1dd090a3e5d | [
"MIT"
] | null | null | null | """Image generating architectures.
Kyle Roth. 2019-07-10.
"""
| 12.6 | 34 | 0.698413 |
eeddefbcddacdcd31162977b74fe0703603b2f9f | 2,668 | py | Python | adverse/urls.py | michael-xander/communique-webapp | 85b450d7f6d0313c5e5ef53a262a850b7e93c3d6 | [
"MIT"
] | null | null | null | adverse/urls.py | michael-xander/communique-webapp | 85b450d7f6d0313c5e5ef53a262a850b7e93c3d6 | [
"MIT"
] | null | null | null | adverse/urls.py | michael-xander/communique-webapp | 85b450d7f6d0313c5e5ef53a262a850b7e93c3d6 | [
"MIT"
] | null | null | null | from django.conf.urls import url
from .views import (EmergencyContactCreateView, EmergencyContactUpdateView, EmergencyContactDeleteView,
EmergencyContactDetailView, EmergencyContactListView, AdverseEventTypeUpdateView,
AdverseEventTypeCreateView, AdverseEventTypeDeleteView, AdverseEventTypeDetailView,
AdverseEventTypeListView, AdverseEventCreateView, AdverseEventDeleteView, AdverseEventDetailView,
AdverseEventListView, AdverseEventUpdateView, AdverseEventExportFormView, AdverseEventExportListView)
urlpatterns = [
url(r'^emergency-contacts/$', EmergencyContactListView.as_view(), name='adverse_emergency_contact_list'),
url(r'^emergency-contacts/create/$', EmergencyContactCreateView.as_view(), name='adverse_emergency_contact_create'),
url(r'^emergency-contacts/(?P<pk>[0-9]+)/$', EmergencyContactDetailView.as_view(),
name='adverse_emergency_contact_detail'),
url(r'^emergency-contacts/(?P<pk>[0-9]+)/update/$', EmergencyContactUpdateView.as_view(),
name='adverse_emergency_contact_update'),
url(r'^emergency-contacts/(?P<pk>[0-9]+)/delete/$', EmergencyContactDeleteView.as_view(),
name='adverse_emergency_contact_delete'),
url(r'^event-types/$', AdverseEventTypeListView.as_view(), name='adverse_event_type_list'),
url(r'^event-types/create/$', AdverseEventTypeCreateView.as_view(), name='adverse_event_type_create'),
url(r'^event-types/(?P<pk>[0-9]+)/$', AdverseEventTypeDetailView.as_view(), name='adverse_event_type_detail'),
url(r'^event-types/(?P<pk>[0-9]+)/update/$', AdverseEventTypeUpdateView.as_view(), name='adverse_event_type_update'),
url(r'^event-types/(?P<pk>[0-9]+)/delete/$', AdverseEventTypeDeleteView.as_view(), name='adverse_event_type_delete'),
url(r'^events/$', AdverseEventListView.as_view(), name='adverse_event_list'),
url(r'^events/create/$', AdverseEventCreateView.as_view(), name='adverse_event_create'),
url(r'^events/(?P<pk>[0-9]+)/$', AdverseEventDetailView.as_view(), name='adverse_event_detail'),
url(r'^events/(?P<pk>[0-9]+)/update/$', AdverseEventUpdateView.as_view(), name='adverse_event_update'),
url(r'^events/(?P<pk>[0-9]+)/delete/$', AdverseEventDeleteView.as_view(), name='adverse_event_delete'),
url(r'^events/export/$', AdverseEventExportFormView.as_view(), name='adverse_event_export_form'),
url(r'^events/export/(?P<start_year>[0-9]{4})-(?P<start_month>[0-9]{2})-(?P<start_day>[0-9]{2})/(?P<end_year>[0-9]{4})-(?P<end_month>[0-9]{2})-(?P<end_day>[0-9]{2})/$',
AdverseEventExportListView.as_view(), name='adverse_event_export_list'),
] | 86.064516 | 172 | 0.725262 |
eedf4a520738f711e0b9af209fc2128b16e46db5 | 1,133 | py | Python | qbflask/models.py | kevindkeogh/qbootstrapper-flask | 490906837d6522e3669193e5097bd33e1f953451 | [
"MIT"
] | 1 | 2017-04-27T08:59:01.000Z | 2017-04-27T08:59:01.000Z | qbflask/models.py | kevindkeogh/qbootstrapper-flask | 490906837d6522e3669193e5097bd33e1f953451 | [
"MIT"
] | null | null | null | qbflask/models.py | kevindkeogh/qbootstrapper-flask | 490906837d6522e3669193e5097bd33e1f953451 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
'''Handles all database interactions for qbootstrapper
'''
from flask import g
from qbflask import app
import sqlite3
def connect_db():
'''Connects to the database and returns the connection
'''
conn = sqlite3.connect(app.config['DATABASE'])
conn.row_factory = sqlite3.Row
return conn
def get_db():
'''Connects to the database and returns the connection
Note that it ensures that the 'g' object holds a connection to the database
'''
if not hasattr(g, 'db'):
g.db = connect_db()
return g.db
@app.teardown_appcontext
def close_db(error):
'''Ensures that when a request is completed, the connection to the database
is closed
'''
if hasattr(g, 'db'):
g.db.close()
def init_db():
'''Creates the database from scratch
'''
db = get_db()
with app.open_resource('schema.sql', mode='r') as f:
db.cursor().executescript(f.read())
db.commit()
@app.cli.command('initdb')
def initdb_command():
'''Flask command to initialize the database (and tables)
'''
init_db()
print('Initialized the database')
| 21.788462 | 79 | 0.655781 |
eee0e160c355877e9ab99acba82ef48b402d10db | 2,795 | py | Python | termlog/interpret.py | brianbruggeman/termlog | 361883f790ab6fae158095585370672e3ca8e354 | [
"MIT"
] | 1 | 2019-11-22T09:32:25.000Z | 2019-11-22T09:32:25.000Z | termlog/interpret.py | brianbruggeman/termlog | 361883f790ab6fae158095585370672e3ca8e354 | [
"MIT"
] | null | null | null | termlog/interpret.py | brianbruggeman/termlog | 361883f790ab6fae158095585370672e3ca8e354 | [
"MIT"
] | null | null | null | """Interprets each AST node"""
import ast
import textwrap
from typing import Any, Dict, List
def extract_fields(code: str) -> Dict[str, Any]:
"""Extracts data from code block searching for variables
Args:
code: the code block to parse
"""
# Parsing expects that the code have no indentation
code = textwrap.dedent(code)
parsed = ast.parse(code)
queue: List[Any] = parsed.body
data = []
fields: Dict[str, Any] = {}
# Grab field names to get data needed for message
count = -1
while queue:
count += 1
node = queue.pop(0)
ignored = tuple([ast.ImportFrom, ast.Import, ast.Assert, ast.Raise])
unhandled = tuple(
[
ast.Constant,
ast.Dict,
ast.DictComp,
ast.Expr,
ast.GeneratorExp,
ast.For,
ast.List,
ast.ListComp,
ast.Return,
ast.Subscript,
ast.Try,
ast.With,
]
)
if isinstance(node, (list, tuple)):
queue.extend(node)
elif isinstance(node, (ast.Expr, ast.FormattedValue, ast.Assign, ast.Starred, ast.Attribute, ast.Subscript, ast.AnnAssign)):
queue.append(node.value)
elif isinstance(node, (ast.Call,)):
queue.extend(node.args)
elif isinstance(node, (ast.JoinedStr, ast.BoolOp)):
queue.extend(node.values)
elif isinstance(node, (ast.Str,)):
data.append(node.s)
elif isinstance(node, (ast.Name,)):
fields.update({node.id: None})
elif isinstance(node, (ast.BinOp,)):
queue.append(node.left)
queue.append(node.right)
elif isinstance(node, (ast.FunctionDef,)):
queue.extend(node.body)
elif isinstance(node, (ast.If, ast.IfExp)):
queue.append(node.body)
queue.append(node.orelse)
# elif isinstance(node, (ast.DictComp,)):
# queue.extend(node.generators)
# queue.append(node.key)
# queue.append(node.value)
# elif isinstance(node, (ast.Try,)):
# queue.extend(node.body)
# queue.extend(node.orelse)
# queue.extend(node.finalbody)
elif isinstance(node, ignored):
pass
elif isinstance(node, unhandled):
# print("Termlog Warning [Debug ast.Node]:", node, ", ".join([d for d in dir(node) if not d.startswith("_")]))
pass
else:
print("Termlog Warning [Unhandled ast.Node]:", node, ", ".join([d for d in dir(node) if not d.startswith("_")]))
if count > 4096: # to prevent a runaway queue
break
return fields
| 34.9375 | 132 | 0.544544 |
eee2473186eac206c8388e1f0a6f771a7776dd49 | 4,757 | py | Python | python3/koans/about_strings.py | PatrickBoynton/python_koans | 12243005b6ca5145a3989eadc42d1cca122fe9a6 | [
"MIT"
] | null | null | null | python3/koans/about_strings.py | PatrickBoynton/python_koans | 12243005b6ca5145a3989eadc42d1cca122fe9a6 | [
"MIT"
] | null | null | null | python3/koans/about_strings.py | PatrickBoynton/python_koans | 12243005b6ca5145a3989eadc42d1cca122fe9a6 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutStrings(Koan):
def test_double_quoted_strings_are_strings(self):
string = "Hello, world."
# self.assertEqual(__, isinstance(string, str))
# Returns true, because string and str are the same type.
self.assertEqual(True, isinstance(string, str))
def test_single_quoted_strings_are_also_strings(self):
string = 'Goodbye, world.'
# Again, returns true, because single quotes are the same as str.
self.assertEqual(True, isinstance(string, str))
def test_triple_quote_strings_are_also_strings(self):
string = """Howdy, world!"""
# Triple double quotes are str as well.
self.assertEqual(True, isinstance(string, str))
def test_triple_single_quotes_work_too(self):
# Triple single quotes are str.
string = '''Bonjour tout le monde!'''
self.assertEqual(True, isinstance(string, str))
def test_raw_strings_are_also_strings(self):
string = r"Konnichi wa, world!"
# Raw strings are still str.
self.assertEqual(True, isinstance(string, str))
def test_use_single_quotes_to_create_string_with_double_quotes(self):
string = 'He said, "Go Away."'
# You can use single quotes to create double quotes.
self.assertEqual('He said, "Go Away."', string)
def test_use_double_quotes_to_create_strings_with_single_quotes(self):
string = "Don't"
# You can use this to avoid escape
# characters so that it doesn't look so messy.
self.assertEqual("Don't", string)
def test_use_backslash_for_escaping_quotes_in_strings(self):
a = "He said, \"Don't\""
b = 'He said, "Don\'t"'
# self.assertEqual(__, (a == b))
# These two are equal because they are essentially strings.
self.assertEqual(True, (a == b))
def test_use_backslash_at_the_end_of_a_line_to_continue_onto_the_next_line(
self):
string = "It was the best of times,\n\
It was the worst of times."
# The escape characters don't count.
self.assertEqual(52, len(string))
def test_triple_quoted_strings_can_span_lines(self):
string = """
Howdy,
world!
"""
# self.assertEqual(__, len(string))
# I think the extra lines are counted as characters.
self.assertEqual(15, len(string))
def test_triple_quoted_strings_need_less_escaping(self):
a = "Hello \"world\"."
b = """Hello "world"."""
# self.assertEqual(__, (a == b))
# Escaped quotes are equal to actual quotes.
self.assertEqual(True, (a == b))
def test_escaping_quotes_at_the_end_of_triple_quoted_string(self):
string = """Hello "world\""""
# self.assertEqual(__, string)
# The above is the same thing as having a double inside of triple quotes.
self.assertEqual('Hello "world"', string)
def test_plus_concatenates_strings(self):
string = "Hello, " + "world"
# self.assertEqual(__, string)
# Concatenated double str are the same as single str not.
self.assertEqual('Hello, world', string)
def test_adjacent_strings_are_concatenated_automatically(self):
string = "Hello" ", " "world"
# self.assertEqual(__, string)
# Strings are concatenated automatically, no need for the + operator.
self.assertEqual('Hello, world', string)
def test_plus_will_not_modify_original_strings(self):
hi = "Hello, "
there = "world"
string = hi + there
# self.assertEqual(__, hi)
# self.assertEqual(__, there)
# Doing anything with a string returns a modified copy.
self.assertEqual(hi, hi)
self.assertEqual(there, there)
def test_plus_equals_will_append_to_end_of_string(self):
hi = "Hello, "
there = "world"
hi += there
# self.assertEqual(__, hi)
# Plus equals works to concatenate strings.
self.assertEqual("Hello, world", hi)
def test_plus_equals_also_leaves_original_string_unmodified(self):
original = "Hello, "
hi = original
there = "world"
hi += there
# self.assertEqual(__, original)
# Modifying strings always returns a new copy.
self.assertEqual("Hello, ", original)
def test_most_strings_interpret_escape_characters(self):
string = "\n"
self.assertEqual('\n', string)
self.assertEqual("""\n""", string)
# self.assertEqual(__, len(string))
# All characters, including escape characters are
# apparently counted in the length of strings.
self.assertEqual(1, len(string))
| 36.875969 | 81 | 0.644103 |
eee41fee815cbfd9d791866ac61cc5f679e6a33c | 630 | py | Python | acmicpc/2798/2798.py | love-adela/algorithm | 4ccd02173c96f8369962f1fd4e5166a221690fa2 | [
"MIT"
] | 3 | 2019-03-09T05:19:23.000Z | 2019-04-06T09:26:36.000Z | acmicpc/2798/2798.py | love-adela/algorithm | 4ccd02173c96f8369962f1fd4e5166a221690fa2 | [
"MIT"
] | 1 | 2020-02-23T10:38:04.000Z | 2020-02-23T10:38:04.000Z | acmicpc/2798/2798.py | love-adela/algorithm | 4ccd02173c96f8369962f1fd4e5166a221690fa2 | [
"MIT"
] | 1 | 2019-05-22T13:47:53.000Z | 2019-05-22T13:47:53.000Z | params = [int(x) for x in input().split()]
point = params[-1]
card_numbers = sorted([int(i) for i in input().split()])
max_sum = 0
for i in range(len(card_numbers)):
for j in range(i+1, len(card_numbers)):
for k in range(j+1, len(card_numbers)):
if card_numbers[i] + card_numbers[j] + card_numbers[k] > point:
break
if card_numbers[i] + card_numbers[j] + card_numbers[k] <= point \
and point - (card_numbers[i] + card_numbers[j] + card_numbers[k]) < point - max_sum:
max_sum = card_numbers[i] + card_numbers[j] + card_numbers[k]
print(max_sum)
| 39.375 | 96 | 0.603175 |
eee600143ae9d2506a33cc7fd8cd95666e09cf2a | 453 | py | Python | 2/2.py | pyl/AdventOfCode | 575a8ba2eb6bd597201986444a799a4384ac3983 | [
"MIT"
] | null | null | null | 2/2.py | pyl/AdventOfCode | 575a8ba2eb6bd597201986444a799a4384ac3983 | [
"MIT"
] | null | null | null | 2/2.py | pyl/AdventOfCode | 575a8ba2eb6bd597201986444a799a4384ac3983 | [
"MIT"
] | null | null | null | import os
import re
# from .m.red import readInput
data = open("2\\input.txt").read().split('\n')
parsedData = []
for x in data:
parsedData.append(list(filter(None, re.split("[- :]", x))))
parsedData.pop()
count = 0
for x in parsedData:
print(x)
if(x[3][int(x[0])-1] != x[3][int(x[1])-1]
and (x[3][int(x[1])-1] == x[2]
or x[3][int(x[0])-1] == x[2])):
print("found" + ' '.join(x))
count += 1
print(count)
| 15.62069 | 63 | 0.527594 |
eee70444919e0996101bd470d17bbcdf1da08d3b | 284 | py | Python | python/multi-2.6/simple.py | trammell/test | ccac5e1dac947032e64d813e53cb961417a58d05 | [
"Artistic-2.0"
] | null | null | null | python/multi-2.6/simple.py | trammell/test | ccac5e1dac947032e64d813e53cb961417a58d05 | [
"Artistic-2.0"
] | null | null | null | python/multi-2.6/simple.py | trammell/test | ccac5e1dac947032e64d813e53cb961417a58d05 | [
"Artistic-2.0"
] | null | null | null | #!/usr/bin/env python2.4
"""
"""
class MyClass(object):
def __init__(self, a, b):
print 'MyClass.__init__', a, b
#super(MyClass, self).__init__(a, b) # works in 2.4
super(MyClass, self).__init__() # works in 2.6
obj = MyClass(6, 7)
| 18.933333 | 66 | 0.542254 |
eee72143266c2f7d061e2031c509c2b48483a480 | 1,183 | py | Python | dd3/visitor/views.py | za/dd3 | b70d795fb3bd3ff805696b632beabf6d1f342389 | [
"Apache-2.0"
] | null | null | null | dd3/visitor/views.py | za/dd3 | b70d795fb3bd3ff805696b632beabf6d1f342389 | [
"Apache-2.0"
] | null | null | null | dd3/visitor/views.py | za/dd3 | b70d795fb3bd3ff805696b632beabf6d1f342389 | [
"Apache-2.0"
] | null | null | null | from django.shortcuts import render
from django.http import JsonResponse
from django.db import connections
from django.db.models import Count
from django.contrib import admin
from visitor.models import Apache
import json
admin.site.register(Apache)
# Create your views here.
def text(request):
apachelogs_list = Apache.objects.all()
context_dict = {'apaches': apachelogs_list}
return render(request, 'index.html', context_dict)
def render_javascript(request):
lists = [
{ "date": "2015-11-28", "visit": 10 },
{ "date": "2015-10-09", "visit": 8 },
{ "date": "2015-11-01", "visit": 25 },
]
context_dict = {'lists_as_json': lists}
return render(request, 'lists.html', context_dict)
def render_javascript2(request):
apaches = Apache.objects.all()
alist = []
for apache in apaches:
dateformat = "%Y-%m-%d %H:%M:%S" #2015-11-21 18:36:00
date_dict1 = apache.date
date_dict2 = date_dict1.strftime(dateformat)
adict = {'date': date_dict2, 'visit': apache.visit}
alist.append(adict)
context_dict = {'data_as_json': alist}
return render(request, 'logs.html', context_dict)
def render_javascript3(request):
return render(request, 'scatterplot.html')
| 26.886364 | 55 | 0.718512 |
eee85fe54b0a7025f321a3dcd3adecc8d263a047 | 2,451 | py | Python | 02_test_and_prototype/CBH file subset tests.py | pnorton-usgs/notebooks | 17a38ecd3f3c052b9bd785c2e53e16a9082d1e71 | [
"MIT"
] | null | null | null | 02_test_and_prototype/CBH file subset tests.py | pnorton-usgs/notebooks | 17a38ecd3f3c052b9bd785c2e53e16a9082d1e71 | [
"MIT"
] | null | null | null | 02_test_and_prototype/CBH file subset tests.py | pnorton-usgs/notebooks | 17a38ecd3f3c052b9bd785c2e53e16a9082d1e71 | [
"MIT"
] | null | null | null | # ---
# jupyter:
# jupytext:
# formats: ipynb,py:percent
# text_representation:
# extension: .py
# format_name: percent
# format_version: '1.3'
# jupytext_version: 1.13.7
# kernelspec:
# display_name: Python 2
# language: python
# name: python2
# ---
# %%
import pandas as pd
# %%
# %%
workdir = "/Users/pnorton/USGS/Projects/National_Hydrology_Model/regions/r10U/input/cbh"
filename = '%s/daymet_1980_2010_tmin.cbh' % workdir
missing = [-99.0, -999.0]
infile = open(filename, 'r')
fheader = ''
for ii in range(0,3):
line = infile.readline()
if line[0:4] in ['prcp', 'tmax', 'tmin']:
# Change the number of HRUs included to one
numhru = int(line[5:])
fheader += line[0:5] + ' 1\n'
else:
fheader += line
print fheader
print 'numhru:', numhru
# %%
# Read in the CBH data for the HRU we want to extract
hruindex = 1 # one-based hru index
df1 = pd.read_csv(infile, sep=' ', skipinitialspace=True,
#usecols=[0, 1, 2, 3, 4, 5, hruindex+5],
header=None)
# df1 = pd.read_csv(infile, sep=r"\s*", engine='python',
# skiprows=3, usecols=[0, 1, 2, 3, 4, 5, hruindex+6],
# header=None)
infile.close()
df1.head(10)
# %%
df1.loc[:,[0,1,2,8]]
# %%
# Write the subsetted CBH data out
outfile = open('crap.cbh', 'w')
outfile.write(fheader)
df1.to_csv(outfile, sep=' ', float_format='%0.4f', header=False, index=False)
outfile.close()
# %%
# %%
workdir = "/Users/pnorton/Projects/National_Hydrology_Model/tmp"
filename = '%s/daymet_1980_2011_prcp.cbh' % workdir
missing = [-99.0, -999.0]
infile = open(filename, 'r')
fheader = ''
for ii in range(0,3):
line = infile.readline()
if line[0:6] in ['precip', 'tmax', 'tmin']:
# Change the number of HRUs included to one
numhru = int(line[7:])
fheader += line[0:5] + ' 1\n'
else:
fheader += line
print fheader
print 'numhru:', numhru
# %%
df1 = pd.read_csv(infile, sep=' ', skipinitialspace=True,
#usecols=[0, 1, 2, 3, 4, 5, hruindex+5],
header=None)
# df1 = pd.read_csv(infile, sep=r"\s*", engine='python',
# skiprows=3, usecols=[0, 1, 2, 3, 4, 5, hruindex+6],
# header=None)
infile.close()
df1.head(10)
# %%
# Check for precip values less than 0.001
df2 = df1[df1.iloc[:,6:] < 0.001]
df2.sum().sum()
# %%
# %%
| 22.694444 | 88 | 0.575275 |
eee96c8768d0bd73bfcc0b80259c717f22d6398d | 655 | py | Python | tests/test_time_compare.py | ludwiglierhammer/pyhomogenize | 339cd823b0e8ce618f1b2e42a69c20fb92ca7485 | [
"MIT"
] | null | null | null | tests/test_time_compare.py | ludwiglierhammer/pyhomogenize | 339cd823b0e8ce618f1b2e42a69c20fb92ca7485 | [
"MIT"
] | null | null | null | tests/test_time_compare.py | ludwiglierhammer/pyhomogenize | 339cd823b0e8ce618f1b2e42a69c20fb92ca7485 | [
"MIT"
] | null | null | null |
import pytest
import pyhomogenize as pyh
from . import has_dask, requires_dask
from . import has_xarray, requires_xarray
from . import has_numpy, requires_numpy
def test_time_compare():
netcdffile1 = pyh.test_netcdf[0]
netcdffile2 = pyh.test_netcdf[2]
time_control1 = pyh.time_control(netcdffile1)
time_control2 = pyh.time_control(netcdffile2)
assert pyh.time_compare(time_control1.ds, time_control2.ds).select_max_intersection()
assert pyh.time_compare([time_control1.ds, time_control2.ds]).select_max_intersection()
assert pyh.time_compare([time_control1.ds], time_control2.ds).select_max_intersection(output='test.nc')
| 34.473684 | 107 | 0.79084 |
eee9f9f542f197693a4587a809d1d13007ab6153 | 8,391 | py | Python | features/steps/zz_08_materials_steps.py | tewarfel/RayTracerChallenge_1 | 736cc5d159c267c9bcc14d42abb03eedc2f7e5f1 | [
"MIT"
] | 2 | 2020-05-13T20:54:50.000Z | 2021-06-06T03:37:41.000Z | features/steps/zz_08_materials_steps.py | tewarfel/RayTracerChallenge_1 | 736cc5d159c267c9bcc14d42abb03eedc2f7e5f1 | [
"MIT"
] | null | null | null | features/steps/zz_08_materials_steps.py | tewarfel/RayTracerChallenge_1 | 736cc5d159c267c9bcc14d42abb03eedc2f7e5f1 | [
"MIT"
] | null | null | null | from behave import *
from hamcrest import assert_that, equal_to
from vec3 import Vec3, vec3
from vec4 import Vec4, point, vector
from base import equal, normalize, transform, ray, lighting
import numpy as np
from shape import material, sphere, test_shape, normal_at, set_transform, intersect, glass_sphere, point_light
from base import render, translation, scaling, view_transform, world, camera, color, rotation_y, rotation_z, rotation_x, stripe_at, stripe_pattern
from parse_type import TypeBuilder
from step_helper import *
valid_test_objects = ["light","m", "in_shadow"]
parse_test_object = TypeBuilder.make_choice(valid_test_objects)
register_type(TestObject=parse_test_object)
valid_test_variables = ["intensity", "position", "eyev", "normalv", "result", "c1", "c2"]
parse_test_variable = TypeBuilder.make_choice(valid_test_variables)
register_type(TestVariable=parse_test_variable)
valid_light_elements = ["position", "intensity"]
parse_light_element = TypeBuilder.make_choice(valid_light_elements)
register_type(LightElement=parse_light_element)
valid_material_elements = ["color", "ambient", "diffuse", "specular", "shininess", "reflective", "transparency", "refractive_index", "pattern"]
parse_material_element = TypeBuilder.make_choice(valid_material_elements)
register_type(MaterialElement=parse_material_element)
valid_boolean_values = ["true", "false"]
parse_boolean_value = TypeBuilder.make_choice(valid_boolean_values)
register_type(BooleanValue=parse_boolean_value)
@given("{item:TestVariable} ← color({r:g}, {g:g}, {b:g})")
def step_impl_color_assign(context, item, r, g, b):
ensure_context_has_tuple(context)
context.tuple[item] = color(float(r), float(g), float(b))
@given("{item:TestVariable} ← point({x:g}, {y:g}, {z:g})")
def step_impl_point_assign_B(context, item, x, y, z):
ensure_context_has_tuple(context)
context.tuple[item] = point(float(x), float(y), float(z))
@given("{item:TestObject} ← true")
def step_impl_logic_assign_true(context, item):
ensure_context_has_dict(context)
context.dict[item] = True
@given("{item:TestVariable} ← vector({x:g}, √{ynum:g}/{ydenom:g}, -√{znum:g}/{zdenom:g})")
def step_impl_vector_assign_B(context, item, x, ynum, ydenom, znum, zdenom):
ensure_context_has_tuple(context)
context.tuple[item] = vector(float(x), np.sqrt(float(ynum)) / float(ydenom), -np.sqrt(float(znum)) / float(zdenom))
@given("{item:TestVariable} ← vector({x:g}, {y:g}, -{z:g})")
def step_impl_vector_assign_C(context, item, x, y, z):
ensure_context_has_tuple(context)
context.tuple[item] = vector(float(x), float(y), -float(z))
@given("{item:TestVariable} ← vector({x:g}, {y:g}, {z:g})")
def step_impl_vector_assign_D(context, item, x, y, z):
ensure_context_has_tuple(context)
context.tuple[item] = vector(float(x), float(y), float(z))
@given("{item:TestVariable} ← vector({x:g}, -√{ynum:g}/{ydenom:g}, -√{znum:g}/{zdenom:g})")
def step_impl_vector_assign_E(context, item, x, ynum, ydenom, znum, zdenom):
ensure_context_has_tuple(context)
context.tuple[item] = vector(float(x), -np.sqrt(float(ynum)) / float(ydenom),
-np.sqrt(float(znum)) / float(zdenom))
@given("{item:TestObject} ← material()")
def step_impl_generic_material_given(context, item):
ensure_context_has_dict(context)
context.dict[item] = material()
@given("{item:TestObject} ← point_light(point({px:g}, {py:g}, {pz:g}), color({red:g}, {green:g}, {blue:g}))")
def step_impl_point_light_for_materials(context, item, px, py, pz, red, green, blue):
ensure_context_has_dict(context)
real_position = point(float(px), float(py), float(pz))
real_intensity = color(float(red), float(green), float(blue))
context.dict[item] = point_light(real_position, real_intensity)
@given("{item:TestObject}.pattern ← stripe_pattern(color({r1:g}, {g1:g}, {b1:g}), color({r2:g}, {g2:g}, {b2:g}))")
def step_set_background_color(context, item, r1, g1, b1, r2, g2, b2):
assert (item in context.dict.keys())
context.dict[str(item)].pattern = stripe_pattern(color(float(r1), float(g1), float(b1)),
color(float(r2), float(g2), float(b2)))
@when("{item:TestVariable} ← lighting({material:TestObject}, {light:TestObject}, {point_position:TestVariable}, {eye_vector:TestVariable}, {normal_vector:TestVariable})")
def step_set_lighting_values(context, item, material, light, point_position, eye_vector, normal_vector):
assert(material in context.dict.keys())
assert(light in context.dict.keys())
assert(point_position in context.tuple.keys())
assert(eye_vector in context.tuple.keys())
assert(normal_vector in context.tuple.keys())
material_val = context.dict[str(material)]
light_val = context.dict[str(light)]
point_value = context.tuple[str(point_position)]
eye_vec_value = context.tuple[str(eye_vector)]
norm_vec_value = context.tuple[str(normal_vector)]
lighting_value = lighting(material_val, sphere(), light_val, point_value, eye_vec_value, norm_vec_value)
context.tuple[str(item)] = lighting_value
@when("{item:TestVariable} ← lighting({material:TestObject}, {light:TestObject}, point({px:g}, {py:g}, {pz:g}), {eye_vector:TestVariable}, {normal_vector:TestVariable}, {in_shadow:BooleanValue})")
def step_set_lighting_values_with_shadow_explicit_point(context, item, material, light, px, py, pz, eye_vector, normal_vector, in_shadow):
assert (material in context.dict.keys())
assert (light in context.dict.keys())
assert (eye_vector in context.tuple.keys())
assert (normal_vector in context.tuple.keys())
material_val = context.dict[str(material)]
light_val = context.dict[str(light)]
point_value = point(float(px), float(py), float(pz))
eye_vec_value = context.tuple[str(eye_vector)]
norm_vec_value = context.tuple[str(normal_vector)]
in_shadow_value = True if in_shadow=="true" else False
lighting_value = lighting(material_val, sphere(), light_val, point_value, eye_vec_value, norm_vec_value, in_shadow_value)
context.tuple[str(item)] = lighting_value
@when("{item:TestVariable} ← lighting({material:TestObject}, {light:TestObject}, {point_position:TestVariable}, {eye_vector:TestVariable}, {normal_vector:TestVariable}, {in_shadow:TestObject})")
def step_set_lighting_values_with_shadow_defined_point(context, item, material, light, point_position, eye_vector, normal_vector, in_shadow):
assert (material in context.dict.keys())
assert (light in context.dict.keys())
assert (point_position in context.tuple.keys())
assert (eye_vector in context.tuple.keys())
assert (normal_vector in context.tuple.keys())
assert (in_shadow in context.dict.keys())
material_val = context.dict[str(material)]
light_val = context.dict[str(light)]
point_value = context.tuple[str(point_position)]
eye_vec_value = context.tuple[str(eye_vector)]
norm_vec_value = context.tuple[str(normal_vector)]
in_shadow_value = context.dict[str(in_shadow)]
lighting_value = lighting(material_val, sphere(), light_val, point_value, eye_vec_value, norm_vec_value, in_shadow_value)
context.tuple[str(item)] = lighting_value
@then("{item:TestObject}.{element:MaterialElement} = color({red:g}, {green:g}, {blue:g})")
def step_impl_ray_intersect_list_count(context, item, element, red, green, blue):
assert(item in context.dict.keys())
local_object_str = "context.dict['"+str(item)+"']."+str(element)
local_object = eval(local_object_str)
value = color(float(red), float(green), float(blue))
assert(equal(local_object, value))
@then("{item:TestObject}.{element:MaterialElement} = {value:g}")
def step_impl_ray_intersect_list_count(context, item, element, value):
assert(item in context.dict.keys())
local_object_str = "context.dict['"+str(item)+"']."+str(element)
local_object = eval(local_object_str)
value = float(value)
assert(equal(local_object, value))
@then("{item:TestVariable} = color({red:g}, {green:g}, {blue:g})")
def step_lighting_color_test(context, item, red, green, blue):
assert(item in context.tuple.keys())
local_object_str = "context.tuple['"+str(item)+"']"
local_object = eval(local_object_str)
value = color(float(red), float(green), float(blue))
assert(equal(local_object, value))
| 45.603261 | 196 | 0.725539 |
eeea795546b0f95cf627707162e00a3f25d014a4 | 2,073 | py | Python | wordfrequencies/wordfrequencies.py | chrisshiels/life | f6902ef656e0171c07eec3eb9343a275048ab849 | [
"MIT"
] | null | null | null | wordfrequencies/wordfrequencies.py | chrisshiels/life | f6902ef656e0171c07eec3eb9343a275048ab849 | [
"MIT"
] | null | null | null | wordfrequencies/wordfrequencies.py | chrisshiels/life | f6902ef656e0171c07eec3eb9343a275048ab849 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# 'wordfrequencies.py'.
# Chris Shiels.
import re
import sys
def pipemaybe(l):
def internal(v):
return reduce(lambda a, e: e(a) if a is not None else None, l, v)
return internal
def partial(f, *args):
args1 = args
def internal(*args):
return f(*(args1 + args))
return internal
def removepossessives(s):
return s.replace('\'s', '')
def rewritenonalphanumerics(s):
return re.sub('\W', ' ', s)
def splitwords(s):
return s.split()
def lowercasewords(l):
return map(lambda e: e.lower(), l)
def dictfrequencies(l):
def accumulate(a, e):
if not e in a:
a[e] = 1
else:
a[e] += 1
return a
return reduce(accumulate, l, {})
def listfrequencies(d):
return reduce(lambda a, e: a + [ { 'word': e, 'count': d[e] } ],
d.keys(),
[])
def sortfrequencies(l):
def compare(x, y):
ret = cmp(x['count'], y['count']) * -1
if ret == 0:
ret = cmp(x['word'], y['word'])
return ret
return sorted(l, compare)
def outputfrequencies(stdout, l):
for e in l:
print >> stdout, \
'%(count)s %(word)s' % { 'count': e['count'], 'word': e['word'] }
return 0
def wordfrequencies(stdout, s):
ret = pipemaybe([ removepossessives,
rewritenonalphanumerics,
splitwords,
lowercasewords,
dictfrequencies,
listfrequencies,
sortfrequencies,
partial(outputfrequencies, stdout)
])(s)
if ret != None:
return ret
else:
return 1
def main(stdin, stdout, stderr, argv):
if len(argv) == 0:
return wordfrequencies(stdout, stdin.read())
else:
ret = 0
for arg in argv:
if len(argv) > 1:
print "\n%(arg)s:" % { 'arg': arg }
f = open(arg)
ret = wordfrequencies(stdout, f.read())
f.close()
if ret != 0:
break
return ret
if __name__ == "__main__":
sys.exit(main(sys.stdin, sys.stdout, sys.stderr, sys.argv[1:]))
| 19.556604 | 78 | 0.545586 |
eeea7ce35f96919784a10c51746fa125d0fb04fb | 741 | py | Python | data/thread_generator.py | beesk135/ReID-Survey | d1467c0ce5d3ca78640196360a05df9ff9f9f42a | [
"MIT"
] | null | null | null | data/thread_generator.py | beesk135/ReID-Survey | d1467c0ce5d3ca78640196360a05df9ff9f9f42a | [
"MIT"
] | null | null | null | data/thread_generator.py | beesk135/ReID-Survey | d1467c0ce5d3ca78640196360a05df9ff9f9f42a | [
"MIT"
] | null | null | null | import threading
import time
import numpy as np
from collections import deque
class ThreadGenerator(threading.Thread):
def __init__(self, generator, max_queue_size=10):
threading.Thread.__init__(self)
self.generator = ThreadGenerator
self.buffer = deque(maxlen=max_queue_size)
self.max_queue_size = max_queue_size
def push(self, X):
while(len(self.buffer) == self.max_queue_size):
time.sleep(1e-6)
self.buffer.append(X)
def grab(self):
while (len(self.buffer) <= 0):
time.sleep(1e-6)
data = self.buffer.popleft()
return data
def run(self):
while True:
data = next(self.generator)
self.push(data)
| 29.64 | 55 | 0.62753 |
eeecbdae984ff942e14cb18d12ef5612889a5ac7 | 81 | py | Python | pbs/apps.py | AliTATLICI/django-rest-app | 901e1d50fe4c8732dccdb597d6cad6e099a2dbfa | [
"MIT"
] | null | null | null | pbs/apps.py | AliTATLICI/django-rest-app | 901e1d50fe4c8732dccdb597d6cad6e099a2dbfa | [
"MIT"
] | null | null | null | pbs/apps.py | AliTATLICI/django-rest-app | 901e1d50fe4c8732dccdb597d6cad6e099a2dbfa | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class PbsConfig(AppConfig):
name = 'pbs'
| 13.5 | 33 | 0.728395 |
eeee2179bf362d1c71b12b2f474e4d3a6d80e573 | 1,544 | py | Python | spug/data_pipeline/sources/stock.py | syeehyn/spug | 216976e0171bbc14042377fbbb535180bd2efaf3 | [
"Apache-2.0"
] | null | null | null | spug/data_pipeline/sources/stock.py | syeehyn/spug | 216976e0171bbc14042377fbbb535180bd2efaf3 | [
"Apache-2.0"
] | null | null | null | spug/data_pipeline/sources/stock.py | syeehyn/spug | 216976e0171bbc14042377fbbb535180bd2efaf3 | [
"Apache-2.0"
] | 1 | 2021-12-05T22:54:28.000Z | 2021-12-05T22:54:28.000Z | """
fetch historical stocks prices
"""
from tqdm import tqdm
import pandas as pd
import pandas_datareader as pdr
from .base import DataFetcher
def get_stock_price(symbol, start, end):
"""get stock price of a company over a time range
Args:
symbol (str): ticker symbol of a stock
start (datetime.datetime): start time
end (datetime.datetime): end time
Returns:
pd.DataFrame: stock price of a company over a time range
"""
df = (
pdr.yahoo.daily.YahooDailyReader(symbol, start=start, end=end)
.read()
.reset_index()[["Date", "High", "Low", "Open", "Close", "Volume", "Adj Close"]]
)
df["date"] = pd.to_datetime(df.Date)
return df.drop("Date", axis=1)
class StockFetcher(DataFetcher):
def __init__(self, **configs):
super().__init__(**configs)
def get_data(self):
"""get stock prices of companies over a time range
Args:
symbol (list): ticker symbols of stocks
start (datetime.datetime): start time
end (datetime.datetime): end time
Returns:
pd.DataFrame: stock prices of companies over a time range
"""
dfs = pd.DataFrame()
symbols = self.companies
symbols = list(map(lambda x: list(x.keys())[0], symbols))
for symbol in tqdm(symbols):
df = get_stock_price(symbol, self.start_date, self.end_date)
df["ticker_symbol"] = symbol
dfs = dfs.append(df)
return dfs.reset_index(drop=True)
| 31.510204 | 87 | 0.613342 |
eeee6f4fc03992c011356b8190353e8fc67ab368 | 809 | py | Python | parser/team07/Proyecto/clasesAbstractas/expresion.py | susanliss/tytus | a613a2352cf4a1d0e90ce27bb346ab60ed8039cc | [
"MIT"
] | null | null | null | parser/team07/Proyecto/clasesAbstractas/expresion.py | susanliss/tytus | a613a2352cf4a1d0e90ce27bb346ab60ed8039cc | [
"MIT"
] | null | null | null | parser/team07/Proyecto/clasesAbstractas/expresion.py | susanliss/tytus | a613a2352cf4a1d0e90ce27bb346ab60ed8039cc | [
"MIT"
] | null | null | null | from .instruccionAbstracta import InstruccionAbstracta
class Expresion(InstruccionAbstracta):
def __init__(self):
pass
def valorPrimitivo(self,valor,tipo):
self.valor = valor
self.tipoOperacion = tipo
self.opIzquierdo = None
self.opDerecho = None
def operacionUnaria(self,opIzquierdo,tipoOperacion):
self.valor = None
self.tipoOperacion = tipoOperacion
self.opIzquierdo = opIzquierdo
self.opDerecho = None
def operacionBinaria(self,opIzquierdo,opDerecho,tipoOperacion):
self.valor = None
self.tipoOperacion = tipoOperacion
self.opIzquierdo = opIzquierdo
self.opDerecho = opDerecho
def ejecutar(self, tabalSimbolos, listaErrores):
pass
| 23.794118 | 67 | 0.651422 |
eeef030e3640987cf35e25ed5365b60fde947fe0 | 2,963 | py | Python | src/gluonts/nursery/tsbench/src/tsbench/evaluations/metrics/performance.py | RingoIngo/gluon-ts | 62fb20c36025fc969653accaffaa783671709564 | [
"Apache-2.0"
] | 1 | 2022-03-28T01:17:00.000Z | 2022-03-28T01:17:00.000Z | src/gluonts/nursery/tsbench/src/tsbench/evaluations/metrics/performance.py | RingoIngo/gluon-ts | 62fb20c36025fc969653accaffaa783671709564 | [
"Apache-2.0"
] | null | null | null | src/gluonts/nursery/tsbench/src/tsbench/evaluations/metrics/performance.py | RingoIngo/gluon-ts | 62fb20c36025fc969653accaffaa783671709564 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
from __future__ import annotations
from dataclasses import dataclass
from typing import cast, Dict, List, Union
import numpy as np
import pandas as pd
from .metric import Metric
@dataclass
class Performance:
"""
The performance class encapsulates the metrics that are recorded for configurations.
"""
training_time: Metric
latency: Metric
num_model_parameters: Metric
num_gradient_updates: Metric
ncrps: Metric
mase: Metric
smape: Metric
nrmse: Metric
nd: Metric
@classmethod
def from_dict(cls, metrics: Dict[str, Union[float, int]]) -> Performance:
"""
Initializes a new performance object from the given 1D dictionary. Metrics are expected to
be provided via `<metric>_mean` and `<metric>_std` keys.
"""
kwargs = {
m: Metric(metrics[f"{m}_mean"], metrics[f"{m}_std"])
for m in cls.metrics()
}
return Performance(**kwargs) # type: ignore
@classmethod
def metrics(cls) -> List[str]:
"""
Returns the list of metrics that are exposed by the performance class.
"""
# pylint: disable=no-member
return list(cls.__dataclass_fields__.keys()) # type: ignore
@classmethod
def to_dataframe(
cls, performances: List[Performance], std: bool = True
) -> pd.DataFrame:
"""
Returns a data frame representing the provided performances.
"""
fields = sorted(
Performance.__dataclass_fields__.keys()
) # pylint: disable=no-member
result = np.empty((len(performances), 18 if std else 9))
offset = 2 if std else 1
for i, performance in enumerate(performances):
for j, field in enumerate(fields):
result[i, j * offset] = cast(
Metric, getattr(performance, field)
).mean
if std:
result[i, j * offset + 1] = cast(
Metric, getattr(performance, field)
).std
return pd.DataFrame(
result,
columns=[
f
for field in fields
for f in (
[f"{field}_mean", f"{field}_std"]
if std
else [f"{field}_mean"]
)
],
)
| 31.521277 | 98 | 0.59433 |
eef0f0b4303286161e71367939209bbe2bdf9cf9 | 1,940 | py | Python | Scripts/simulation/postures/posture_tunables.py | velocist/TS4CheatsInfo | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | [
"Apache-2.0"
] | null | null | null | Scripts/simulation/postures/posture_tunables.py | velocist/TS4CheatsInfo | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | [
"Apache-2.0"
] | null | null | null | Scripts/simulation/postures/posture_tunables.py | velocist/TS4CheatsInfo | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | [
"Apache-2.0"
] | null | null | null | # uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\postures\posture_tunables.py
# Compiled at: 2016-02-19 01:17:07
# Size of source mod 2**32: 2003 bytes
from postures.posture_cost import TunablePostureCostVariant
from postures.posture_validators import TunablePostureValidatorVariant
from sims4.tuning.tunable import OptionalTunable, TunableTuple, TunableList
class TunableSupportedPostureTransitionData(OptionalTunable):
def __init__(self, *args, **kwargs):
(super().__init__)(args, tunable=TunableTuple(cost=(TunablePostureCostVariant()),
validators=TunableList(description='\n Define under what circumstances this transition is valid.\n There are performance implications of adding tested edges to\n the posture graph. \n \n In general, this should be handled by testing posture-\n providing interactions altogether. This should really only\n be used to restrict the ability to go from a specific\n posture to another specific posture under certain\n circumstances.\n \n e.g. Prevent Squeamish Sims from sitting on dirty toilets.\n * Do not use this tuning. Instead, test out the interaction\n directly.\n \n e.g. Prevent Toddlers with low motor skill from entering the\n High Chair posture from stand. However, allow them to be\n placed on the High Chair from carry.\n * Use this tuning.\n ',
tunable=(TunablePostureValidatorVariant()))),
enabled_by_default=True, **kwargs) | 114.117647 | 1,122 | 0.635052 |
eef0f57e2e52d98324d6736af1814a7fec12251f | 23 | py | Python | Game/History/__init__.py | ritwikd/interom | 0b626351fd742f2a99d0a6d11ba8c1a214aab576 | [
"MIT"
] | null | null | null | Game/History/__init__.py | ritwikd/interom | 0b626351fd742f2a99d0a6d11ba8c1a214aab576 | [
"MIT"
] | 1 | 2021-03-06T22:08:32.000Z | 2021-03-06T22:09:07.000Z | Game/History/__init__.py | ritwikd/interom | 0b626351fd742f2a99d0a6d11ba8c1a214aab576 | [
"MIT"
] | 1 | 2021-03-03T22:48:07.000Z | 2021-03-03T22:48:07.000Z | from . import Log, Move | 23 | 23 | 0.73913 |
eef278f2f4e2c217a17b9bdf16a63771a1fe90a6 | 107 | py | Python | Guitarist.py | Stanels42/pythonic-garage-band | 7dfdec84073720998368cc2042bed011244c88ae | [
"MIT"
] | 1 | 2021-10-01T09:48:42.000Z | 2021-10-01T09:48:42.000Z | Guitarist.py | Stanels42/pythonic-garage-band | 7dfdec84073720998368cc2042bed011244c88ae | [
"MIT"
] | 1 | 2019-12-06T04:22:11.000Z | 2019-12-06T04:22:11.000Z | Guitarist.py | Stanels42/pythonic-garage-band | 7dfdec84073720998368cc2042bed011244c88ae | [
"MIT"
] | 1 | 2019-12-06T19:39:55.000Z | 2019-12-06T19:39:55.000Z | from Musician import Musician
class Guitarist(Musician):
solo = 'Guitar Sounds'
instrument = 'Guitar'
| 17.833333 | 29 | 0.747664 |
eef62d1ce6768e7a68a4a1159bbd33491dcbc7e8 | 6,126 | py | Python | tests/objects/fiber_manipulation_test.py | jifengting1/fastpliFork | 1ef7e2d268e03e21ded9390fc005b9fff2e0a3c1 | [
"MIT"
] | null | null | null | tests/objects/fiber_manipulation_test.py | jifengting1/fastpliFork | 1ef7e2d268e03e21ded9390fc005b9fff2e0a3c1 | [
"MIT"
] | null | null | null | tests/objects/fiber_manipulation_test.py | jifengting1/fastpliFork | 1ef7e2d268e03e21ded9390fc005b9fff2e0a3c1 | [
"MIT"
] | null | null | null | import unittest
import numpy as np
import fastpli.objects
import fastpli.tools
class MainTest(unittest.TestCase):
# TODO: implement object.fiber.*manipulations*
def setUp(self):
self.fiber = np.array([[0, 0, 0, 1], [1, 1, 1, 2]], dtype=float)
self.fiber_bundle = [self.fiber.copy()]
self.fiber_bundles = [[self.fiber.copy()]]
def test_resize(self):
fiber = fastpli.objects.fiber.Rescale(self.fiber, 10)
self.assertTrue(np.array_equal(fiber, self.fiber * 10))
fb = fastpli.objects.fiber_bundle.Rescale(self.fiber_bundle, 10)
for f in fb:
self.assertTrue(np.array_equal(f, self.fiber * 10))
fbs = fastpli.objects.fiber_bundles.Rescale(self.fiber_bundles, 10)
for fb in fbs:
for f in fb:
self.assertTrue(np.array_equal(f, self.fiber * 10))
fiber = fastpli.objects.fiber.Rescale(self.fiber, 10, mod='points')
self.assertTrue(np.array_equal(fiber[:, :-2], self.fiber[:, :-2] * 10))
self.assertTrue(np.array_equal(fiber[:, -1], self.fiber[:, -1]))
fiber = fastpli.objects.fiber.Rescale(self.fiber, 10, mod='radii')
self.assertTrue(np.array_equal(fiber[:, :-2], self.fiber[:, :-2]))
self.assertTrue(np.array_equal(fiber[:, -1], self.fiber[:, -1] * 10))
def test_rotation(self):
fiber = fastpli.objects.fiber.Rotate(self.fiber,
fastpli.tools.rotation.x(0))
self.assertTrue(np.array_equal(self.fiber, fiber))
fiber = fastpli.objects.fiber.Rotate(
self.fiber, fastpli.tools.rotation.x(np.deg2rad(90)))
self.assertTrue(
np.allclose(fiber, np.array([[0, 0, 0, 1], [1, -1, 1, 2]])))
fiber = fastpli.objects.fiber.Rotate(
self.fiber, fastpli.tools.rotation.x(np.deg2rad(90)), [1, 1, 1])
self.assertTrue(
np.allclose(fiber, np.array([[0, 2, 0, 1], [1, 1, 1, 2]])))
for f in self.fiber_bundle:
fiber = fastpli.objects.fiber.Rotate(
f, fastpli.tools.rotation.x(np.deg2rad(90)), [1, 1, 1])
self.assertTrue(
np.allclose(fiber, np.array([[0, 2, 0, 1], [1, 1, 1, 2]])))
for fb in self.fiber_bundles:
for f in fb:
fiber = fastpli.objects.fiber.Rotate(
f, fastpli.tools.rotation.x(np.deg2rad(90)), [1, 1, 1])
self.assertTrue(
np.allclose(fiber, np.array([[0, 2, 0, 1], [1, 1, 1, 2]])))
def test_translate(self):
fiber = fastpli.objects.fiber.Translate(self.fiber, [1, 1, 1])
self.assertTrue(
np.array_equal(fiber[:, :3],
self.fiber[:, :3] + np.array([1, 1, 1])))
self.assertTrue(np.array_equal(fiber[:, -1], self.fiber[:, -1]))
for f in self.fiber_bundle:
fiber = fastpli.objects.fiber.Translate(f, [1, 1, 1])
self.assertTrue(
np.array_equal(fiber[:, :3],
self.fiber[:, :3] + np.array([1, 1, 1])))
self.assertTrue(np.array_equal(f[:, -1], self.fiber[:, -1]))
for fb in self.fiber_bundles:
for f in fb:
fiber = fastpli.objects.fiber.Translate(f, [1, 1, 1])
self.assertTrue(
np.array_equal(fiber[:, :3],
self.fiber[:, :3] + np.array([1, 1, 1])))
self.assertTrue(np.array_equal(f[:, -1], self.fiber[:, -1]))
def test_cut(self):
fiber = np.array([[0, 0, 0, 1], [1, 1, 1, 2]], dtype=float)
fibers = fastpli.objects.fiber.Cut(fiber, [[-10] * 3, [10] * 3])
self.assertTrue(len(fibers) == 1)
self.assertTrue(np.array_equal(fibers[0], fiber))
fiber = np.array([[0, 0, 0, 1], [10, 10, 10, 2]], dtype=float)
fibers = fastpli.objects.fiber.Cut(fiber, [[-5] * 3, [5] * 3])
self.assertTrue(len(fibers) == 1)
self.assertTrue(np.array_equal(fibers[0], fiber))
fiber = np.array([[0, 0, 0, 1], [10, 10, 10, 2], [100, 100, 100, 2]],
dtype=float)
fibers = fastpli.objects.fiber.Cut(fiber, [[-5] * 3, [5] * 3])
self.assertTrue(len(fibers) == 1)
self.assertTrue(fibers[0].shape[0] == 2)
self.assertTrue(not np.array_equal(fibers[0], fiber))
fiber = np.array([[0, 0, 0, 1], [10, 10, 10, 2], [100, 100, 100, 2],
[10, 10, 10, 2], [0, 0, 0, 1]],
dtype=float)
fibers = fastpli.objects.fiber.Cut(fiber, [[-5] * 3, [5] * 3])
self.assertTrue(len(fibers) == 2)
self.assertTrue(fibers[0].shape[0] == 2)
self.assertTrue(fibers[1].shape[0] == 2)
self.assertTrue(not np.array_equal(fibers[0], fiber))
self.assertTrue(not np.array_equal(fibers[1], fiber))
fiber_bundle = [fiber]
cut_fb = fastpli.objects.fiber_bundle.Cut(fiber_bundle,
[[-5] * 3, [5] * 3])
fibers = cut_fb
self.assertTrue(len(fibers) == 2)
self.assertTrue(fibers[0].shape[0] == 2)
self.assertTrue(fibers[1].shape[0] == 2)
self.assertTrue(not np.array_equal(fibers[0], fiber))
self.assertTrue(not np.array_equal(fibers[1], fiber))
fiber_bundles = [[fiber]]
cut_fbs = fastpli.objects.fiber_bundles.Cut(fiber_bundles,
[[-5] * 3, [5] * 3])
fibers = cut_fbs[0]
self.assertTrue(len(cut_fbs) == 1)
self.assertTrue(len(fibers) == 2)
self.assertTrue(fibers[0].shape[0] == 2)
self.assertTrue(fibers[1].shape[0] == 2)
self.assertTrue(not np.array_equal(fibers[0], fiber))
self.assertTrue(not np.array_equal(fibers[1], fiber))
fiber = np.array([[0, 0, 0, 1], [10, 10, 10, 2]], dtype=float)
fibers = fastpli.objects.fiber.Cut(fiber, [[5] * 3, [6] * 3])
self.assertTrue(np.array_equal(fibers[0], fiber))
if __name__ == '__main__':
unittest.main()
| 42.839161 | 79 | 0.538851 |
eef6f9b0de74e501a4d4981b8350d4bf8e08d58a | 4,403 | py | Python | kerascv/layers/matchers/argmax_matcher.py | tanzhenyu/keras-cv | b7208ee25735c492ccc171874e34076111dcf637 | [
"Apache-2.0"
] | null | null | null | kerascv/layers/matchers/argmax_matcher.py | tanzhenyu/keras-cv | b7208ee25735c492ccc171874e34076111dcf637 | [
"Apache-2.0"
] | null | null | null | kerascv/layers/matchers/argmax_matcher.py | tanzhenyu/keras-cv | b7208ee25735c492ccc171874e34076111dcf637 | [
"Apache-2.0"
] | null | null | null | import tensorflow as tf
from kerascv.layers.iou_similarity import IOUSimilarity
iou_layer = IOUSimilarity()
class ArgMaxMatcher(tf.keras.layers.Layer):
"""ArgMax matcher"""
# [pos, neutral, neg]
def __init__(self, matched_threshold, unmatched_threshold):
self.matched_threshold = matched_threshold
self.unmatched_threshold = unmatched_threshold
super(ArgMaxMatcher, self).__init__()
# similarity: [#num_anchors, #num_gt_boxes]
# matched_values: [#num_gt_boxes, dim]
# unmatched_values: [dim]
# ignored_values: [dim]
def call(self, similarity, matched_values, unmatched_values, ignored_values):
# [#num_anchors]
matched_indices = tf.argmax(similarity, axis=1)
# [#num_anchors]
matched_max_vals = tf.reduce_max(similarity, axis=1)
above_unmatched_threshold_indices = tf.cast(
tf.greater(matched_max_vals, self.unmatched_threshold), tf.float32
)
# [#num_anchors]
below_unmatched_threshold_indices = tf.greater(
self.unmatched_threshold, matched_max_vals
)
below_unmatched_threshold_indices = tf.cast(
below_unmatched_threshold_indices, matched_values.dtype
)
# [#num_anchors]
between_threshold_indices = tf.logical_and(
tf.greater_equal(matched_max_vals, self.unmatched_threshold),
tf.greater(self.matched_threshold, matched_max_vals),
)
between_threshold_indices = tf.cast(
between_threshold_indices, matched_values.dtype
)
# [#num_anchors, dim]
matched_vals = tf.gather(matched_values, matched_indices)
if matched_vals.shape.rank > 1:
# [#num_anchors, 1]
below_unmatched_threshold_indices = below_unmatched_threshold_indices[
:, tf.newaxis
]
# [#num_anchors, 1]
between_threshold_indices = between_threshold_indices[:, tf.newaxis]
matched_vals = tf.add(
tf.multiply(
matched_vals,
tf.constant(1, dtype=matched_values.dtype)
- below_unmatched_threshold_indices,
),
tf.multiply(unmatched_values, below_unmatched_threshold_indices),
)
matched_vals = tf.add(
tf.multiply(
matched_vals,
tf.constant(1, dtype=matched_values.dtype) - between_threshold_indices,
),
tf.multiply(ignored_values, between_threshold_indices),
)
return matched_vals
def get_config(self):
config = {
"matched_threshold": self.matched_threshold,
"unmatched_threshold": self.unmatched_threshold,
}
base_config = super(ArgMaxMatcher, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@tf.function(
input_signature=[
tf.TensorSpec(shape=(None, 4), dtype=tf.float32),
tf.TensorSpec(shape=(None, 1), dtype=tf.int64),
tf.TensorSpec(shape=(None, 4), dtype=tf.float32),
tf.TensorSpec(shape=(), dtype=tf.float32),
tf.TensorSpec(shape=(), dtype=tf.float32),
]
)
def target_assign_argmax(
ground_truth_boxes,
ground_truth_labels,
anchors,
positive_iou_threshold=0.5,
negative_iou_threshold=0.3):
if tf.equal(tf.size(ground_truth_boxes), 0):
num_anchors = tf.shape(anchors)[0]
matched_gt_boxes = tf.identity(anchors)
matched_gt_labels = tf.zeros((num_anchors, 1), dtype=tf.int64)
positive_mask = tf.zeros((num_anchors, 1), tf.bool)
negative_mask = tf.zeros((num_anchors, 1), tf.bool)
return matched_gt_boxes, matched_gt_labels, positive_mask, negative_mask
# [n_gt_boxes, n_anchors]
similarity = iou_layer(ground_truth_boxes, anchors)
# [n_anchors]
matched_gt_indices = tf.argmax(similarity, axis=0)
# [n_anchors]
matched_max_vals = tf.reduce_max(similarity, axis=0)
positive_mask = tf.greater(matched_max_vals, positive_iou_threshold)
negative_mask = tf.greater(negative_iou_threshold, matched_max_vals)
matched_gt_boxes = tf.gather(ground_truth_boxes, matched_gt_indices)
matched_gt_labels = tf.gather(ground_truth_labels, matched_gt_indices)
return matched_gt_boxes, matched_gt_labels, positive_mask, negative_mask | 39.666667 | 87 | 0.661821 |
eef840e020a73705ee971a6562f13c86679b8ac7 | 538 | py | Python | Atv1-Distribuida/servidorBackup.py | rodolfotr/Computacao_Distribuida | 1d9db06ef4ab7290a6ce9666b5cb83987cc74e9d | [
"MIT"
] | null | null | null | Atv1-Distribuida/servidorBackup.py | rodolfotr/Computacao_Distribuida | 1d9db06ef4ab7290a6ce9666b5cb83987cc74e9d | [
"MIT"
] | null | null | null | Atv1-Distribuida/servidorBackup.py | rodolfotr/Computacao_Distribuida | 1d9db06ef4ab7290a6ce9666b5cb83987cc74e9d | [
"MIT"
] | null | null | null | import socket
import struct
IP_BACKUP = '127.0.0.1'
PORTA_BACKUP = 5000
ARQUIVO_BACKUP = "/home/aluno-uffs/Documentos/Trab_Final/Atv1-Distribuida/cliente_BACKUP.c"
#Recebe o arquivo.
sockReceber = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sockReceber.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sockReceber.bind((IP_BACKUP, PORTA_BACKUP))
while (True):
l = sockReceber.recv(1561651651)
if (l):
f = open(ARQUIVO_BACKUP,'wb')
f.write(l)
f.close()
sockReceber.close() | 25.619048 | 91 | 0.734201 |
eef8835ed3e8db9f839217b35bfd1e4b67953a9b | 634 | py | Python | examples/example_regression.py | QuantLet/localpoly | 7db50e3fb2caf39af8f9db1f2108fd1a81fc51bc | [
"MIT"
] | 1 | 2021-04-28T09:39:53.000Z | 2021-04-28T09:39:53.000Z | examples/example_regression.py | QuantLet/localpoly | 7db50e3fb2caf39af8f9db1f2108fd1a81fc51bc | [
"MIT"
] | null | null | null | examples/example_regression.py | QuantLet/localpoly | 7db50e3fb2caf39af8f9db1f2108fd1a81fc51bc | [
"MIT"
] | 1 | 2021-05-11T19:06:17.000Z | 2021-05-11T19:06:17.000Z | import numpy as np
from matplotlib import pyplot as plt
from localpoly.base import LocalPolynomialRegression
# simulate data
np.random.seed(1)
X = np.linspace(-np.pi, np.pi, num=150)
y_real = np.sin(X)
y = np.random.normal(0, 0.3, len(X)) + y_real
# local polynomial regression
model = LocalPolynomialRegression(X=X, y=y, h=0.8469, kernel="gaussian", gridsize=100)
prediction_interval = (X.min(), X.max())
results = model.fit(prediction_interval)
# plot
plt.scatter(X, y)
plt.plot(X, y_real, "grey", ls="--", alpha=0.5, label="function")
plt.plot(results["X"], results["fit"], "r", alpha=0.9, label="fit")
plt.legend()
plt.show()
| 27.565217 | 86 | 0.706625 |
eefc2e95d04d1e10619a3cb3fe8a472e3a76f13a | 690 | py | Python | mint/modules/activations.py | remicongee/Mint | 0f2db9b4216d8e61ec6b6892fd5baf962847581c | [
"MIT"
] | null | null | null | mint/modules/activations.py | remicongee/Mint | 0f2db9b4216d8e61ec6b6892fd5baf962847581c | [
"MIT"
] | null | null | null | mint/modules/activations.py | remicongee/Mint | 0f2db9b4216d8e61ec6b6892fd5baf962847581c | [
"MIT"
] | 1 | 2020-12-02T09:02:55.000Z | 2020-12-02T09:02:55.000Z | ## Activation functions
from .module import Module
from ..utils import functional as F
class ReLU(Module):
def __init__(self, in_place=False):
super(ReLU, self).__init__()
self.in_place = in_place
self.init_buffer()
def init_buffer(self):
self.buffer['activated'] = None
def forward(self, input):
if self.training and self.in_place:
self.buffer['activated'] = input >= 0
# print(self.buffer['activated'])
return F.relu(input)
def backward(self, input):
assert self.training
if self.in_place:
input *= self.buffer['activated']
return input | 21.5625 | 49 | 0.592754 |
eefc3d409d2d8b66094f301c43a67fdc4a9f6792 | 2,829 | py | Python | utils/phase0/state_transition.py | hwwhww/eth2.0-specs | 729757d4279db4535b176361d67d1567c0df314b | [
"CC0-1.0"
] | 3 | 2020-07-22T14:51:07.000Z | 2022-01-02T12:02:45.000Z | utils/phase0/state_transition.py | hwwhww/eth2.0-specs | 729757d4279db4535b176361d67d1567c0df314b | [
"CC0-1.0"
] | null | null | null | utils/phase0/state_transition.py | hwwhww/eth2.0-specs | 729757d4279db4535b176361d67d1567c0df314b | [
"CC0-1.0"
] | null | null | null | from . import spec
from typing import ( # noqa: F401
Any,
Callable,
List,
NewType,
Tuple,
)
from .spec import (
BeaconState,
BeaconBlock,
)
def process_transaction_type(state: BeaconState,
transactions: List[Any],
max_transactions: int,
tx_fn: Callable[[BeaconState, Any], None]) -> None:
assert len(transactions) <= max_transactions
for transaction in transactions:
tx_fn(state, transaction)
def process_transactions(state: BeaconState, block: BeaconBlock) -> None:
process_transaction_type(
state,
block.body.proposer_slashings,
spec.MAX_PROPOSER_SLASHINGS,
spec.process_proposer_slashing,
)
process_transaction_type(
state,
block.body.attester_slashings,
spec.MAX_ATTESTER_SLASHINGS,
spec.process_attester_slashing,
)
process_transaction_type(
state,
block.body.attestations,
spec.MAX_ATTESTATIONS,
spec.process_attestation,
)
process_transaction_type(
state,
block.body.deposits,
spec.MAX_DEPOSITS,
spec.process_deposit,
)
process_transaction_type(
state,
block.body.voluntary_exits,
spec.MAX_VOLUNTARY_EXITS,
spec.process_voluntary_exit,
)
assert len(block.body.transfers) == len(set(block.body.transfers))
process_transaction_type(
state,
block.body.transfers,
spec.MAX_TRANSFERS,
spec.process_transfer,
)
def process_block(state: BeaconState,
block: BeaconBlock,
verify_state_root: bool=False) -> None:
spec.process_block_header(state, block)
spec.process_randao(state, block)
spec.process_eth1_data(state, block)
process_transactions(state, block)
if verify_state_root:
spec.verify_block_state_root(state, block)
def process_epoch_transition(state: BeaconState) -> None:
spec.update_justification_and_finalization(state)
spec.process_crosslinks(state)
spec.maybe_reset_eth1_period(state)
spec.apply_rewards(state)
spec.process_ejections(state)
spec.update_registry_and_shuffling_data(state)
spec.process_slashings(state)
spec.process_exit_queue(state)
spec.finish_epoch_update(state)
def state_transition(state: BeaconState,
block: BeaconBlock,
verify_state_root: bool=False) -> BeaconState:
while state.slot < block.slot:
spec.cache_state(state)
if (state.slot + 1) % spec.SLOTS_PER_EPOCH == 0:
process_epoch_transition(state)
spec.advance_slot(state)
if block.slot == state.slot:
process_block(state, block, verify_state_root)
| 28.009901 | 80 | 0.653588 |
eefc51b8229cb41587ef71a58d9e82472148716d | 1,419 | py | Python | greatbigcrane/buildout_manage/recipes/mercurial.py | pnomolos/greatbigcrane | db0763706e1e8ca1f2bd769aa79c99681f1a967e | [
"Apache-2.0"
] | 3 | 2015-11-19T21:35:22.000Z | 2016-07-17T18:07:07.000Z | greatbigcrane/buildout_manage/recipes/mercurial.py | pnomolos/greatbigcrane | db0763706e1e8ca1f2bd769aa79c99681f1a967e | [
"Apache-2.0"
] | null | null | null | greatbigcrane/buildout_manage/recipes/mercurial.py | pnomolos/greatbigcrane | db0763706e1e8ca1f2bd769aa79c99681f1a967e | [
"Apache-2.0"
] | null | null | null | """
Copyright 2010 Jason Chu, Dusty Phillips, and Phil Schalm
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from buildout_manage.recipetools import simple_property, bool_property
class MercurialRecipe(object):
def __init__(self, config, section_name):
self.config = config
self.section_name = section_name
def init(self):
# Does section already exist?
self.config.add_part(self.section_name)
self.section = self.config[self.section_name]
self.section['recipe'] = 'mercurialrecipe'
def dict(self):
return dict(repository=self.repository,
location=self.location,
newest=self.newest)
repository = simple_property('repository')
location = simple_property('location')
newest = bool_property('newest')
def mercurial(config, section_name):
recipe = MercurialRecipe(config, section_name)
recipe.init()
return recipe
| 33 | 72 | 0.725863 |
eefdcd3ea1af6682c969002f242acba638c23ea1 | 799 | py | Python | e2e/codebuild/results_comment.py | hixio-mh/taskcat | a5d23a4b05592250c2ec0304d77571675628b00d | [
"Apache-2.0"
] | 920 | 2016-12-03T01:41:25.000Z | 2021-11-04T13:52:21.000Z | e2e/codebuild/results_comment.py | hixio-mh/taskcat | a5d23a4b05592250c2ec0304d77571675628b00d | [
"Apache-2.0"
] | 544 | 2017-02-23T22:41:25.000Z | 2021-11-03T23:02:25.000Z | e2e/codebuild/results_comment.py | hixio-mh/taskcat | a5d23a4b05592250c2ec0304d77571675628b00d | [
"Apache-2.0"
] | 225 | 2016-12-11T13:36:21.000Z | 2021-11-04T14:43:53.000Z | import os
import sys
import boto3
from github import Github
SSM_CLIENT = boto3.client("ssm")
GITHUB_REPO_NAME = os.environ.get("GITHUB_REPO_NAME", "")
PR_NUMBER = os.environ.get("PR_NUMBER", "")
FAILED = bool(int(sys.argv[2]))
GITHUB_TOKEN = os.environ.get("GITHUB_TOKEN", "")
if __name__ == "__main__":
repo = Github(GITHUB_TOKEN).get_repo(GITHUB_REPO_NAME)
pr = repo.get_pull(int(PR_NUMBER))
message, event = ("end to end tests failed", "REQUEST_CHANGES")
if not FAILED:
message, event = ("end to end tests passed\n", "APPROVE")
with open("../../cov_report", "r") as fh:
cov = fh.read().replace(f"/{GITHUB_REPO_NAME}/", "")
message += f"```{cov}```"
pr.create_review(body=message, event=event, commit=repo.get_commit(sys.argv[1]))
| 30.730769 | 84 | 0.653317 |
eefe78a5c5393bb02f57187df46d42fbd870dd68 | 2,460 | py | Python | openghg/client/_search.py | openghg/openghg | 9a05dd6fe3cee6123898b8f390cfaded08dbb408 | [
"Apache-2.0"
] | 5 | 2021-03-02T09:04:07.000Z | 2022-01-25T09:58:16.000Z | openghg/client/_search.py | openghg/openghg | 9a05dd6fe3cee6123898b8f390cfaded08dbb408 | [
"Apache-2.0"
] | 229 | 2020-09-30T15:08:39.000Z | 2022-03-31T14:23:55.000Z | openghg/client/_search.py | openghg/openghg | 9a05dd6fe3cee6123898b8f390cfaded08dbb408 | [
"Apache-2.0"
] | null | null | null | from __future__ import annotations
from typing import TYPE_CHECKING, Dict, List, Optional, Union
from Acquire.Client import Wallet
if TYPE_CHECKING:
from openghg.dataobjects import SearchResults
__all__ = ["Search"]
class Search:
def __init__(self, service_url: Optional[str] = None):
if service_url is not None:
self._service_url = service_url
else:
self._service_url = "https://fn.openghg.org/t"
wallet = Wallet()
self._service = wallet.get_service(service_url=f"{self._service_url}/openghg")
def search(
self,
species: Union[str, List] = None,
site: Union[str, List] = None,
inlet: Union[str, List] = None,
instrument: Union[str, List] = None,
start_date: str = None,
end_date: str = None,
skip_ranking: bool = False,
data_type: str = "timeseries",
) -> Union[SearchResults, Dict]:
"""Search for surface observations data in the object store
Args:
species: Species
site: Three letter site code
inlet: Inlet height
instrument: Instrument name
start_date: Start date
end_date: End date
Returns:
SearchResults: SearchResults object
"""
from openghg.dataobjects import SearchResults
if self._service is None:
raise PermissionError("Cannot use a null service")
if not any((species, site, inlet, instrument)):
raise ValueError("We must have at least one of species, site, inlet or instrument")
args = {}
if species is not None:
args["species"] = species
if site is not None:
args["site"] = site
if inlet is not None:
args["inlet"] = inlet
if instrument is not None:
args["instrument"] = instrument
if start_date is not None:
args["start_date"] = start_date
if end_date is not None:
args["end_date"] = end_date
args["skip_ranking"] = str(skip_ranking)
args["data_type"] = str(data_type)
response: Dict = self._service.call_function(function="search.search", args=args)
try:
results_data = response["results"]
search_results = SearchResults.from_data(results_data)
return search_results
except KeyError:
return response
| 29.638554 | 96 | 0.597561 |
e1003c20209106cf6d3e01c2eabbb6012b595686 | 1,524 | py | Python | ikats/client/opentsdb_stub.py | IKATS/ikats_api | 86f965e9ea83fde1fb64f187b294d383d267f77f | [
"Apache-2.0"
] | null | null | null | ikats/client/opentsdb_stub.py | IKATS/ikats_api | 86f965e9ea83fde1fb64f187b294d383d267f77f | [
"Apache-2.0"
] | null | null | null | ikats/client/opentsdb_stub.py | IKATS/ikats_api | 86f965e9ea83fde1fb64f187b294d383d267f77f | [
"Apache-2.0"
] | 1 | 2020-01-27T14:44:27.000Z | 2020-01-27T14:44:27.000Z | # -*- coding: utf-8 -*-
"""
Copyright 2019 CS Systèmes d'Information
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import random
from ikats.client.opentsdb_client import OpenTSDBClient
class Singleton(type):
"""
Singleton class used to synchronize the databases
"""
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
class OpenTSDBStub(OpenTSDBClient, metaclass=Singleton):
"""
Wrapper for Ikats to connect to OpenTSDB api
"""
DB = {}
def get_nb_points_of_tsuid(self, tsuid):
return len(self.DB[tsuid])
def assign_metric(self, metric, tags):
return str(hex(random.randint(0, 0xFFFFFFFFFFFFFFFFFFFF))).upper()[2:]
def get_ts_by_tsuid(self, tsuid, sd, ed=None):
return self.DB[tsuid]
def add_points(self, tsuid, data):
self.DB[tsuid] = data
return data[0][0], data[-1][0], len(data)
| 27.709091 | 81 | 0.694226 |
e101989a4d6808941cf59d0b6ca5c8dec9a09fac | 4,467 | py | Python | models/seeding/base.py | Sanzeed/balanced_influence_maximization | 0797b8a8f536cac8023e128ab13eb532f902bcad | [
"MIT"
] | 4 | 2021-06-03T02:53:46.000Z | 2022-01-25T07:07:08.000Z | models/seeding/base.py | Sanzeed/balanced_influence_maximization | 0797b8a8f536cac8023e128ab13eb532f902bcad | [
"MIT"
] | null | null | null | models/seeding/base.py | Sanzeed/balanced_influence_maximization | 0797b8a8f536cac8023e128ab13eb532f902bcad | [
"MIT"
] | 1 | 2021-06-17T02:17:22.000Z | 2021-06-17T02:17:22.000Z | import numpy as np
from scipy.stats import bernoulli
import heapq
class DiffusionModel:
def __init__(self, graph, majority, get_diffusion_probability, num_rels):
self.graph = graph
self.majority = majority
nodes = sorted(self.graph.nodes())
self.node_index_map = {nodes[i] : i for i in range(len(nodes))}
self.group_vector = np.array([int(graph.nodes[node]['label'] == majority) for node in nodes])
self.num_rels = num_rels
self.get_diffusion_probability = get_diffusion_probability
self.__generate_live_edges()
def __generate_live_edges(self):
edges = list(self.graph.edges())
self.live_edges = {}
edge_probabilities = [self.get_diffusion_probability(u, v,
self.graph.nodes[u]['label'],
self.graph.nodes[v]['label']) for (u, v) in edges]
for i in range(self.num_rels):
edge_life_indicators = bernoulli.rvs(edge_probabilities)
self.live_edges[i] = {edges[i] for i in range(len(edges)) if edge_life_indicators[i]}
assert len(self.live_edges) == self.num_rels
def __is_live_edge(self, rel_index, u, v):
if self.graph.is_directed():
return (u, v) in self.live_edges[rel_index]
else:
return (u, v) in self.live_edges[rel_index] or (v, u) in self.live_edges[rel_index]
def compute_influence_data(self, rel_index, u):
bfs_queue = {u}
visited_nodes = set()
influence_set, majority_in_influence_set = set(), set()
while bfs_queue:
node_to_visit = bfs_queue.pop()
visited_nodes.add(node_to_visit)
influence_set.add(node_to_visit)
if self.graph.nodes[node_to_visit]['label'] == self.majority:
majority_in_influence_set.add(node_to_visit)
for neighbor in self.graph.neighbors(node_to_visit):
if neighbor not in visited_nodes and self.__is_live_edge(rel_index, node_to_visit, neighbor):
bfs_queue.add(neighbor)
return influence_set, majority_in_influence_set
def generate_seeding_data(self):
pass
class GreedySeedingModel(DiffusionModel):
def __init__(self, graph, majority, get_diffusion_probability, num_rels, k):
super(GreedySeedingModel, self).__init__(graph, majority, get_diffusion_probability, num_rels)
self.queue = [(float('-inf'), -1, v) for v in self.graph.nodes()]
heapq.heapify(self.queue)
self.k = k
self.current_objective_value = 0
self.active_set_map = {i : set() for i in range(self.num_rels)}
self.majority_set_map = {i : set() for i in range(self.num_rels)}
self.seeding_data = {'active_set' : {i + 1 : set() for i in range(self.k)},
'majority' : {i + 1 : set() for i in range(self.k)},
'seeds' : []}
def compute_expected_marginal_gain(self, v):
pass
def do_next_iteration(self):
inc, iter_flag, u = heapq.heappop(self.queue)
if iter_flag == len(self.seeding_data['seeds']):
self.seeding_data['seeds'].append(u)
self.current_objective_value += -inc
for rel_index in range(self.num_rels):
influence, majority = self.compute_influence_data(rel_index, u)
self.active_set_map[rel_index].update(influence)
self.majority_set_map[rel_index].update(majority)
self.seeding_data['active_set'][iter_flag + 1] = sum(map(len, self.active_set_map.values())) / self.num_rels
self.seeding_data['majority'][iter_flag + 1] = sum(map(len, self.majority_set_map.values())) / self.num_rels
else:
new_negated_marginal_gain = -self.compute_expected_marginal_gain(u)
new_iter_flag = len(self.seeding_data['seeds'])
heapq.heappush(self.queue, (new_negated_marginal_gain, new_iter_flag, u))
def generate_seeding_data(self):
while len(self.seeding_data['seeds']) < self.k:
self.do_next_iteration()
return self.seeding_data
| 44.227723 | 120 | 0.591896 |
e102bdd6852dce95483c7c8cdb3211b3d9ab7231 | 43 | py | Python | run_5395.py | mpi3d/goodix-fp-dump | 039940845bd5eeb98cd92d72f267e3be77feb156 | [
"MIT"
] | 136 | 2021-05-05T14:16:17.000Z | 2022-03-31T09:04:18.000Z | run_5395.py | tsunekotakimoto/goodix-fp-dump | b88ecbababd3766314521fe30ee943c4bd1810df | [
"MIT"
] | 14 | 2021-08-20T09:49:39.000Z | 2022-03-20T13:18:05.000Z | run_5395.py | tsunekotakimoto/goodix-fp-dump | b88ecbababd3766314521fe30ee943c4bd1810df | [
"MIT"
] | 11 | 2021-08-02T15:49:11.000Z | 2022-02-06T22:06:42.000Z | from driver_53x5 import main
main(0x5395)
| 10.75 | 28 | 0.813953 |
e10338cc76f582f3f2a03b933dc6086137bca50f | 7,104 | py | Python | v2ray/com/core/proxy/vmess/inbound/config_pb2.py | xieruan/v2bp | 350b2f80d3a06494ed4092945804c1c851fdf1db | [
"MIT"
] | 7 | 2020-06-24T07:15:15.000Z | 2022-03-08T16:36:09.000Z | v2ray/com/core/proxy/vmess/inbound/config_pb2.py | xieruan/vp | 350b2f80d3a06494ed4092945804c1c851fdf1db | [
"MIT"
] | null | null | null | v2ray/com/core/proxy/vmess/inbound/config_pb2.py | xieruan/vp | 350b2f80d3a06494ed4092945804c1c851fdf1db | [
"MIT"
] | 6 | 2020-07-06T06:51:20.000Z | 2021-03-23T06:26:36.000Z | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: v2ray.com/core/proxy/vmess/inbound/config.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from v2ray.com.core.common.protocol import user_pb2 as v2ray_dot_com_dot_core_dot_common_dot_protocol_dot_user__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='v2ray.com/core/proxy/vmess/inbound/config.proto',
package='v2ray.core.proxy.vmess.inbound',
syntax='proto3',
serialized_options=b'\n\"com.v2ray.core.proxy.vmess.inboundP\001Z\007inbound\252\002\036V2Ray.Core.Proxy.Vmess.Inbound',
serialized_pb=b'\n/v2ray.com/core/proxy/vmess/inbound/config.proto\x12\x1ev2ray.core.proxy.vmess.inbound\x1a)v2ray.com/core/common/protocol/user.proto\"\x1a\n\x0c\x44\x65tourConfig\x12\n\n\x02to\x18\x01 \x01(\t\"0\n\rDefaultConfig\x12\x10\n\x08\x61lter_id\x18\x01 \x01(\r\x12\r\n\x05level\x18\x02 \x01(\r\"\xd6\x01\n\x06\x43onfig\x12.\n\x04user\x18\x01 \x03(\x0b\x32 .v2ray.core.common.protocol.User\x12>\n\x07\x64\x65\x66\x61ult\x18\x02 \x01(\x0b\x32-.v2ray.core.proxy.vmess.inbound.DefaultConfig\x12<\n\x06\x64\x65tour\x18\x03 \x01(\x0b\x32,.v2ray.core.proxy.vmess.inbound.DetourConfig\x12\x1e\n\x16secure_encryption_only\x18\x04 \x01(\x08\x42P\n\"com.v2ray.core.proxy.vmess.inboundP\x01Z\x07inbound\xaa\x02\x1eV2Ray.Core.Proxy.Vmess.Inboundb\x06proto3'
,
dependencies=[v2ray_dot_com_dot_core_dot_common_dot_protocol_dot_user__pb2.DESCRIPTOR,])
_DETOURCONFIG = _descriptor.Descriptor(
name='DetourConfig',
full_name='v2ray.core.proxy.vmess.inbound.DetourConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='to', full_name='v2ray.core.proxy.vmess.inbound.DetourConfig.to', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=126,
serialized_end=152,
)
_DEFAULTCONFIG = _descriptor.Descriptor(
name='DefaultConfig',
full_name='v2ray.core.proxy.vmess.inbound.DefaultConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='alter_id', full_name='v2ray.core.proxy.vmess.inbound.DefaultConfig.alter_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='level', full_name='v2ray.core.proxy.vmess.inbound.DefaultConfig.level', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=154,
serialized_end=202,
)
_CONFIG = _descriptor.Descriptor(
name='Config',
full_name='v2ray.core.proxy.vmess.inbound.Config',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user', full_name='v2ray.core.proxy.vmess.inbound.Config.user', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='default', full_name='v2ray.core.proxy.vmess.inbound.Config.default', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='detour', full_name='v2ray.core.proxy.vmess.inbound.Config.detour', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='secure_encryption_only', full_name='v2ray.core.proxy.vmess.inbound.Config.secure_encryption_only', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=205,
serialized_end=419,
)
_CONFIG.fields_by_name['user'].message_type = v2ray_dot_com_dot_core_dot_common_dot_protocol_dot_user__pb2._USER
_CONFIG.fields_by_name['default'].message_type = _DEFAULTCONFIG
_CONFIG.fields_by_name['detour'].message_type = _DETOURCONFIG
DESCRIPTOR.message_types_by_name['DetourConfig'] = _DETOURCONFIG
DESCRIPTOR.message_types_by_name['DefaultConfig'] = _DEFAULTCONFIG
DESCRIPTOR.message_types_by_name['Config'] = _CONFIG
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
DetourConfig = _reflection.GeneratedProtocolMessageType('DetourConfig', (_message.Message,), {
'DESCRIPTOR' : _DETOURCONFIG,
'__module__' : 'v2ray.com.core.proxy.vmess.inbound.config_pb2'
# @@protoc_insertion_point(class_scope:v2ray.core.proxy.vmess.inbound.DetourConfig)
})
_sym_db.RegisterMessage(DetourConfig)
DefaultConfig = _reflection.GeneratedProtocolMessageType('DefaultConfig', (_message.Message,), {
'DESCRIPTOR' : _DEFAULTCONFIG,
'__module__' : 'v2ray.com.core.proxy.vmess.inbound.config_pb2'
# @@protoc_insertion_point(class_scope:v2ray.core.proxy.vmess.inbound.DefaultConfig)
})
_sym_db.RegisterMessage(DefaultConfig)
Config = _reflection.GeneratedProtocolMessageType('Config', (_message.Message,), {
'DESCRIPTOR' : _CONFIG,
'__module__' : 'v2ray.com.core.proxy.vmess.inbound.config_pb2'
# @@protoc_insertion_point(class_scope:v2ray.core.proxy.vmess.inbound.Config)
})
_sym_db.RegisterMessage(Config)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 39.248619 | 757 | 0.758164 |
e103652358a900837a67abd9fbc1244e03d12a88 | 2,631 | py | Python | RedditReader/redditReader.py | Semicolon42/PythonProjects | eb6ec5d13594013a2703af43eb0d1c29406faaad | [
"Apache-2.0"
] | null | null | null | RedditReader/redditReader.py | Semicolon42/PythonProjects | eb6ec5d13594013a2703af43eb0d1c29406faaad | [
"Apache-2.0"
] | null | null | null | RedditReader/redditReader.py | Semicolon42/PythonProjects | eb6ec5d13594013a2703af43eb0d1c29406faaad | [
"Apache-2.0"
] | null | null | null | import logging
import csv
import time
from bs4 import BeautifulSoup
import requests
logging.basicConfig(
format='%(asctime)s %(levelname)s:%(message)s',
level=logging.INFO)
class Crawler:
def __init__(self, urls=[]):
self.visited_urls = []
self.urls_to_visit = urls
def download_url(self, url):
response = None
for x in range(1,5):
response = requests.get(url, headers={'User-Agent': 'Mozilla/5.0'})
if response is not None and response.status_code == 200:
break
return response
def get_reddit_posts(self, url, html):
soup = BeautifulSoup(html, 'html.parser')
attrs = {'data-click-id': 'body'}
rposts = []
for post in soup.find_all('a', attrs=attrs):
post_url = 'https://www.reddit.com'+post.attrs['href']
post_text = self.get_reddit_post_text(post_url)
rposts.append((post_url, post.h3.text, post_text))
return rposts
def get_reddit_post_text(self, url):
response = self.download_url(url)
print(response.status_code, url)
soup = BeautifulSoup(response.text, 'html.parser')
temp = soup.find('div', attrs={'data-test-id': 'post-content'})
post_content = "NOT FOUND"
if temp is not None:
for div in temp.descendants:
if hasattr(div, 'attrs') and 'data-click-id' in div.attrs:
try:
for p in div.find_all('p'):
post_content = post_content + " " + p.text
except Exception:
logging.exception(f'Failed to get post content: {url}')
return post_content
def crawl(self, url):
response = self.download_url(url)
html = response.text
print("starting the crawl...")
posts = self.get_reddit_posts(url, html)
for rpost in posts:
print(f'/////////////////////////////////////')
print(rpost)
print(f'/////////////////////////////////////')
def run(self):
while self.urls_to_visit:
url = self.urls_to_visit.pop(0)
logging.info(f'Crawling: {url}')
try:
self.crawl(url)
except Exception:
logging.exception(f'Failed to crawl: {url}')
finally:
self.visited_urls.append(url)
def main():
Crawler(urls=['https://www.reddit.com/r/BoardGameExchange/new/']).run()
if __name__ == '__main__':
print('start up')
main()
print('all done')
| 30.241379 | 79 | 0.542379 |
e10557e7b3374a814dff92034c545370c1354b22 | 2,605 | py | Python | asteroid/repl.py | asteroid-lang/asteroid | 537c60dd639e4f83fdefff4d36e1d63c3b4139a4 | [
"MIT"
] | 2 | 2022-02-09T20:33:05.000Z | 2022-02-09T20:33:08.000Z | asteroid/repl.py | asteroid-lang/asteroid | 537c60dd639e4f83fdefff4d36e1d63c3b4139a4 | [
"MIT"
] | 40 | 2022-01-22T02:29:51.000Z | 2022-03-31T14:45:31.000Z | asteroid/repl.py | asteroid-lang/asteroid | 537c60dd639e4f83fdefff4d36e1d63c3b4139a4 | [
"MIT"
] | 2 | 2022-01-20T18:20:11.000Z | 2022-02-12T22:35:22.000Z | from asteroid.interp import interp
from asteroid.version import VERSION
from asteroid.state import state
from asteroid.globals import ExpectationError
from asteroid.walk import function_return_value
from asteroid.support import term2string
from sys import stdin
import readline
def repl():
state.initialize()
print_repl_menu()
try:
run_repl()
except EOFError:
print()
pass
def print_repl_menu():
print("Asteroid Version", VERSION)
print("Run \"asteroid -h\" for help")
print("Press CTRL+D to exit")
def run_repl():
# The two different prompt types either > for a new statement
# or . for continuing one
arrow_prompt, continue_prompt = ("> ", ". ")
current_prompt = arrow_prompt
# Our line to be interpreted
line = ""
while True:
"""
Line input, breaking, and exiting
"""
try:
# Get the new input and append it to the previous line (Possibly empty)
# with a newline in between
# If the line is empty, just set the line
if line == "":
line = input(current_prompt)
# Otherwhise append a new line
else:
line += "\n" + input(current_prompt)
except KeyboardInterrupt:
line = ""
current_prompt = arrow_prompt
print()
continue
except EOFError:
print()
break
"""
Interpretation, multiline input, and exception handling
"""
try:
# Try to interpret the new statement
interp(line, initialize_state=False, exceptions=True)
# Try to
line = ""
# Check for return value
if function_return_value[-1]:
# Get the last return value (type, value)
(_, val) = function_return_value[-1]
# If it isn't none, print out the value
if val is not None:
print(term2string(function_return_value[-1]))
except ExpectationError as e:
# If we expected something but found EOF, it's a continue
if e.found_EOF:
current_prompt = continue_prompt
else:
print("error: "+str(e))
line = ""
current_prompt = arrow_prompt
except Exception as e:
# FIX THIS
print("error: "+str(e))
line = ""
current_prompt = arrow_prompt
else:
current_prompt = arrow_prompt
| 26.581633 | 83 | 0.554702 |
e106417c74eb34df2f46cb1cc4d7afaf1c61501e | 1,762 | py | Python | apis/file_state.py | brockpalen/ltfsee-globus | 5cb322ef09cd4f883951de96e5cb242f876ccd9c | [
"MIT"
] | null | null | null | apis/file_state.py | brockpalen/ltfsee-globus | 5cb322ef09cd4f883951de96e5cb242f876ccd9c | [
"MIT"
] | null | null | null | apis/file_state.py | brockpalen/ltfsee-globus | 5cb322ef09cd4f883951de96e5cb242f876ccd9c | [
"MIT"
] | null | null | null | """API for eeadm file state."""
from http import HTTPStatus
from flask import request
from flask_restx import Namespace, Resource, fields
from core.eeadm.file_state import EEADM_File_State
from ltfsee_globus.auth import token_required
api = Namespace(
"file_state", description="Get state of a file in archive eeadm file state"
)
# model for returning data from eeadm file state -s
# https://www.ibm.com/support/knowledgecenter/ST9MBR_1.3.0/ee_eeadm_file_state_command_output.html
file_state_model = api.model(
"file_state",
{
"state": fields.String,
"replicas": fields.Integer,
"tapes": fields.List(fields.String),
"path": fields.String,
},
)
# model for the input of a file
# must be abolute path
file_model = api.model("file", {"path": fields.String})
# create the API
@api.route("/file_state")
class FileState(Resource):
"""API Provider class for eeadm file state.
https://www.ibm.com/support/knowledgecenter/ST9MBR_1.3.0/ee_eeadm_file_state_command_output.html
"""
@api.marshal_list_with(file_state_model, code=HTTPStatus.CREATED.value)
@api.expect(file_model, validate=True)
@api.response(HTTPStatus.NOT_FOUND.value, "No such file")
@api.response(HTTPStatus.CREATED.value, "Request for file state created")
@token_required
def post(self, **kwargs):
"""POST method to send payload of file path to check status of files."""
path = request.json["path"]
# pass in the path including wild cards to get list of file states
file_state = EEADM_File_State(path)
api.logger.debug(file_state.files)
api.logger.info(f"Checking state of {path} from {request.remote_addr}")
return file_state.files, HTTPStatus.CREATED
| 32.036364 | 100 | 0.713394 |
e1068254019048e1b19e7e8d94638f8a3b8808de | 1,350 | py | Python | src/helpers/fix_test_data_for_roc.py | Iretha/IoT23-network-traffic-anomalies-classification | 93c157589e8128e8d9d5091d93052b18cd3ac35d | [
"MIT"
] | 9 | 2021-04-07T18:16:54.000Z | 2021-12-08T16:49:03.000Z | src/helpers/fix_test_data_for_roc.py | Iretha/IoT-23-anomaly-detection | 93c157589e8128e8d9d5091d93052b18cd3ac35d | [
"MIT"
] | 2 | 2021-09-02T03:52:03.000Z | 2021-11-15T11:32:55.000Z | src/helpers/fix_test_data_for_roc.py | Iretha/IoT23-network-traffic-anomalies-classification | 93c157589e8128e8d9d5091d93052b18cd3ac35d | [
"MIT"
] | null | null | null | from numpy import sort
from src.helpers.dataframe_helper import df_get, write_to_csv
def add_missing_class_rows_to_test_data(train_data_path, test_data_path):
__add_missing_classes(train_data_path, test_data_path)
def __add_missing_classes(train_data_path, test_data_path):
if train_data_path is None or test_data_path is None:
return
df_train = df_get(train_data_path, delimiter=',')
train_classes = sort(list(df_train['detailed-label'].unique()))
df_test = df_get(test_data_path, delimiter=',')
test_classes = sort(list(df_test['detailed-label'].unique()))
classes_missing_in_test = sort(list(set(train_classes) - set(test_classes)))
__copy_random_record_of_class(df_train, train_data_path, df_test, test_data_path, classes_missing_in_test)
def __copy_random_record_of_class(from_df, from_file_path, to_df, to_file_path, classes=None):
"""
TODO if we want to be more precise, we have to move the row, not just copy it
"""
if classes is None or len(classes) == 0:
return
print('Missing classes: ' + str(classes) + ' in ' + to_file_path)
cnt = 0
for clz in classes:
sample_df = from_df[from_df['detailed-label'] == clz].sample(1)
to_df = to_df.append(sample_df)
cnt += 1
if cnt > 0:
write_to_csv(to_df, to_file_path, mode='w')
| 34.615385 | 110 | 0.718519 |
e106968b5aabed3c4faf9536ea2f316b06ae7ec9 | 7,925 | py | Python | 130_html_to_csv/150_mkcsv_t_info_d.py | takobouzu/BOAT_RACE_DB | f16ed8f55aef567c0ecc6ebd3ad0e917f5c600d8 | [
"MIT"
] | 6 | 2020-12-23T01:06:04.000Z | 2022-01-12T10:18:36.000Z | 130_html_to_csv/150_mkcsv_t_info_d.py | takobouzu/BOAT_RACE_DB | f16ed8f55aef567c0ecc6ebd3ad0e917f5c600d8 | [
"MIT"
] | 15 | 2021-03-02T05:59:24.000Z | 2021-09-12T08:12:38.000Z | 130_html_to_csv/150_mkcsv_t_info_d.py | takobouzu/BOAT_RACE_DB | f16ed8f55aef567c0ecc6ebd3ad0e917f5c600d8 | [
"MIT"
] | 1 | 2021-05-09T10:47:21.000Z | 2021-05-09T10:47:21.000Z | '''
【システム】BOAT_RACE_DB2
【ファイル】140_mkcsv_t_info_d.py
【機能仕様】直前情報HTMLファイルから直前情報明細テーブル「t_info_d」のインポートCSVファイルを作成する
【動作環境】macOS 11.1/Raspbian OS 10.4/python 3.9.1/sqlite3 3.32.3
【来 歴】2021.02.01 ver 1.00
'''
import os
import datetime
from bs4 import BeautifulSoup
#インストールディレクトの定義
BASE_DIR = '/home/pi/BOAT_RACE_DB'
'''
【関 数】mkcsv_t_info_d
【機 能】直前HTMLファイルから直前情報明細テーブル「t_info_d」のインポートCSVファイルを作成する
【引 数】なし
【戻り値】なし
'''
def mkcsv_t_info_d():
print('直前情報明細テーブル「t_info_d」のインポートCSVファイル 開始')
in_path = BASE_DIR + '/200_html/last_info'
out_file = BASE_DIR + '/210_csv/t_info_d.csv'
fw = open(out_file, 'w')
for item in os.listdir(path=in_path):
if item != '.html' and item != '.DS_Store':
in_file = in_path + '/' + item
print("==> 処理中[%s]" % (in_file))
fb = open(in_file, 'r')
html = fb.read()
fb.close()
#データ存在チェック
flg = 0
if 'データがありません。' in html:
flg = 1
if flg == 0:
#CSVレコードフィールドの初期化(共通項目)
t_info_d_yyyymmdd = '' #開催日付
t_info_d_pool_code = '' #場コード
t_info_d_race_no = '' #レース番号
#HTMLファイルからcsvレコード項目を抽出
soup = BeautifulSoup(html, 'html.parser')
#開催日付の抽出
t_info_d_yyyymmdd = item[0:8]
#場コードの抽出
t_info_d_pool_code = item[8:10]
#レース番号
t_info_d_race_no = item[10:12]
#場名の抽出
for tag1 in soup.find_all('img'):
if '/static_extra/pc/images/text_place2' in str(tag1):
for tag2 in str(tag1).splitlines():
if '/static_extra/pc/images/text_place2' in str(tag2):
wk_arry = str(tag2).strip().split(' ')
t_race_d_pool_name = str(wk_arry[1])
t_race_d_pool_name = t_race_d_pool_name.replace('alt="','')
t_race_d_pool_name = t_race_d_pool_name.replace('"','')
#選手単位の明細項目の抽出
base_count = 0
for tag1 in soup.find_all('tbody'):
if 'is-fs12' in str(tag1):
base_count = base_count + 1
#CSVレコードフィールドの初期化(選手単位項目)
t_info_d_entry_no = '' #枠番
t_info_d_body_weight = '' #体重
t_info_d_adjusted_weight = '' #調整重量
t_info_d_rehearsal_time = '' #展示タイム
t_info_d_tilt = '' #チルト
t_info_d_start_course = '' #スタート展示コース
t_info_d_flying = '' #フライング区分
t_info_d_start_time = '' #スタート展示タイム率
#選手単位の明細項目の抽出(枠番)
t_info_d_entry_no = str(base_count)
#選手単位の明細項目の抽出(体重)
n = 0
for tag2 in str(tag1).splitlines():
n = n + 1
if n == 6:
wk_arry = str(tag2).strip().split('>')
t_info_d_body_weight = str(wk_arry[1])
t_info_d_body_weight = t_info_d_body_weight.replace('</td','')
t_info_d_body_weight = t_info_d_body_weight.replace('kg','')
t_info_d_body_weight = t_info_d_body_weight.strip()
break
#選手単位の明細項目の抽出(調整重量)
n = 0
for tag2 in str(tag1).splitlines():
n = n + 1
if n == 22:
wk_arry = str(tag2).strip().split('>')
t_info_d_adjusted_weight = str(wk_arry[1]).replace('</td','')
break
#選手単位の明細項目の抽出(展示タイム)
n = 0
for tag2 in str(tag1).splitlines():
n = n + 1
if n == 7:
wk_arry = str(tag2).strip().split('>')
t_info_d_rehearsal_time = str(wk_arry[1]).replace('</td','')
break
#選手単位の明細項目の抽出(チルト)
n = 0
for tag2 in str(tag1).splitlines():
n = n + 1
if n == 8:
wk_arry = str(tag2).strip().split('>')
t_info_d_tilt = str(wk_arry[1]).replace('</td','')
break
#選手単位の明細項目の抽出(スタート展示コース)
n = 0
for tag2 in soup.find_all('span'):
if 'table1_boatImage1Number' in str(tag2):
n = n + 1
wk_arry = str(tag2).strip().split('>')
wk_str = str(wk_arry[1]).replace('</span','')
if t_info_d_entry_no == wk_str:
t_info_d_start_course = str(n)
#選手単位の明細項目の抽出(フライング区分_スタート展示タイム)
n = 0
for tag2 in soup.find_all('span'):
if 'table1_boatImage1Time' in str(tag2):
n = n + 1
wk_arry = str(tag2).strip().split('>')
wk_str = str(wk_arry[1]).replace('</span','')
if t_info_d_start_course == str(n):
if 'F' in wk_str:
t_info_d_flying = 'F'
t_info_d_start_time = wk_str.replace('F','')
else:
t_info_d_flying = ' '
t_info_d_start_time = wk_str
#CSVレコードの生成
t_info_d_outrec = ''
t_info_d_outrec = t_info_d_outrec + '"' + t_info_d_yyyymmdd + '"' #開催日付
t_info_d_outrec = t_info_d_outrec + ',"' + t_info_d_pool_code + '"' #場コード
t_info_d_outrec = t_info_d_outrec + ',"' + t_info_d_race_no + '"' #レース番号
t_info_d_outrec = t_info_d_outrec + ',"' + t_info_d_entry_no + '"' #枠番
t_info_d_outrec = t_info_d_outrec + ',' + t_info_d_body_weight #体重
t_info_d_outrec = t_info_d_outrec + ',' + t_info_d_adjusted_weight #調整重量
t_info_d_outrec = t_info_d_outrec + ',' + t_info_d_rehearsal_time #展示タイム
t_info_d_outrec = t_info_d_outrec + ',' + t_info_d_tilt #チルト
t_info_d_outrec = t_info_d_outrec + ',' + t_info_d_start_course #スタート展示コース
t_info_d_outrec = t_info_d_outrec + ',"' + t_info_d_flying + '"' #フライング区分0: なし 1: フライング2: 出遅れ
t_info_d_outrec = t_info_d_outrec + ',' + t_info_d_start_time #スタート展示タイム
#CSVレコードファイル出力
if t_info_d_body_weight != '':
fw.write(t_info_d_outrec + '\n')
fw.close()
print('直前情報明細「t_info_d」のインポートCSVファイル 完了')
#主処理
mkcsv_t_info_d() #直前情報明細テーブル「t_info_d」のインポートCSVファイルを作成
| 48.323171 | 120 | 0.418801 |
e1071b566b934eed8eaf574357b76325acbfe989 | 174 | py | Python | python/show_nmc.py | Typas/Data-Assimilation-Project | 4b880c7faadf778d891ffab77ebfbde1db5c5baf | [
"MIT"
] | null | null | null | python/show_nmc.py | Typas/Data-Assimilation-Project | 4b880c7faadf778d891ffab77ebfbde1db5c5baf | [
"MIT"
] | null | null | null | python/show_nmc.py | Typas/Data-Assimilation-Project | 4b880c7faadf778d891ffab77ebfbde1db5c5baf | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import numpy as np
B = np.reshape(np.genfromtxt("data/b_nmc.txt"), (40, 40))
import matplotlib.pyplot as plt
plt.contourf(B)
plt.colorbar()
plt.show()
| 21.75 | 57 | 0.724138 |
e10776844de6cd61363f91f2091e32c884366312 | 602 | py | Python | hello.py | QuocTrungTran/cgi-lab | fa79815b0e0ebd3d925e4d30043f2536ef2d9b4f | [
"Apache-2.0"
] | null | null | null | hello.py | QuocTrungTran/cgi-lab | fa79815b0e0ebd3d925e4d30043f2536ef2d9b4f | [
"Apache-2.0"
] | null | null | null | hello.py | QuocTrungTran/cgi-lab | fa79815b0e0ebd3d925e4d30043f2536ef2d9b4f | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
import os, json
print("Content-type:text/html\r\n\r\n")
print
print("<title>Test CGI</title>")
print("<p>Hello World!</>")
# #Q1
# print(os.environ)
# json_object = json.dumps(dict(os.environ), indent=4)
# #print(json_object)
#Q2
# for param in os.environ.keys():
# if (param=="QUERY_STRING"):
# #print(f"<em>{param}</em> = {os.environ[param]}</li>")
# print("<b>%20s</b>: %s<br>" % (param, os.environ[param]))
# #Q3
# for param in os.environ.keys():
# if (param=="HTTP_USER_AGENT"):
# print("<b>%20s</b>: %s<br>" % (param, os.environ[param])) | 26.173913 | 67 | 0.593023 |
e1088f7eca5eb9b2a0d3d520b6c9dd794d84bb1c | 2,194 | py | Python | onetabtobear.py | vinceblake/saveTabToBear | 4b3a79c06e9130c95fa1f87b30999f2fbfe2e017 | [
"MIT"
] | null | null | null | onetabtobear.py | vinceblake/saveTabToBear | 4b3a79c06e9130c95fa1f87b30999f2fbfe2e017 | [
"MIT"
] | null | null | null | onetabtobear.py | vinceblake/saveTabToBear | 4b3a79c06e9130c95fa1f87b30999f2fbfe2e017 | [
"MIT"
] | null | null | null | #!/usr/local/bin/python3
from subprocess import Popen, PIPE
from urllib.parse import quote
import sqlite3, datetime, sys, re
# Global Variables
removeCheckedItems = True # Set to false if you want to keep "completed" to-do items when this is run
bearDbFile = str(sys.argv[3])
oneTabID = str(sys.argv[4])
# Methods
def create_connection(db_file): # Establish SQLITE database connection cursor
""" create a database connection to the SQLite database
specified by the db_file
:param db_file: database file
:return: Connection or None
"""
conn = None
try:
conn = sqlite3.connect(db_file)
except:
print("Failed to establish connection")
return None
return conn
def xcall(url): # Simple wrapper method to run xcalls
r = Popen(['/Applications/xcall.app/Contents/MacOS/xcall',
'-url', f'"{url}"'
], stdout=PIPE)
stdout = r.communicate()
return str(stdout[0].decode('utf-8')).strip().replace(" ","")
def getOneTab(): # Get and return OneTab note from Bear
bearNote = bear.execute(f"SELECT * FROM ZSFNOTE WHERE ZUNIQUEIDENTIFIER IS '{oneTabID}'").fetchone()
return str(bearNote[32]) # ZTEXT
def updateOneTab():
oneTab = getOneTab().replace("# BearMarks","")
if removeCheckedItems:
oneTab = re.sub(r"^\+ .*\n","",oneTab,flags=re.MULTILINE)
oneTab = re.sub(r"^\#\#\# .*\n\n","",oneTab,flags=re.MULTILINE)
if url in oneTab:
#print("URL already present. Skipping.")
return
now = datetime.datetime.now().strftime("%B %d, %Y")
prefix = f'### {now}\n'
line = f'- [{title}]({url})'
if prefix in oneTab:
oneTab = oneTab.replace(prefix,f'{prefix}{line}\n')
else:
line = f'{prefix}{line}\n'
oneTab = line + oneTab
update = f'bear://x-callback-url/add-text?id={oneTabID}&mode=replace&text={quote(oneTab.strip())}&open_note=no'
xcall(update)
# Main functionality:
if __name__ == '__main__':
title = sys.argv[1]
url = sys.argv[2]
# Connect to Bear database
beardb = create_connection(bearDbFile)
bear = beardb.cursor()
# Process tab and update database:
updateOneTab() | 30.054795 | 115 | 0.639927 |
e1093ea692aa40b78e1fe9867c9ec44b0222ae19 | 1,319 | py | Python | defects4cpp/d++.py | HansolChoe/defects4cpp | cb9e3db239c50e6ec38127cec117865f0ee7a5cf | [
"MIT"
] | 10 | 2021-06-23T01:53:19.000Z | 2022-03-31T03:14:01.000Z | defects4cpp/d++.py | HansolChoe/defects4cpp | cb9e3db239c50e6ec38127cec117865f0ee7a5cf | [
"MIT"
] | 34 | 2021-05-27T01:09:04.000Z | 2022-03-28T07:53:35.000Z | defects4cpp/d++.py | HansolChoe/defects4cpp | cb9e3db239c50e6ec38127cec117865f0ee7a5cf | [
"MIT"
] | 6 | 2021-09-03T07:16:56.000Z | 2022-03-29T07:30:35.000Z | import sys
from time import perf_counter
from command import CommandList
from errors import DppArgparseError, DppDockerError, DppError
from message import message
def _handle_cmdline_error(e: DppError):
if isinstance(e, DppArgparseError):
message.stdout_argparse_error(str(e))
elif isinstance(e, DppDockerError):
message.stdout_argparse_error(str(e))
def main():
def measure_time(func, args):
start_time = perf_counter()
func(args)
elapsed = perf_counter() - start_time
if elapsed < 100:
message.stdout_progress(f"Elapsed: {elapsed:.2f}s")
else:
minutes, seconds = divmod(elapsed, 60)
message.stdout_progress(f"Elapsed: {int(minutes)}m {seconds:.2f}s")
commands = CommandList()
try:
name = sys.argv[1]
except IndexError:
name = "help"
argv = sys.argv[2:]
if name not in commands:
message.stdout_progress_error(f"'{name}' is not a valid command")
return 1
try:
if name != "help":
measure_time(commands[name], argv)
else:
commands[name](argv)
except DppError as e:
_handle_cmdline_error(e)
if __name__ == "__main__":
from multiprocessing import freeze_support
freeze_support()
main()
| 24.425926 | 79 | 0.639121 |
e109e7b0486674fec7a7133e0f5ef96b64e2f7e2 | 9,962 | py | Python | wz/ui/choice_grid.py | gradgrind/WZ | 672d93a3c9d7806194d16d6d5b9175e4046bd068 | [
"Apache-2.0"
] | null | null | null | wz/ui/choice_grid.py | gradgrind/WZ | 672d93a3c9d7806194d16d6d5b9175e4046bd068 | [
"Apache-2.0"
] | null | null | null | wz/ui/choice_grid.py | gradgrind/WZ | 672d93a3c9d7806194d16d6d5b9175e4046bd068 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
ui/choice_grid.py
Last updated: 2021-05-04
Manage the grid for the puil-subject-choice-editor.
=+LICENCE=============================
Copyright 2021 Michael Towers
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=-LICENCE========================================
"""
### Display texts
_PUPIL = "Schüler"
_GROUPS = "Gruppen"
## Measurements are in mm ##
_SEP_SIZE = 1
_HEIGHT_LINE = 6
_WIDTH_TOGGLE = 8
COLUMNS = (35, 15, 15, _SEP_SIZE) # + ...
ROWS = (
#title
12,
# info rows
_HEIGHT_LINE, _HEIGHT_LINE, _HEIGHT_LINE, _HEIGHT_LINE,
_HEIGHT_LINE, _HEIGHT_LINE,
# header (tags)
_HEIGHT_LINE, _SEP_SIZE
) # + _HEIGHT_LINE * n
# Content of marked toggle-cells
MARK = 'X'
#####################################################
from qtpy.QtWidgets import QApplication
from qtpy.QtGui import QColor, QBrush
from qtpy.QtCore import Qt
from ui.gridbase import GridBase
class ToggleGrid(GridBase):
"""A grid of toggle-cells with column and row headers (potentially
multi-row or multi-column respectively).
Clicking on a cell will toggle its value. SHIFT-clicking marks a cell
as the starting point of a rectangle. A further SHIFT-click marks
the end-point of the rectangle and toggles all cells within the
rectangle. The marking is removed.
The mark can also be removed by clicking elsewhere (without SHIFT).
"""
def __init__(self, gview, info, pupil_data, subjects):
"""<gview> is the "View" on which this "Scene" is to be presented.
<info>: general information, [[key, value], ... ]
<pupil_data>: A list of pupil lines, only valid sids are included:
[[pid, name, groups, {sid: val, ... }], ... ]
val: true if marked
<subjects>: The list of subjects, possibly containing spacers:
[[sid, name], ... , null-value, [sid, name], ... ]
"""
# Set up grid: get number of rows and columns
row_pids = len(ROWS)
_ROWS = ROWS + (_HEIGHT_LINE,) * len(pupil_data)
col_sids = len(COLUMNS)
_COLS = list(COLUMNS)
for s in subjects:
_COLS.append(_WIDTH_TOGGLE if s else _SEP_SIZE)
super().__init__(gview, _ROWS, _COLS)
self.styles()
# Horizontal separator (after headers)
self.basic_tile(row_pids - 1, 0, tag = None, text = None,
style = 'padding', cspan = len(_COLS))
# Vertical separator (before subjects)
col = col_sids
self.basic_tile(1, col_sids - 1, tag = None, text = None,
style = 'padding', rspan = len(_ROWS) - 1)
### Title area
self.basic_tile(0, 0, tag = None, text = "Fächer(ab)wahl",
style = 'title', cspan = 2)
self.basic_tile(0, 4, tag = None,
text = ADMIN.school_data['SCHOOL_NAME'],
style = 'titleR', cspan = 10)
### General Info
line = 1
for key, value in info:
self.basic_tile(line, 0, tag = None, text = key,
style = 'info')
# Non-editable
self.basic_tile(line, 1, tag = None, text = value,
style = 'info', cspan = 2)
line += 1
### Subject headers
line = 7
rspan = line - 1
self.basic_tile(line, 0, tag = None, text = _PUPIL,
style = 'small', cspan = 2)
self.basic_tile(line, 2, tag = None, text = _GROUPS,
style = 'small')
col = col_sids
self.sids = []
for sid_name in subjects:
if sid_name:
sid, name = sid_name
self.sids.append(sid)
self.basic_tile(line, col, tag = None, text = sid,
style = 'small')
self.basic_tile(1, col, tag = None, text = name,
style = 'v', rspan = rspan)
else:
# vertical spacer
self.basic_tile(1, col, tag = None, text = None,
style = 'padding', rspan = len(_ROWS) - 1)
col += 1
### Pupil lines
row = row_pids
# The array (list of lists) <self.toggles> is a simple matrix
# of the toggle-tiles, omitting the skipped columns.
self.toggles = []
self.pids = []
self.value0 = set() # Set of initially marked cells (x, y)
y = 0
for pid, pname, groups, choices in pupil_data:
self.basic_tile(row, 0, tag = None, text = pname,
style = 'name', cspan = 2)
self.basic_tile(row, 2, tag = None, text = groups,
style = 'small')
col = col_sids
x = 0
_toggles = []
for sid_name in subjects:
if sid_name:
try:
marked = choices[sid_name[0]]
except KeyError:
# Invalid key: not editable
tag = None
style = 'padding'
val = None
else:
tag = (x, y)
style = 'toggle'
if marked:
self.value0.add(tag)
val = MARK
else:
val = ''
tile = self.basic_tile(row, col, tag = tag,
text = val, style = style)
_toggles.append(tile)
x += 1
col += 1
self.pids.append(pid)
self.toggles.append(_toggles)
y += 1
row += 1
# Need a highlighted/selected QBrush for a toggle-cell
self.mark_brush = QBrush(QColor('#80FF7200'))
self.no_mark = self.style('toggle').bgColour or QBrush(Qt.NoBrush)
# Collect changed cell tags for signalling "table changed".
self._changes = set()
self.toggle_start = None
#
def styles(self):
"""Set up the styles used in the table view.
"""
self.new_style('base', font = ADMIN.school_data['FONT'], size = 11)
self.new_style('name', base = 'base', align = 'l')
self.new_style('title', font = ADMIN.school_data['FONT'], size = 12,
align = 'l', border = 0, highlight = 'b')
self.new_style('info', base = 'base', border = 0, align = 'l')
self.new_style('underline', base = 'base', border = 2)
self.new_style('titleR', base = 'title', align = 'r')
self.new_style('small', base = 'base', size = 10)
self.new_style('v', base = 'small', align = 'b')
self.new_style('toggle', base = 'base', highlight = ':002562',
mark = 'E00000')
# self.new_style('no-toggle', bg = '666666')
self.new_style('padding', bg = '666666')
#
def tile_left_clicked(self, tile):
if isinstance(tile.tag, tuple):
# toggle-tile
kbdmods = QApplication.keyboardModifiers()
if kbdmods & Qt.ShiftModifier:
if self.toggle_start:
# toggle range
c0, r0 = self.toggle_start.tag
c1, r1 = tile.tag
r_range = range(r0, r1 + 1) if r1 >= r0 \
else range(r1, r0 + 1)
c_range = range(c0, c1 + 1) if c1 >= c0 \
else range(c1, c0 + 1)
for r in r_range:
for c in c_range:
self.toggle(self.toggles[r][c])
else:
self.toggle_start = tile
# highlight cell
tile.setBrush(self.mark_brush)
return False
else:
self.toggle(tile)
if self.toggle_start:
# remove highlight
if self.toggle_start:
self.toggle_start.setBrush(self.no_mark)
self.toggle_start = None
return False
#
def toggle(self, tile):
val = '' if tile.value() else MARK
tile.setText(val)
if val:
if tile.tag in self.value0:
self.changes_discard(tile.tag)
else:
self.changes_add(tile.tag)
else:
if tile.tag in self.value0:
self.changes_add(tile.tag)
else:
self.changes_discard(tile.tag)
#
def changes_discard(self, tag):
if self._changes:
self._changes.discard(tag)
if not self._changes:
self._gview.set_changed(False)
#
def changes_add(self, tag):
if not self._changes:
self._gview.set_changed(True)
self._changes.add(tag)
#
def changes(self):
return list(self._changes)
#
def data(self):
"""Return choice data as a list of "non-chosen" subject lists.
[(pid, [sid, ...]), ... ]
Also pupils with empty lists are included.
"""
clist = []
y = 0
for row in self.toggles:
x = 0
slist = []
for sid in self.sids:
if row[x].value():
slist.append(sid)
x += 1
clist.append((self.pids[y], slist))
y += 1
return clist
| 36.490842 | 76 | 0.511443 |
e10a689e78f45e04945a350aa7275406f0c3d7c2 | 72 | py | Python | numberstest.py | dreadnaught-ETES/school | 9faa2b6379db8f819872b8597896f5291812c5d6 | [
"CC0-1.0"
] | null | null | null | numberstest.py | dreadnaught-ETES/school | 9faa2b6379db8f819872b8597896f5291812c5d6 | [
"CC0-1.0"
] | null | null | null | numberstest.py | dreadnaught-ETES/school | 9faa2b6379db8f819872b8597896f5291812c5d6 | [
"CC0-1.0"
] | null | null | null | import math
result=(math.pow(3,2)+1)*(math.fmod(16,7))/7
print(result) | 24 | 45 | 0.680556 |
e10b54355c9e418ed2013419152b910332c40ec9 | 5,585 | py | Python | EPH_CORE_SkyObjectMgr.py | polsterc16/ephem | 70ac6c079c80344b83499b96edaff57fb5881efc | [
"MIT"
] | null | null | null | EPH_CORE_SkyObjectMgr.py | polsterc16/ephem | 70ac6c079c80344b83499b96edaff57fb5881efc | [
"MIT"
] | null | null | null | EPH_CORE_SkyObjectMgr.py | polsterc16/ephem | 70ac6c079c80344b83499b96edaff57fb5881efc | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Tue Feb 5 16:25:45 2019
@author: polsterc16
==============================================================================
LICENCE INFORMATION
==============================================================================
This Software uses Code (spg4) provided by "Brandon Rhodes" under
the "MIT License". For more Information see "licence-info.txt".
Diese Software benutzt Code (spg4), welcher von "Brandon Rhodes" unter
der "MIT License" zur Verfuegung gestellt wird. Fuer weitere Information
siehe "licence-info.txt".
==============================================================================
"""
import EPH_CORE_TimeSpaceMgr as TSMgr
import EPH_SAT_SatelliteMgr as SatMgr
import EPH_PLANET_PlanetMgr as PlanetMgr
import EPH_STAR_StarMgr as StarMgr
import EPH_MOON_MoonMgr as MoonMgr
class SkyObjectMgr:
''' deals with all allowed sky objects. '''
################################################################
#### STATIC / CLASS VARIABLES
################################################################
#### INIT
def __init__(self, TSMgrObj, skyObjType:str, identifier=None ):
''' TimeSpaceManager Instance, type of sky object, identifier '''
constrString = ("Constructor for Type: '"+str(skyObjType)+
"' ("+str(identifier)+"): ")
self._cnstrMsg = (constrString+"failed immediatly.") # immediate fail
self._success = False
self._skyObject = None
self._skyObjName = identifier
self._skyObjType = skyObjType
if(type(TSMgrObj)==(TSMgr.TimeSpaceMgr)):
# Wenn TSMgrObj wirklich ein TSMgr.TimeSpaceMgr objekt ist
self._TSMgr = TSMgrObj
if (skyObjType.lower() in ["planet"]):
# TODO: implementierung
# wenn typ = planet
self._skyObjType = "planet"
self._skyObject = PlanetMgr.PlanetMgr(self._TSMgr,identifier)
elif(skyObjType.lower() in ["star"]):
# wenn typ = stern
self._skyObjType = "star"
self._skyObject = StarMgr.StarMgr(self._TSMgr,identifier)
self._skyObjName = str(self._skyObject.getName())
elif(skyObjType.lower() in ["moon"]):
# TODO: implementierung
# wenn typ = Mond
self._skyObjType = "moon"
self._skyObject = MoonMgr.MoonMgr(self._TSMgr)
elif(skyObjType.lower() in ["sat","satellite"]):
# wenn typ = satellit
self._skyObjType = "satellite"
tempID = identifier
# für den fall einer string eingabe
if type(tempID) == str:
tempID = tempID.strip()
if tempID.isnumeric():
tempID = int(tempID)
if type(tempID) == int:
# geht nur für int eingabe (bzw erkennbarer int-string)
self._skyObject = SatMgr.SatelliteMgr(self._TSMgr,tempID)
else:
# wenn typ = UNBEKANNT
self._cnstrMsg = (constrString+
"failed - unknown 'skyObjType' string.")
else:
# this is not a TSMgr object!
self._cnstrMsg = (constrString+
"failed - 'TSMgrObj' must be of type "+
str(type(TSMgr.TimeSpaceMgr()))+".")
def get_pos_spherical(self, utcTime = None):
#set TSMgr to utcTime (if None, then utcNow)
self._TSMgr.time_set_utcDateTime(utcTime)
# TODO: für alle implementieren
if self._skyObject != None:
# wenn skyObject existiert
if self._skyObjType in ["planet","star","satellite","moon"]:
# wenn type eine umsetzung hat
print(self._skyObjType)
pos = self._skyObject.getPos()
if pos != None:
return {"Ra": pos['Ra'],
"De": pos['De'],
'Success': self._skyObject.get_success()}
else:
return None
else:
return None
else:
# wenn kein skyobject existiert
return None
def write_pos_to_dict(self, destDict: dict, utcTime = None):
''' writes Ra, De and Success to a provided dictionary.
(OPTIONAL: get the pos for a specific utc datetime) '''
# get pos for specified utctime (inf none, then utcNow)
temp = self.get_pos_spherical(utcTime)
if(temp != None):
# if it has a return value, then write this to dict
destDict['Ra'] = temp['Ra']
destDict['De'] = temp['De']
destDict['Success'] = temp['Success']
else:
# if it returns none, then return default none values
destDict['Ra'] = None
destDict['De'] = None
destDict['Success'] = False
def get_type(self):
return self._skyObjType
def get_name(self):
return self._skyObjName
| 34.263804 | 78 | 0.476813 |
e10cde8f79b9f8a7e8e8be18b130895124b76c09 | 3,370 | py | Python | integration-test-reports/run_reports.py | sutoiku/autostat | b0e6588e587450c4cbdb19a021d847f7571ba466 | [
"MIT"
] | null | null | null | integration-test-reports/run_reports.py | sutoiku/autostat | b0e6588e587450c4cbdb19a021d847f7571ba466 | [
"MIT"
] | 1 | 2022-03-16T19:05:46.000Z | 2022-03-16T19:05:46.000Z | integration-test-reports/run_reports.py | sutoiku/autostat | b0e6588e587450c4cbdb19a021d847f7571ba466 | [
"MIT"
] | 1 | 2021-07-14T19:37:44.000Z | 2021-07-14T19:37:44.000Z | from autostat.run_settings import RunSettings, Backend
from autostat.kernel_search import kernel_search, get_best_kernel_info
from autostat.dataset_adapters import Dataset
from autostat.utils.test_data_loader import load_test_dataset
from html_reports import Report
from markdown import markdown
import matplotlib.pyplot as plt
from datetime import datetime
import os
import time
import random
import numpy as np
print(os.getcwd())
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
os.chdir(dname)
def timestamp():
return datetime.strftime(datetime.now(), "%Y-%m-%d_%H:%M:%S")
class HtmlLogger:
def __init__(self, report: Report) -> None:
self.report = report
def print(self, s: str) -> None:
self.report.add_markdown(
s.replace("\n", "\n\n")
# .replace("<", "<").replace(">", ">")
)
def prepend(self, s: str) -> None:
md = markdown(s, extensions=["fenced_code", "codehilite"])
self.report.body = [md] + self.report.body
def show(self, fig) -> None:
plt.tight_layout(rect=(0, 0, 1, 0.95))
plt.figure(fig.number)
self.report.add_figure(options="width=100%")
plt.close(plt.gcf())
report = Report()
logger = HtmlLogger(report)
def title_separator(title):
logger.print("-----------")
logger.print("-----------")
logger.print("-----------")
logger.print(f"# ***{title}***")
matlab_data_path = "data/"
files_sorted_by_num_data_points = [
"01-airline.mat",
# "07-call-centre.mat",
# "08-radio.mat",
"04-wheat.mat",
# "02-solar.mat",
# "11-unemployment.mat",
# # "10-sulphuric.mat",
# # "09-gas-production.mat",
# "03-mauna.mat",
# # "13-wages.mat",
# # "06-internet.mat",
# "05-temperature.mat",
# "12-births.mat",
]
if __name__ == "__main__":
random.seed(1234)
np.random.seed(1234)
print("starting report")
run_settings = RunSettings(
max_search_depth=2,
expand_kernel_specs_as_sums=False,
num_cpus=12,
use_gpu=False,
use_parallel=True,
gpu_memory_share_needed=0.45,
backend=Backend.SKLEARN,
).replace_base_kernels_by_names(["PER", "LIN", "RBF"])
logger.print(str(run_settings))
logger.print("\n" + str(run_settings.asdict()))
prediction_scores = []
for file_name in files_sorted_by_num_data_points:
file_num = int(file_name[:2])
dataset = load_test_dataset(matlab_data_path, file_num, split=0.1)
run_settings = run_settings.replace_kernel_proto_constraints_using_dataset(
dataset
)
title_separator(f"Dataset: {file_name}")
tic = time.perf_counter()
kernel_scores = kernel_search(dataset, run_settings=run_settings, logger=logger)
toc = time.perf_counter()
best_kernel_info = get_best_kernel_info(kernel_scores)
prediction_scores.append(best_kernel_info.prediction_score)
logger.print(f"best_kernel_info {str(best_kernel_info)}")
logger.print(f"Total time for {file_name}: {toc-tic:.3f} s")
logger.prepend(f"prediction_scores: {str(prediction_scores)}")
logger.prepend(f"sum prediction_scores: {str(sum(prediction_scores))}")
report.write_report(filename=f"reports/report_{timestamp()}.html")
print("report done")
| 26.124031 | 88 | 0.651929 |
e10f737d8a704aff53053429254515a89ebf061b | 424 | py | Python | backend/apps/lyusers/urls.py | lybbn/django-vue3-lyadmin | df8ed48971eb3e3da977e1fd0467b1230b56afe4 | [
"MIT"
] | 1 | 2022-03-01T07:20:36.000Z | 2022-03-01T07:20:36.000Z | backend/apps/lyusers/urls.py | lybbn/django-vue3-lyadmin | df8ed48971eb3e3da977e1fd0467b1230b56afe4 | [
"MIT"
] | null | null | null | backend/apps/lyusers/urls.py | lybbn/django-vue3-lyadmin | df8ed48971eb3e3da977e1fd0467b1230b56afe4 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
@Remark: 用户模块的路由文件
"""
from django.urls import path, re_path
from rest_framework import routers
from apps.lyusers.views import UserManageViewSet
system_url = routers.SimpleRouter()
system_url.register(r'users', UserManageViewSet)
urlpatterns = [
re_path('users/disableuser/(?P<pk>.*?)/',UserManageViewSet.as_view({'put':'disableuser'}), name='后台禁用用户'),
]
urlpatterns += system_url.urls | 21.2 | 110 | 0.731132 |
e1124f5104c7b2ddd81c1b4c389bcffa152ee3a4 | 44,393 | py | Python | srt_gc_launchGui.py | OrigamiAztec/LaunchGUITesting | e097afb075b313e13550937f450adf6653f88812 | [
"MIT"
] | null | null | null | srt_gc_launchGui.py | OrigamiAztec/LaunchGUITesting | e097afb075b313e13550937f450adf6653f88812 | [
"MIT"
] | null | null | null | srt_gc_launchGui.py | OrigamiAztec/LaunchGUITesting | e097afb075b313e13550937f450adf6653f88812 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
'''
Texas A&M University Sounding Rocketry Team
SRT-6 | 2018-2019
SRT-9 | 2021-2022
%-------------------------------------------------------------%
TAMU SRT
_____ __ _____ __ __
/ ___/______ __ _____ ___/ / / ___/__ ___ / /________ / /
/ (_ / __/ _ \/ // / _ \/ _ / / /__/ _ \/ _ \/ __/ __/ _ \/ /
\___/_/ \___/\_,_/_//_/\_,_/ \___/\___/_//_/\__/_/ \___/_/
%-------------------------------------------------------------%
Filepath:
gc/srt_gc_launchGui/srt_gc_launchGui.py
Developers:
(C) Doddanavar, Roshan 20171216
(L) Doddanavar, Roshan 20180801
Diaz, Antonio
Description:
Launch Control GUI, interfaces w/ srt_gc_launchArduino/srt_gc_launchArduino.ino
Input(s):
<None>
Output(s):
./log/*.log plain-text command log
./dat/*.dat plain-text data archive
'''
# Installed modules --> Utilities
import sys
import os
import serial, serial.tools.list_ports
from serial.serialutil import SerialException
import time
from datetime import datetime
import numpy as np
# Installed modules --> PyQt related
from PyQt5 import (QtGui, QtCore, QtSvg)
from PyQt5.QtCore import (Qt, QThread, pyqtSignal, QDate, QTime, QDateTime, QSize)
from PyQt5.QtWidgets import (QMainWindow, QWidget, QDesktopWidget, QPushButton, QApplication, QGroupBox, QGridLayout, QStatusBar, QFrame, QTabWidget,QComboBox)
import pyqtgraph as pg
# Program modules
from srt_gc_launchState import State
from srt_gc_launchThread import SerThread, UptimeThread
from srt_gc_launchTools import Tools, Object
from srt_gc_launchStyle import Style, Color
from srt_gc_launchConstr import Constr
# used to monitor wifi networks.
import subprocess
# used to get date and time in clock method.
import datetime as dt
# used to connect to ethernet socket in connect method.
import socket
# data for ethernet connection to SRT6 router
# Create a TCP/IP socket for srt router
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
TCP_IP = '192.168.1.177'
TCP_PORT = 23
server_address = (TCP_IP, TCP_PORT)
class Gui(QMainWindow):
def __init__(self):
super().__init__()
self.initUI()
def initUI(self):
'''
Main Window Initialization
'''
# General initialization
self.session = ''
# current date used in top of window
self.dateGlobal = QDate.currentDate()
# current time used in starting thread time in bottom of window
self.startGlobal = QTime.currentTime()
self.version = "v6.4.0"
# Container initialization
self.edit = Object() # Line edit container
self.btn = Object() # Button container
self.led = Object() # LED indicator container
self.ledClr = Object() # LED pixmap container
self.sensor = Object() # Sensor readout container
self.data = Object() # Data array container
self.plot = Object() # Plot container
ledImg = ["green","yellow","red","off"] # LED indicator image files
for name in ledImg:
# get LED Images in figs folder, green.png, yellow.png, and so on
# pixmap = QtGui.QPixmap("./figs/" + name + ".png").scaled(20, 20,
pixmap = QtGui.QPixmap("./srt_gc_launchGui/figs/" + name + ".png").scaled(20, 20,
transformMode=QtCore.Qt.SmoothTransformation)
setattr(self.ledClr,name,pixmap)
# Utility initialization
self.style = Style()
self.color = Color()
self.state = State(self.led,self.ledClr)
self.tools = Tools()
self.constr = Constr(self,self.ledClr)
# Utility states
self.state.connected = False # Serial connection
self.state.reading = False # COM port bypass
self.state.log = False # Log/data file initialization
self.state.data = False # Avionics data read
# Master grid layout management
self.gridMaster = QGridLayout()
self.gridMaster.setSpacing(10)
# Tab initialization
# name, row, col, row Span, col Span
tabSpec = [( "tabComm", 0, 2, 1, 8),
( "tabSys", 1, 0, 1, 2),
( "tabAv", 1, 2, 1, 2),
( "tabFill", 1, 4, 1, 2),
( "tabData", 2, 0, 1, 10)]
for spec in tabSpec:
tabName = spec[0]
row = spec[1]
col = spec[2]
rSpan = spec[3]
cSpan = spec[4]
tab = QTabWidget()
setattr(self,tabName,tab)
self.gridMaster.addWidget(tab,row,col,rSpan,cSpan)
# kind, grid, title, row, col, row Span, col Span
groupSpec = [( "box", "groupTitle", "gridTitle", "", 0, 0, 1, 2),
( "tab", "groupComm", "gridComm", "Communication", "tabComm"),
( "tab", "groupSess", "gridSess", "Session Control", "tabComm"),
( "tab", "groupSys", "gridSys", "System State", "tabSys"),
( "tab", "groupPwr", "gridPwr", "Power Telemetry", "tabSys"),
( "tab", "groupDaq", "gridDaq", "Avionics DAQ", "tabAv"),
( "tab", "groupDiag", "gridDiag", "Diagnostics", "tabAv"),
( "tab", "groupFill", "gridFill", "Fill Control", "tabFill"),
( "tab", "groupAuto", "gridAuto", "Auto Fill", "tabFill"),
( "box", "groupIgn", "gridIgn", "Igniter Control", 1, 6, 1, 2),
( "box", "groupVal", "gridVal", "Valve Control", 1, 8, 1, 2),
( "tab", "groupPlot", "gridPlot", "Engine Diagnostics", "tabData"),
( "tab", "groupOut", "gridOut", "Serial Output", "tabData"),]
for spec in groupSpec:
kind = spec[0]
groupName = spec[1]
gridName = spec[2]
title = spec[3]
if (kind == "tab"):
parent = spec[4]
group = QWidget()
grid = QGridLayout()
# Widget initialization
setattr(self,groupName,group)
# GridLayout object initialization
setattr(self,gridName,grid)
group.setLayout(grid)
group.setAutoFillBackground(True)
# Tab assignment
getattr(self,parent).addTab(group,title)
elif (kind == "box"):
row = spec[4]
col = spec[5]
rSpan = spec[6]
cSpan = spec[7]
# GroupBox object initialization
group = QGroupBox(title)
group.setStyleSheet(self.style.css.group)
# GridLayout object initialization
grid = QGridLayout()
group.setLayout(grid)
# Assign to parent objects
setattr(self,gridName,grid)
setattr(self,groupName,group)
self.gridMaster.addWidget(group,row,col,rSpan,cSpan)
# Call initialization routines
self.titleInit() # Title bar
self.barInit() # Bottom statusbar
self.commInit() # Communication toolbar
self.sessInit() # Session toolbar
self.btnCtrlInit() # Buttons for control panel
self.ledCtrlInit() # LED inidicators " "
self.plotInit() # Engine diagnostics, plots
self.dataInit() # Engine diagnostics, readouts
self.outInit() # Raw serial output
# Row & column stretching in master grid
rowStr = [1, 4, 8]
colStr = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
self.tools.resize(self.gridMaster,rowStr,colStr)
# Finalize widget
mainWidget = QWidget()
mainWidget.setLayout(self.gridMaster)
self.setCentralWidget(mainWidget)
# Window management
self.setWindowTitle("SRT Ground Control " + self.version + " " + self.dateGlobal.toString(Qt.TextDate))
self.setWindowIcon(QtGui.QIcon("./figs/desktop_icon.png"))
self.showMaximized()
# Window centering
qr = self.frameGeometry()
cp = QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
# Window formatting
#self.setStyleSheet(self.style.css.window)
# Final initialization
self.show()
def titleInit(self):
'''
Window Title Initialization
'''
# QLabel --> SRT logo
#titImg = "./figs/srt_black.svg"
titImg = "./srt_gc_launchGui/figs/srt_black.svg"
pixmap = QtGui.QPixmap(titImg).scaled(50,50,transformMode=QtCore.Qt.SmoothTransformation)
self.logo = self.constr.image(self.gridTitle,pixmap,[0,0,2,1])
# QLabel --> Main window title
text = "SRT Ground Control" + " " + self.version
self.title = self.constr.label(self.gridTitle,"title",text,"Bottom",[0,1,1,1])
# QLabel --> Main window subtitle
text = "Remote Launch System [tamusrt/gc]"
self.subtitle = self.constr.label(self.gridTitle,"subtitle",text,"Top",[1,1,1,1])
# Row & column stretching in title grid
rowStr = [5, 1]
colStr = [1, 2]
self.tools.resize(self.gridTitle,rowStr,colStr)
def barInit(self):
'''
Initialize strings and inputs in bottom status bar.
'''
self.statusBar = QStatusBar()
self.setStatusBar(self.statusBar)
barFrame = QFrame()
gridStatus = QGridLayout()
barFrame.setLayout(gridStatus)
self.statusBar.addPermanentWidget(barFrame,1)
# Event log
self.constr.label(gridStatus,"label","EVENT LOG","Center",[0,0,1,1])
self.statusBar.log = self.constr.readout(gridStatus,"statusBar",[0,1,1,1])
# Last sent
self.constr.label(gridStatus,"label","LAST SENT","Center",[0,2,1,1])
self.statusBar.sent = self.constr.readout(gridStatus,"statusBar",[0,3,1,1])
# Last recieved
self.constr.label(gridStatus,"label","LAST RCVD","Center",[0,4,1,1])
self.statusBar.recieved = self.constr.readout(gridStatus,"statusBar",[0,5,1,1])
# Session name
self.constr.label(gridStatus,"label","SESSION","Center",[0,6,1,1])
self.statusBar.session = self.constr.readout(gridStatus,"statusBar",[0,7,1,1])
# Uptime counter
self.constr.label(gridStatus,"label","UPTIME","Center",[0,8,1,1])
self.statusBar.uptime = self.constr.readout(gridStatus,"statusBar",[0,9,1,1])
# Uptime thread management
self.uptimeThread = UptimeThread(self.startGlobal,self.statusBar.uptime)
self.uptimeThread.start()
# Row & column stretching in comm grid
rowStr = []
colStr = [1, 4, 1, 2, 1, 2, 1, 2, 1, 2]
self.tools.resize(gridStatus,rowStr,colStr)
def commInit(self):
'''
Communication Toolbar Initialization
'''
# set communication and reading status as false initially.
self.state.connected = False
self.state.reading = False
if (os.name == "posix"):
prefix = "/dev/tty"
elif (os.name == "nt"):
prefix = "COM"
else:
prefix = ""
# LED indicator for connection
self.led.commConn = self.constr.led(self.gridComm,[0,0,1,1])
# CONNECT button
method = "btnClkConn"
color = self.color.comm
self.btn.commConn = self.constr.button(self.gridComm,"CONNECT",method,color,[0,1,1,1])
# SEARCH button
method = "btnClkSearch"
color = self.color.comm
self.btn.commSearch = self.constr.button(self.gridComm,"SEARCH",method,color,[0,2,1,1])
# COM Port label & input
self.labPort = self.constr.label(self.gridComm,"label","Data Port:","Center",[0,3,1,1])
self.portMenu = self.constr.dropDown(self.gridComm,[0,4,1,1])
# Baud rate label & input
self.labBaud = self.constr.label(self.gridComm,"label","Baud Rate","Center",[0,5,1,1])
self.baudMenu = self.constr.dropDown(self.gridComm,[0,6,1,1])
self.baudMenu.addItems(["9600","14400","19200","28800","38400","57600","115200"])
# LED indicator for bypass
self.led.commByp = self.constr.led(self.gridComm,[0,7,1,1])
# BYPASS button. Function of bypass is to force GUI to send commands over xbee even if xbee port isn't showing.
method = "btnClkByp"
color = self.color.comm
self.btn.commByp = self.constr.button(self.gridComm,"BYPASS",method,color,[0,8,1,1])
# RESET button. Function of reset is to stop thread sorting, turn off all LEDs and disconnect xbees. May want to add more functionality such as returning to a safe state of the engine.
method = "btnClkRes"
color = self.color.comm
self.btn.commRes = self.constr.button(self.gridComm,"RESET",method,color,[0,9,1,1])
# Row & column stretching in comm grid
rowStr = []
colStr = [1, 3, 3, 2, 5, 2, 2, 1, 3, 3]
self.tools.resize(self.gridComm,rowStr,colStr)
def sessInit(self):
# Session name
self.led.sess = self.constr.led(self.gridSess,[0,0,1,1])
self.btn.sessNew = self.constr.button(self.gridSess,"NEW","btnClkSessNew",self.color.comm,[0,1,1,1])
self.btn.sessRename = self.constr.button(self.gridSess,"RENAME","btnClkSessRename",self.color.comm,[0,2,1,1])
self.labSess = self.constr.label(self.gridSess,"label","Session","Center",[0,3,1,1])
self.edit.session = self.constr.edit(self.gridSess,"test",[0,4,1,1])
# Clock control
self.led.clock = self.constr.led(self.gridSess,[0,5,1,1])
self.btn.sessClock = self.constr.button(self.gridSess,"SET CLOCK","btnClkClock",self.color.comm,[0,6,1,1])
self.labDateYr = self.constr.label(self.gridSess,"label","Date","Center",[0,7,1,1])
self.edit.dateYYYY = self.constr.edit(self.gridSess,"YYYY",[0,8,1,1])
self.edit.dateMM = self.constr.edit(self.gridSess,"MM",[0,9,1,1])
self.edit.dateDD = self.constr.edit(self.gridSess,"DD",[0,10,1,1])
self.labTime = self.constr.label(self.gridSess,"label","Time","Center",[0,11,1,1])
self.edit.timeHH = self.constr.edit(self.gridSess,"HH",[0,12,1,1])
self.edit.timeMM = self.constr.edit(self.gridSess,"MM",[0,13,1,1])
self.edit.timeSS = self.constr.edit(self.gridSess,"SS",[0,14,1,1])
# Row & column stretching in sess grid
rowStr = []
colStr = [1, 2, 2, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1]
self.tools.resize(self.gridSess,rowStr,colStr)
def btnClkSearch(self):
# set the port menu to be cleared initially.
self.portMenu.clear()
# check the number of serial ports available.
ports = serial.tools.list_ports.comports()
# if ports exist, add it to drop down menu in GUI
if (ports):
for port in ports:
entry = "Serial: " + port.device + " - " + port.description
self.portMenu.addItem(entry)
# uses subprocess package to check for connected wifi networks.
devices = subprocess.check_output(['netsh','wlan','show','network']).decode('ascii').replace("\r","")
numOfWifiDevices = len(devices.split("SSID"))
# check to see the number of wifi networks we can connect to
if numOfWifiDevices:
for deviceNum in range(1, numOfWifiDevices):
entry = "Wifi Network: " + devices.split("SSID")[deviceNum].split(" ")[3]
self.portMenu.addItem(entry)
else:
self.portMenu.setCurrentText("NO DEVICE(S) FOUND")
def btnClkClock(self):
'''
"CLOCK" Button Event Handling
'''
# probably better to replace these with QDate class to reduce number of packages you have to import.
year = str(dt.datetime.now().year)
month = str(dt.datetime.now().month)
day = str(dt.datetime.now().day)
hour = str(datetime.now().hour)
minute = str(dt.datetime.now().minute)
seconds = str(dt.datetime.now().second)
# automatically update date and time log when button clicked
self.edit.dateYYYY = self.constr.edit(self.gridSess,year,[0,8,1,1])
self.edit.dateMM = self.constr.edit(self.gridSess,month,[0,9,1,1])
self.edit.dateDD = self.constr.edit(self.gridSess,day,[0,10,1,1])
self.edit.timeHH = self.constr.edit(self.gridSess,hour,[0,12,1,1])
self.edit.timeMM = self.constr.edit(self.gridSess,minute,[0,13,1,1])
self.edit.timeSS = self.constr.edit(self.gridSess,seconds,[0,14,1,1])
# I think this was meant to pull up exact date and time on a separate window for user to type in manually.
# This command below with os.system doesn't work. sudo command not recognized on windows.
# dateStr = self.edit.dateYYYY.text() + '-' + self.edit.dateMM.text() + '-' + self.edit.dateDD.text()
# timeStr = self.edit.timeHH.text() + ':' + self.edit.timeMM.text() + ':' + self.edit.timeSS.text()
# cmdStr = "sudo date -s"
# System command
# sudo date -s 'YYYY-MM-DD HH:MM:SS'
#os.system('cmdStr' + ' ' + '\'' + dateStr + ' ' + timeStr + '\'')
self.led.clock.setPixmap(self.ledClr.yellow)
def btnClkConn(self):
'''
"CONNECT" Button Event Handling. Attempts to connect to SRT Router and Serial
'''
if (self.state.connected):
self.logEvent("ERROR","ALREADY CONNECTED")
else:
# User input --> Port name & baud rate
text = str(self.portMenu.currentText())
text = text.split(' ')
self.port = text[0]
self.baud = int(str(self.baudMenu.currentText()))
if (self.port == "/dev/tty"):
self.logEvent("ERROR","INVALID PORT")
else:
if (self.port == "Wifi"):
try:
# Attempt to connect to router/ethernet over ubiquity
sock.connect(server_address)
self.ser = sock
# Set connected Status true, change LED, log connected status if connected to Ethernet
self.state.connected = True
self.logEvent("CONNECTED",self.port)
self.led.commConn.setPixmap(self.ledClr.yellow)
# must send a command initially for it to stay connected and read data over ethernet.
missionCMD = 'b'
missionCMD = bytes(missionCMD, 'utf-8')
sock.sendall(missionCMD)
# Thread handling
self.serThread = SerThread(self.ser)
self.serThread.outSig.connect(self.outUpdate)
self.serThread.stateSig.connect(self.stateUpdate)
self.serThread.dataSig.connect(self.dataUpdate)
self.serThread.resetSig.connect(self.readFail)
# Test for bypass condition
text = self.ser.recv(100)
if (len(text) > 0):
# Check for empty packet
self.state.reading = True
self.logEvent("READING",self.port)
self.led.commByp.setPixmap(self.ledClr.yellow)
self.serThread.start()
except (TimeoutError, OSError):
self.logEvent("ERROR","INVALID PORT")
else:
try:
# Attempt to connect to serial
self.ser = serial.Serial(self.port,self.baud,timeout=1)
self.state.connected = True
self.logEvent("CONNECTED",self.port)
self.led.commConn.setPixmap(self.ledClr.yellow)
# trying to send a command initially to see if that makes it easy to get connected.
missionCMD = 'b'
missionCMD = bytes(missionCMD, 'utf-8')
self.ser.write(missionCMD)
# Thread handling
self.serThread = SerThread(self.ser)
self.serThread.outSig.connect(self.outUpdate)
self.serThread.stateSig.connect(self.stateUpdate)
self.serThread.dataSig.connect(self.dataUpdate)
self.serThread.resetSig.connect(self.readFail)
# Test for bypass condition
text = self.ser.readline()
if (len(text) > 0):
# Check for empty packet
self.state.reading = True
self.logEvent("READING",self.port)
self.led.commByp.setPixmap(self.ledClr.yellow)
self.serThread.start()
except:
self.logEvent("ERROR","INVALID PORT")
def btnClkByp(self):
# haven't updated bypass method for ubiquity, just Xbee
'''
"BYPASS" Button Event Handling --> XBee (old) firmware quirk
'''
if (self.state.reading):
self.logEvent("ERROR","ALREADY READING")
elif (not self.state.connected):
self.logEvent("ERROR","NO CONNECTION")
else:
# enter, enter, (wait), 'b' --> bypass XBee dongle w/ ascii encoding
self.ser.write(b'\r\n\r\n')
time.sleep(2)
self.ser.write(b'b\r\n')
# Test for bypass condition
text = self.ser.readline()
if (len(text) > 0):
# Check for empty packet
self.state.reading = True
self.logEvent("READING",self.port)
self.led.commByp.setPixmap(self.ledClr.yellow)
self.serThread.start()
def btnClkRes(self):
'''
"RESET" Button Event Handling
'''
if (self.state.connected):
# This is discouraged but thread.quit() and thread.exit() don't work [brute force method]
self.serThread.terminate()
self.state.reading = False
self.led.commByp.setPixmap(self.ledClr.off)
self.ser.close()
self.state.connected = False
self.logEvent("DISCONNECTED",self.port)
self.led.commConn.setPixmap(self.ledClr.off)
# Reset all control status LEDs
ledName = list(self.led.__dict__)
for name in ledName:
if (name == "sess"): # Don't reset session LED
continue
else:
getattr(self.led,name).setPixmap(self.ledClr.off)
else:
self.logEvent("ERROR","NO CONNECTION")
def btnCtrlInit(self):
'''
Control Button Initialization
'''
rSp = 1 # Row span multiplier
cSp = 2 # Column span mutilplier
# Control button specification
# grid, name, text, comm, color, row, col, row span, col span
# System state
btnSpec = [( "gridSys", "sysArm", "SYS ARM", "btnClkCtrl", "sys", 0, 0, 1, 1),
( "gridSys", "sysDisarm", "SYS DISARM", "btnClkCtrl", "sys", 0, 1, 1, 1),
( "gridSys", "ready1", "READY 1", "btnClkCtrl", "abt", 1, 0, 1, 1),
( "gridSys", "ready2", "READY 2", "btnClkCtrl", "abt", 2, 0, 1, 1),
( "gridSys", "abort", "ABORT", "btnClkCtrl", "abt", 1, 1, 2, 1),
( "gridSys", "buzzOn", "BUZZ ON", "btnClkCtrl", "sys", 3, 0, 1, 1),
( "gridSys", "buzzOff", "BUZZ OFF", "btnClkCtrl", "sys", 3, 1, 1, 1),
# Data acquisition
( "gridDaq", "dataState", "DATA STATE", "btnClkCtrl", "daq", 0, 0, 1, 1),
( "gridDaq", "avPwrOff", "AV PWR OFF", "btnClkCtrl", "av", 0, 1, 1, 1),
( "gridDaq", "dataStart", "DATA START", "btnClkCtrl", "daq", 1, 0, 1, 1),
( "gridDaq", "dataStop", "DATA STOP", "btnClkCtrl", "daq", 1, 1, 1, 1),
# Fill control
( "gridFill", "supplyOpen", "SUPPLY OPEN", "btnClkCtrl", "n2o", 0, 0, 1, 1),
( "gridFill", "supplyClose", "SUPPLY CLOSE", "btnClkCtrl", "n2o", 0, 1, 1, 1),
( "gridFill", "supplyVtOpen", "SUPPLY VT OPEN", "btnClkCtrl", "n2o", 1, 0, 1, 1),
( "gridFill", "supplyVtClose", "SUPPLY VT CLOSE", "btnClkCtrl", "n2o", 1, 1, 1, 1),
( "gridFill", "runVtOpen", "RUN VT OPEN", "btnClkCtrl", "n2o", 2, 0, 1, 1),
( "gridFill", "runVtClose", "RUN VT CLOSE", "btnClkCtrl", "n2o", 2, 1, 1, 1),
( "gridFill", "motorOn", "MOTOR ON", "btnClkCtrl", "qd", 3, 0, 1, 1),
( "gridFill", "motorOff", "MOTOR OFF", "btnClkCtrl", "qd", 3, 1, 1, 1),
# Igniter control
( "gridIgn", "ignCont", "IGN CONT", "btnClkCtrl", "ign", 0, 0, 1, 2),
( "gridIgn", "ignArm", "IGN ARM", "btnClkCtrl", "ign", 1, 0, 1, 1),
( "gridIgn", "ignDisarm", "IGN DISARM", "btnClkCtrl", "ign", 1, 1, 1, 1),
( "gridIgn", "oxOpen", "OX OPEN", "btnClkCtrl", "o2", 2, 0, 1, 1),
( "gridIgn", "oxClose", "OX CLOSE", "btnClkCtrl", "o2", 2, 1, 1, 1),
( "gridIgn", "ignOn", "IGN ON", "btnClkCtrl", "ign", 3, 0, 1, 1),
( "gridIgn", "ignOff", "IGN OFF", "btnClkCtrl", "ign", 3, 1, 1, 1),
# Valve control
( "gridVal", "bvPwrOn", "BV PWR ON", "btnClkCtrl", "bvas", 0, 0, 1, 1),
( "gridVal", "bvPwrOff", "BV PWR OFF", "btnClkCtrl", "bvas", 0, 1, 1, 1),
( "gridVal", "bvOpen", "BV OPEN", "btnClkCtrl", "bvas", 1, 0, 1, 1),
( "gridVal", "bvClose", "BV CLOSE", "btnClkCtrl", "bvas", 1, 1, 1, 1),
( "gridVal", "bvState", "BV STATE", "btnClkCtrl", "bvas", 2, 0, 1, 1),
( "gridVal", "mdot", "MDOT", "btnClkCtrl", "mdot", 2, 1, 1, 1)]
for spec in btnSpec:
grid = getattr(self,spec[0])
name = spec[1]
text = spec[2]
method = spec[3]
color = getattr(self.color,spec[4])
row = spec[5]*rSp
col = spec[6]*cSp
rSpan = spec[7]*rSp
cSpan = spec[8]*cSp
# Construct button
btn = self.constr.button(grid,text,method,color,[row,col,rSpan,cSpan])
btn.comm = self.state.btnMap(name) # Find & set character command
btn.led = [] # Create empty list of associated LEDs
# Assign to container
setattr(self.btn,name,btn)
def btnClkCtrl(self):
'''
Control Button Event Handling
'''
sender = self.sender()
self.statusBar.sent.setText(sender.text()) # Update statusbar
self.logEvent(sender.text(),sender.comm)
# Trigger red LED state
if (self.state.connected):
for led in sender.led:
led.setPixmap(self.ledClr.red)
try:
comm = sender.comm.encode("ascii")
try:
self.ser.sendall(comm)
except:
self.ser.write(comm)
except:
if (self.state.connected):
self.logEvent("ERROR","WRITE FAIL")
else:
self.logEvent("ERROR","NO CONNECTION")
def btnClkSessRename(self):
if (self.state.log):
self.session = self.edit.session.text()
self.statusBar.session.setText(self.session)
else:
self.logEvent("ERROR","FILE IO")
def btnClkSessNew(self):
try:
# Close log & data files if initialized
self.closeLog()
# Update session name
self.session = self.edit.session.text()
self.statusBar.session.setText(self.session)
# Generate file date & time stamp(s)
dateLocal = QDate.currentDate()
dateStr = dateLocal.toString(Qt.ISODate)
startLocal = QTime.currentTime()
startStr = startLocal.toString("HH:mm:ss")
# Control & data log initialization
fileObj = ["logFile","dataFile"]
fileDir = ["./log/","./data/"]
fileExt = [".log",".dat"]
for i in range(len(fileObj)):
fileName = dateStr.replace('-','') + '_' + startStr.replace(':','') + fileExt[i]
if (not os.path.exists(fileDir[i])):
os.makedirs(fileDir[i])
setattr(self,fileObj[i],open(fileDir[i] + fileName,'w'))
self.state.log = True
self.led.sess.setPixmap(self.ledClr.yellow)
except:
self.logEvent("ERROR","FILE IO")
def ledCtrlInit(self):
'''
LED Inidicator Initialization
'''
rSp = 1 # Row span multiplier
cSp = 2 # Column span multiplier
# LED indicator specification
# grid, name, row, col, row Span, col Span, buttons ...
# System state
ledSpec = [( "gridSys", "sysArm", 0, 2, 1, 1, "sysArm", "sysDisarm"),
( "gridSys", "ready1", 1, 2, 1, 1, "ready1", "abort"),
( "gridSys", "ready2", 2, 2, 1, 1, "ready2", "abort"),
( "gridSys", "buzz", 3, 2, 1, 1, "buzzOn", "buzzOff"),
# Data acquisition
( "gridDaq", "avPwr", 0, 2, 1, 1, "avPwrOff"),
( "gridDaq", "data", 1, 2, 1, 1, "dataStart", "dataStop", "dataState"),
# Fill control
( "gridFill", "supply", 0, 2, 1, 1, "supplyOpen", "supplyClose"),
( "gridFill", "supplyVt", 1, 2, 1, 1, "supplyVtOpen", "supplyVtClose"),
( "gridFill", "runVt", 2, 2, 1, 1, "runVtOpen", "runVtClose"),
( "gridFill", "motor", 3, 2, 1, 1, "motorOn", "motorOff"),
# Igniter control
( "gridIgn", "ignCont", 0, 2, 1, 1, "ignCont"),
( "gridIgn", "ignArm", 1, 2, 1, 1, "ignArm", "ignDisarm"),
( "gridIgn", "ox", 2, 2, 1, 1, "oxOpen", "oxClose"),
( "gridIgn", "ign", 3, 2, 1, 1, "ignOn", "ignOff"),
# Valve control
( "gridVal", "bvPwr", 0, 2, 1, 1, "bvPwrOn", "bvPwrOff", "bvState", "mdot"),
( "gridVal", "bv", 1, 2, 1, 1, "bvOpen", "bvClose", "bvState", "mdot")]
for spec in ledSpec:
grid = getattr(self,spec[0])
name = spec[1]
row = spec[2]*rSp
col = spec[3]*cSp
rSpan = spec[4]*rSp
cSpan = spec[5]*cSp/2
btn = spec[6:]
# Construct LED
led = self.constr.led(grid,[row,col,rSpan,cSpan])
# Attach LEDs to associated buttons
for btnName in btn:
getattr(self.btn,btnName).led.append(led)
# Assign to container
setattr(self.led,name,led)
def dataInit(self):
'''
Data Array & Sensor Readout Initialization
'''
# Data storage initialization
# time stamp, run tank press, chamber press, run tank temp, chamber temp, aux temp
self.dataTime = 1*60 # Data array length (sec)
self.dataName = ["st","pt","pc","tt","tc","ta"]
self.dataDict = {}
for name in self.dataName:
# looks like it sets dataDict[st], dataDict[pt], ... and so on to none in initialization
setattr(self.data,name,np.array([]))
self.dataDict[name] = None
# Sensor readout specification
# name, text, unit, code, row, col, row span, col span
# Pressure column
sensorSpec = [( "pRun", "Press\nRun", "[ psi ]", "pt", 0, 0, 2, 1, 1),
( "pRun30s", "Extrap\n30 sec", "[ psi ]", "pt", 30, 1, 2, 1, 1),
( "pRun1m", "Extrap\n1 min", "[ psi ]", "pt", 1*60, 2, 2, 1, 1),
( "pRun5m", "Extrap\n5 min", "[ psi ]", "pt", 5*60, 3, 2, 1, 1),
( "pChamb", "Press\nChamb", "[ psi ]", "pc", 0, 4, 2, 1, 1),
# Temperature column
( "tRun", "Temp\nRun", "[ °F ]", "tt", 0, 0, 6, 1, 1),
( "tRun30s", "Extrap\n30 sec", "[ °F ]", "tt", 30, 1, 6, 1, 1),
( "tRun1m", "Extrap\n1 min", "[ °F ]", "tt", 1*60, 2, 6, 1, 1),
( "tRun5m", "Extrap\n5 min", "[ °F ]", "tt", 5*60, 3, 6, 1, 1),
( "pRunVap", "Press\nVapor", "[ psi ]", "tt", 0, 4, 6, 1, 1)]
for spec in sensorSpec:
name = spec[0]
text = spec[1]
unit = spec[2]
code = spec[3]
extrap = spec[4]
row = spec[5]
col = spec[6]
rSpan = spec[7]
cSpan = spec[8]
# Construct sensor & assign to container
sensor = self.constr.readout(self.gridPlot,"sensor",[row,col,rSpan,cSpan])
sensor.code = code # Data code
sensor.extrap = extrap # Forward extrapolation time
# Assign to container
setattr(self.sensor,name,sensor)
# Sensor text & unit labels
self.constr.label(self.gridPlot,"label",text,"Center",[row,col-1,1,1])
self.constr.label(self.gridPlot,"label",unit,"Center",[row,col+1,1,1])
# Generate sensor list
self.sensorName = list(self.sensor.__dict__)
# Row & column stretching in plotGrid
rowStr = []
colStr = [8, 1, 1, 1, 8, 1, 1, 1]
self.tools.resize(self.gridPlot,rowStr,colStr)
def plotInit(self):
'''
Live Plot Initialization
'''
self.plot = [None] * 2
# Pressure plot
yRange = [0,950]
xLabel = ["Time","sec"]
yLabel = ["Run Tank Pressure","psi"]
hour = [1,2,3,4,5,6,7,8,9,10]
temperature = [400,432,434,432,433,431,429,432,435,445]
self.plot[0] = self.constr.plot(self.gridPlot,yRange,xLabel,yLabel,[0,0,5,1])
self.plotPress = self.plot[0].plot()
# Temperature plot
yRange = [0,150]
xLabel = ["Time","sec"]
yLabel = ["Run Tank Temperature","°F"]
hour = [1,2,3,4,5,6,7,8,9,10]
temperature = [100,90,80,90,90,90,100,100,100,100]
self.plot[1] = self.constr.plot(self.gridPlot,yRange,xLabel,yLabel,[0,4,5,1])
self.plotTemp = self.plot[1].plot()
def outInit(self):
# Create scroll box for raw serial output
self.serialOut = self.constr.scrollBox(self.gridOut,[0,0,1,1])
def outUpdate(self,text):
self.serialOut.moveCursor(QtGui.QTextCursor.End)
self.serialOut.insertPlainText(text + "\n")
sb = self.serialOut.verticalScrollBar()
sb.setValue(sb.maximum())
def stateUpdate(self,text):
'''
Control State Update
'''
# Update statusbar
self.statusBar.recieved.setText(text)
try:
# Logs state event, update state object, update PID graphic (eventually)
self.logEvent("STATE",text)
# QUICK FIX FOR ABORT STATE
if (text == "xLBabo"):
self.state.update("xLBrl10")
self.state.update("xLBrl20")
else:
self.state.update(text)
except:
self.logEvent("ERROR","STATE FAIL")
def dataUpdate(self,text):
print("gets here, right?")
'''
Plot & Sensor Update
'''
try:
# Write to data log
if self.state.log:
self.dataFile.write(text + '\n')
print("writing")
# Process data packet
raw = text.split(',')
nEnd = len(self.data.st)
print(raw, nEnd)
# Update dictionary --> maps code to reading
for field in raw:
self.dataDict[field[0:2]] = field[2:]
# Convert time stamps to elapsed from AV start (first packet)
if (self.state.data):
stamp = self.dataDict["st"]
nowData = datetime.strptime(stamp,"%H:%M:%S.%f")
delta = nowData - self.startData
elapsed = delta.total_seconds()
self.dataDict["st"] = elapsed
else:
stamp = self.dataDict["st"]
self.startData = datetime.strptime(stamp,"%H:%M:%S.%f")
self.state.data = True
self.dataDict["st"] = 0
# Establish extrapolation time step
if (len(self.data.st) < 2):
step = 1 # Arbitrary value; can't be zero
else:
step = self.data.st[-1] - self.data.st[-2]
nData = np.floor(self.dataTime/step)
# Populate data arrays: after filling array, delete first element & append to end
if (nEnd < nData): # Case: array not full
for name in self.dataName:
value = float(self.dataDict[name])
setattr(self.data,name,np.append(getattr(self.data,name),value))
else: # Case: array full
for name in self.dataName:
value = float(self.dataDict[name])
getattr(self.data,name,np.roll(getattr(self.data,name),-1))
getattr(self.data,name)[-1] = value
# Sensor readout update
for name in self.sensorName:
sensor = getattr(self.sensor,name)
data = getattr(self.data,sensor.code)
value = self.tools.extrap(self.data.st,data,sensor.extrap,step)
if (name == "pRunVap"): # Vapor pressure from run tank temp
value = self.tools.vapPress(value)
sensor.setText(str(round(value,2)))
# Live plot update
#xTime = self.data.st - self.data.st[-1] # Center time scale at present reading
xTime = [1,2,3,4,5,6,7,8,9,10]
yPress = [400,432,434,432,433,431,429,432,435,445]
print(xTime)
print("YPress:")
print(yPress)
#yPress = self.data.pt # Tank pressure array
yTemp = self.data.tt # Tank temperature array
print("yTemp:")
print(yTemp)
self.plotPress.setData(xTime,yPress,pen=self.style.pen.press)
self.plotTemp.setData(xTime,yTemp,pen=self.style.pen.temp)
except:
# Throws error if failure to read data packet
self.logEvent("ERROR","DAQ FAIL")
if (self.state.log):
self.dataFile.write("ERROR: " + text + '\n')
def readFail(self,text):
'''
Log Read Fail & Reset Connection
'''
self.btnClkRes()
self.logEvent("ERROR",text)
def logEvent(self,event,text):
'''
Log Event Management
'''
# Build, print stamp to statusbar & log
now = QTime.currentTime()
stamp = now.toString("HH:mm:ss.zzz")
pad = ' ' * 5
# Print to statusbar, format if necessary
self.statusBar.log.setText(stamp + pad + event + pad + "\"" + text + "\"")
if (event == "ERROR"):
self.statusBar.log.setStyleSheet(self.style.css.error)
else:
self.statusBar.log.setStyleSheet(self.style.css.statusBar)
# Print to log file
if (self.state.log):
self.logFile.write(stamp + ", " + event + ", " + "\"" + text + "\"" + "\n")
def closeLog(self):
'''
File Close Management
'''
if (self.state.log):
self.state.log = False # Protects thread issues; writing to closed file
fileObj = ["logFile","dataFile"]
for logName in fileObj:
# Close & rename file(s)
filePath = getattr(self,logName).name.split('/')
filePath[2] = self.session + '_' + filePath[2]
filePath = '/'.join(filePath)
getattr(self,logName).close()
os.rename(getattr(self,logName).name,filePath)
def closeEvent(self,event):
'''
GUI Exit Management
'''
# Close log & data files if initialized
self.closeLog()
# Exit GUI safely
event.accept()
if (__name__ == '__main__'):
'''
Executive Control
'''
app = QApplication(sys.argv) # Utility for window exit condition
gui = Gui() # Creates instance of "Gui" class
sys.exit(app.exec_()) # Window exit condition | 39.530721 | 193 | 0.493974 |
e11508b726f072695da36af59f196eefb588d2a7 | 1,359 | py | Python | setup.py | charettes/cricket | ed3ef911e0776e225291a370220f0d9476afdd4e | [
"BSD-3-Clause"
] | 1 | 2015-11-06T07:51:04.000Z | 2015-11-06T07:51:04.000Z | setup.py | charettes/cricket | ed3ef911e0776e225291a370220f0d9476afdd4e | [
"BSD-3-Clause"
] | null | null | null | setup.py | charettes/cricket | ed3ef911e0776e225291a370220f0d9476afdd4e | [
"BSD-3-Clause"
] | null | null | null | #/usr/bin/env python
import sys
from setuptools import setup
from cricket import VERSION
try:
readme = open('README.rst')
long_description = str(readme.read())
finally:
readme.close()
required_pkgs = [
'tkreadonly',
]
if sys.version_info < (2, 7):
required_pkgs.extend(['argparse', 'unittest2', 'pyttk'])
setup(
name='cricket',
version=VERSION,
description='A graphical tool to assist running test suites.',
long_description=long_description,
author='Russell Keith-Magee',
author_email='russell@keith-magee.com',
url='http://pybee.org/cricket',
packages=[
'cricket',
'cricket.django',
'cricket.unittest',
],
install_requires=required_pkgs,
scripts=[],
entry_points={
'console_scripts': [
'cricket-django = cricket.django.__main__:main',
'cricket-unittest = cricket.unittest.__main__:main',
]
},
license='New BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Software Development',
'Topic :: Software Development :: Testing',
'Topic :: Utilities',
],
test_suite='tests'
)
| 25.641509 | 66 | 0.61663 |
e117f67f0c631749e3bd721fb7eedb16a22bb6f3 | 2,701 | py | Python | experimentation/tools/sorald/legacy/violation_scraper_apache_commons.py | gothius/sorald | 4c8761da495e528389c033660fae1f3c3a18cac3 | [
"MIT"
] | 49 | 2020-06-04T20:30:20.000Z | 2022-03-16T01:30:20.000Z | experimentation/tools/sorald/legacy/violation_scraper_apache_commons.py | gothius/sorald | 4c8761da495e528389c033660fae1f3c3a18cac3 | [
"MIT"
] | 551 | 2020-06-02T13:33:56.000Z | 2022-03-31T15:58:17.000Z | experimentation/tools/sorald/legacy/violation_scraper_apache_commons.py | gothius/sorald | 4c8761da495e528389c033660fae1f3c3a18cac3 | [
"MIT"
] | 12 | 2020-06-04T11:39:43.000Z | 2022-03-27T20:04:30.000Z | import requests;
import json;
from collections import Counter # Counts and orders the list of violations
import sys;
from urllib.parse import quote_plus # Make sysarg url-safe
# List of Apache Commons libraries which I know can be analyzed (without crashing/failing their tests)
commonsList = ["bcel",
"beanutils",
"cli",
"codec",
"collections",
"compress",
"configuration",
"crypto",
"csv",
"daemon",
"dbcp",
"dbutils",
"exec",
"fileupload",
"geometry",
"imaging",
"io",
"jexl",
"lang",
"logging",
"math",
"net",
"ognl",
"pool",
"scxml",
"statistics",
"text",
"validator",
"vfs"];
# Number of issues per page (Max 500)
pageSize = 500;
def set_cmd_values():
# Url to SQ instance (overwritten by cmd arguments).
url = "http://127.0.0.1:9000/";
# If a SQ instance with multiple projects is specified (such as OW2 containing Spoon-Core), the specific project can be chosen (overwritten by cmd args).
project_key= "";
if(len(sys.argv) > 1):
url = sys.argv[1];
if(not url.endswith("/")):
url += "/";
if(len(sys.argv) > 2):
project_key = quote_plus(sys.argv[2]);
return (url, project_key);
# Fill array with SQ violations. Keep making calls until all (up to 10000 since SQ doesn't support more) issues have been caught.
def get_violations(url, project_key):
violated_rules = [];
for lib in commonsList:
violations_remaining = True;
pageIndex = 1;
project_key = "commons-" + lib;
while(violations_remaining):
request_string = url + 'api/issues/search?resolved=false';
if (not project_key == ""):
request_string += '&componentKeys=' + project_key;
request_string += '&ps=' + str(pageSize) + '&pageIndex=' + str(pageIndex);
request = requests.get(request_string);
if(request.status_code == 200):
request_json = request.json();
issues = request_json['issues'];
if(len(issues) == 0):
violations_remaining = False;
for issue in issues:
if(issue['type'] == "BUG"):
violated_rules.append(issue['rule']);
pageIndex += 1;
return violated_rules;
# Pretty prints a list, printing every object on its own line
def pretty_print(listVar):
f = open("ordered_violations_list.txt", "w");
for obj in listVar:
print(obj);
f.write(convertTuple(obj));
def convertTuple(tup):
string = tup[0] + ", " + str(tup[1]) + "\n";
return string;
def main():
init_values = set_cmd_values();
ordered_violations = (Counter(get_violations(init_values[0], init_values[1])).most_common());
pretty_print(ordered_violations);
if __name__ == "__main__":
main();
| 28.734043 | 157 | 0.640133 |