id stringlengths 1 8 | text stringlengths 6 1.05M | dataset_id stringclasses 1
value |
|---|---|---|
1975892 | <gh_stars>0
import os
import rdflib
from django.conf import settings
from rdflib import Graph, Literal, BNode, Namespace, RDF, URIRef, RDFS, ConjunctiveGraph
from rdflib.namespace import DC, FOAF, RDFS, XSD
from rdflib.namespace import SKOS
from places.serializer_arche import place_to_arche, inst_to_arche, person_to_arche
from arche.serializer_arche import collection_to_arche
ARCHE = Namespace('https://vocabs.acdh.oeaw.ac.at/schema#')
ACDH = Namespace('https://id.acdh.oeaw.ac.at/')
GEONAMES = Namespace('http://www.geonames.org/ontology#')
try:
base_url = settings.ARCHE_SETTINGS['base_url']
except AttributeError:
base_url = "https://please/provide/ARCHE-SETTINGS"
LOCATION_PATH = "Y:\OREA_DOKU_PLATTFORM-Thunau_Vers2_aktuell 08 03 2017"
def document_to_arche(itmes):
"""takes queryset of Documents objects and returns an ARCHE rdflib.Graph"""
g = rdflib.Graph()
for obj in itmes:
subject = URIRef('/'.join([base_url, 'document', str(obj.id)]))
g.add((subject, RDF.type, ARCHE.Resource))
if obj.filename:
g.add((subject, ARCHE.hasTitle, Literal(obj.filename)))
if obj.author.all():
authors_g = person_to_arche(obj.author.all())
g = g + authors_g
for x in obj.author.all():
temp_a = URIRef('/'.join([base_url, 'person', str(x.id)]))
g.add((subject, ARCHE.hasCreator, temp_a))
if obj.curator:
authors_g = person_to_arche([obj.curator])
g = g + authors_g
temp_c = URIRef('/'.join([base_url, 'person', str(obj.curator.id)]))
g.add((subject, ARCHE.hasMetadataCreator, temp_c))
if obj.date_digitization:
g.add((
subject, ARCHE.hasCreatedDate,
Literal(obj.date_digitization, datatype=XSD.date))
)
if obj.note:
g.add((subject, ARCHE.hasNote, Literal(obj.note)))
if obj.content:
g.add((subject, ARCHE.hasNote, Literal(obj.content)))
if obj.get_file_uri():
abs_path = obj.get_file_uri()
g.add((subject, ARCHE.hasIdentifier, URIRef(abs_path)))
if obj.in_collection:
col_g = collection_to_arche([obj.in_collection])
g = g + col_g
temp_col = URIRef('/'.join([base_url, 'collection', str(obj.in_collection.id)]))
g.add((subject, ARCHE.isPartOf, temp_col))
return g
| StarcoderdataPython |
1689349 | <gh_stars>0
# Copyright 2021 Universität Tübingen, DKFZ and EMBL
# for the German Human Genome-Phenome Archive (GHGA)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains functionalities for greeting persons."""
import random
from datetime import datetime
from ..models import Greeting, GreetingExpression
GREETINGS_EXPRESSIONS = [
GreetingExpression(expression="Καλημέρα", language="Greek", isinformal=False),
GreetingExpression(expression="Γεια σου", language="Greek", isinformal=True),
GreetingExpression(expression="Γεια", language="Greek", isinformal=True),
GreetingExpression(expression="Dobar dan", language="Croatian", isinformal=False),
GreetingExpression(expression="Bok", language="Croatian", isinformal=True),
GreetingExpression(expression="Zdravo", language="Croatian", isinformal=True),
GreetingExpression(expression="Bonjour", language="French", isinformal=False),
GreetingExpression(expression="Salut", language="French", isinformal=True),
GreetingExpression(expression="Guten Tag", language="German", isinformal=False),
GreetingExpression(expression="Moin moin", language="German", isinformal=True),
]
def generate_greeting(name: str, language: str, isinformal: bool):
"""Generate a greeting for a specific person."""
# search for suitable expressions (might be multiple):
expression_hits = [
expr
for expr in GREETINGS_EXPRESSIONS
if expr.language == language and expr.isinformal == isinformal
]
# throw error if no hits were found:
if not expression_hits:
raise ValueError(
f'No greeting expressions found for language=="{language}" '
'and isinformal="{isinformal}"'
)
# pick a random expression from the list of hits:
expression = random.choice(expression_hits) # nosec
# assemble the greeting phrase:
greeting_phrase = f"{expression.expression} {name}!"
# return a Greeting object:
return Greeting(
message=greeting_phrase,
created_at=datetime.now(),
language=expression.language,
isinformal=expression.isinformal,
)
| StarcoderdataPython |
12829339 | <gh_stars>10-100
# from .libload import proxy, register_dll, _register_dll
# from .loader import (MassLynxRawLoader, is_waters_raw_dir,
# determine_if_available, infer_reader,
# IndexEntry, Cycle)
# __all__ = [
# "proxy", "register_dll", "_register_dll",
# "MassLynxRawLoader", "is_waters_raw_dir",
# "determine_if_available", "infer_reader",
# "IndexEntry", "Cycle"
# ] | StarcoderdataPython |
8058306 | <reponame>duo-labs/py_webauthn
import json
from typing import Any, Union
from .base64url_to_bytes import base64url_to_bytes
def _object_hook_base64url_to_bytes(orig_dict: dict) -> dict:
"""
A function for the `object_hook` argument in json.loads() that knows which fields in
an incoming JSON string need to be converted from Base64URL to bytes.
"""
# Registration and Authentication
if "rawId" in orig_dict:
orig_dict["rawId"] = base64url_to_bytes(orig_dict["rawId"])
if "clientDataJSON" in orig_dict:
orig_dict["clientDataJSON"] = base64url_to_bytes(orig_dict["clientDataJSON"])
# Registration
if "attestationObject" in orig_dict:
orig_dict["attestationObject"] = base64url_to_bytes(
orig_dict["attestationObject"]
)
# Authentication
if "authenticatorData" in orig_dict:
orig_dict["authenticatorData"] = base64url_to_bytes(
orig_dict["authenticatorData"]
)
if "signature" in orig_dict:
orig_dict["signature"] = base64url_to_bytes(orig_dict["signature"])
if "userHandle" in orig_dict:
orig_dict["userHandle"] = base64url_to_bytes(orig_dict["userHandle"])
return orig_dict
def json_loads_base64url_to_bytes(input: Union[str, bytes]) -> Any:
"""
Wrap `json.loads()` with a custom object_hook that knows which dict keys to convert
from Base64URL to bytes when converting from JSON to Pydantic model
"""
return json.loads(input, object_hook=_object_hook_base64url_to_bytes)
| StarcoderdataPython |
5000927 | <reponame>zkan/try-airflow
import os
AIRFLOW_HOME = os.environ.get('AIRFLOW_HOME')
with open(f'{AIRFLOW_HOME}/dags/time.txt') as f:
min_data = f.read()
mins_list = min_data.split(':')
with open(f'{AIRFLOW_HOME}/dags/mins.txt', 'w') as split_text:
split_text.write(str(mins_list[1]))
| StarcoderdataPython |
261266 | import pandas as pd
import random
from sklearn import preprocessing
from sklearn import decomposition
from sklearn.ensemble import AdaBoostClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn import svm
from sklearn import feature_selection
from sklearn import model_selection
from sklearn import metrics
from sklearn import tree
from sklearn import linear_model
import numpy as np
def tradaboost(trans_S, trans_A, label_S, label_A, test, N):
trans_data = np.concatenate((trans_A, trans_S), axis=0)
trans_label = np.concatenate((label_A, label_S), axis=0)
row_A = trans_A.shape[0]
row_S = trans_S.shape[0]
row_T = test.shape[0]
test_data = np.concatenate((trans_data, test), axis=0)
weights_A = np.ones([row_A, 1]) / row_A
weights_S = np.ones([row_S, 1]) / row_S
weights = np.concatenate((weights_A, weights_S), axis=0)
bata = 1 / (1 + np.sqrt(2 * np.log(row_A / N)))
bata_T = np.zeros([1, N])
result_label = np.ones([row_A + row_S + row_T, N])
predict = np.zeros([row_T])
print('params initial finished.')
trans_data = np.asarray(trans_data, order='C')
trans_label = np.asarray(trans_label, order='C')
test_data = np.asarray(test_data, order='C')
print(trans_data.shape, trans_label.shape)
for i in range(N):
P = calculate_P(weights, trans_label)
result_label[:, i] = train_classify(trans_data, trans_label, test_data, P)
print('result:', result_label[:, i], row_A, row_S, i, result_label.shape)
error_rate = calculate_error_rate(label_S, result_label[row_A:row_A + row_S, i],weights[row_A:row_A + row_S, :])
print('Error rate:', error_rate)
if error_rate > 0.5:
error_rate = 0.5
if error_rate == 0:
N = i
break
bata_T[0, i] = error_rate / (1 - error_rate)
print(bata_T)
for j in range(row_S):
weights[row_A + j] = weights[row_A + j] * np.power(bata_T[0, i],(-np.abs(result_label[row_A + j, i] - label_S[j])))
for j in range(row_A):
weights[j] = weights[j] * np.power(bata, np.abs(result_label[j, i] - label_A[j]))
for i in range(row_T):
left = np.sum(result_label[row_A + row_S + i, int(np.ceil(N / 2)):N] * np.log(1 / bata_T[0, int(np.ceil(N / 2)):N]))
right = 0.5 * np.sum(np.log(1 / bata_T[0, int(np.ceil(N / 2)):N]))
if left >= right:
predict[i] = 1
else:
predict[i] = 0
return predict
def calculate_P(weights, label):
total = np.sum(weights)
return np.asarray(weights / total, order='C')
def train_classify(trans_data, trans_label, test_data, P):
clf = linear_model.SGDClassifier()
# print(trans_label)
clf.fit(trans_data, trans_label, sample_weight=P[:, 0])
pred = clf.predict(test_data)
# for i in range(len(pred)):
# if(abs(pred[i]-101)>=abs(pred[i]-112)):
# pred[i] = 1
# else:
# pred[i] = 0
return pred
def calculate_error_rate(label_R, label_H, weight):
total = np.sum(weight)
# print(weight[:] / total)
# print(np.abs(label_R - label_H))
return np.sum(weight[:, 0] / total * np.abs(label_R - label_H))
def append_feature(dataframe, istest):
lack_num = np.asarray(dataframe.isnull().sum(axis=1))
if istest:
X = dataframe.values
X = X[:, 1:X.shape[1]]
else:
X = dataframe.values
X = X[:, 1:X.shape[1]]
total_S = np.sum(X, axis=1)
var_S = np.var(X, axis=1)
X = np.c_[X, total_S]
X = np.c_[X, var_S]
X = np.c_[X, lack_num]
return X
train_df = pd.DataFrame(pd.read_csv("mushroom_e.csv"))
train_df.fillna(value=-999999)
train_df1 = pd.DataFrame(pd.read_csv("mushroom_t.csv"))
# print(len(train_df_ex))
# train_df1 = train_df_ex.sample(frac=0.1, replace=False, random_state=1)
# print(len(train_df1))
train_df1.fillna(value=-999999)
# test_df = pd.DataFrame(pd.read_csv("mushroom_t.csv"))
# test_df.fillna(value=-999999)
print(train_df.shape, train_df1.shape)
print(train_df, train_df1)
le = preprocessing.LabelEncoder()
for col in train_df.columns:
train_df[col] = le.fit_transform(train_df[col])
for col in train_df1.columns:
train_df1[col] = le.fit_transform(train_df1[col])
train_data_T = train_df.values
train_data_S = train_df1.values
# test_data_S = test_df.values
# for i in range(len(test_data_S)):
# for c in range(len(test_data_S[i])):
# test_data_S[i][c] = ord(test_data_S[i][c])
print('data loaded.')
# label_T = train_data_T[:, train_data_T.shape[1] - 1]
# trans_T = append_feature(train_df, istest=False)
#
# label_S = train_data_S[:, train_data_S.shape[1] - 1]
# trans_S = append_feature(train_df1, istest=False)
#
# test_data_no = test_data_S[:, 0]
# test_data_S = append_feature(test_df, istest=True)
label_T = train_data_T[:, 0]
trans_T = train_data_T[:, 1:]
# trans_T = append_feature(train_df, istest=False)
label_S = train_data_S[:, 0]
trans_S = train_data_S[:, 1:]
# trans_S = append_feature(train_df1, istest=False)
# test_data_no = train_data_S[:, 0]
# test_data_no = test_data_no.astype('int')
# test_data_S = append_feature(test_df, istest=True)
print('data split end.', trans_S.shape, trans_T.shape, label_S.shape, label_T.shape)
scaler = preprocessing.StandardScaler()
trans_T =scaler.fit_transform(trans_T)
trans_S =scaler.fit_transform(trans_S)
# imputer_T = preprocessing.Imputer(missing_values='NaN', strategy='most_frequent', axis=0)
# imputer_S = preprocessing.Imputer(missing_values='NaN', strategy='most_frequent', axis=0)
#
# imputer_S.fit(trans_S, label_S)
#
# trans_T = imputer_S.transform(trans_T)
# trans_S = imputer_S.transform(trans_S)
# test_data_S = imputer_S.transform(test_data_S)
print('data preprocessed.', trans_S.shape, trans_T.shape, label_S.shape, label_T.shape)
X_train, X_test, y_train, y_test = model_selection.train_test_split(trans_S, label_S, test_size=0.90, random_state= 42)
print('data form.', X_train.shape, X_test.shape, y_train.shape, y_test.shape)
print(len(X_test))
pred = tradaboost(X_train, trans_T, y_train, label_T, X_test, 30)
print(pred.shape, pred)
true = 0
for i, data in enumerate(pred):
if data == y_test[i]:
true += 1
print('accuracy:', true/len(pred))
fpr, tpr, thresholds = metrics.roc_curve(y_true=y_test, y_score=pred, pos_label=1)
print('auc:', metrics.auc(fpr, tpr))
| StarcoderdataPython |
8093035 | <reponame>jimwaldo/HarvardX-Tools<gh_stars>1-10
#!/usr/bin/env python
"""
Tests whether a file (in .csv format) with each line, consisting of data about a single student,
is anonymous with to a particular level of k.
This program will take a set of fields in each line (hard coded, at the moment) and will check to
insure that there are at least k other lines with the same values for those fields. The fields
selected should be all those that can be used to re-identify a student. The program will ask for the
data file to test and the level of k to test.
The program can be run in either full or summary mode. In summary mode, the program will print the
number of lines that would violate the level of k specified. In full mode, all of sets of properties
that would violate the level of k are printed out, so that one can see what properties might need
to be smeared.
Created on May 28, 2014
@author: waldo
"""
import operator
import csv, sys
import utils
def buildKey(ids, dataLine):
"""
Concatenate a set of fields together to build an overall key
This is a simple approach to determining k-anonymity, in which all
of the fields of interest are concatenated as a single key. The
ids coming in should be a list of indexes into the fields in the dataLine.
These will be concatenated in order to form a new key. Note that this
currently assumes that all of the data fields are strings.
"""
retKey = ''
for i in ids:
retKey += dataLine[i]
return retKey
def makeDict(ids, infile):
"""
Create and return a dictionary keyed by a concatenation of fields with value the number
of entries containing all and only those fields.
Taking a list of indexes into a line of a (csv) file and an open csv.reader(), build a
dictionary that is keyed by the string concatenation of the fields in the index with
value the number of times a line containing just those fields in those indexes occurs. Return
the dictionary to the caller.
"""
retDict = {}
for line in infile:
keyAnon = buildKey(ids, line)
if keyAnon in retDict:
retDict[keyAnon] += 1
else:
retDict[keyAnon] = 1
return retDict
if __name__ == '__main__':
"""
When run stand-alone, this script will query for a filename and a level of anonymity
to check for the externally-connected data fields in the .csv file. The user will also
be prompted for either a summary of the anonymity level (in which case only the number
of records that fail to be at least anonymous to the level indicated) will be printed, or
a full report, in which case the concatenation of fields that allow identification finer
than the level entered will be printed. Note that the indexes of the fields that can be
linked to external properties is hard-coded at the moment; it would be good to have a more
flexible mechanism for this but finding one that is not error prone is difficult.
The id fields that could connect to the outside are 0 -> course_id, 6 -> final_cc_cname,
7 -> LoE, 8 -> YoB, 9 -> gender, and 17 -> nforum_posts]
"""
idFields = [0,6,7,8,9,17]
if len(sys.argv) < 4:
fname = utils.getFileName('data file to test')
kanon = utils.getIntVal('Enter value of k to test : ')
full = utils.getStringVal('Enter s for summary, f for full report : ', ['s', 'f'])
else:
fname = sys.argv[1]
kanon = int(sys.argv[2])
full = sys.argv[3]
fin = open(fname, 'rU')
fread = csv.reader(fin)
totals = []
for i in range(0,kanon):
totals.append(0)
fread.next()
anonDict = makeDict(idFields, fread)
sortedDict = sorted(anonDict.iteritems(), key=operator.itemgetter(1))
for k,v in sortedDict:
if v < kanon:
totals[v-1] += 1
if full == 'f':
print v, k
for i in range(0,kanon-1):
print 'Number of buckets with', i+1, 'entries is', totals[i]
| StarcoderdataPython |
156865 | <filename>links.py
import re
import base64
from Crypto.Cipher import AES
from Crypto.Protocol.KDF import PBKDF2
from Crypto.Hash import SHA256
import binascii
class Link(object):
def __init__(self, link, quality, rsk=None):
self.link = link
self.quality = quality
self.rsk = rsk
def is_valid(self):
regex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
return re.match(regex, self.link)
def is_encoded(self):
raise NotImplementedError()
def decode(self):
raise NotImplementedError()
def __repr__(self):
return '<quality : {quality}, link : {link}>'.format(quality=self.quality, link=self.link)
class PlainLink(Link):
def is_encoded(self):
return False
def decode(self):
return self.link
class Base64Link(Link):
def is_encoded(self):
return True
def decode(self):
try:
decoded = base64.b64decode(self.link)
except TypeError:
return None
self.link = decoded
if not self.is_valid():
return None
if decoded is None:
return None
return PlainLink(decoded, self.quality)
class AESLink(Link):
def is_encoded(self):
return True
def decode(self):
a = "a5e8d2e9c1721ae0e84ad660c472c1f3"
#Hash rsk key to get decryption key
sha = SHA256.new()
sha.update(self.rsk)
encKey = sha.digest()
AES_KEY = AES.new(encKey, AES.MODE_CBC, a.decode('hex'))
val = AES_KEY.decrypt(base64.b64decode(self.link))
unpad = lambda s: s[:-ord(s[len(s) - 1:])]
val = unpad(val)
self.link = val
if self.is_valid():
return PlainLink(self.link, self.quality)
else:
return None
def unpad(self, text, k=16):
'''
Remove the PKCS#7 padding from a text string
Made by https://gist.github.com/chrix2
'''
nl = len(text)
val = int(binascii.hexlify(text[-1]), 16)
if val > k:
raise ValueError('Input is not padded or padding is corrupt')
l = nl - val
return text[:l]
def ensure_unicode(self, v):
if isinstance(v, str):
v = v.decode('utf8')
return unicode(v)
| StarcoderdataPython |
12191 | from dataclasses import dataclass
from typing import List
from typing import Union
from postmanparser.description import Description
from postmanparser.exceptions import InvalidObjectException
from postmanparser.exceptions import MissingRequiredFieldException
@dataclass
class FormParameter:
key: str
value: str = ""
src: Union[List, str, None] = None
disabled: bool = False
form_param_type: str = ""
content_type: str = "" # should override content-type in header
description: Union[Description, None, str] = None
@classmethod
def parse(cls, data: dict):
key = data.get("key")
if key is None:
raise MissingRequiredFieldException(
"'formparameter' object should have 'key' property"
)
value = data.get("value", "")
src = data.get("src")
if value and src is not None:
raise InvalidObjectException(
"'formparamter' object can eiher have src or value and not both."
)
description = data.get("description")
if isinstance(description, dict):
description = Description.parse(description)
return cls(
key,
value=value,
src=src,
disabled=data.get("disabled", False),
form_param_type=data.get("type", ""),
content_type=data.get("contentType", ""),
description=description,
)
| StarcoderdataPython |
8025296 | <filename>arekit/contrib/networks/context/architectures/att_self_p_zhou_rcnn.py
from arekit.contrib.networks.attention.architectures.self_p_zhou import self_attention_by_peng_zhou
from arekit.contrib.networks.context.architectures.att_self_rcnn import AttentionSelfRCNN
class AttentionSelfPZhouRCNN(AttentionSelfRCNN):
def get_attention_alphas(self, rnn_outputs):
_, alphas = self_attention_by_peng_zhou(rnn_outputs)
return alphas
| StarcoderdataPython |
1932766 | #MenuTitle: Print Chars
# -*- coding: utf-8 -*-
__doc__="""
Print all chars from the font, useful for making specimens.
"""
for f in Glyphs.fonts:
for g in f.glyphs:
print g.string,
| StarcoderdataPython |
1609213 | <reponame>lnxpy/ludwig<filename>ludwig/features/sequence_feature.py<gh_stars>0
#! /usr/bin/env python
# coding=utf-8
# Copyright (c) 2019 Uber Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
import numpy as np
from ludwig.constants import *
from ludwig.decoders.sequence_decoders import SequenceGeneratorDecoder
from ludwig.decoders.sequence_decoders import SequenceTaggerDecoder
from ludwig.encoders.sequence_encoders import ParallelCNN, StackedTransformer
from ludwig.encoders.sequence_encoders import SequenceEmbedEncoder
from ludwig.encoders.sequence_encoders import SequencePassthroughEncoder
from ludwig.encoders.sequence_encoders import StackedCNN
from ludwig.encoders.sequence_encoders import StackedCNNRNN
from ludwig.encoders.sequence_encoders import StackedParallelCNN
from ludwig.encoders.sequence_encoders import StackedRNN
from ludwig.encoders.text_encoders import *
from ludwig.features.base_feature import InputFeature
from ludwig.features.base_feature import OutputFeature
from ludwig.modules.loss_modules import SampledSoftmaxCrossEntropyLoss
from ludwig.modules.loss_modules import SequenceLoss
from ludwig.modules.metric_modules import EditDistanceMetric, \
SequenceAccuracyMetric
from ludwig.modules.metric_modules import PerplexityMetric
from ludwig.modules.metric_modules import SequenceLastAccuracyMetric
from ludwig.modules.metric_modules import SequenceLossMetric
from ludwig.modules.metric_modules import TokenAccuracyMetric
from ludwig.utils.horovod_utils import is_on_master
from ludwig.utils.math_utils import softmax
from ludwig.utils.metrics_utils import ConfusionMatrix
from ludwig.utils.misc_utils import set_default_value
from ludwig.utils.strings_utils import PADDING_SYMBOL
from ludwig.utils.strings_utils import UNKNOWN_SYMBOL
from ludwig.utils.strings_utils import build_sequence_matrix
from ludwig.utils.strings_utils import create_vocabulary
logger = logging.getLogger(__name__)
class SequenceFeatureMixin(object):
type = SEQUENCE
preprocessing_defaults = {
'sequence_length_limit': 256,
'most_common': 20000,
'padding_symbol': PADDING_SYMBOL,
'unknown_symbol': UNKNOWN_SYMBOL,
'padding': 'right',
'tokenizer': 'space',
'lowercase': False,
'vocab_file': None,
'missing_value_strategy': FILL_WITH_CONST,
'fill_value': UNKNOWN_SYMBOL
}
@staticmethod
def get_feature_meta(column, preprocessing_parameters):
idx2str, str2idx, str2freq, max_length, _, _, _ = create_vocabulary(
column, preprocessing_parameters['tokenizer'],
lowercase=preprocessing_parameters['lowercase'],
num_most_frequent=preprocessing_parameters['most_common'],
vocab_file=preprocessing_parameters['vocab_file'],
unknown_symbol=preprocessing_parameters['unknown_symbol'],
padding_symbol=preprocessing_parameters['padding_symbol'],
)
max_length = min(
preprocessing_parameters['sequence_length_limit'],
max_length
)
return {
'idx2str': idx2str,
'str2idx': str2idx,
'str2freq': str2freq,
'vocab_size': len(idx2str),
'max_sequence_length': max_length
}
@staticmethod
def feature_data(column, metadata, preprocessing_parameters):
sequence_data = build_sequence_matrix(
sequences=column,
inverse_vocabulary=metadata['str2idx'],
tokenizer_type=preprocessing_parameters['tokenizer'],
length_limit=metadata['max_sequence_length'],
padding_symbol=preprocessing_parameters['padding_symbol'],
padding=preprocessing_parameters['padding'],
unknown_symbol=preprocessing_parameters['unknown_symbol'],
lowercase=preprocessing_parameters['lowercase'],
tokenizer_vocab_file=preprocessing_parameters[
'vocab_file'
]
)
return sequence_data
@staticmethod
def add_feature_data(
feature,
dataset_df,
dataset,
metadata,
preprocessing_parameters
):
sequence_data = SequenceInputFeature.feature_data(
dataset_df[feature[NAME]].astype(str),
metadata[feature[NAME]], preprocessing_parameters)
dataset[feature[NAME]] = sequence_data
class SequenceInputFeature(SequenceFeatureMixin, InputFeature):
encoder = 'embed'
max_sequence_length = None
def __init__(self, feature, encoder_obj=None):
super().__init__(feature)
self.overwrite_defaults(feature)
if encoder_obj:
self.encoder_obj = encoder_obj
else:
self.encoder_obj = self.initialize_encoder(feature)
def call(self, inputs, training=None, mask=None):
assert isinstance(inputs, tf.Tensor)
assert inputs.dtype == tf.int8 or inputs.dtype == tf.int16 or \
inputs.dtype == tf.int32 or inputs.dtype == tf.int64
assert len(inputs.shape) == 2
inputs_exp = tf.cast(inputs, dtype=tf.int32)
inputs_mask = tf.not_equal(inputs, 0)
lengths = tf.reduce_sum(tf.cast(inputs_mask, dtype=tf.int32), axis=1)
encoder_output = self.encoder_obj(
inputs_exp, training=training, mask=inputs_mask
)
encoder_output[LENGTHS] = lengths
return encoder_output
def get_input_dtype(self):
return tf.int32
def get_input_shape(self):
return None,
@staticmethod
def update_model_definition_with_metadata(
input_feature,
feature_metadata,
*args,
**kwargs
):
input_feature['vocab'] = feature_metadata['idx2str']
input_feature['max_sequence_length'] = feature_metadata[
'max_sequence_length']
@staticmethod
def populate_defaults(input_feature):
set_default_value(input_feature, TIED, None)
set_default_value(input_feature, 'encoder', 'parallel_cnn')
encoder_registry = {
'stacked_cnn': StackedCNN,
'parallel_cnn': ParallelCNN,
'stacked_parallel_cnn': StackedParallelCNN,
'rnn': StackedRNN,
'cnnrnn': StackedCNNRNN,
'transformer': StackedTransformer,
'embed': SequenceEmbedEncoder,
'passthrough': SequencePassthroughEncoder,
'null': SequencePassthroughEncoder,
'none': SequencePassthroughEncoder,
'None': SequencePassthroughEncoder,
None: SequencePassthroughEncoder
}
class SequenceOutputFeature(SequenceFeatureMixin, OutputFeature):
decoder = 'generator'
loss = {TYPE: SOFTMAX_CROSS_ENTROPY}
metric_functions = {LOSS: None, TOKEN_ACCURACY: None,
SEQUENCE_ACCURACY: None, LAST_ACCURACY: None,
PERPLEXITY: None, EDIT_DISTANCE: None}
default_validation_metric = LOSS
max_sequence_length = 0
num_classes = 0
def __init__(self, feature):
super().__init__(feature)
self.overwrite_defaults(feature)
self.decoder_obj = self.initialize_decoder(feature)
self._setup_loss()
self._setup_metrics()
def _setup_loss(self):
if self.loss[TYPE] == 'softmax_cross_entropy':
self.train_loss_function = SequenceLoss()
elif self.loss[TYPE] == 'sampled_softmax_cross_entropy':
self.train_loss_function = SampledSoftmaxCrossEntropyLoss(
decoder_obj=self.decoder_obj,
num_classes=self.num_classes,
feature_loss=self.loss,
name='train_loss'
)
else:
raise ValueError(
"Loss type {} is not supported. Valid values are "
"'softmax_cross_entropy' or "
"'sampled_softmax_cross_entropy'".format(self.loss[TYPE])
)
self.eval_loss_function = SequenceLossMetric()
def _setup_metrics(self):
self.metric_functions = {} # needed to shadow class variable
self.metric_functions[LOSS] = self.eval_loss_function
self.metric_functions[TOKEN_ACCURACY] = TokenAccuracyMetric()
self.metric_functions[SEQUENCE_ACCURACY] = SequenceAccuracyMetric()
self.metric_functions[LAST_ACCURACY] = SequenceLastAccuracyMetric()
self.metric_functions[PERPLEXITY] = PerplexityMetric()
self.metric_functions[EDIT_DISTANCE] = EditDistanceMetric()
# overrides super class OutputFeature.update_metrics() method
def update_metrics(self, targets, predictions):
for metric, metric_fn in self.metric_functions.items():
if metric == LOSS or metric == PERPLEXITY:
metric_fn.update_state(targets, predictions)
elif metric == LAST_ACCURACY:
metric_fn.update_state(targets, predictions[LAST_PREDICTIONS])
else:
metric_fn.update_state(targets, predictions[PREDICTIONS])
def logits(
self,
inputs,
target=None,
training=None
):
if training:
return self.decoder_obj._logits_training(
inputs,
target=tf.cast(target, dtype=tf.int32),
training=training
)
else:
return inputs
def predictions(self, inputs, training=None):
# Generator Decoder
return self.decoder_obj._predictions_eval(inputs, training=training)
def get_output_dtype(self):
return tf.int32
def get_output_shape(self):
return self.max_sequence_length,
@staticmethod
def update_model_definition_with_metadata(
output_feature,
feature_metadata,
*args,
**kwargs
):
output_feature['num_classes'] = feature_metadata['vocab_size']
output_feature['max_sequence_length'] = (
feature_metadata['max_sequence_length']
)
if isinstance(output_feature[LOSS]['class_weights'], (list, tuple)):
if (len(output_feature[LOSS]['class_weights']) !=
output_feature['num_classes']):
raise ValueError(
'The length of class_weights ({}) is not compatible with '
'the number of classes ({}) for feature {}. '
'Check the metadata JSON file to see the classes '
'and their order and consider there needs to be a weight '
'for the <UNK> and <PAD> class too.'.format(
len(output_feature[LOSS]['class_weights']),
output_feature['num_classes'],
output_feature[NAME]
)
)
if output_feature[LOSS]['class_similarities_temperature'] > 0:
if 'class_similarities' in output_feature[LOSS]:
similarities = output_feature[LOSS]['class_similarities']
temperature = output_feature[LOSS][
'class_similarities_temperature']
curr_row = 0
first_row_length = 0
is_first_row = True
for row in similarities:
if is_first_row:
first_row_length = len(row)
is_first_row = False
curr_row += 1
else:
curr_row_length = len(row)
if curr_row_length != first_row_length:
raise ValueError(
'The length of row {} of the class_similarities '
'of {} is {}, different from the length of '
'the first row {}. All rows must have '
'the same length.'.format(
curr_row,
output_feature[NAME],
curr_row_length,
first_row_length
)
)
else:
curr_row += 1
all_rows_length = first_row_length
if all_rows_length != len(similarities):
raise ValueError(
'The class_similarities matrix of {} has '
'{} rows and {} columns, '
'their number must be identical.'.format(
output_feature[NAME],
len(similarities),
all_rows_length
)
)
if all_rows_length != output_feature['num_classes']:
raise ValueError(
'The size of the class_similarities matrix of {} is '
'{}, different from the number of classe ({}). '
'Check the metadata JSON file to see the classes '
'and their order and '
'consider <UNK> and <PAD> class too.'.format(
output_feature[NAME],
all_rows_length,
output_feature['num_classes']
)
)
similarities = np.array(similarities, dtype=np.float32)
for i in range(len(similarities)):
similarities[i, :] = softmax(
similarities[i, :],
temperature=temperature
)
output_feature[LOSS]['class_similarities'] = similarities
else:
raise ValueError(
'class_similarities_temperature > 0, '
'but no class_similarities are provided '
'for feature {}'.format(output_feature[NAME])
)
if output_feature[LOSS][TYPE] == 'sampled_softmax_cross_entropy':
output_feature[LOSS]['class_counts'] = [
feature_metadata['str2freq'][cls]
for cls in feature_metadata['idx2str']
]
@staticmethod
def calculate_overall_stats(
predictions,
targets,
train_set_metadata
):
overall_stats = {}
sequences = targets
last_elem_sequence = sequences[np.arange(sequences.shape[0]),
(sequences != 0).cumsum(1).argmax(1)]
confusion_matrix = ConfusionMatrix(
last_elem_sequence,
predictions[LAST_PREDICTIONS],
labels=train_set_metadata['idx2str']
)
overall_stats['confusion_matrix'] = confusion_matrix.cm.tolist()
overall_stats['overall_stats'] = confusion_matrix.stats()
overall_stats['per_class_stats'] = confusion_matrix.per_class_stats()
return overall_stats
def postprocess_predictions(
self,
result,
metadata,
output_directory,
skip_save_unprocessed_output=False,
):
postprocessed = {}
name = self.feature_name
npy_filename = None
if is_on_master():
npy_filename = os.path.join(output_directory, '{}_{}.npy')
else:
skip_save_unprocessed_output = True
if PREDICTIONS in result and len(result[PREDICTIONS]) > 0:
preds = result[PREDICTIONS]
lengths = result[LENGTHS]
if 'idx2str' in metadata:
postprocessed[PREDICTIONS] = [
[metadata['idx2str'][token]
if token < len(metadata['idx2str']) else UNKNOWN_SYMBOL
for token in [pred[i] for i in range(length)]]
for pred, length in
[(preds[j], lengths[j]) for j in range(len(preds))]
]
else:
postprocessed[PREDICTIONS] = preds
if not skip_save_unprocessed_output:
np.save(npy_filename.format(name, PREDICTIONS), preds)
del result[PREDICTIONS]
if LAST_PREDICTIONS in result and len(result[LAST_PREDICTIONS]) > 0:
last_preds = result[LAST_PREDICTIONS]
if 'idx2str' in metadata:
postprocessed[LAST_PREDICTIONS] = [
metadata['idx2str'][last_pred]
if last_pred < len(metadata['idx2str']) else UNKNOWN_SYMBOL
for last_pred in last_preds
]
else:
postprocessed[LAST_PREDICTIONS] = last_preds
if not skip_save_unprocessed_output:
np.save(npy_filename.format(name, LAST_PREDICTIONS),
last_preds)
del result[LAST_PREDICTIONS]
if PROBABILITIES in result and len(result[PROBABILITIES]) > 0:
probs = result[PROBABILITIES].numpy()
if probs is not None:
if len(probs) > 0 and isinstance(probs[0], list):
prob = []
for i in range(len(probs)):
# todo: should adapt for the case of beam > 1
for j in range(len(probs[i])):
probs[i][j] = np.max(probs[i][j])
prob.append(np.prod(probs[i]))
elif isinstance(probs, np.ndarray):
if (probs.shape) == 3: # prob of each class of each token
probs = np.amax(probs, axis=-1)
prob = np.prod(probs, axis=-1)
# commenting probabilities out because usually it is huge:
# dataset x length x classes
# todo: add a mechanism for letting the user decide to save it
# postprocessed[PROBABILITIES] = probs
postprocessed[PROBABILITY] = prob
if not skip_save_unprocessed_output:
# commenting probabilities out, see comment above
# np.save(npy_filename.format(name, PROBABILITIES), probs)
np.save(npy_filename.format(name, PROBABILITY), prob)
del result[PROBABILITIES]
if LENGTHS in result:
del result[LENGTHS]
return postprocessed
@staticmethod
def populate_defaults(output_feature):
set_default_value(
output_feature,
LOSS,
{
TYPE: 'softmax_cross_entropy',
'sampler': None,
'negative_samples': 0,
'distortion': 1,
'labels_smoothing': 0,
'class_weights': 1,
'robust_lambda': 0,
'confidence_penalty': 0,
'class_similarities_temperature': 0,
'weight': 1
}
)
set_default_value(output_feature[LOSS], TYPE,
'softmax_cross_entropy')
set_default_value(output_feature[LOSS], 'labels_smoothing', 0)
set_default_value(output_feature[LOSS], 'class_weights', 1)
set_default_value(output_feature[LOSS], 'robust_lambda', 0)
set_default_value(output_feature[LOSS], 'confidence_penalty', 0)
set_default_value(output_feature[LOSS],
'class_similarities_temperature', 0)
set_default_value(output_feature[LOSS], 'weight', 1)
if output_feature[LOSS][TYPE] == 'sampled_softmax_cross_entropy':
set_default_value(output_feature[LOSS], 'sampler', 'log_uniform')
set_default_value(output_feature[LOSS], 'negative_samples', 25)
set_default_value(output_feature[LOSS], 'distortion', 0.75)
else:
set_default_value(output_feature[LOSS], 'sampler', None)
set_default_value(output_feature[LOSS], 'negative_samples', 0)
set_default_value(output_feature[LOSS], 'distortion', 1)
set_default_value(output_feature[LOSS], 'unique', False)
set_default_value(output_feature, 'decoder', 'generator')
if output_feature['decoder'] == 'tagger':
set_default_value(output_feature, 'reduce_input', None)
set_default_value(output_feature, 'dependencies', [])
set_default_value(output_feature, 'reduce_input', SUM)
set_default_value(output_feature, 'reduce_dependencies', SUM)
decoder_registry = {
'generator': SequenceGeneratorDecoder,
'tagger': SequenceTaggerDecoder
}
| StarcoderdataPython |
3210732 | def persistence(n):
nums = [int(x) for x in str(n)]
count = 0
while len(nums) != 1:
n = 1
for i in nums:
n*= i
nums = [int(x) for x in str(n)]
count+=1
return count | StarcoderdataPython |
3208969 | import logging
import joblib
import scipy.sparse as sparce
import numpy as np
def save_matrix(df,matrix,out_path):
id_matrix = sparce.csr_matrix(df.id.astype(np.int64)).T
label_matrix = sparce.csr_matrix(df.label.astype(np.int64)).T
result = sparce.hstack([id_matrix,label_matrix,matrix],format="csr")
joblib.dump(result,out_path)
msg = f"The output matrix saved at: {out_path} of the size:{result.shape} and data type:{result.dtype}"
logging.info(msg)
| StarcoderdataPython |
3302867 | import glob
import subprocess
from subprocess import Popen, PIPE
import sys
import os
import shutil
sys.path.append(os.environ["COVALENTIZER"])
from Code import *
def main(name, argv):
if len(argv) != 2:
print_usage(name)
return
timeout = 12*60*60
f = open('log.txt', 'w', 0)
f.write('Covalentizer has started.\n')
curr_dir = os.getcwd()
cluster = Cluster.Cluster()
f.write('INFO: Looking for cysteine tiol within 6A of any of the ligand atoms.\n')
res = PYMOLUtils.env_cysteine(argv[0], argv[1])
with open('res.txt', 'w') as fres:
for r in res:
fres.write("\t".join(r) + '\n')
if len(res) == 0:
f.write('WARNING: Did not find cysteines which are close to any of the ligand.\n')
f.close()
return
f.write('INFO: Create folders for the different cysteines and rotamers.\n')
CysUtils.cysteine_folders(argv[0])
#dirlist = glob.glob("CYS*/*/ROT*/")
#with open('dirlist', 'w') as dirf:
# for line in dirlist:
# dirf.write(line + '\n')
os.mkdir('Ligands/')
ligands = {}
ligands_to_remove = []
for r in res:
if r[0] in ligands:
continue
ligands[r[0]] = curr_dir + '/Ligands/' + r[0] + '/gz_files/'
os.mkdir('Ligands/' + r[0])
PYMOLUtils.seperate_rec_lig(argv[0], r[0], r[2])
ChemUtils.convert('xtal-lig.pdb', 'Ligands/' + r[0] + '/' + 'smile.smi')
mw = CovUtils.get_MW('Ligands/' + r[0] + '/' + 'smile.smi')
if mw == None or mw < 100:
ligands_to_remove.append(r[0])
os.remove('xtal-lig.pdb')
if len(ligands_to_remove) > 0:
f.write('INFO: Removing very small ligands (<300D) and ligand not readable by RDKit.\n')
#Remove ligands smaller than 300D
for lig in ligands_to_remove:
del ligands[lig]
shutil.rmtree('Ligands/' + lig + '/')
lig_folders = glob.glob("CYS*/" + lig + "/")
for l_fol in lig_folders:
shutil.rmtree(l_fol)
#dirlist = [d for d in dirlist if '/' + lig + '/' in d]
f.write('INFO: Ligand ' + lig + ' was removed.\n')
if len(ligands) == 0:
f.write('ERROR: All ligands with nearby cysteines are smaller than 300D. Finish the run.\n')
f.close()
return
#dirlist = glob.glob("CYS*/*/ROT*/")
#with open('dirlist', 'w') as dirf:
# for line in dirlist:
# dirf.write(line + '\n')
f.write('INFO: Build the ligands covalent libraries and prepare them for docking.\n')
lig_jobs = []
ligands_to_remove = []
os.chdir('Ligands')
for lig in ligands:
os.chdir(lig)
CovUtils.build_library('smile.smi', 'frags.smi', 'lib.smi', rules = os.environ["COVALENTIZER"] + "/Code/numbered_reaction.re")
if os.stat('frags.smi').st_size == 0 or os.stat('smile.smi').st_size == 0 or os.stat('lib.smi').st_size == 0:
ligands_to_remove.append(lig)
os.chdir('../')
continue
p = Popen([os.environ["DOCKBASE"] + '/ligand/generate/build_covalent_lib_medium.csh', 'lib.smi', '10', 'LIB'], stdin=PIPE, stdout=PIPE, stderr=PIPE)
lig_jobs += [line for line in p.communicate()[0].split('\n') if 'pbs' in line]
os.chdir('../')
os.chdir('../')
if len(ligands_to_remove) > 0:
f.write('INFO: Removing ligands which were not fragmentable or preparable by RDKit.\n')
#Remove ligands which were not fragmentable or preparable by RDKit
for lig in ligands_to_remove:
del ligands[lig]
shutil.rmtree('Ligands/' + lig + '/')
lig_folders = glob.glob("CYS*/" + lig + "/")
for l_fol in lig_folders:
shutil.rmtree(l_fol)
f.write('INFO: Ligand ' + lig + ' was removed.\n')
if len(ligands) == 0:
f.write('ERROR: All ligands with nearby cysteines are smaller than 300D or unpreparable by RDKit. Finish the run.\n')
f.close()
return
dirlist = glob.glob("CYS*/*/ROT*/")
with open('dirlist', 'w') as dirf:
for line in dirlist:
dirf.write(line + '\n')
f.write('INFO: Job IDs for ligand building: ' + str(lig_jobs) + '\n')
f.write('INFO: Start preparing the structures for docking.\n')
prepare_jobs = cluster.runBatchJobs('dirlist', './prepare.sh', mem='16000mb')
f.write('INFO: Job IDs for structure prepare: ' + str(prepare_jobs) + '\n')
waiting_done = Cluster.Cluster.wait(prepare_jobs + lig_jobs, timeout)
if waiting_done:
f.write('INFO: Finished preparing both proteins and molecules for docking.\n')
else:
f.write('ERROR: Cluster jobs have reached time limit (12h). That could be due to a cluster occupancy, problematic input, or any other unidentified problem. To rule out the first problem, try submitting the job at some other time.\n')
f.close()
return
LIB_folders = glob.glob('Ligands/*/LIB.*')
for lib in LIB_folders:
shutil.rmtree(lib)
f.write('INFO: Start docking processes for all covalentized fragments to all cysteine rotamers.\n')
dock_jobs = []
for d in dirlist:
lig = d.split('/')[1]
os.chdir(d)
DOCK_Prepare.DOCK_Prepare.changeNumSave(10)
p = Popen(['python', os.environ["COVALENTIZER"] + 'Scripts/DOCKovalentTask.py', 'CovLib', ligands[lig], 'True'], stdin=PIPE, stdout=PIPE, stderr=PIPE)
dock_jobs += [line for line in p.communicate()[0].split('\n') if 'pbs' in line]
os.chdir(curr_dir)
f.write('INFO: Job IDs for covalent docking: ' + str(dock_jobs) + '\n')
waiting_done = Cluster.Cluster.wait(dock_jobs, timeout)
if waiting_done:
f.write('INFO: Finished preparing both proteins and molecules for docking.\n')
else:
f.write('ERROR: Cluster jobs have reached time limit (12h). That could be due to a cluster occupancy, problematic input, or any other unidentified problem. To rule out the first problem, try submitting the job at some other time.\n')
f.close()
return
f.write('INFO: Finished docking.\n')
f.write('INFO: Start analyzing the docking results.\n')
##
reslist = glob.glob("CYS*/*/ROT*/CovLib/")
with open('reslist', 'w') as dirf:
for line in reslist:
dirf.write(line + '\n')
results_jobs = cluster.runBatchJobs('reslist', os.environ["COVALENTIZER"] + 'Scripts/Final_Scripts/combine_cluster.sh', mem='32000mb')
f.write('INFO: Job IDs for analyzing the results: ' + str(results_jobs) + '\n')
waiting_done = Cluster.Cluster.wait(results_jobs, timeout)
if waiting_done:
f.write('INFO: Finished analyzing docking results.\n')
else:
f.write('ERROR: Cluster jobs have reached time limit (12h). That could be due to a cluster occupancy, problematic input, or any other unidentified problem. To rule out the first problem, try submitting the job at some other time.\n')
f.close()
return
##
#os.system(os.environ["SCRIPTS"] + '/Covalentizer/Final_Scripts/combine.sh')
os.mkdir('Results')
are_results = False
for d in dirlist:
web_folder = d + '/CovLib/web_files/'
if os.path.isdir(web_folder):
with open(web_folder + 'cys_position.txt', 'r') as cys_file:
cys = cys_file.readline().split()[0]
are_results = True
name_list = d.split('/')[:3]
name_list[0] = 'CYS' + cys
new_folder = 'Results/' + '_'.join(name_list) + '/'
#new_folder = 'Results/' + d.replace('/', '_')[:-1] + '/'
shutil.move(web_folder, new_folder)
if are_results:
f.write('INFO: Moved results into the Results folder.\n')
else:
f.write('INFO: No results have been found with RMSD below 1.5A\n')
f.write('INFO: Covalentizer has finished.\n')
f.close()
def print_usage(name):
print "Usage : " + name + " <pdb_file> <lig_name>"
if __name__ == "__main__":
main(sys.argv[0], sys.argv[1:])
| StarcoderdataPython |
8056297 | <filename>ch13/myproject_virtualenv/src/django-myproject/myproject/apps/locations/__init__.py
default_app_config = "myproject.apps.locations.apps.LocationsAppConfig" | StarcoderdataPython |
1810149 | """Test initialization of VoQ objects, switch, system ports, router interfaces, neighbors, inband port."""
import json
import logging
import pytest
from tests.common.helpers.assertions import pytest_assert
from tests.common.helpers.sonic_db import AsicDbCli, VoqDbCli
from voq_helpers import check_voq_remote_neighbor, get_sonic_mac
from voq_helpers import check_local_neighbor_asicdb, get_device_system_ports, get_inband_info
from voq_helpers import check_rif_on_sup, check_voq_neighbor_on_sup
from voq_helpers import dump_and_verify_neighbors_on_asic
logger = logging.getLogger(__name__)
pytestmark = [
pytest.mark.topology('t2')
]
class TestVoqSwitch(object):
SWITCH_ID_LIST = []
def test_voq_switch_create(self, duthosts, enum_frontend_dut_hostname, enum_asic_index, all_cfg_facts):
"""Compare the config facts with the asic db for switch:
* Verify ASIC_DB get all system ports referenced in configDB created on all hosts and ASICs.
* Verify object creation and values of port attributes.
"""
per_host = duthosts[enum_frontend_dut_hostname]
asic = per_host.asics[enum_asic_index if enum_asic_index is not None else 0]
cfg_facts = all_cfg_facts[per_host.hostname][asic.asic_index]['ansible_facts']
dev_facts = cfg_facts['DEVICE_METADATA']['localhost']
asicdb = AsicDbCli(asic)
switchkey = asicdb.get_switch_key()
logger.info("Checking switch %s", switchkey)
check_list = {
"max_cores": "SAI_SWITCH_ATTR_MAX_SYSTEM_CORES",
"switch_id": "SAI_SWITCH_ATTR_SWITCH_ID"}
for k in check_list:
asicdb.get_and_check_key_value(switchkey, dev_facts[k], field=check_list[k])
pytest_assert(dev_facts["switch_id"] not in TestVoqSwitch.SWITCH_ID_LIST,
"Switch ID: %s has been used more than once" % dev_facts["switch_id"])
TestVoqSwitch.SWITCH_ID_LIST.append(dev_facts["switch_id"])
asicdb.get_and_check_key_value(switchkey, "SAI_SWITCH_TYPE_VOQ", field="SAI_SWITCH_ATTR_TYPE")
def test_voq_system_port_create(duthosts, enum_frontend_dut_hostname, enum_asic_index, all_cfg_facts):
"""Compare the config facts with the asic db for system ports
* Verify ASIC_DB get all system ports referenced in configDB created on all hosts and ASICs.
* Verify object creation and values of port attributes.
"""
per_host = duthosts[enum_frontend_dut_hostname]
asic = per_host.asics[enum_asic_index if enum_asic_index is not None else 0]
cfg_facts = all_cfg_facts[per_host.hostname][asic.asic_index]['ansible_facts']
logger.info("Checking system ports on host: %s, asic: %s", per_host.hostname, asic.asic_index)
dev_ports = get_device_system_ports(cfg_facts)
asicdb = AsicDbCli(asic)
sys_port_table = asicdb.dump(asicdb.ASIC_SYSPORT_TABLE)
keylist = sys_port_table.keys()
pytest_assert(len(keylist) == len(dev_ports.keys()),
"Found %d system port keys, %d entries in cfg_facts, not matching" % (
len(keylist), len(dev_ports.keys())))
logger.info("Found %d system port keys, %d entries in cfg_facts, checking each.",
len(keylist), len(dev_ports.keys()))
for portkey in keylist:
try:
port_config_info = sys_port_table[portkey]['value']['SAI_SYSTEM_PORT_ATTR_CONFIG_INFO']
except KeyError:
# TODO: Need to check on behavior here.
logger.warning("System port: %s had no SAI_SYSTEM_PORT_ATTR_CONFIG_INFO", portkey)
continue
port_data = json.loads(port_config_info)
for cfg_port in dev_ports:
if dev_ports[cfg_port]['system_port_id'] == port_data['port_id']:
# "switch_id": "0",
# "core_index": "1",
# "core_port_index": "6",
# "speed": "400000"
pytest_assert(dev_ports[cfg_port]['switch_id'] == port_data[
'attached_switch_id'], "switch IDs do not match for port: %s" % portkey)
pytest_assert(dev_ports[cfg_port]['core_index'] == port_data[
'attached_core_index'], "switch IDs do not match for port: %s" % portkey)
pytest_assert(dev_ports[cfg_port]['core_port_index'] == port_data[
'attached_core_port_index'], "switch IDs do not match for port: %s" % portkey)
pytest_assert(dev_ports[cfg_port]['speed'] == port_data[
'speed'], "switch IDs do not match for port: %s" % portkey)
break
else:
logger.error("Could not find config entry for portkey: %s" % portkey)
logger.info("Host: %s, Asic: %s all ports match all parameters", per_host.hostname, asic.asic_index)
def test_voq_local_port_create(duthosts, enum_frontend_dut_hostname, enum_asic_index, all_cfg_facts):
"""Compare the config facts with the asic db for local ports
* Verify ASIC_DB has host interface information for all local ports on all cards and ASICs.
* Verify host interfaces exist on host CLI (ifconfig).
* Verify interfaces exist in show interfaces on the linecard.
"""
per_host = duthosts[enum_frontend_dut_hostname]
asic = per_host.asics[enum_asic_index if enum_asic_index is not None else 0]
cfg_facts = all_cfg_facts[per_host.hostname][asic.asic_index]['ansible_facts']
dev_ports = cfg_facts['PORT']
asicdb = AsicDbCli(asic)
hostif_table = asicdb.get_hostif_table(refresh=True)
keylist = hostif_table.keys()
pytest_assert(len(keylist) == len(dev_ports.keys()),
"Found %d hostif keys, %d entries in cfg_facts" % (len(keylist), len(dev_ports.keys())))
logger.info("Found %s ports to check on host:%s, asic: %s.", len(dev_ports.keys()), per_host.hostname,
asic.asic_index)
show_intf = asic.show_interface(command="status", include_internal_intfs=True)['ansible_facts']
for portkey in keylist:
portkey = portkey.decode('unicode-escape') # need to handle the hyphen in the inband port name
port_name = hostif_table[portkey]['value']["SAI_HOSTIF_ATTR_NAME"].decode('unicode-escape')
port_state = hostif_table[portkey]['value']["SAI_HOSTIF_ATTR_OPER_STATUS"]
port_type = hostif_table[portkey]['value']["SAI_HOSTIF_ATTR_TYPE"]
logger.info("Checking port: %s, state: %s", port_name, port_state)
# "SAI_HOSTIF_ATTR_NAME": "Ethernet0",
# "SAI_HOSTIF_ATTR_OBJ_ID": "oid:0x1000000000002",
# "SAI_HOSTIF_ATTR_OPER_STATUS": "false",
# "SAI_HOSTIF_ATTR_TYPE": "SAI_HOSTIF_TYPE_NETDEV"
pytest_assert(port_type == "SAI_HOSTIF_TYPE_NETDEV", "Port %s is not type netdev" % portkey)
if port_state == "true":
pytest_assert(show_intf['int_status'][port_name]['oper_state'] == "up",
"Show interface state is down when it should be up")
if port_state == "false":
pytest_assert(show_intf['int_status'][port_name]['oper_state'] == "down",
"Show interface state is up when it should be down")
if asic.namespace is None:
cmd = "sudo ifconfig %s" % port_name
else:
cmd = "sudo ip netns exec %s ifconfig %s" % (asic.namespace, port_name)
ifout = per_host.command(cmd)
assert "not found" not in ifout['stdout_lines'][0], "Interface %s not found" % port_name
if port_state == "true" and "RUNNING" in ifout['stdout_lines'][0]:
logger.debug("Interface state is up and matches")
elif port_state == "false" and "RUNNING" not in ifout['stdout_lines'][0]:
logger.debug("Interface state is down and matches")
else:
raise AssertionError("Interface state does not match: %s %s", port_state, ifout['stdout_lines'][0])
def check_voq_interfaces(duthosts, per_host, asic, cfg_facts):
"""
Checks router interfaces on a dut.
Args:
duthosts: The duthosts fixture
per_host: Instance of MultiAsicSonic host to check.
asic: Instance of SonicAsic to check,
cfg_facts: Config facts for the frontend duthost/asic under test
"""
logger.info("Check router interfaces on node: %s, asic: %d", per_host.hostname, asic.asic_index)
dev_intfs = cfg_facts.get('INTERFACE', {})
voq_intfs = cfg_facts.get('VOQ_INBAND_INTERFACE', [])
dev_sysports = get_device_system_ports(cfg_facts)
rif_ports_in_asicdb = []
# intf_list = get_router_interface_list(dev_intfs)
asicdb = AsicDbCli(asic)
asicdb_rif_table = asicdb.dump(asicdb.ASIC_ROUTERINTF_TABLE)
sys_port_table = asicdb.dump(asicdb.ASIC_SYSPORT_TABLE)
asicdb_lag_table = asicdb.dump(asicdb.ASIC_LAG_TABLE + ":")
if per_host.is_multi_asic and len(duthosts.supervisor_nodes) == 0:
voqdb = VoqDbCli(per_host)
else:
voqdb = VoqDbCli(duthosts.supervisor_nodes[0])
systemlagtable = voqdb.dump("SYSTEM_LAG_ID_TABLE")
systemintftable = voqdb.dump("SYSTEM_INTERFACE")
# asicdb_intf_key_list = asicdb.get_router_if_list()
# Check each rif in the asicdb, if it is local port, check VOQ DB for correct RIF.
# If it is on system port, verify slot/asic/port and OID match a RIF in VoQDB
for rif in asicdb_rif_table.keys():
rif_type = asicdb_rif_table[rif]['value']["SAI_ROUTER_INTERFACE_ATTR_TYPE"]
if rif_type != "SAI_ROUTER_INTERFACE_TYPE_PORT":
logger.info("Skip this rif: %s, it is not on a port: %s", rif, rif_type)
continue
else:
portid = asicdb_rif_table[rif]['value']["SAI_ROUTER_INTERFACE_ATTR_PORT_ID"]
logger.info("Process RIF %s, Find port with ID: %s", rif, portid)
porttype = asicdb.get_rif_porttype(portid)
logger.info("RIF: %s is of type: %s", rif, porttype)
if porttype == 'hostif':
# find the hostif entry to get the physical port the router interface is on.
hostifkey = asicdb.find_hostif_by_portid(portid)
hostif = asicdb.get_hostif_table(refresh=False)[hostifkey]['value']['SAI_HOSTIF_ATTR_NAME'].decode('unicode-escape')
logger.info("RIF: %s is on local port: %s", rif, hostif)
rif_ports_in_asicdb.append(hostif)
if hostif not in dev_intfs and hostif not in voq_intfs:
pytest.fail("Port: %s has a router interface, but it isn't in configdb." % portid)
# check MTU and ethernet address
pytest_assert(asicdb_rif_table[rif]['value']["SAI_ROUTER_INTERFACE_ATTR_MTU"] == cfg_facts['PORT'][hostif]['mtu'],
"MTU for rif %s is not %s" % (rif, cfg_facts['PORT'][hostif]['mtu']))
intf_mac = get_sonic_mac(per_host, asic.asic_index, hostif)
pytest_assert(asicdb_rif_table[rif]['value']["SAI_ROUTER_INTERFACE_ATTR_SRC_MAC_ADDRESS"].lower() == intf_mac.lower(),
"MAC for rif %s is not %s" % (rif, intf_mac))
sysport_info = {'slot': cfg_facts['DEVICE_METADATA']['localhost']['hostname'],
'asic': cfg_facts['DEVICE_METADATA']['localhost']['asic_name']}
check_rif_on_sup(systemintftable, sysport_info['slot'], sysport_info['asic'], hostif)
elif porttype == 'sysport':
try:
port_output = sys_port_table["ASIC_STATE:SAI_OBJECT_TYPE_SYSTEM_PORT:" + portid]['value']['SAI_SYSTEM_PORT_ATTR_CONFIG_INFO']
except KeyError:
# not a hostif or system port, log error and continue
logger.error("Did not find OID %s in local or system tables" % portid)
continue
port_data = json.loads(port_output)
for cfg_port in dev_sysports:
if dev_sysports[cfg_port]['system_port_id'] == port_data['port_id']:
logger.info("RIF: %s is on remote port: %s", rif, cfg_port)
break
else:
raise AssertionError("Did not find OID %s in local or system tables" % portid)
sys_slot, sys_asic, sys_port = cfg_port.split("|")
check_rif_on_sup(systemintftable, sys_slot, sys_asic, sys_port)
elif porttype == 'port':
# this is the RIF on the inband port.
inband = get_inband_info(cfg_facts)
logger.info("RIF: %s is on local port: %s", rif, inband['port'])
# check MTU and ethernet address
pytest_assert(asicdb_rif_table[rif]['value']["SAI_ROUTER_INTERFACE_ATTR_MTU"] == cfg_facts['PORT'][inband['port']]['mtu'],
"MTU for rif %s is not %s" % (rif, cfg_facts['PORT'][inband['port']]['mtu']))
intf_mac = get_sonic_mac(per_host, asic.asic_index, inband['port'])
pytest_assert(asicdb_rif_table[rif]['value']["SAI_ROUTER_INTERFACE_ATTR_SRC_MAC_ADDRESS"].lower() == intf_mac.lower(),
"MAC for rif %s is not %s" % (rif, intf_mac))
sysport_info = {'slot': cfg_facts['DEVICE_METADATA']['localhost']['hostname'],
'asic': cfg_facts['DEVICE_METADATA']['localhost']['asic_name']}
check_rif_on_sup(systemintftable, sysport_info['slot'], sysport_info['asic'], inband['port'])
# TODO: Could be on a LAG
elif porttype == 'lag':
#lagid = asicdb.hget_key_value("%s:%s" % (AsicDbCli.ASIC_LAG_TABLE, portid), 'SAI_LAG_ATTR_SYSTEM_PORT_AGGREGATE_ID')
lagid = asicdb_lag_table["%s:%s" % (AsicDbCli.ASIC_LAG_TABLE, portid)]['value']['SAI_LAG_ATTR_SYSTEM_PORT_AGGREGATE_ID']
logger.info("RIF: %s is on system LAG: %s", rif, lagid)
for lag, sysid in systemlagtable['SYSTEM_LAG_ID_TABLE']['value'].iteritems():
if sysid == lagid:
logger.info("System LAG ID %s is portchannel: %s", lagid, lag)
break
myslot = cfg_facts['DEVICE_METADATA']['localhost']['hostname']
myasic = cfg_facts['DEVICE_METADATA']['localhost']['asic_name']
if lag.startswith("%s|%s" % (myslot, myasic)):
logger.info("Lag: %s is a local portchannel with a router interface.", lag)
(s, a, lagname) = lag.split("|")
pytest_assert(lagname in cfg_facts['PORTCHANNEL_INTERFACE'], "RIF Interface %s is in configdb.json but not in asicdb" % rif)
check_rif_on_sup(systemintftable, myslot, myasic, lagname)
else:
logger.info("Lag: %s is a remote portchannel with a router interface.", lag)
# Verify each RIF in config had a corresponding local port RIF in the asicDB.
for rif in dev_intfs:
if rif not in rif_ports_in_asicdb:
raise AssertionError("Interface %s is in configdb.json but not in asicdb" % rif)
logger.info("Interfaces %s are present in configdb.json and asicdb" % str(dev_intfs.keys()))
def test_voq_interface_create(duthosts, enum_frontend_dut_hostname, enum_asic_index, all_cfg_facts):
"""
Verify router interfaces are created on all line cards and present in Chassis App Db.
* Verify router interface creation on local ports in ASIC DB.
* PORT_ID should match system port table and traced back to config_db.json, mac and MTU should match as well.
* Verify SYSTEM_INTERFACE table in Chassis AppDb (redis-dump -h <ip> -p 6380 -d 12 on supervisor).
* Verify creation interfaces with different MTUs in configdb.json.
* Verify creation of different subnet masks in configdb.json.
* Repeat with IPv4, IPv6, dual-stack.
"""
per_host = duthosts[enum_frontend_dut_hostname]
asic = per_host.asics[enum_asic_index if enum_asic_index is not None else 0]
cfg_facts = all_cfg_facts[per_host.hostname][asic.asic_index]['ansible_facts']
check_voq_interfaces(duthosts, per_host, asic, cfg_facts)
def test_voq_neighbor_create(duthosts, enum_frontend_dut_hostname, enum_asic_index, nbrhosts,
all_cfg_facts, nbr_macs):
"""
Verify neighbor entries are created on linecards for local and remote VMS.
For local neighbors:
* ARP/NDP should be resolved when BGP to adjacent VMs is established.
* On local linecard, verify ASIC DB entries.
* MAC address matches MAC of neighbor VM.
* Router interface OID matches back to the correct interface and port the neighbor was learned on.
* On local linecard, verify show arp/ndp, ip neigh commands.
* MAC address matches MAC of neighbor VM.
* On local linecard. verify neighbor table in appDB.
* MAC address matches MAC of neighbor VM.
* On supervisor card, verify SYSTEM_NEIGH table in Chassis AppDB (redis-dump -h <ip> -p 6380 -d 12 on supervisor).
* Verify encap index and MAC address match between ASICDB the Chassis AppDB
* Repeat with IPv4, IPv6, dual-stack.
For remote neighbors:
* When local neighbors are established as in the Local Neighbor testcase, corresponding entries will be established
on all other line cards. On each remote card, verify:
* Verify ASIC DB entries on remote linecards.
* Verify impose index=True in ASIC DB.
* Verify MAC address in ASIC DB is the remote neighbor mac.
* Verify encap index for ASIC DB entry matches Chassis App DB.
* Verify router interface OID matches the interface the neighbor was learned on.
* Verify on linecard CLI, show arp/ndp, ip neigh commands.
* For inband port, MAC should be inband port mac in kernel table and LC appDb.
* For inband vlan mode, MAC will be remote ASIC mac in kernel table and LC appdb.
* Verify neighbor table in linecard appdb.
* Verify static route is installed in kernel routing table with /32 (or /128 for IPv6) for neighbor entry.
* Repeat with IPv4, IPv6, dual-stack.
"""
per_host = duthosts[enum_frontend_dut_hostname]
asic = per_host.asics[enum_asic_index if enum_asic_index is not None else 0]
cfg_facts = all_cfg_facts[per_host.hostname][asic.asic_index]['ansible_facts']
logger.info("Checking local neighbors on host: %s, asic: %s", per_host.hostname, asic.asic_index)
if 'BGP_NEIGHBOR' in cfg_facts:
neighs = cfg_facts['BGP_NEIGHBOR']
else:
logger.info("No local neighbors for host: %s/%s, skipping", per_host.hostname, asic.asic_index)
return
dump_and_verify_neighbors_on_asic(duthosts, per_host, asic, neighs, nbrhosts, all_cfg_facts, nbr_macs)
def test_voq_inband_port_create(duthosts, enum_frontend_dut_hostname, enum_asic_index, all_cfg_facts):
"""
Test inband port creation.
These steps are covered by previous test cases:
* On each linecard, verify inband ports are present in ASICDB.
* On each linecard, verify inband router interfaces are present in ASICDB
* On supervisor card, verify inband router interfaces are present in Chassis App DB
This test function will cover:
* On each linecard, verify permanent neighbors for all inband ports.
* On each linecard, verify kernel routes for all inband ports.
* Repeat with IPv4, IPv6, dual-stack.
"""
per_host = duthosts[enum_frontend_dut_hostname]
asic = per_host.asics[enum_asic_index if enum_asic_index is not None else 0]
cfg_facts = all_cfg_facts[per_host.hostname][asic.asic_index]['ansible_facts']
inband_info = get_inband_info(cfg_facts)
if inband_info == {}:
logger.info("No inband configuration on this ASIC: %s/%s, skipping", per_host.hostname, asic.asic_index)
return
inband_mac = get_sonic_mac(per_host, asic.asic_index, inband_info['port'])
inband_ips = []
if 'ipv6_addr' in inband_info:
inband_ips.append(inband_info['ipv6_addr'])
if 'ipv4_addr' in inband_info:
inband_ips.append(inband_info['ipv4_addr'])
for neighbor_ip in inband_ips:
host = per_host
neighbor_mac = inband_mac
interface = inband_info['port']
logger.info("Check local neighbor on host %s, asic %s for %s/%s via port: %s", host.hostname,
str(asic.asic_index),
neighbor_ip, neighbor_mac, interface)
asic_dict = check_local_neighbor_asicdb(asic, neighbor_ip, neighbor_mac)
encap_idx = asic_dict['encap_index']
sysport_info = {'slot': cfg_facts['DEVICE_METADATA']['localhost']['hostname'],
'asic': cfg_facts['DEVICE_METADATA']['localhost']['asic_name']}
for sup in duthosts.supervisor_nodes:
check_voq_neighbor_on_sup(sup, sysport_info['slot'], sysport_info['asic'], interface, neighbor_ip,
encap_idx, inband_mac)
# Check the neighbor entry on each remote linecard
for rem_host in duthosts.frontend_nodes:
for rem_asic in rem_host.asics:
if rem_host == per_host and rem_asic == asic:
# skip remote check on local host
continue
rem_cfg_facts = all_cfg_facts[rem_host.hostname][rem_asic.asic_index]['ansible_facts']
remote_inband_info = get_inband_info(rem_cfg_facts)
if remote_inband_info == {}:
logger.info("No inband configuration on this asic: %s/%s, will be skipped.", rem_host.hostname,
rem_asic.asic_index)
continue
remote_inband_mac = get_sonic_mac(rem_host, rem_asic.asic_index, remote_inband_info['port'])
check_voq_remote_neighbor(rem_host, rem_asic, neighbor_ip, inband_mac,
remote_inband_info['port'],
encap_idx, remote_inband_mac)
| StarcoderdataPython |
3546580 | <reponame>ArgiesDario/exif
"""Test deleting EXIF attributes."""
import os
import textwrap
import unittest
from exif import Image
from exif.tests.delete_exif_baselines import (
DELETE_ASCII_TAGS_HEX_BASELINE, DELETE_GEOTAG_HEX_BASELINE)
# pylint: disable=pointless-statement, protected-access
class TestModifyExif(unittest.TestCase):
"""Test cases for deleting EXIF attributes."""
def setUp(self):
"""Open sample image file in binary mode for use in test cases."""
grand_canyon = os.path.join(os.path.dirname(__file__), 'grand_canyon.jpg')
with open(grand_canyon, 'rb') as image_file:
self.image = Image(image_file)
assert self.image.has_exif
def test_delete_ascii_tags(self):
"""Verify deleting EXIF ASCII from the Image object and the hexadecimal equivalent."""
del self.image.make
del self.image.model
with self.assertRaisesRegex(AttributeError, "image does not have attribute make"):
self.image.make
with self.assertRaisesRegex(AttributeError, "image does not have attribute model"):
self.image.model
segment_hex = self.image._segments['APP1'].get_segment_hex()
self.assertEqual('\n'.join(textwrap.wrap(segment_hex, 90)), DELETE_ASCII_TAGS_HEX_BASELINE)
def test_delete_gps_tags(self):
"""Verify deleting EXIF geotags from the Image object and the hexadecimal equivalent."""
del self.image.gps_latitude
del self.image.gps_longitude
del self.image.gps_altitude
with self.assertRaisesRegex(AttributeError, "image does not have attribute gps_latitude"):
self.image.gps_latitude
with self.assertRaisesRegex(AttributeError, "image does not have attribute gps_longitude"):
self.image.gps_longitude
with self.assertRaisesRegex(AttributeError, "image does not have attribute gps_altitude"):
self.image.gps_altitude
segment_hex = self.image._segments['APP1'].get_segment_hex()
self.assertEqual('\n'.join(textwrap.wrap(segment_hex, 90)), DELETE_GEOTAG_HEX_BASELINE)
def test_delete_method(self):
"""Test behavior when setting tags using the ``delete()`` method."""
self.image.delete("model")
with self.assertRaisesRegex(AttributeError, "image does not have attribute model"):
self.image.model
def test_handle_unset_attribute(self):
"""Verify that accessing an attribute not present in an image raises an AttributeError."""
with self.assertRaisesRegex(AttributeError, "image does not have attribute light_source"):
del self.image.light_source
def test_index_deleter(self):
"""Test deleting attributes using index syntax."""
del self.image["model"]
with self.assertRaisesRegex(AttributeError, "image does not have attribute model"):
self.image.model
def test_standard_delete(self):
"""Verify that writing and deleting non-EXIF attributes behave normally."""
self.image.dummy_attr = 123
assert self.image.dummy_attr == 123
del self.image.dummy_attr
with self.assertRaisesRegex(AttributeError, "unknown image attribute dummy_attr"):
self.image.dummy_attr
| StarcoderdataPython |
6407134 | <reponame>sturzl/guet
from unittest import TestCase
from unittest.mock import Mock
from guet.committers.committer import Committer
from guet.commands.scriptcommands.commitmsg.commitmsg_strategy import CommitMsgStrategy
from guet.context.context import Context
class TestCommitMsgStrategy(TestCase):
def test_appends_current_committer_coauthored_lines_to_message_text(self):
context: Context = Mock()
context.committers = Mock()
context.committers.current.return_value = [
Committer('name1', 'email1', 'initials1'),
Committer('name2', 'email2', 'initials2')
]
context.git = Mock()
context.git.commit_msg = [
f'Commit message'
]
strategy = CommitMsgStrategy(context)
strategy.apply()
self.assertEqual(context.git.commit_msg, [
f'Commit message',
'\n',
f'Co-authored-by: name1 <email1>',
f'Co-authored-by: name2 <email2>'
])
def test_replace_already_present_co_authored_messages(self):
context: Context = Mock()
context.committers = Mock()
context.committers.current.return_value = [
Committer('name3', 'email3', 'initials3'),
Committer('name4', 'email4', 'initials4')
]
context.git = Mock()
context.git.commit_msg = [
f'Commit message',
'\n',
f'Co-authored-by: name1 <email1>',
f'Co-authored-by: name2 <email2>'
]
strategy = CommitMsgStrategy(context)
strategy.apply()
self.assertEqual(context.git.commit_msg, [
f'Commit message',
'\n',
f'Co-authored-by: name3 <email3>',
f'Co-authored-by: name4 <email4>'
])
def test_doesnt_append_coauthored_lines_if_only_one_committer(self):
context: Context = Mock()
context.committers = Mock()
context.committers.current.return_value = [
Committer('name1', 'email1', '<PASSWORD>1'),
]
context.git = Mock()
context.git.commit_msg = [
f'Commit message'
]
strategy = CommitMsgStrategy(context)
strategy.apply()
self.assertEqual(context.git.commit_msg, [
f'Commit message',
])
| StarcoderdataPython |
6692630 | # -*- coding: utf-8 -*-
"""
meraki_sdk
This file was automatically generated for meraki by APIMATIC v2.0 ( https://apimatic.io ).
"""
class MondayModel(object):
"""Implementation of the 'Monday' model.
The schedule object for Monday.
Attributes:
active (bool): Whether the schedule is active (true) or inactive
(false) during the time specified between 'from' and 'to'.
Defaults to true.
mfrom (string): The time, from '00:00' to '24:00'. Must be less than
the time specified in 'to'. Defaults to '00:00'. Only 30 minute
increments are allowed.
to (string): The time, from '00:00' to '24:00'. Must be greater than
the time specified in 'from'. Defaults to '24:00'. Only 30 minute
increments are allowed.
"""
# Create a mapping from Model property names to API property names
_names = {
"active":'active',
"mfrom":'from',
"to":'to'
}
def __init__(self,
active=None,
mfrom=None,
to=None):
"""Constructor for the MondayModel class"""
# Initialize members of the class
self.active = active
self.mfrom = mfrom
self.to = to
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
active = dictionary.get('active')
mfrom = dictionary.get('from')
to = dictionary.get('to')
# Return an object of this model
return cls(active,
mfrom,
to)
| StarcoderdataPython |
6517705 | #!/usr/bin/env python
"""<NAME>, Strabourg Astronomical Observatory"""
import numpy as np
import matplotlib.pyplot as plt
from collections import OrderedDict as collections_OrderedDict
def ndim_rectangles_integral(
# main args
func,
up_limits,
low_limits,
ndim,
nsamples=10000,
args_func = {},
# demo plot args
verbose=False,
args_subplots = {'sharex':True, 'sharey':True, 'figsize':(10,10)},
args_suptitle = {'fontsize':16},
args_scatter_mesh = {'marker':"+", 'color':"black", 'label':"rectangular mesh"},
args_scatter_func = {'marker':"o", 'label':"computed points"},
args_legend = {},
dim_labels = None
):
"""
Returns the integral of a function in n-dimensions using the textbook rectangle method.
Heavy usage of numpy functions to benefit from parallization.
Tip: To save RAM, divide integration space into sub-spaces and integrate one at a time.
v0.1
Parameters
----------
func : function
A Python function or method to integrate. The function takes an array of coordinates of shape=(ndim) and/or shape=(ndim, nsamples) to be integrated as first argument.
Other arguments can be passed using the args_func dictionary argument.
up_limits: array_like
Upward bounds of integrations. Expected shape = (ndim)
low_limits: array_like
Downward bounds of integrations. Expected shape = (ndim)
nsamples: integer or array_like, optional
#Samples of integrations in each dimension. Expected shape = (ndim). If an integer is given, #samples are divided between each dimension by nsamples**(1/ndim).
args_func: dictionary, optional
Supplementary arguments to pass to func.
verbose: boolean, optional
Generates a matplotlib (plt) figure of the integration space meshing and samples. This involves the computation of an histogram which is significantly computationaly intensive. Verbose=True should be used for verifications only with a low number of samples.
args_subplots: dictionary, optional
Supplementary arguments to pass to the plt.subplot function for pdf sample / space meshing visualisation (for verbose=True).
args_suptitle: dictionary, optional
Supplementary arguments to pass to the plt.suptitle function for pdf sample / space meshing visualisation (for verbose=True).
args_scatter_mesh: dictionary, optional
Supplementary arguments to pass to the plt.scatter function for space meshing visualisation (for verbose=True).
args_scatter_func: dictionary, optional
Supplementary arguments to pass to the plt.scatter function for pdf sample visualisation (for verbose=True).
args_legend: dictionary, optional
Supplementary arguments to pass to the plt.legend function for pdf sample / space meshing visualisation (for verbose=True).
dim_labels = array_like, optional
Label of each dimension for pdf sample / space meshing visualisation (for verbose=True). Expected shape = (ndim)
Returns
-------
result : float
The result of the integration.
Example
--------
from scipy import stats
import numpy as np
import matplotlib.pyplot as plt
dim_labels = ["x", "y", "z"]
ndim = len(dim_labels)
df_func = lambda x:stats.multivariate_normal.pdf(x, mean=np.zeros(ndim), cov=np.eye(ndim))
integral = ndim_rectangles_integral (func = df_func,
up_limits = np.full(ndim,4),
low_limits = np.full(ndim,-4),
ndim=ndim,
nsamples = np.full(ndim,11),
verbose = True,
dim_labels = dim_labels,)
print("integral = %f"%(integral))
plt.show()
"""
#---------------------------------------------------------------
# supporting int as n_samples argument
if isinstance(nsamples, int):
nsamples = np.full(ndim,int(nsamples**(1/ndim)))
# checking arguments
if not(len(up_limits)==len(low_limits)==ndim==len(nsamples)):
raise ValueError("Shapes should be len(up_limits)=len(low_limits)=ndim")
#---------------------------------------------------------------
# todo: max_memory argument. automated space division
#---------------------------------------------------------------
# hyperrectangles edge size in each dimension
ndx = np.array([(up_limits[dim] - low_limits[dim])/(nsamples[dim]-1) for dim in range(ndim)])
# hyperrectangle volume
vol = np.prod(ndx)
# hyperrectangles centers: edges
ncenters = np.array([np.linspace(start=low_limits[dim]+ndx[dim]/2, stop=up_limits[dim]-ndx[dim]/2, num=nsamples[dim]-1) for dim in range(ndim)])
del ndx
# hyperrectangles centers: coords
ncoords_centers = np.array(np.meshgrid(*ncenters))
del ncenters
ncoords_centers = ncoords_centers.reshape(ncoords_centers.shape[0],np.prod(ncoords_centers.shape[1:])) # equivalent to ncoords_centers = ncoords_centers.reshape(ndim,np.prod(nsamples-1))
ncoords_centers = ncoords_centers.transpose()
#---------------------------------------------------------------
# integral computation
try: # if func supports array of coords
mapped_func = func(ncoords_centers, **args_func)
except: # if func only supports 1 coord at a time
mapped_func = np.array([func(ncoords_centers[i], **args_func) for i in range (ncoords_centers.shape[0])])
# dividing by volume
integral = np.sum(mapped_func)*vol
#---------------------------------------------------------------
#todo: error computation
# # not sure about this...
# mapped_err = np.abs(mapped_func-np.roll(mapped_func, 1))/2
# err = np.sum(mapped_err)*vol
#---------------------------------------------------------------
# mesh plot for visualisation purposes
if verbose==1:
# meshing edges for display
nedges = np.array([np.linspace(start=low_limits[dim], stop=up_limits[dim], num=nsamples[dim]) for dim in range(ndim)], dtype=object) # nedges.shape = (ndim, nsamples in dim)
ncoords_edges = np.array(np.meshgrid(*nedges))
ncoords_edges = ncoords_edges.reshape(ncoords_edges.shape[0],np.prod(ncoords_edges.shape[1:]))
# plot
fig, ax = plt.subplots(ndim ,ndim, **args_subplots)
#title
args_suptitle_default = {'t':"Mesh and func samples used. Integral = %f"%(integral)} # default title
args_suptitle_default.update(args_suptitle)
fig.suptitle(**args_suptitle_default)
for i in range(ndim):
for j in range (ndim):
# mesh: plot
ax[i,j].scatter(ncoords_edges[i,:], ncoords_edges[j,:], **args_scatter_mesh)
# df sample points: cleaning supperposed values, summing prob along other dimensions
temp_centers_ij = np.append(ncoords_centers[:,[i,j]], mapped_func.reshape(mapped_func.shape[0],1),axis=1)
temp_centers_ij = temp_centers_ij[np.lexsort((temp_centers_ij[:,0], temp_centers_ij[:,1]))]
unique_centers = []
unique_prob = []
counter = -1
for k in range(temp_centers_ij.shape[0]):
if np.sum(temp_centers_ij[k,0:2] != temp_centers_ij[k-1,0:2]):
unique_prob.append(temp_centers_ij[k,2])
unique_centers.append(temp_centers_ij[k,0:2])
counter+=1
else:
unique_prob[counter]+=temp_centers_ij[k,2]
unique_centers = np.array(unique_centers)
unique_prob = np.array(unique_prob)
#todo: use an image instead of points for the sampled pdf
# df sample points: plot
df_plot = ax[i,j].scatter(unique_centers[:,0], unique_centers[:,1], c=unique_prob, **args_scatter_func)
plt.colorbar(df_plot, ax=ax[i,j])
# labels
if dim_labels != None:
ax[i,j].set_xlabel(dim_labels[i])
ax[i,j].set_ylabel(dim_labels[j])
# legend
handles, labels = plt.gca().get_legend_handles_labels()
by_label = collections_OrderedDict(zip(labels, handles))
fig.legend(by_label.values(), by_label.keys(), **args_legend)
#---------------------------------------------------------------
return integral
## demo
# from scipy import stats
# import numpy as np
# import matplotlib.pyplot as plt
#
# dim_labels = ["x", "y", "z"]
# ndim = len(dim_labels)
# df_func = lambda x:stats.multivariate_normal.pdf(x, mean=np.zeros(ndim), cov=np.eye(ndim))
# integral = ndim_rectangles_integral (func = df_func,
# up_limits = np.full(ndim,4),
# low_limits = np.full(ndim,-4),
# ndim=ndim,
# nsamples = np.full(ndim,11),
# verbose = True,
# dim_labels = dim_labels,)
# print("integral = %f"%(integral))
# plt.show() | StarcoderdataPython |
12866222 | <reponame>VILLASframework/VILLAScontroller
from villas.controller.components.manager import Manager
from villas.controller.component import Component
class GenericManager(Manager):
def create(self, payload):
component = Component.from_dict(payload.get('parameters'))
try:
self.add_component(component)
except KeyError:
self.logger.error('A component with the UUID %s already exists',
component.uuid)
def delete(self, payload):
parameters = payload.get('parameters')
uuid = parameters.get('uuid')
try:
comp = self.components[uuid]
self.remove_component(comp)
except KeyError:
self.logger.error('There is not component with UUID: %s', uuid)
| StarcoderdataPython |
9727680 | #!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pyarrow as pa
import numpy as np
import pandas as pd
import pandas.util.testing as tm
import memory_profiler
import gc
import io
MEGABYTE = 1 << 20
def assert_does_not_leak(f, iterations=10, check_interval=1, tolerance=5):
gc.collect()
baseline = memory_profiler.memory_usage()[0]
for i in range(iterations):
f()
if i % check_interval == 0:
gc.collect()
usage = memory_profiler.memory_usage()[0]
diff = usage - baseline
print("{0}: {1}\r".format(i, diff), end="")
if diff > tolerance:
raise Exception("Memory increased by {0} megabytes after {1} "
"iterations".format(diff, i + 1))
gc.collect()
usage = memory_profiler.memory_usage()[0]
diff = usage - baseline
print("\nMemory increased by {0} megabytes after {1} "
"iterations".format(diff, iterations))
def test_leak1():
data = [pa.array(np.concatenate([np.random.randn(100000)] * 1000))]
table = pa.Table.from_arrays(data, ['foo'])
def func():
table.to_pandas()
assert_does_not_leak(func)
def test_leak2():
data = [pa.array(np.concatenate([np.random.randn(100000)] * 10))]
table = pa.Table.from_arrays(data, ['foo'])
def func():
df = table.to_pandas()
batch = pa.RecordBatch.from_pandas(df)
sink = io.BytesIO()
writer = pa.RecordBatchFileWriter(sink, batch.schema)
writer.write_batch(batch)
writer.close()
buf_reader = pa.BufferReader(sink.getvalue())
reader = pa.open_file(buf_reader)
reader.read_all()
assert_does_not_leak(func, iterations=50, tolerance=50)
def test_leak3():
import pyarrow.parquet as pq
df = pd.DataFrame({'a{0}'.format(i): [1, 2, 3, 4]
for i in range(50)})
table = pa.Table.from_pandas(df, preserve_index=False)
writer = pq.ParquetWriter('leak_test_' + tm.rands(5) + '.parquet',
table.schema)
def func():
writer.write_table(table, row_group_size=len(table))
# This does not "leak" per se but we do want to have this use as little
# memory as possible
assert_does_not_leak(func, iterations=500,
check_interval=50, tolerance=20)
def test_ARROW_8801():
x = pd.to_datetime(np.random.randint(0, 2**32, size=2**20),
unit='ms', utc=True)
table = pa.table(pd.DataFrame({'x': x}))
assert_does_not_leak(lambda: table.to_pandas(split_blocks=False),
iterations=1000, check_interval=50, tolerance=1000)
if __name__ == '__main__':
test_ARROW_8801()
| StarcoderdataPython |
1915228 | from django.contrib import admin
from .models import DayOfWeek, Request, RequestType, Schedule, TimeSlot
admin.site.register([DayOfWeek, Request, RequestType, Schedule, TimeSlot])
| StarcoderdataPython |
3310593 | __author__ = '<NAME>'
import unittest
from odm.errors import ValidationError
from odm import document, fields
class TestDocument(unittest.TestCase):
def test_choices(self):
class FieldContainer(document.BaseDocument):
field_integer = fields.IntegerField(choices=[1, 2, 3])
model = FieldContainer()
model.field_integer = 1
self.assertEqual(model.field_integer, 1)
try:
model.field_integer = 987
self.assertTrue(False, 'ValidationError should have been thrown')
except ValidationError:
self.assertTrue(True, 'ValidationError was expected and caught')
def test_constraints(self):
min_value = 5
max_value = 9876543210
class FieldContainer(document.BaseDocument):
field_integer = fields.IntegerField(min_value=min_value, max_value=max_value)
valid_middle = int((min_value + max_value) / 2)
invalid_min = min_value - 1
invalid_max = max_value + 1
model = FieldContainer()
for value in [min_value, valid_middle, max_value]:
model.field_integer = value
self.assertEqual(model.field_integer, value)
for value in [invalid_min, invalid_max]:
try:
model.field_integer = value
self.assertTrue(False, 'ValidationError should have been thrown')
except ValidationError:
self.assertTrue(True, 'ValidationError was expected and caught')
def test_nullable_non_default_jsonification(self):
class FieldContainer(document.BaseDocument):
field_integer = fields.IntegerField(null=True)
model = FieldContainer()
json_data = model.to_json()
self.assertIsInstance(json_data, dict)
m2 = FieldContainer.from_json(json_data)
self.assertIsNone(m2.field_integer)
def test_non_nullable_non_default_jsonification(self):
class FieldContainer(document.BaseDocument):
field_integer = fields.IntegerField(null=False)
model = FieldContainer()
json_data = model.to_json()
self.assertIsInstance(json_data, dict)
m2 = FieldContainer.from_json(json_data)
self.assertIsNone(m2.field_integer)
def test_non_nullable_default_jsonification(self):
class FieldContainer(document.BaseDocument):
field_integer = fields.IntegerField(null=False, default=123)
model = FieldContainer()
json_data = model.to_json()
self.assertIsInstance(json_data, dict)
m2 = FieldContainer.from_json(json_data)
self.assertEqual(m2.field_integer, 123)
def test_nullable_default_jsonification(self):
class FieldContainer(document.BaseDocument):
field_integer = fields.IntegerField(null=True, default=123)
model = FieldContainer()
json_data = model.to_json()
self.assertIsInstance(json_data, dict)
m2 = FieldContainer.from_json(json_data)
self.assertIsNone(m2.field_integer)
def test_nullable_non_default_field(self):
class FieldContainer(document.BaseDocument):
field_integer = fields.IntegerField(null=True)
model = FieldContainer()
self.assertIsNone(model.field_integer)
del model.field_integer
self.assertIsNone(model.field_integer)
model.field_integer = 789
self.assertEqual(model.field_integer, 789)
del model.field_integer
self.assertIsNone(model.field_integer)
def test_nullable_default_field(self):
class FieldContainer(document.BaseDocument):
field_integer = fields.IntegerField(null=True, default=999)
model = FieldContainer()
self.assertIsNone(model.field_integer)
del model.field_integer
self.assertIsNone(model.field_integer)
model.field_integer = 789
self.assertEqual(model.field_integer, 789)
del model.field_integer
self.assertEqual(model.field_integer, 999)
def test_non_nullable_non_default_field(self):
class FieldContainer(document.BaseDocument):
field_integer = fields.IntegerField(null=False)
model = FieldContainer()
self.assertIsNone(model.field_integer)
del model.field_integer
self.assertIsNone(model.field_integer)
model.field_integer = 789
self.assertEqual(model.field_integer, 789)
del model.field_integer
self.assertIsNone(model.field_integer)
def test_non_nullable_default_field(self):
class FieldContainer(document.BaseDocument):
field_integer = fields.IntegerField(null=False, default=100)
model = FieldContainer()
self.assertEqual(model.field_integer, 100)
del model.field_integer
self.assertEqual(model.field_integer, 100)
model.field_integer = 789
self.assertEqual(model.field_integer, 789)
del model.field_integer
self.assertEqual(model.field_integer, 100)
def test_non_nullable_non_default_assignment(self):
class FieldContainer(document.BaseDocument):
field_integer = fields.IntegerField(null=False)
model = FieldContainer()
self.assertIsNone(model.field_integer)
model.field_integer = 101
self.assertEqual(model.field_integer, 101)
try:
model.field_integer = None
self.assertTrue(False, 'ValidationError should have been thrown')
except ValidationError:
self.assertTrue(True, 'ValidationError was expected and caught')
def test_non_nullable_default_assignment(self):
class FieldContainer(document.BaseDocument):
field_integer = fields.IntegerField(null=False, default=987654321)
model = FieldContainer()
self.assertEqual(model.field_integer, 987654321)
model.field_integer = 101
self.assertEqual(model.field_integer, 101)
model.field_integer = None
self.assertEqual(model.field_integer, 987654321)
def test_nullable_non_default_assignment(self):
class FieldContainer(document.BaseDocument):
field_integer = fields.IntegerField(null=True)
model = FieldContainer()
self.assertIsNone(model.field_integer)
model.field_integer = 101
self.assertEqual(model.field_integer, 101)
model.field_integer = None
self.assertIsNone(model.field_integer)
def test_nullable_default_assignment(self):
class FieldContainer(document.BaseDocument):
field_integer = fields.IntegerField(null=True, default=987654321)
model = FieldContainer()
self.assertIsNone(model.field_integer)
model.field_integer = 101
self.assertEqual(model.field_integer, 101)
model.field_integer = None
self.assertIsNone(model.field_integer)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
16858 | <filename>ui/staff.py<gh_stars>0
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'staff.ui'
#
# Created by: PyQt5 UI code generator 5.13.0
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(800, 600)
MainWindow.setStyleSheet("QListWidget, QListView, QTreeWidget, QTreeView,QFrame {\n"
" outline: 0px;\n"
"}\n"
"/*设置左侧选项的最小最大宽度,文字颜色和背景颜色*/\n"
"QListWidget {\n"
" min-width: 200px;\n"
" max-width: 200px;\n"
" color: white;\n"
" background-color:#2f4050\n"
"}\n"
"#head\n"
"{\n"
"background:white;\n"
"border-radius:30px;\n"
"}\n"
"#head_2\n"
"{\n"
"background:#CCFFCC;\n"
"border:1px solid;\n"
"border-color:#CCFFCC;\n"
"border-radius:60px;\n"
"}\n"
"#Search\n"
"{\n"
"border-radius:5px;\n"
"background:#293846;\n"
"border:0.5px solid;\n"
"border-color:white;\n"
"\n"
"}\n"
"QListWidget::item\n"
"{\n"
"height:60;\n"
"background-color:#293846;\n"
"}\n"
"#frame\n"
"{\n"
"background-color:#2f4050\n"
"\n"
"}\n"
"/*被选中时的背景颜色和左边框颜色*/\n"
"QListWidget::item:selected {\n"
" background: rgb(52, 52, 52);\n"
" border-left: 2px solid rgb(9, 187, 7);\n"
"}\n"
"/*鼠标悬停颜色*/\n"
"HistoryPanel::item:hover {\n"
" background: rgb(52, 52, 52);\n"
"}\n"
"/*右侧的层叠窗口的背景颜色*/\n"
"QStackedWidget {\n"
" background: white;\n"
"}\n"
"/*模拟的页面*/\n"
"#frame > QLabel\n"
"{\n"
"color:white;\n"
"}\n"
"#frame_2\n"
"{\n"
"background-color:#CCFFCC;\n"
"}\n"
"#page_2 > QLineEdit,QDateEdit\n"
"{\n"
"border-radius:5px;\n"
"background:#FFFFFF;\n"
"border:1px solid;\n"
"border-color:#6699CC;\n"
"}\n"
"#page_4 > QLineEdit\n"
"{\n"
"border-radius:5px;\n"
"background:#FFFFFF;\n"
"border:1px solid;\n"
"border-color:#6699CC;\n"
"}\n"
"QLineEdit\n"
"{\n"
"border-radius:5px;\n"
"background:#FFFFFF;\n"
"border:1px solid;\n"
"border-color:#6699CC;\n"
"}\n"
"\n"
"\n"
"")
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.stackedWidget = QtWidgets.QStackedWidget(self.centralwidget)
self.stackedWidget.setGeometry(QtCore.QRect(190, 0, 611, 601))
self.stackedWidget.setStyleSheet("background-color:#FFFFFF\n"
"")
self.stackedWidget.setObjectName("stackedWidget")
self.page = QtWidgets.QWidget()
self.page.setObjectName("page")
self.split = QtWidgets.QFrame(self.page)
self.split.setGeometry(QtCore.QRect(10, 210, 600, 2))
self.split.setStyleSheet("color:#CCFFCC;\n"
"border-color:#CCFFCC;\n"
"background-color:#CCFFCC")
self.split.setFrameShape(QtWidgets.QFrame.HLine)
self.split.setFrameShadow(QtWidgets.QFrame.Raised)
self.split.setObjectName("split")
self.head_2 = QtWidgets.QToolButton(self.page)
self.head_2.setGeometry(QtCore.QRect(260, 30, 121, 121))
self.head_2.setText("")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("./pictures/staff3.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.head_2.setIcon(icon)
self.head_2.setIconSize(QtCore.QSize(100, 100))
self.head_2.setObjectName("head_2")
self.name = QtWidgets.QLabel(self.page)
self.name.setGeometry(QtCore.QRect(260, 160, 131, 31))
self.name.setAlignment(QtCore.Qt.AlignCenter)
self.name.setObjectName("name")
self.label = QtWidgets.QLabel(self.page)
self.label.setGeometry(QtCore.QRect(190, 240, 61, 51))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.label.setFont(font)
self.label.setObjectName("label")
self.label_3 = QtWidgets.QLabel(self.page)
self.label_3.setGeometry(QtCore.QRect(190, 290, 51, 51))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.label_3.setFont(font)
self.label_3.setObjectName("label_3")
self.label_4 = QtWidgets.QLabel(self.page)
self.label_4.setGeometry(QtCore.QRect(190, 340, 71, 51))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.label_4.setFont(font)
self.label_4.setObjectName("label_4")
self.label_5 = QtWidgets.QLabel(self.page)
self.label_5.setGeometry(QtCore.QRect(190, 390, 61, 51))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.label_5.setFont(font)
self.label_5.setObjectName("label_5")
self.label_6 = QtWidgets.QLabel(self.page)
self.label_6.setGeometry(QtCore.QRect(190, 440, 71, 51))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.label_6.setFont(font)
self.label_6.setObjectName("label_6")
self.label_7 = QtWidgets.QLabel(self.page)
self.label_7.setGeometry(QtCore.QRect(190, 490, 81, 51))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.label_7.setFont(font)
self.label_7.setObjectName("label_7")
self.sname = QtWidgets.QLabel(self.page)
self.sname.setGeometry(QtCore.QRect(300, 250, 131, 31))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.sname.setFont(font)
self.sname.setObjectName("sname")
self.ssex = QtWidgets.QLabel(self.page)
self.ssex.setGeometry(QtCore.QRect(300, 300, 81, 31))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.ssex.setFont(font)
self.ssex.setObjectName("ssex")
self.stime = QtWidgets.QLabel(self.page)
self.stime.setGeometry(QtCore.QRect(300, 350, 91, 31))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.stime.setFont(font)
self.stime.setObjectName("stime")
self.srole = QtWidgets.QLabel(self.page)
self.srole.setGeometry(QtCore.QRect(300, 400, 81, 31))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.srole.setFont(font)
self.srole.setObjectName("srole")
self.sphone = QtWidgets.QLabel(self.page)
self.sphone.setGeometry(QtCore.QRect(300, 450, 141, 31))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.sphone.setFont(font)
self.sphone.setObjectName("sphone")
self.sidcard = QtWidgets.QLabel(self.page)
self.sidcard.setGeometry(QtCore.QRect(300, 500, 181, 31))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.sidcard.setFont(font)
self.sidcard.setObjectName("sidcard")
self.label_8 = QtWidgets.QLabel(self.page)
self.label_8.setGeometry(QtCore.QRect(190, 540, 81, 51))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.label_8.setFont(font)
self.label_8.setObjectName("label_8")
self.sidcard_2 = QtWidgets.QLabel(self.page)
self.sidcard_2.setGeometry(QtCore.QRect(300, 550, 181, 31))
font = QtGui.QFont()
font.setFamily("幼圆")
font.setPointSize(10)
self.sidcard_2.setFont(font)
self.sidcard_2.setObjectName("sidcard_2")
self.stackedWidget.addWidget(self.page)
self.page_3 = QtWidgets.QWidget()
self.page_3.setObjectName("page_3")
self.searchTable = QtWidgets.QTableWidget(self.page_3)
self.searchTable.setGeometry(QtCore.QRect(0, 240, 611, 361))
self.searchTable.setStyleSheet("")
self.searchTable.setObjectName("searchTable")
self.searchTable.setColumnCount(9)
self.searchTable.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.searchTable.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.searchTable.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.searchTable.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.searchTable.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.searchTable.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.searchTable.setHorizontalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.searchTable.setHorizontalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.searchTable.setHorizontalHeaderItem(7, item)
item = QtWidgets.QTableWidgetItem()
self.searchTable.setHorizontalHeaderItem(8, item)
self.frame_2 = QtWidgets.QFrame(self.page_3)
self.frame_2.setGeometry(QtCore.QRect(10, 30, 611, 211))
self.frame_2.setStyleSheet("background-color:rgb(255, 249, 246)")
self.frame_2.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_2.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_2.setObjectName("frame_2")
self.searchName = QtWidgets.QLineEdit(self.frame_2)
self.searchName.setGeometry(QtCore.QRect(170, 40, 181, 41))
self.searchName.setStyleSheet("border-radius:10px;\n"
"background:#FFFFFF;\n"
"border:1px solid;\n"
"border-color:#CCCCFF;\n"
"")
self.searchName.setObjectName("searchName")
self.searchNB = QtWidgets.QToolButton(self.frame_2)
self.searchNB.setGeometry(QtCore.QRect(370, 40, 101, 41))
self.searchNB.setStyleSheet("background-color:rgb(255, 249, 246);\n"
"border:0px;\n"
"\n"
"border-radius:5px")
self.searchNB.setText("")
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap("./pictures/search.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.searchNB.setIcon(icon1)
self.searchNB.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.searchNB.setObjectName("searchNB")
self.label_74 = QtWidgets.QLabel(self.frame_2)
self.label_74.setGeometry(QtCore.QRect(310, 149, 151, 40))
self.label_74.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_74.setObjectName("label_74")
self.modifyvalue = QtWidgets.QLineEdit(self.frame_2)
self.modifyvalue.setGeometry(QtCore.QRect(430, 160, 111, 21))
self.modifyvalue.setStyleSheet("border-radius:5px")
self.modifyvalue.setText("")
self.modifyvalue.setObjectName("modifyvalue")
self.commitTableModify = QtWidgets.QPushButton(self.frame_2)
self.commitTableModify.setGeometry(QtCore.QRect(170, 155, 121, 31))
self.commitTableModify.setStyleSheet("#commitTableModify{background:#CCFFCC;\n"
"border-radius:8px}\n"
"#commitTableModify:hover\n"
"{\n"
"background:#CCFF99\n"
"}")
self.commitTableModify.setObjectName("commitTableModify")
self.label_78 = QtWidgets.QLabel(self.frame_2)
self.label_78.setGeometry(QtCore.QRect(360, 10, 231, 21))
font = QtGui.QFont()
font.setPointSize(8)
self.label_78.setFont(font)
self.label_78.setObjectName("label_78")
self.commitTableDel = QtWidgets.QPushButton(self.frame_2)
self.commitTableDel.setGeometry(QtCore.QRect(170, 110, 121, 31))
self.commitTableDel.setStyleSheet("#commitTableDel{background:#CCFFCC;\n"
"border-radius:8px}\n"
"#commitTableDel:hover\n"
"{\n"
"background:#CCFF99\n"
"}")
self.commitTableDel.setObjectName("commitTableDel")
self.split_3 = QtWidgets.QFrame(self.page_3)
self.split_3.setGeometry(QtCore.QRect(10, 30, 600, 2))
self.split_3.setStyleSheet("color:#CCFFCC;\n"
"border-color:#CCFFCC;\n"
"background-color:#CCFFCC")
self.split_3.setFrameShape(QtWidgets.QFrame.HLine)
self.split_3.setFrameShadow(QtWidgets.QFrame.Raised)
self.split_3.setObjectName("split_3")
self.toolButton_2 = QtWidgets.QToolButton(self.page_3)
self.toolButton_2.setGeometry(QtCore.QRect(20, 0, 101, 31))
font = QtGui.QFont()
font.setFamily("FontAwesome")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.toolButton_2.setFont(font)
self.toolButton_2.setStyleSheet("border:none")
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap("./pictures/search1.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_2.setIcon(icon2)
self.toolButton_2.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.toolButton_2.setObjectName("toolButton_2")
self.line = QtWidgets.QFrame(self.page_3)
self.line.setGeometry(QtCore.QRect(10, 230, 601, 16))
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.stackedWidget.addWidget(self.page_3)
self.page_2 = QtWidgets.QWidget()
self.page_2.setObjectName("page_2")
self.label_9 = QtWidgets.QLabel(self.page_2)
self.label_9.setGeometry(QtCore.QRect(100, 60, 101, 40))
self.label_9.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_9.setObjectName("label_9")
self.split_2 = QtWidgets.QFrame(self.page_2)
self.split_2.setGeometry(QtCore.QRect(10, 30, 600, 2))
self.split_2.setStyleSheet("color:#CCFFCC;\n"
"border-color:#CCFFCC;\n"
"background-color:#CCFFCC")
self.split_2.setFrameShape(QtWidgets.QFrame.HLine)
self.split_2.setFrameShadow(QtWidgets.QFrame.Raised)
self.split_2.setObjectName("split_2")
self.label_10 = QtWidgets.QLabel(self.page_2)
self.label_10.setGeometry(QtCore.QRect(100, 260, 101, 41))
self.label_10.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_10.setObjectName("label_10")
self.label_11 = QtWidgets.QLabel(self.page_2)
self.label_11.setGeometry(QtCore.QRect(100, 110, 101, 41))
self.label_11.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_11.setObjectName("label_11")
self.label_12 = QtWidgets.QLabel(self.page_2)
self.label_12.setGeometry(QtCore.QRect(100, 310, 101, 41))
self.label_12.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_12.setObjectName("label_12")
self.label_13 = QtWidgets.QLabel(self.page_2)
self.label_13.setGeometry(QtCore.QRect(100, 160, 101, 41))
self.label_13.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_13.setObjectName("label_13")
self.label_14 = QtWidgets.QLabel(self.page_2)
self.label_14.setGeometry(QtCore.QRect(100, 360, 101, 41))
self.label_14.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_14.setObjectName("label_14")
self.label_15 = QtWidgets.QLabel(self.page_2)
self.label_15.setGeometry(QtCore.QRect(100, 210, 101, 41))
self.label_15.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_15.setObjectName("label_15")
self.label_16 = QtWidgets.QLabel(self.page_2)
self.label_16.setGeometry(QtCore.QRect(100, 410, 101, 41))
self.label_16.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_16.setObjectName("label_16")
self.label_17 = QtWidgets.QLabel(self.page_2)
self.label_17.setGeometry(QtCore.QRect(100, 460, 101, 41))
self.label_17.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_17.setObjectName("label_17")
self.inputsid = QtWidgets.QLineEdit(self.page_2)
self.inputsid.setGeometry(QtCore.QRect(220, 70, 221, 21))
self.inputsid.setObjectName("inputsid")
self.inputname = QtWidgets.QLineEdit(self.page_2)
self.inputname.setGeometry(QtCore.QRect(220, 120, 221, 21))
self.inputname.setObjectName("inputname")
self.inputuser = QtWidgets.QLineEdit(self.page_2)
self.inputuser.setGeometry(QtCore.QRect(220, 270, 221, 21))
self.inputuser.setObjectName("inputuser")
self.inputpwd = QtWidgets.QLineEdit(self.page_2)
self.inputpwd.setGeometry(QtCore.QRect(220, 320, 221, 21))
self.inputpwd.setObjectName("inputpwd")
self.inputrole = QtWidgets.QLineEdit(self.page_2)
self.inputrole.setGeometry(QtCore.QRect(220, 370, 221, 21))
self.inputrole.setObjectName("inputrole")
self.inputidcard = QtWidgets.QLineEdit(self.page_2)
self.inputidcard.setGeometry(QtCore.QRect(220, 420, 221, 21))
self.inputidcard.setObjectName("inputidcard")
self.inputphone = QtWidgets.QLineEdit(self.page_2)
self.inputphone.setGeometry(QtCore.QRect(220, 470, 221, 21))
self.inputphone.setObjectName("inputphone")
self.toolButton_3 = QtWidgets.QToolButton(self.page_2)
self.toolButton_3.setGeometry(QtCore.QRect(20, 0, 111, 31))
font = QtGui.QFont()
font.setFamily("FontAwesome")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.toolButton_3.setFont(font)
self.toolButton_3.setStyleSheet("border:none\n"
"")
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap("./pictures/insert.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_3.setIcon(icon3)
self.toolButton_3.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.toolButton_3.setObjectName("toolButton_3")
self.commitAdd = QtWidgets.QPushButton(self.page_2)
self.commitAdd.setGeometry(QtCore.QRect(200, 530, 211, 31))
self.commitAdd.setStyleSheet("#commitAdd{background:#CCFFCC;\n"
"border-radius:8px}\n"
"#commitAdd:hover\n"
"{\n"
"background:#CCFF99\n"
"}")
self.commitAdd.setObjectName("commitAdd")
self.inputdate = QtWidgets.QDateEdit(self.page_2)
self.inputdate.setGeometry(QtCore.QRect(220, 220, 221, 22))
self.inputdate.setDateTime(QtCore.QDateTime(QtCore.QDate(2020, 1, 1), QtCore.QTime(0, 0, 0)))
self.inputdate.setObjectName("inputdate")
self.inputfemale = QtWidgets.QRadioButton(self.page_2)
self.inputfemale.setGeometry(QtCore.QRect(320, 170, 115, 19))
self.inputfemale.setObjectName("inputfemale")
self.inputmale = QtWidgets.QRadioButton(self.page_2)
self.inputmale.setGeometry(QtCore.QRect(220, 170, 81, 19))
self.inputmale.setObjectName("inputmale")
self.stackedWidget.addWidget(self.page_2)
self.page_4 = QtWidgets.QWidget()
self.page_4.setObjectName("page_4")
self.split_4 = QtWidgets.QFrame(self.page_4)
self.split_4.setGeometry(QtCore.QRect(10, 30, 600, 2))
self.split_4.setStyleSheet("color:#CCFFCC;\n"
"border-color:#CCFFCC;\n"
"background-color:#CCFFCC")
self.split_4.setFrameShape(QtWidgets.QFrame.HLine)
self.split_4.setFrameShadow(QtWidgets.QFrame.Raised)
self.split_4.setObjectName("split_4")
self.toolButton_4 = QtWidgets.QToolButton(self.page_4)
self.toolButton_4.setGeometry(QtCore.QRect(20, 0, 111, 31))
font = QtGui.QFont()
font.setFamily("FontAwesome")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.toolButton_4.setFont(font)
self.toolButton_4.setStyleSheet("border:none\n"
"")
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap("./pictures/delete.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_4.setIcon(icon4)
self.toolButton_4.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.toolButton_4.setObjectName("toolButton_4")
self.deleteTable = QtWidgets.QTableWidget(self.page_4)
self.deleteTable.setGeometry(QtCore.QRect(10, 260, 601, 341))
self.deleteTable.setStyleSheet("")
self.deleteTable.setObjectName("deleteTable")
self.deleteTable.setColumnCount(9)
self.deleteTable.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.deleteTable.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.deleteTable.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.deleteTable.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.deleteTable.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.deleteTable.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.deleteTable.setHorizontalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.deleteTable.setHorizontalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.deleteTable.setHorizontalHeaderItem(7, item)
item = QtWidgets.QTableWidgetItem()
self.deleteTable.setHorizontalHeaderItem(8, item)
self.desid = QtWidgets.QLineEdit(self.page_4)
self.desid.setGeometry(QtCore.QRect(250, 90, 221, 21))
self.desid.setObjectName("desid")
self.label_18 = QtWidgets.QLabel(self.page_4)
self.label_18.setGeometry(QtCore.QRect(150, 80, 91, 40))
self.label_18.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_18.setObjectName("label_18")
self.dename = QtWidgets.QLineEdit(self.page_4)
self.dename.setGeometry(QtCore.QRect(250, 130, 221, 21))
self.dename.setObjectName("dename")
self.label_19 = QtWidgets.QLabel(self.page_4)
self.label_19.setGeometry(QtCore.QRect(150, 120, 91, 41))
self.label_19.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_19.setObjectName("label_19")
self.deidcard = QtWidgets.QLineEdit(self.page_4)
self.deidcard.setGeometry(QtCore.QRect(250, 170, 221, 21))
self.deidcard.setObjectName("deidcard")
self.label_20 = QtWidgets.QLabel(self.page_4)
self.label_20.setGeometry(QtCore.QRect(150, 160, 81, 41))
self.label_20.setStyleSheet("font: 9pt \"FontAwesome\";")
self.label_20.setObjectName("label_20")
self.commitDe = QtWidgets.QPushButton(self.page_4)
self.commitDe.setGeometry(QtCore.QRect(240, 210, 93, 28))
self.commitDe.setStyleSheet("#commitDe{background:#CCFFCC;\n"
"border-radius:8px}\n"
"#commitDe:hover\n"
"{\n"
"background:#CCFF99\n"
"}")
self.commitDe.setObjectName("commitDe")
self.label_21 = QtWidgets.QLabel(self.page_4)
self.label_21.setGeometry(QtCore.QRect(210, 35, 211, 31))
font = QtGui.QFont()
font.setFamily("FontAwesome")
font.setPointSize(12)
font.setBold(False)
font.setWeight(50)
self.label_21.setFont(font)
self.label_21.setObjectName("label_21")
self.stackedWidget.addWidget(self.page_4)
self.listWidget = QtWidgets.QListWidget(self.centralwidget)
self.listWidget.setGeometry(QtCore.QRect(0, 200, 204, 400))
self.listWidget.setObjectName("listWidget")
item = QtWidgets.QListWidgetItem()
font = QtGui.QFont()
font.setFamily("FontAwesome")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
item.setFont(font)
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap("./pictures/staff5.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item.setIcon(icon5)
self.listWidget.addItem(item)
item = QtWidgets.QListWidgetItem()
font = QtGui.QFont()
font.setFamily("FontAwesome")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
item.setFont(font)
icon6 = QtGui.QIcon()
icon6.addPixmap(QtGui.QPixmap("./pictures/staff2.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item.setIcon(icon6)
self.listWidget.addItem(item)
item = QtWidgets.QListWidgetItem()
font = QtGui.QFont()
font.setFamily("FontAwesome")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
item.setFont(font)
icon7 = QtGui.QIcon()
icon7.addPixmap(QtGui.QPixmap("./pictures/staff4.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item.setIcon(icon7)
self.listWidget.addItem(item)
item = QtWidgets.QListWidgetItem()
font = QtGui.QFont()
font.setFamily("FontAwesome")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
item.setFont(font)
item.setIcon(icon5)
self.listWidget.addItem(item)
self.frame = QtWidgets.QFrame(self.centralwidget)
self.frame.setGeometry(QtCore.QRect(0, 0, 204, 211))
self.frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame.setObjectName("frame")
self.head = QtWidgets.QToolButton(self.frame)
self.head.setGeometry(QtCore.QRect(60, 20, 60, 60))
self.head.setText("")
self.head.setIcon(icon)
self.head.setIconSize(QtCore.QSize(60, 60))
self.head.setObjectName("head")
self.welcome = QtWidgets.QLabel(self.frame)
self.welcome.setGeometry(QtCore.QRect(30, 90, 110, 20))
self.welcome.setText("")
self.welcome.setAlignment(QtCore.Qt.AlignCenter)
self.welcome.setObjectName("welcome")
self.label_2 = QtWidgets.QLabel(self.frame)
self.label_2.setGeometry(QtCore.QRect(40, 140, 121, 16))
font = QtGui.QFont()
font.setPointSize(8)
self.label_2.setFont(font)
self.label_2.setObjectName("label_2")
self.Search = QtWidgets.QLineEdit(self.frame)
self.Search.setGeometry(QtCore.QRect(20, 170, 145, 25))
font = QtGui.QFont()
font.setFamily("微软雅黑")
font.setPointSize(7)
self.Search.setFont(font)
self.Search.setStyleSheet("")
self.Search.setObjectName("Search")
self.toolButton = QtWidgets.QToolButton(self.frame)
self.toolButton.setGeometry(QtCore.QRect(170, 170, 21, 20))
self.toolButton.setStyleSheet("background-color:#2f4050;\n"
"border:0px;\n"
"\n"
"border-radius:5px")
self.toolButton.setText("")
self.toolButton.setIcon(icon1)
self.toolButton.setIconSize(QtCore.QSize(15, 15))
self.toolButton.setObjectName("toolButton")
self.role = QtWidgets.QLabel(self.frame)
self.role.setGeometry(QtCore.QRect(30, 120, 110, 15))
font = QtGui.QFont()
font.setPointSize(7)
self.role.setFont(font)
self.role.setText("")
self.role.setAlignment(QtCore.Qt.AlignCenter)
self.role.setObjectName("role")
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
self.stackedWidget.setCurrentIndex(1)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.name.setText(_translate("MainWindow", "csa "))
self.label.setText(_translate("MainWindow", "姓名:"))
self.label_3.setText(_translate("MainWindow", "性别:"))
self.label_4.setText(_translate("MainWindow", "申请时间:"))
self.label_5.setText(_translate("MainWindow", "权限:"))
self.label_6.setText(_translate("MainWindow", "手机号:"))
self.label_7.setText(_translate("MainWindow", "身份证号:"))
self.sname.setText(_translate("MainWindow", "邵嘉毅"))
self.ssex.setText(_translate("MainWindow", "男"))
self.stime.setText(_translate("MainWindow", "2019-12-12"))
self.srole.setText(_translate("MainWindow", "1"))
self.sphone.setText(_translate("MainWindow", "2332121323"))
self.sidcard.setText(_translate("MainWindow", "1111111111111111111"))
self.label_8.setText(_translate("MainWindow", "用户号:"))
self.sidcard_2.setText(_translate("MainWindow", "1"))
item = self.searchTable.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "用户编号"))
item = self.searchTable.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "姓名"))
item = self.searchTable.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "性别"))
item = self.searchTable.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "登记申请时间"))
item = self.searchTable.horizontalHeaderItem(4)
item.setText(_translate("MainWindow", "账户名"))
item = self.searchTable.horizontalHeaderItem(5)
item.setText(_translate("MainWindow", "密码"))
item = self.searchTable.horizontalHeaderItem(6)
item.setText(_translate("MainWindow", "权限"))
item = self.searchTable.horizontalHeaderItem(7)
item.setText(_translate("MainWindow", "身份证号"))
item = self.searchTable.horizontalHeaderItem(8)
item.setText(_translate("MainWindow", "手机号"))
self.searchName.setPlaceholderText(_translate("MainWindow", "搜索用户姓名"))
self.label_74.setText(_translate("MainWindow", "选中部分修改为:"))
self.modifyvalue.setPlaceholderText(_translate("MainWindow", "修改值"))
self.commitTableModify.setText(_translate("MainWindow", "确认修改"))
self.label_78.setText(_translate("MainWindow", "*选中表格内可以进行修改和删除操作"))
self.commitTableDel.setText(_translate("MainWindow", "确认删除"))
self.toolButton_2.setText(_translate("MainWindow", "查询用户"))
self.label_9.setText(_translate("MainWindow", "用户编号:"))
self.label_10.setText(_translate("MainWindow", "账户名:"))
self.label_11.setText(_translate("MainWindow", "用户姓名:"))
self.label_12.setText(_translate("MainWindow", "密码:"))
self.label_13.setText(_translate("MainWindow", "用户性别:"))
self.label_14.setText(_translate("MainWindow", "权限:"))
self.label_15.setText(_translate("MainWindow", "登记入职时间:"))
self.label_16.setText(_translate("MainWindow", "身份证:"))
self.label_17.setText(_translate("MainWindow", "手机号:"))
self.inputsid.setPlaceholderText(_translate("MainWindow", "编号"))
self.inputname.setPlaceholderText(_translate("MainWindow", "姓名"))
self.inputuser.setPlaceholderText(_translate("MainWindow", "账号名"))
self.inputpwd.setPlaceholderText(_translate("MainWindow", "密码"))
self.inputrole.setPlaceholderText(_translate("MainWindow", "权限"))
self.inputidcard.setPlaceholderText(_translate("MainWindow", "身份证"))
self.inputphone.setPlaceholderText(_translate("MainWindow", "手机号"))
self.toolButton_3.setText(_translate("MainWindow", "增添用户"))
self.commitAdd.setText(_translate("MainWindow", "确认录入"))
self.inputfemale.setText(_translate("MainWindow", "女"))
self.inputmale.setText(_translate("MainWindow", "男"))
self.toolButton_4.setText(_translate("MainWindow", "删除用户"))
item = self.deleteTable.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "用户编号"))
item = self.deleteTable.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "姓名"))
item = self.deleteTable.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "性别"))
item = self.deleteTable.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "登记入职时间"))
item = self.deleteTable.horizontalHeaderItem(4)
item.setText(_translate("MainWindow", "账户名"))
item = self.deleteTable.horizontalHeaderItem(5)
item.setText(_translate("MainWindow", "密码"))
item = self.deleteTable.horizontalHeaderItem(6)
item.setText(_translate("MainWindow", "权限"))
item = self.deleteTable.horizontalHeaderItem(7)
item.setText(_translate("MainWindow", "身份证号"))
item = self.deleteTable.horizontalHeaderItem(8)
item.setText(_translate("MainWindow", "手机号"))
self.desid.setPlaceholderText(_translate("MainWindow", "编号"))
self.label_18.setText(_translate("MainWindow", "用户编号:"))
self.dename.setPlaceholderText(_translate("MainWindow", "姓名"))
self.label_19.setText(_translate("MainWindow", "用户姓名:"))
self.deidcard.setPlaceholderText(_translate("MainWindow", "身份证"))
self.label_20.setText(_translate("MainWindow", "身份证:"))
self.commitDe.setText(_translate("MainWindow", "确认删除"))
self.label_21.setText(_translate("MainWindow", "选择要删除的用户:"))
__sortingEnabled = self.listWidget.isSortingEnabled()
self.listWidget.setSortingEnabled(False)
item = self.listWidget.item(0)
item.setText(_translate("MainWindow", " 个人信息"))
item = self.listWidget.item(1)
item.setText(_translate("MainWindow", " 查询用户*"))
item = self.listWidget.item(2)
item.setText(_translate("MainWindow", " 增添用户*"))
item = self.listWidget.item(3)
item.setText(_translate("MainWindow", " 删除用户*"))
self.listWidget.setSortingEnabled(__sortingEnabled)
self.label_2.setText(_translate("MainWindow", "*表示需要最高权限"))
self.Search.setPlaceholderText(_translate("MainWindow", "搜索"))
| StarcoderdataPython |
3240255 | <reponame>ariolwork/uni_conditional_gradient
from scipy.optimize import linprog
import math
# от рукинаписанные нужные части numpy
from my_numpy import np, my_list
eps = 0.00001
# class that contain function, it's derivative and spetial combination for optimization
class Func:
def __init__(self, func, func_der):
self.f = func # function J(u)
self.fder = func_der # function derivate J'(u)
def J_k(self, u_k, u):
return sum(self.fder(u_k) * (u - u_k)) #function J_k(u)
# class that contain some type frames for optimisation and minimization method for this type of frames and linear function
class Frames:
#-----------------------------------------------
#type 0: frames like a_i <= u_i <= b_i
def __set_frames0(self, a, b):
self.a = a
self.b = b
#type 1: frame for symplex method
def __set_frames1(self, A, b):
self.A = A
self.b = b
__setframesfuncs = {
0: __set_frames0,
1: __set_frames1
}
#-----------------------------------------------
def __set_size0(self):
if len(self.a) == len(self.b):
return len(self.a)
else:
return -1
def __set_size1(self):
if len(self.A) == len(self.b):
return len(self.b)
else:
return -1
__setSize = {
0: __set_size0,
1: __set_size1
}
#------------------------------------------------
def __init__(self, type_of_conditions, minimize_func):
self.type = type_of_conditions
self.minimize = minimize_func
self.setframes = self.__setframesfuncs.get(type_of_conditions)
self.size = self.__setSize.get(type_of_conditions)
def get_size(self):
return self.size(self)
# class of task contains function, frames and some help functions and parameters
class Job:
def __init__(self, func, frames, u_0, alpha):
self.f = func
self.frames = frames
self.u_0 = u_0 # start point
self.u_k = u_0 # the point got last time
self.__alpha = alpha # rule(function) for alpha calculation
self.k = 0 # step number
# сheck task by
# compare dimension of function, derivative, frames and x
def check_errors(self):
a = type(self.f.f(self.u_0))
b = len(self.f.fder(self.u_0))
c = self.frames.get_size()
print("func:",a,"\nframes:",c,"\nder:",b,"\nu_0:", len(self.u_0), "\n")
# calculate new point using previus
def get_next_u(self, u1_k):
self.k+=1
return self.u_k + (u1_k - self.u_k)*self.__alpha(self, u1_k)
# find abutting poin
def find_u1_k(self):
return self.frames.minimize(self.f, self.frames, self.u_k)
# one variable function minimisation methods
class One_variable_function_minimisation_methods:
#---------------------------------------------------
@staticmethod
def golden_ratio_method(func, a, b, eps=0.000001):
__MAGIC_CONSTANT_1 = (3 - math.sqrt(5))/2
__MAGIC_CONSTANT_2 = (math.sqrt(5) - 1)/2
while True:
if b-a < eps:
return (a+b)/2
u1 = a + __MAGIC_CONSTANT_1*(b-a)
u2 = a + __MAGIC_CONSTANT_2*(b-a)
if func(u1)<=func(u2):
b = u2
else:
a = u1
return -1
#---------------------------------------------------
@staticmethod
def tangent_method(func, a, b, eps=0.000001):
while True:
if func.fder(a)>=0:
return a
if func.fder(b)<=0:
return b
if abs(a-b)<eps:
return (a+b)/2
un = (func.f(a)-func.f(b)+b*func.fder(b)-a*func.fder(a))/(func.fder(b)-func.fder(a))
if func.fder(un)<0:
a = un
else:
b = un
#testng of one variable function minimisation methods
#print(One_variable_function_minimisation_methods.bisection_method((lambda x: x*x*x-x*x), -5, 1))
#print(One_variable_function_minimisation_methods.golden_ratio_method((lambda x: x*x*x-x*x), -5, 1))
#print(One_variable_function_minimisation_methods.tangent_method(Func((lambda x: x*x), (lambda x: 2.0*x)), -1, 2))
# alpha calculate rule 1
def alpha_1(J, u1_k):
return 1.0/(J.k+1)
# alpha calculate rule 2
def alpha_2(J, u1_k):
alpha = 1.0
while True:
u_k1 = J.u_k + alpha*(u1_k - J.u_k)
if J.f.f(u_k1) < J.f.f(J.u_k):
return alpha
if alpha < eps:
break
alpha/=2.0
return 0.0
# alpha calculate rule 3 (safety)
def alpha_3(J, u1_k):
def f_k(alpha):
return J.f.f(J.u_k+alpha*(u1_k-J.u_k))
def f_k_der(alpha):
return np.dot((u1_k-J.u_k),J.f.fder(J.u_k+alpha*(u1_k-J.u_k)))
return One_variable_function_minimisation_methods.tangent_method(Func(f_k, f_k_der), 0, 1)
# minimazation function for spetial frames type (a<x<b)
def frames_minnimize_function(func, frames, u_k):
ans = []
der = func.fder(u_k)
for i in range(0, len(der)):
if der[i]>0:
ans.append(frames.a[i])
elif der[i]<0:
ans.append(frames.b[i])
else:
ans.append((frames.a[i]+frames.b[i])/2)
return np.array(ans)
# minimisation function(symplex method) for spetioal(linear) type of frames
def symplex_meyhod_minimize_function(func, frames, u_k):
return np.array(linprog(func.fder(u_k), frames.A, frames.b).x)
# method for different stop rules
def calculate_m(job, eps, steps):
def method_full(J, eps, steps):
f_sequ = []
u_k_sequ = []
k = 0
f_sequ.append(J.f.f(J.u_k))
u_k_sequ.append(J.u_k)
u_k = 0
while True:
u1_k = J.find_u1_k()
u_k = J.u_k
J.u_k = J.get_next_u(u1_k)
f_sequ.append(J.f.f(J.u_k))
u_k_sequ.append(J.u_k)
if k>steps or np.all(J.u_k == u_k) or abs(J.f.f(J.u_k) - J.f.f(u_k)) <= eps:
break
k+=1
return J.f.f(J.u_k), J.u_k, f_sequ, u_k_sequ, k, abs(J.f.f(J.u_k) - J.f.f(u_k))
def method_eps(J, eps):
f_sequ = []
u_k_sequ = []
k = 0
f_sequ.append(J.f.f(J.u_k))
u_k_sequ.append(J.u_k)
u_k = 0
while True:
u1_k = J.find_u1_k()
u_k = J.u_k
J.u_k = J.get_next_u(u1_k)
f_sequ.append(J.f.f(J.u_k))
u_k_sequ.append(J.u_k)
if k>100000000 or np.all(J.u_k == u_k) or abs(J.f.f(J.u_k) - J.f.f(u_k)) <= eps:
break
k+=1
return J.f.f(J.u_k), J.u_k, f_sequ, u_k_sequ, k, abs(J.f.f(J.u_k) - J.f.f(u_k))
def method_steps(J, steps):
f_sequ = []
u_k_sequ = []
k = 0
f_sequ.append(J.f.f(J.u_k))
u_k_sequ.append(J.u_k)
u_k = 0
# print("u_k:{}, f:{}".format(J.u_k, J.f.f(J.u_k)))
while True:
u1_k = J.find_u1_k()
u_k = J.u_k
J.u_k = J.get_next_u(u1_k)
# print("u_k:{}, f:{}, u1_k:{}".format(J.u_k, J.f.f(J.u_k), u1_k))
f_sequ.append(J.f.f(J.u_k))
u_k_sequ.append(J.u_k)
if k>steps or np.all(J.u_k == u_k):
break
k+=1
return J.f.f(J.u_k), J.u_k, f_sequ, u_k_sequ, k+1, abs(J.f.f(J.u_k) - J.f.f(u_k))
if steps == -1:
return method_eps(job, eps)
elif eps == -1:
return method_steps(job, steps)
return method_full(job, eps, steps)
#calculate_m(job1, -1, 100) | StarcoderdataPython |
3357601 | <reponame>taapp/ohtu-lukuvinkkikirjasto
import requests
from repositories.user_repository import user_repository
class AppLibrary:
def __init__(self):
self._base_url = "http://localhost:5000"
self.reset_application()
self._user_repository = user_repository
def reset_application(self):
requests.post(f"{self._base_url}/tests/reset")
def create_user(self, username, password):
data = {
"username": username,
"password": password,
}
requests.post(f"{self._base_url}/register", data=data)
def delete_user(self, username):
self._user_repository.delete(username)
def add_book(self, tyyppi, otsikko, kirjailija, isbn,
tagit, url, kommentit, kuvaus, kurssit):
data = {"tyyppi": tyyppi,
"otsikko": otsikko,
"kirjailija": kirjailija,
"isbn": isbn,
"tagit": tagit,
"url": url,
"kommentit": kommentit,
"kuvaus": kuvaus,
"kurssit": kurssit
}
requests.post(f"{self._base_url}/add_subject", data=data)
| StarcoderdataPython |
6704629 | # Generated by Django 3.0.8 on 2021-05-13 12:19
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('products', '0010_productcontent_image'),
]
operations = [
migrations.RenameModel(
old_name='ButtonContent',
new_name='ProductButton',
),
]
| StarcoderdataPython |
12827060 | <reponame>roberthoenig/VQ-VAE-Speech
#####################################################################################
# MIT License #
# #
# Copyright (C) 2018 <NAME> #
# #
# Permission is hereby granted, free of charge, to any person obtaining a copy #
# of this software and associated documentation files (the "Software"), to deal #
# in the Software without restriction, including without limitation the rights #
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #
# copies of the Software, and to permit persons to whom the Software is #
# furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in all #
# copies or substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #
# SOFTWARE. #
#####################################################################################
from clarinet.data import LJspeechDataset, collate_fn, collate_fn_synthesize
from clarinet.modules import ExponentialMovingAverage, KL_Loss, STFT
from clarinet.wavenet import Wavenet
from clarinet.wavenet_iaf import Wavenet_Student
import torch
from torch import optim
import torch.nn as nn
from torch.utils.data import DataLoader
from torch.distributions.normal import Normal
import numpy as np
import librosa
import os
import argparse
import json
import time
def build_model():
model_t = Wavenet(out_channels=2,
num_blocks=args.num_blocks_t,
num_layers=args.num_layers_t,
residual_channels=args.residual_channels,
gate_channels=args.gate_channels,
skip_channels=args.skip_channels,
kernel_size=args.kernel_size,
cin_channels=args.cin_channels,
upsample_scales=[16, 16])
return model_t
def build_student():
model_s = Wavenet_Student(num_blocks_student=[1, 1, 1, 4],
num_layers=args.num_layers_s)
return model_s
def clone_as_averaged_model(model_s, ema):
assert ema is not None
averaged_model = build_student()
averaged_model.to(device)
averaged_model.load_state_dict(model_s.state_dict())
for name, param in averaged_model.named_parameters():
if name in ema.shadow:
param = ema.shadow[name].clone()
return averaged_model
def train(epoch, model_t, model_s, optimizer, ema):
global global_step
epoch_loss = 0.0
running_loss = [0.0, 0.0, 0.0, 0.0]
model_t.eval()
model_s.train()
start_time = time.time()
display_step = 100
for batch_idx, (x, y, c, _) in enumerate(train_loader):
global_step += 1
if global_step == 200000:
for param_group in optimizer.param_groups:
param_group['learning_rate'] *= 0.5
state['learning_rate'] = param_group['learning_rate']
if global_step == 400000:
for param_group in optimizer.param_groups:
param_group['learning_rate'] *= 0.5
state['learning_rate'] = param_group['learning_rate']
if global_step == 600000:
for param_group in optimizer.param_groups:
param_group['learning_rate'] *= 0.5
state['learning_rate'] = param_group['learning_rate']
x, y, c = x.to(device), y.to(device), c.to(device)
q_0 = Normal(x.new_zeros(x.size()), x.new_ones(x.size()))
z = q_0.sample()
optimizer.zero_grad()
c_up = model_t.upsample(c)
x_student, mu_s, logs_s = model_s(z, c_up) # q_T ~ N(mu_tot, logs_tot.exp_())
mu_logs_t = model_t(x_student, c)
if args.KL_type == 'pq':
loss_t, loss_KL, loss_reg = criterion_t(mu_logs_t[:, 0:1, :-1], mu_logs_t[:, 1:, :-1], mu_s, logs_s)
elif args.KL_type == 'qp':
loss_t, loss_KL, loss_reg = criterion_t(mu_s, logs_s, mu_logs_t[:, 0:1, :-1], mu_logs_t[:, 1:, :-1])
stft_student, _ = stft(x_student[:, :, 1:])
stft_truth, _ = stft(x[:, :, 1:])
loss_frame = criterion_frame(stft_student, stft_truth)
loss_tot = loss_t + loss_frame
loss_tot.backward()
nn.utils.clip_grad_norm_(model_s.parameters(), 10)
optimizer.step()
if ema is not None:
for name, param in model_s.named_parameters():
if name in ema.shadow:
ema.update(name, param.data)
running_loss[0] += loss_tot.item() / display_step
running_loss[1] += loss_KL.item() / display_step
running_loss[2] += loss_reg.item() / display_step
running_loss[3] += loss_frame.item() / display_step
epoch_loss += loss_tot.item()
if (batch_idx + 1) % display_step == 0:
end_time = time.time()
print('Global Step : {}, [{}, {}] [Total Loss, KL Loss, Reg Loss, Frame Loss] : {}'
.format(global_step, epoch, batch_idx + 1, np.array(running_loss)))
print('{} Step Time : {}'.format(display_step, end_time - start_time))
start_time = time.time()
running_loss = [0.0, 0.0, 0.0, 0.0]
del loss_tot, loss_frame, loss_KL, loss_reg, loss_t, x, y, c, c_up, stft_student, stft_truth, q_0, z
del x_student, mu_s, logs_s, mu_logs_t
print('{} Epoch Training Loss : {:.4f}'.format(epoch, epoch_loss / (len(train_loader))))
return epoch_loss / len(train_loader)
def evaluate(model_t, model_s, ema=None):
if ema is not None:
model_s_ema = clone_as_averaged_model(model_s, ema)
model_t.eval()
model_s_ema.eval()
running_loss = [0., 0., 0., 0.]
epoch_loss = 0.
display_step = 100
for batch_idx, (x, y, c, _) in enumerate(test_loader):
x, y, c = x.to(device), y.to(device), c.to(device)
q_0 = Normal(x.new_zeros(x.size()), x.new_ones(x.size()))
z = q_0.sample()
c_up = model_t.upsample(c)
x_student, mu_s, logs_s = model_s_ema(z, c_up)
mu_logs_t = model_t(x_student, c)
if args.KL_type == 'pq':
loss_t, loss_KL, loss_reg = criterion_t(mu_logs_t[:, 0:1, :-1], mu_logs_t[:, 1:, :-1], mu_s, logs_s)
elif args.KL_type == 'qp':
loss_t, loss_KL, loss_reg = criterion_t(mu_s, logs_s, mu_logs_t[:, 0:1, :-1], mu_logs_t[:, 1:, :-1])
stft_student, _ = stft(x_student[:, :, 1:])
stft_truth, _ = stft(x[:, :, 1:])
loss_frame = criterion_frame(stft_student, stft_truth.detach())
loss_tot = loss_t + loss_frame
running_loss[0] += loss_tot.item() / display_step
running_loss[1] += loss_KL.item() / display_step
running_loss[2] += loss_reg.item() / display_step
running_loss[3] += loss_frame.item() / display_step
epoch_loss += loss_tot.item()
if (batch_idx + 1) % display_step == 0:
print('{} [Total, KL, Reg, Frame Loss] : {}'.format(batch_idx + 1, np.array(running_loss)))
running_loss = [0., 0., 0., 0.]
del loss_tot, loss_frame, loss_KL, loss_reg, loss_t, x, y, c, c_up, stft_student, stft_truth, q_0, z
del x_student, mu_s, logs_s, mu_logs_t
epoch_loss /= len(test_loader)
print('Evaluation Loss : {:.4f}'.format(epoch_loss))
del model_s_ema
return epoch_loss
def synthesize(model_t, model_s, ema=None):
global global_step
if ema is not None:
model_s_ema = clone_as_averaged_model(model_s, ema)
model_s_ema.eval()
for batch_idx, (x, y, c, _) in enumerate(synth_loader):
if batch_idx == 0:
x, c = x.to(device), c.to(device)
q_0 = Normal(x.new_zeros(x.size()), x.new_ones(x.size()))
z = q_0.sample()
wav_truth_name = '{}/{}/{}/generate_{}_{}_truth.wav'.format(args.sample_path, args.teacher_name,
args.model_name, global_step, batch_idx)
librosa.output.write_wav(wav_truth_name, y.squeeze().numpy(), sr=22050)
print('{} Saved!'.format(wav_truth_name))
torch.cuda.synchronize()
start_time = time.time()
c_up = model_t.upsample(c)
with torch.no_grad():
y_gen = model_s_ema.generate(z, c_up).squeeze()
torch.cuda.synchronize()
print('{} seconds'.format(time.time() - start_time))
wav = y_gen.to(torch.device("cpu")).data.numpy()
wav_name = '{}/{}/{}/generate_{}_{}.wav'.format(args.sample_path, args.teacher_name,
args.model_name, global_step, batch_idx)
librosa.output.write_wav(wav_name, wav, sr=22050)
print('{} Saved!'.format(wav_name))
del y_gen, wav, x, y, c, c_up, z, q_0
del model_s_ema
def save_checkpoint(model, optimizer, global_step, global_epoch, ema=None):
checkpoint_path = os.path.join(args.save, args.teacher_name, args.model_name, "checkpoint_step{:09d}.pth".format(global_step))
optimizer_state = optimizer.state_dict()
torch.save({"state_dict": model.state_dict(),
"optimizer": optimizer_state,
"global_step": global_step,
"global_epoch": global_epoch}, checkpoint_path)
if ema is not None:
averaged_model = clone_as_averaged_model(model, ema)
checkpoint_path = os.path.join(args.save, args.teacher_name, args.model_name, "checkpoint_step{:09d}_ema.pth".format(global_step))
torch.save({"state_dict": averaged_model.state_dict(),
"optimizer": optimizer_state,
"global_step": global_step,
"global_epoch": global_epoch}, checkpoint_path)
def load_checkpoint(step, model_s, optimizer, ema=None):
global global_step
global global_epoch
checkpoint_path = os.path.join(args.save, args.teacher_name, args.model_name, "checkpoint_step{:09d}.pth".format(step))
print("Load checkpoint from: {}".format(checkpoint_path))
checkpoint = torch.load(checkpoint_path)
model_s.load_state_dict(checkpoint["state_dict"])
optimizer.load_state_dict(checkpoint["optimizer"])
global_step = checkpoint["global_step"]
global_epoch = checkpoint["global_epoch"]
if ema is not None:
checkpoint_path = os.path.join(args.save, args.teacher_name, args.model_name, "checkpoint_step{:09d}_ema.pth".format(step))
checkpoint = torch.load(checkpoint_path)
averaged_model = build_student()
averaged_model.to(device)
averaged_model.load_state_dict(checkpoint["state_dict"])
for name, param in averaged_model.named_parameters():
if param.requires_grad:
ema.register(name, param.data)
return model_s, optimizer, ema
def load_teacher_checkpoint(path, model_t):
print("Load checkpoint from: {}".format(path))
checkpoint = torch.load(path, map_location=lambda storage, loc: storage)
model_t.load_state_dict(checkpoint["state_dict"])
return model_t
if __name__ == "__main__":
torch.backends.cudnn.benchmark = True
np.set_printoptions(precision=4)
parser = argparse.ArgumentParser(description='Train WaveNet of LJSpeech',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--data_path', type=str, default='../DATASETS/ljspeech/', help='Dataset Path')
parser.add_argument('--sample_path', type=str, default='../samples', help='Sample Path')
parser.add_argument('--save', '-s', type=str, default='../params', help='Folder to save checkpoints.')
parser.add_argument('--load', '-l', type=str, default='../params', help='Checkpoint path to resume / test.')
parser.add_argument('--loss', type=str, default='../loss', help='Folder to save loss')
parser.add_argument('--log', type=str, default='../log', help='Log folder.')
parser.add_argument('--teacher_name', type=str, default='wavenet_gaussian_01', help='Model Name')
parser.add_argument('--model_name', type=str, default='wavenet_student_gaussian_01', help='Model Name')
parser.add_argument('--teacher_load_step', type=int, default=0, help='Teacher Load Step')
parser.add_argument('--load_step', type=int, default=0, help='Student Load Step')
parser.add_argument('--KL_type', type=str, default='qp', help='KL_pq vs KL_qp')
parser.add_argument('--epochs', '-e', type=int, default=1000, help='Number of epochs to train.')
parser.add_argument('--batch_size', '-b', type=int, default=4, help='Batch size.')
parser.add_argument('--learning_rate', '-lr', type=float, default=1e-3, help='The Learning Rate.')
parser.add_argument('--ema_decay', type=float, default=0.9999, help='Exponential Moving Average Decay')
parser.add_argument('--num_blocks_t', type=int, default=4, help='Number of blocks (Teacher)')
parser.add_argument('--num_layers_t', type=int, default=6, help='Number of layers (Teacher)')
parser.add_argument('--num_layers_s', type=int, default=6, help='Number of layers (Student)')
parser.add_argument('--residual_channels', type=int, default=128, help='Residual Channels')
parser.add_argument('--gate_channels', type=int, default=256, help='Gate Channels')
parser.add_argument('--skip_channels', type=int, default=128, help='Skip Channels')
parser.add_argument('--kernel_size', type=int, default=3, help='Kernel Size')
parser.add_argument('--cin_channels', type=int, default=80, help='Cin Channels')
parser.add_argument('--num_workers', type=int, default=3, help='Number of workers')
args = parser.parse_args()
# Init logger
if not os.path.isdir(args.log):
os.makedirs(args.log)
# Checkpoint dir
if not os.path.isdir(args.save):
os.makedirs(args.save)
if not os.path.isdir(args.loss):
os.makedirs(args.loss)
if not os.path.isdir(os.path.join(args.save, args.teacher_name)):
os.makedirs(os.path.join(args.save, args.teacher_name))
if not os.path.isdir(os.path.join(args.save, args.teacher_name, args.model_name)):
os.makedirs(os.path.join(args.save, args.teacher_name, args.model_name))
if not os.path.isdir(os.path.join(args.sample_path, args.teacher_name)):
os.makedirs(os.path.join(args.sample_path, args.teacher_name))
if not os.path.isdir(os.path.join(args.sample_path, args.teacher_name, args.model_name)):
os.makedirs(os.path.join(args.sample_path, args.teacher_name, args.model_name))
use_cuda = torch.cuda.is_available()
device = torch.device("cuda" if use_cuda else "cpu")
# LOAD DATASETS
train_dataset = LJspeechDataset(args.data_path, True, 0.1)
test_dataset = LJspeechDataset(args.data_path, False, 0.1)
train_loader = DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True, collate_fn=collate_fn,
num_workers=args.num_workers, pin_memory=True)
test_loader = DataLoader(test_dataset, batch_size=args.batch_size, collate_fn=collate_fn,
num_workers=args.num_workers)
synth_loader = DataLoader(test_dataset, batch_size=1, collate_fn=collate_fn_synthesize,
num_workers=args.num_workers, pin_memory=True)
teacher_step = args.teacher_load_step
path = os.path.join(args.load, args.teacher_name, "checkpoint_step{:09d}_ema.pth".format(teacher_step))
model_t = build_model()
model_t = load_teacher_checkpoint(path, model_t)
model_s = build_student()
stft = STFT(filter_length=1024, hop_length=256)
model_t.to(device)
model_s.to(device)
stft.to(device)
optimizer = optim.Adam(model_s.parameters(), lr=args.learning_rate)
criterion_t = KL_Loss()
criterion_frame = nn.MSELoss()
ema = ExponentialMovingAverage(args.ema_decay)
for name, param in model_s.named_parameters():
if param.requires_grad:
ema.register(name, param.data)
for name, param in model_t.named_parameters():
if param.requires_grad:
param.requires_grad = False
global_step, global_epoch = 0, 0
load_step = args.load_step
log = open(os.path.join(args.log, '{}.txt'.format(args.model_name)), 'w')
state = {k: v for k, v in args._get_kwargs()}
if load_step == 0:
list_train_loss, list_loss = [], []
log.write(json.dumps(state) + '\n')
test_loss = 100.0
else:
model_s, optimizer, ema = load_checkpoint(load_step, model_s, optimizer, ema)
list_train_loss = np.load('{}/{}_train.npy'.format(args.loss, args.model_name)).tolist()
list_loss = np.load('{}/{}.npy'.format(args.loss, args.model_name)).tolist()
list_train_loss = list_train_loss[:global_epoch]
list_loss = list_loss[:global_epoch]
test_loss = np.min(list_loss)
for epoch in range(global_epoch + 1, args.epochs + 1):
training_epoch_loss = train(epoch, model_t, model_s, optimizer, ema)
with torch.no_grad():
test_epoch_loss = evaluate(model_t, model_s, ema)
state['training_loss'] = training_epoch_loss
state['eval_loss'] = test_epoch_loss
state['epoch'] = epoch
list_train_loss.append(training_epoch_loss)
list_loss.append(test_epoch_loss)
if test_loss > test_epoch_loss:
test_loss = test_epoch_loss
save_checkpoint(model_s, optimizer, global_step, epoch, ema)
print('Epoch {} Model Saved! Loss : {:.4f}'.format(epoch, test_loss))
synthesize(model_t, model_s, ema)
np.save('{}/{}_train.npy'.format(args.loss, args.model_name), list_train_loss)
np.save('{}/{}.npy'.format(args.loss, args.model_name), list_loss)
log.write('%s\n' % json.dumps(state))
log.flush()
print(state)
log.close()
| StarcoderdataPython |
3508095 | from taskplus.core.shared.action import Action
from taskplus.core.shared.response import ResponseSuccess
from taskplus.core.shared.request import Request
class DeleteUserAction(Action):
def __init__(self, repo):
super().__init__()
self.repo = repo
def process_request(self, request):
user_id = request.id
self._call_before_execution_hooks(dict(request=request, user=None))
response = self.repo.delete(user_id)
self._call_after_execution_hooks(dict(request=request, user=response))
return ResponseSuccess(response)
class DeleteUserRequest(Request):
def __init__(self, id):
super().__init__()
self.id = id
self._validate()
def _validate(self):
self.errors = []
if self.id is None:
self._add_error('id', 'is required')
elif not isinstance(self.id, int):
self._add_error('id', 'expected int, got {}({})'.format(
self.id.__class__.__name__, self.id))
| StarcoderdataPython |
9728790 | <reponame>torkjellsdatter/pisa
# author: <NAME>
# date: June 29, 2017
"""
OscParams: Characterize neutrino oscillation parameters
(mixing angles, Dirac-type CP-violating phase, mass splittings)
"""
from __future__ import division
import numpy as np
from pisa import FTYPE
class OscParams(object):
"""
Holds neutrino oscillation parameters, i.e., mixing angles, squared-mass
differences, and a Dirac-type CPV phase. The neutrino mixing (PMNS) matrix
constructed from these parameters is given in the standard
3x3 parameterization.
Parameters
----------
dm21, dm31, dm41 : float
Mass splittings (delta M^2_{21,31,41}) expected to be given in [eV^2]
sin12, sin13, sin23 : float
1-2, 1-3 and 2-3 mixing angles, interpreted as sin(theta_{ij})
deltacp : float
Value of CPV phase in [rad]
Attributes
----------
dm21, dm31, dm41 : float
Cf. parameters
sin12, sin13, sin23, sin14 : float
Cf. parameters
theta12, theta13, theta23, theta14 : float
Mixing angles (corresponding to sinXY)
deltacp : float
Cf. parameters
mix_matrix : 3d float array of shape (3, 3, 2)
Neutrino mixing (PMNS) matrix in standard parameterization. The third
dimension holds the real and imaginary parts of each matrix element.
dm_matrix : 2d float array of shape (3, 3)
Antisymmetric matrix of squared-mass differences in vacuum
"""
def __init__(self):
self._sin12 = 0.
self._sin13 = 0.
self._sin23 = 0.
self._sin14 = 0.
self._deltacp = 0.
self.dm21 = 0.
self.dm31 = 0.
self.dm41 = 0.
self.nsi_eps = np.zeros((3, 3), dtype=FTYPE) + 1.j * np.zeros((3,3), dtype=FTYPE)
self.gamma21 = 0. # TODO Add full 3x3 matrix option, TODO update docs, TODO getters/setters to enforce values ranges?
self.gamma31 = 0.
self.gamma32 = 0.
# --- theta12 ---
@property
def sin12(self):
"""Sine of 1-2 mixing angle"""
return self._sin12
@sin12.setter
def sin12(self, value):
assert (abs(value) <= 1)
self._sin12 = value
@property
def theta12(self):
return np.arcsin(self.sin12)
@theta12.setter
def theta12(self, value):
self.sin12 = np.sin(value)
# --- theta13 ---
@property
def sin13(self):
"""Sine of 1-3 mixing angle"""
return self._sin13
@sin13.setter
def sin13(self, value):
assert (abs(value) <= 1)
self._sin13 = value
@property
def theta13(self):
return np.arcsin(self.sin13)
@theta13.setter
def theta13(self, value):
self.sin13 = np.sin(value)
# --- theta23 ---
@property
def sin23(self):
"""Sine of 2-3 mixing angle"""
return self._sin23
@sin23.setter
def sin23(self, value):
assert (abs(value) <= 1)
self._sin23 = value
@property
def theta23(self):
return np.arcsin(self.sin23)
@theta23.setter
def theta23(self, value):
self.sin23 = np.sin(value)
# --- theta14 ---
@property
def sin14(self):
"""Sine of 1-4 mixing angle"""
return self._sin14
@sin14.setter
def sin14(self, value):
assert (abs(value) <= 1)
self._sin14 = value
@property
def theta14(self):
return np.arcsin(self.sin14)
@theta14.setter
def theta14(self, value):
self.sin14 = np.sin(value)
# --- deltaCP ---
@property
def deltacp(self):
"""CPV phase"""
return self._deltacp
@deltacp.setter
def deltacp(self, value):
assert value >= 0. and value <= 2*np.pi
self._deltacp = value
# --- NSI epsilons ---
@property
def eps_ee(self):
"""nue-nue NSI coupling parameter"""
return self.nsi_eps[0, 0].real
@eps_ee.setter
def eps_ee(self, value):
self.nsi_eps[0, 0] = value + 1.j * self.nsi_eps[0, 0].imag
@property
def eps_emu(self):
"""nue-numu NSI coupling parameter"""
return self.nsi_eps[1, 0].real
@eps_emu.setter
def eps_emu(self, value):
self.nsi_eps[1, 0] = value + 1.j * self.nsi_eps[1, 0].imag
self.nsi_eps[0, 1] = value + 1.j * self.nsi_eps[0, 1].imag
@property
def eps_etau(self):
"""nue-nutau NSI coupling parameter"""
return self.nsi_eps[2, 0].real
@eps_etau.setter
def eps_etau(self, value):
self.nsi_eps[2, 0] = value + 1.j * self.nsi_eps[2, 0].imag
self.nsi_eps[0, 2] = value + 1.j * self.nsi_eps[0, 2].imag
@property
def eps_mumu(self):
return self.nsi_eps[1,1].real
@eps_mumu.setter
def eps_mumu(self, value):
self.nsi_eps[1,1] = value + 1.j * self.nsi_eps[1, 1].imag
@property
def eps_mutau(self):
return self.nsi_eps[1, 2].real
@eps_etau.setter
def eps_mutau(self, value):
self.nsi_eps[2, 1] = value + 1.j * self.nsi_eps[2, 1].imag
self.nsi_eps[1, 2] = value + 1.j * self.nsi_eps[1, 2].imag
@property
def eps_tautau(self):
return self.nsi_eps[2,2].real
@eps_tautau.setter
def eps_tautau(self, value):
self.nsi_eps[2,2] = value + 1.j * self.nsi_eps[2, 2].imag
@property
def mix_matrix(self):
"""Neutrino mixing matrix"""
mix = np.zeros((3, 3, 2), dtype=FTYPE)
sd = np.sin(self.deltacp)
cd = np.cos(self.deltacp)
c12 = np.sqrt(1. - self.sin12**2)
c23 = np.sqrt(1. - self.sin23**2)
c13 = np.sqrt(1. - self.sin13**2)
mix[0, 0, 0] = c12 * c13
mix[0, 0, 1] = 0.
mix[0, 1, 0] = self.sin12 * c13
mix[0, 1, 1] = 0.
mix[0, 2, 0] = self.sin13 * cd
mix[0, 2, 1] = - self.sin13 * sd
mix[1, 0, 0] = - self.sin12 * c23 - c12 * self.sin23 * self.sin13 * cd
mix[1, 0, 1] = - c12 * self.sin23 * self.sin13 * sd
mix[1, 1, 0] = c12 * c23 - self.sin12 * self.sin23 * self.sin13 * cd
mix[1, 1, 1] = - self.sin12 * self.sin23 * self.sin13 * sd
mix[1, 2, 0] = self.sin23 * c13
mix[1, 2, 1] = 0.
mix[2, 0, 0] = self.sin12 * self.sin23 - c12 * c23 * self.sin13 * cd
mix[2, 0, 1] = - c12 * c23 * self.sin13 * sd
mix[2, 1, 0] = - c12 * self.sin23 - self.sin12 * c23 * self.sin13 * cd
mix[2, 1, 1] = - self.sin12 * c23 * self.sin13 * sd
mix[2, 2, 0] = c23 * c13
mix[2, 2, 1] = 0.
return mix
@property
def mix_matrix_complex(self):
''' mixing matrix as complex 2-d array'''
return self.mix_matrix[:, :, 0] + self.mix_matrix[:, :, 1] * 1.j
@property
def dm_matrix(self):
"""Neutrino mass splitting matrix in vacuum"""
dmVacVac = np.zeros((3, 3), dtype=FTYPE)
mVac = np.zeros(3, dtype=FTYPE)
delta = 5.e-9
mVac[0] = 0.
mVac[1] = self.dm21
mVac[2] = self.dm31
# Break any degeneracies
if mVac[1] == 0.:
mVac[0] -= delta
if mVac[2] == 0.:
mVac[2] += delta
dmVacVac[0, 0] = 0.
dmVacVac[1, 1] = 0.
dmVacVac[2, 2] = 0.
dmVacVac[0, 1] = mVac[0] - mVac[1]
dmVacVac[1, 0] = - dmVacVac[0, 1]
dmVacVac[0, 2] = mVac[0] - mVac[2]
dmVacVac[2, 0] = - dmVacVac[0, 2]
dmVacVac[1, 2] = mVac[1] - mVac[2]
dmVacVac[2, 1] = - dmVacVac[1, 2]
return dmVacVac
| StarcoderdataPython |
8051989 | import requests
import json
import uuid
from flask import Flask, request, json, jsonify
from flask_restplus import Resource, Api, Resource, reqparse
app = Flask(__name__)
api = Api(app, title='Unit Test')
gtest = api.namespace('gTest', description='Accessible by API and CLI')
arq = api.namespace('Arquillian', description='Accessible by API and CLI')
junit = api.namespace('JUnit', description='Accessible by API and CLI')
@gtest.route('/<string:TC>/<string:responder>/<string:tag>/Test1')
class gTest(Resource):
@api.doc(responses={
200: 'Success',
400: 'Validation Error',
500: 'Internal Server Error'
})
def get(self, TC, responder, tag):
'''TC#1 Definition'''
url = 'http://httpbin.org/get'
tag = tag # TODO: tag goes to a grouping file. Grouping would be done backend and hardcoded.
parameters = {"TC": TC, 'responder': responder}
r = requests.get(url, parameters)
data = r.text
jsonData = json.loads(data)
with open('Tests_Logs.txt', 'a+') as outfile:
json.dump(jsonData, outfile, sort_keys = False, indent = 4,
ensure_ascii = False)
return jsonData
@arq.route('/<string:TC>/<string:responder>/<string:tag>/Test1')
class Arquillian (Resource):
@api.doc(responses={
200: 'Success',
400: 'Validation Error',
500: 'Internal Server Error'
})
def get(self, TC, responder, tag):
'''TC#1 Definition'''
url = 'http://httpbin.org/get'
tag = tag # TODO: tag goes to a grouping file. Grouping would be done backend and hardcoded.
parameters = {"TC": TC, 'responder': responder}
r = requests.get(url, parameters)
data = r.text
jsonData = json.loads(data)
with open('Tests_Logs.txt', 'a+') as outfile:
json.dump(jsonData, outfile, sort_keys = False, indent = 4,
ensure_ascii = False)
return jsonData
@junit.route('/<string:TC>/<string:responder>/<string:tag>/Test1')
class JUnit (Resource):
@api.doc(responses={
200: 'Success',
400: 'Validation Error',
500: 'Internal Server Error'
})
def get(self, TC, responder, tag):
'''TC#1 Definition'''
url = 'http://httpbin.org/get'
tag = tag # TODO: tag goes to a grouping file. Grouping would be done backend and hardcoded.
parameters = {"TC": TC, 'responder': responder}
r = requests.get(url, parameters)
data = r.text
jsonData = json.loads(data)
with open('Tests_Logs.txt', 'a+') as outfile:
json.dump(jsonData, outfile, sort_keys = False, indent = 4,
ensure_ascii = False)
return jsonData
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True, port=5001) | StarcoderdataPython |
8157720 | import Optitrack as OptiT
import numpy as np
import time
import sys
import matplotlib.pyplot as plt
if __name__ == '__main__':
op = OptiT.OptiTrack()
current_time = time.time()
position = []
begin_time = time.time()
fig = plt.figure()
ax = fig.add_subplot(projection='3d')
ax.set_xlabel('X Label')
ax.set_ylabel('Y Label')
ax.set_zlabel('Z Label')
plt.ion()
position0 = []
position1 = []
while True:
pass
if time.time() - current_time >= 0.01: # 0.1 second refresh rate
plt.clf()
#print('position = %s' % (op.position))
position0.append(op.position[0])
position1.append(op.position[1])
current_time = time.time()
#ax.scatter(op.position[0], op.position[1], op.position[2])
plt.scatter(position0,position1)
plt.draw()
#time.sleep(0.01)
plt.pause(0.01)
| StarcoderdataPython |
4945223 | <filename>pretix_cas/views.py
import cas
from django.conf import settings
from django.contrib import messages
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from django.views.generic import TemplateView
from django.views.generic.edit import CreateView, DeleteView, UpdateView
from pretix.base.models import Team, User
from pretix.control.permissions import OrganizerPermissionRequiredMixin
from pretix.control.views.auth import process_login
from pretix.helpers.urls import build_absolute_uri
from pretix.settings import config
from . import auth_backend
from .forms import CasAssignmentRuleForm
from .models import CasAttributeTeamAssignmentRule
def return_from_sso(request):
"""
This function will be called when the user returns from the CAS server, presenting the ticket of the CAS server.
"""
cas_response = __verify_cas(request)
# If the ticket could not be verified, the response is {None, None, None}
if cas_response[0] is None:
return HttpResponse(_('Login failed'))
else:
# See __create_new_user_from_cas_data for data format
email = cas_response[1]['mail']
try:
user = User.objects.filter(email=email).get()
except ObjectDoesNotExist:
locale = request.LANGUAGE_CODE if hasattr(request, 'LANGUAGE_CODE') else settings.LANGUAGE_CODE
timezone = request.timezone if hasattr(request, 'timezone') else settings.TIME_ZONE
user = __create_new_user_from_cas_data(cas_response, locale, timezone)
if user.auth_backend != auth_backend.CasAuthBackend.identifier:
return HttpResponseBadRequest(_('Could not create user: Email is already registered.'))
group_membership = cas_response[1].get('groupMembership')
ou = cas_response[1].get('ou')
__add_user_to_teams(user, group_membership, ou)
return process_login(request, user, False)
def __verify_cas(request):
# This is the absolute URL of the view that receives the ticket from the client (generated by the CAS Server).
return_address = build_absolute_uri('plugins:pretix_cas:cas.response')
# The CASClient is created on every request because the domain of the pretix instance is not fixed.
cas_client = cas.CASClient(
version='CAS_2_SAML_1_0',
server_url=config.get('pretix_cas', 'cas_server_url', fallback='https://sso.tu-darmstadt.de'),
service_url=return_address
)
ticket = request.GET.get('ticket')
# Validate ticket with CAS Server, receive user information.
return cas_client.verify_ticket(ticket)
class AssignmentRulesList(TemplateView, OrganizerPermissionRequiredMixin):
"""
This view renders the team assignment rules settings page.
"""
template_name = 'pretix_cas/cas_assignment_rules.html'
permission = 'can_change_organizer_settings'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
organizer = self.request.organizer
context['teams'] = Team.objects.filter(organizer=organizer)
context['assignmentRules'] = CasAttributeTeamAssignmentRule.objects.filter(team__organizer=organizer)
return context
class AssignmentRuleEditMixin(OrganizerPermissionRequiredMixin):
model = CasAttributeTeamAssignmentRule
permission = 'can_change_organizer_settings'
def get_success_url(self):
return reverse('plugins:pretix_cas:team_assignment_rules',
kwargs={'organizer': self.request.organizer.slug})
class AssignmentRuleUpdateMixin(AssignmentRuleEditMixin):
fields = ['team', 'attribute']
template_name = 'pretix_cas/cas_assignment_rule_edit.html'
def get_form(self, form_class=None):
return CasAssignmentRuleForm(organizer=self.request.organizer, **self.get_form_kwargs())
def form_valid(self, form):
super().form_valid(form)
messages.success(self.request, _('The new assignment rule has been created.'))
return redirect(self.get_success_url())
def form_invalid(self, form):
messages.error(self.request, _('The assignment rule could not be created.'))
return super().form_invalid(form)
class AssignmentRuleCreate(AssignmentRuleUpdateMixin, CreateView):
"""
This view enables the organizer to add a new team assignment rule.
"""
class AssignmentRuleEdit(AssignmentRuleUpdateMixin, UpdateView):
"""
This view enables the organizer to update an existing team assignment rule.
"""
class AssignmentRuleDelete(AssignmentRuleEditMixin, DeleteView):
"""
This view enables the organizer to delete an existing team assignment rule.
"""
template_name = 'pretix_cas/cas_assignment_rule_delete.html'
def __create_new_user_from_cas_data(cas_response, locale, timezone):
"""
Creates a user from the fields in the CAS payload.
:param cas_response: The payload that is returned by CAS.
:param locale: The locale for the new user.
:param timezone: The timezone for the new user.
:return: The user model that was created.
"""
# On successful verification the returned triple looks something like this:
# ('ab12abcd',
# {'mail': '<EMAIL>', 'eduPersonAffiliation': ['student', 'member', 'employee'],
# 'ou': ['T20', 'FB20'], 'groupMembership': ['cn=T20', 'ou=central-it', 'o=tu-darmstadt'],
# 'givenName': 'John', 'successfulAuthenticationHandlers': 'LdapAuthenticationHandler', 'fullName': '<NAME>',
# 'tudUserUniqueID': '123456789', 'cn': 'ab12abcd', 'credentialType': 'UsernamePasswordCredential',
# 'samlAuthenticationStatementAuthMethod': 'urn:oasis:names:tc:SAML:1.0:am:password', 'tudMatrikel': '1234567',
# 'authenticationMethod': 'LdapAuthenticationHandler', 'surname': 'Doe'
# }, None)
user_info = cas_response[1]
# email attribute is always required
email = user_info['mail']
# Try to determine a name
if "fullName" in user_info:
fullname = user_info['fullName']
elif "givenName" in user_info and "surname" in user_info:
fullname = '%s, %s' % (user_info['surname'], user_info['givenName'])
elif "surname" in user_info:
fullname = user_info['surname']
elif "givenName" in user_info:
fullname = user_info['givenName']
else:
fullname = ""
created_user = User.objects.create(
email=email,
fullname=fullname,
locale=locale,
timezone=timezone,
auth_backend=auth_backend.CasAuthBackend.identifier,
password='',
)
return created_user
def __add_user_to_teams(user, ou_attributes=None, group_membership_attributes=None):
"""
Assigns users to teams based on the set assignment rules.
It doesn't matter whether the user is already in the team, or not.
:param user: The pretix 'User' object of the user that logged in
:param ou_attributes: The list of ou attributes of the user received by the CAS server
:param group_membership_attributes: The list of groupMembership attributes of the user received by the CAS server
"""
# The response from the CAS server can respond with None, an empty list, a single attribute, or a list with
# attributes
if ou_attributes is None:
ou_attributes = []
if type(ou_attributes) is not list:
ou_attributes = [ou_attributes]
if group_membership_attributes is None:
group_membership_attributes = []
if type(group_membership_attributes) is not list:
group_membership_attributes = [group_membership_attributes]
assignment_rules = CasAttributeTeamAssignmentRule.objects.all()
teams = {matcher.team for matcher in assignment_rules
if (matcher.attribute in ou_attributes or matcher.attribute in group_membership_attributes)}
for team in teams:
try:
team.members.add(user)
except ObjectDoesNotExist:
pass
| StarcoderdataPython |
299582 | <gh_stars>0
#!/usr/bin/env python
#
# GrovePi Example for using the Grove Thumb Joystick (http://www.seeedstudio.com/wiki/Grove_-_Thumb_Joystick)
#
# The GrovePi connects the Raspberry Pi and Grove sensors. You can learn more about GrovePi here: http://www.dexterindustries.com/GrovePi
#
# Have a question about this example? Ask on the forums here: http://www.dexterindustries.com/forum/?forum=grovepi
#
'''
## License
The MIT License (MIT)
GrovePi for the Raspberry Pi: an open source platform for connecting Grove Sensors to the Raspberry Pi.
Copyright (C) 2015 Dexter Industries
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
import time
import grovepi
# Connect the Grove Thumb Joystick to analog port A0
# GrovePi Port A0 uses Arduino pins 0 and 1
# GrovePi Port A1 uses Arduino pins 1 and 2
# Don't plug anything into port A1 that uses pin 1
# Most Grove sensors only use 3 of their 4 pins, which is why the GrovePi shares Arduino pins between adjacent ports
# If the sensor has a pin definition SIG,NC,VCC,GND, the second (white) pin is not connected to anything
# If you wish to connect two joysticks, use ports A0 and A2 (skip A1)
# Uses two pins - one for the X axis and one for the Y axis
# This configuration means you are using port A0
xPin = 0
yPin = 1
grovepi.pinMode(xPin,"INPUT")
grovepi.pinMode(yPin,"INPUT")
# The Grove Thumb Joystick is an analog device that outputs analog signal ranging from 0 to 1023
# The X and Y axes are two ~10k potentiometers and a momentary push button which shorts the x axis
# My joystick produces slightly different results to the specifications found on the url above
# I've listed both here:
# Specifications
# Min Typ Max Click
# X 206 516 798 1023
# Y 203 507 797
# My Joystick
# Min Typ Max Click
# X 253 513 766 1020-1023
# Y 250 505 769
while True:
try:
# Get X/Y coordinates
x = grovepi.analogRead(xPin)
y = grovepi.analogRead(yPin)
# Calculate X/Y resistance
Rx = (float)(1023 - x) * 10 / x
Ry = (float)(1023 - y) * 10 / y
# Was a click detected on the X axis?
click = 1 if x >= 1020 else 0
print ("x =", x, " y =", y, " Rx =", Rx, " Ry =", Ry, " click =", click)
time.sleep(.5)
except IOError:
print ("Error")
| StarcoderdataPython |
12852626 | <reponame>jakub530/PyGame-Neural-Net
import sys, pygame,math
import numpy as np
from pygame import gfxdraw
import pygame_lib, nn_lib
import pygame.freetype
from pygame_lib import color
import random
import copy
import auto_maze
import node_vis | StarcoderdataPython |
4803671 | import json
import logging
import sys
import click
from click_didyoumean import DYMGroup
from esok.config.connection_options import per_connection
from esok.constants import UNKNOWN_ERROR
LOG = logging.getLogger(__name__)
@click.group(cls=DYMGroup)
def alias():
"""Alias operations."""
pass
@alias.command(name="list")
@click.option(
"-s",
"--sort",
default="alias,index",
metavar="COLUMN",
show_default=True,
help="Comma-separated list of column names or column aliases used to sort the "
"response.",
)
@per_connection()
def list_aliases(client, sort):
"""List existing index aliases."""
r = client.cat.aliases(v=True, s=sort)
click.echo(r)
@alias.command()
@click.argument("name", type=click.STRING)
@click.argument("index", type=click.STRING)
@per_connection()
def create(client, name, index):
"""Create an alias for an index."""
r = client.indices.put_alias(index=index, name=name)
LOG.info(json.dumps(r))
ok = r.get("acknowledged")
if not ok:
sys.exit(UNKNOWN_ERROR)
@alias.command()
@click.argument("name", type=click.STRING)
@click.argument("index", type=click.STRING)
@per_connection()
def delete(client, name, index):
"""Delete alias from index."""
r = client.indices.delete_alias(name=name, index=index)
ok = r.get("acknowledged")
LOG.info(json.dumps(r))
if not ok:
sys.exit(UNKNOWN_ERROR)
@alias.command()
@click.argument("name", type=click.STRING)
@click.argument("from_index", type=click.STRING)
@click.argument("to_index", type=click.STRING)
@per_connection()
def swap(client, name, from_index, to_index):
"""Swap alias between indices, atomically."""
r = client.indices.update_aliases(
{
"actions": [
{"add": {"index": to_index, "alias": name}},
{"remove": {"index": from_index, "alias": name}},
]
}
)
ok = r.get("acknowledged")
LOG.info(json.dumps(r))
if not ok:
sys.exit(UNKNOWN_ERROR)
| StarcoderdataPython |
155506 | from datadog import initialize, api
options = {
'api_key': '<YOUR_API_KEY>',
'app_key': '<YOUR_APP_KEY>'
}
initialize(**options)
list_id = 4741
name = 'My Updated Dashboard List'
api.DashboardList.update(list_id, name=name)
| StarcoderdataPython |
1995669 | <gh_stars>1-10
# -*- coding: utf-8 -*-
# Copyright (c) 2019, profmagija and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
import frappe.utils.data as fd
from frappe.model.document import Document
BREAKFAST = 'ACT-00001'
LUNCH = 'ACT-00002'
DINNER = 'ACT-00003'
MEALS = [BREAKFAST, LUNCH, DINNER]
class Course(Document):
def autoname(self):
self.name = self.program + self.part + self.abcd + self.year
def validate(self):
if fd.date_diff(self.end_date, self.start_date) < 0:
frappe.throw(_("End date can't be before start date"))
self.activities.sort(key=lambda x: x.time_from)
for act in self.activities:
act.activity_date = fd.formatdate(act.time_from)
for i in range(len(self.activities)):
self.activities[i].idx = i + 1
@frappe.whitelist()
def create_meals(self):
days = fd.date_diff(self.end_date, self.start_date)
acts = self.activities[:]
for a in acts:
if a.activity in MEALS:
self.activities.remove(a)
frappe.delete_doc(a.doctype, a.name)
def cact(name, date, hr, duration=1):
doc = frappe.new_doc('Course Activity')
doc.activity = name
doc.time_from = fd.add_to_date(date, hours=hr, as_string=True, as_datetime=True)
doc.time_to = fd.add_to_date(date, hours=hr + duration, as_string=True, as_datetime=True)
doc.color = '#aaaaaa'
doc.parent = self.name
doc.parentfield = 'activities'
self.activities.append(doc)
cact(LUNCH, self.start_date, 14)
if days > 0:
cact(DINNER, self.start_date, 20)
cact(BREAKFAST, self.end_date, 8.5, 0.5)
for i in range(1, days):
date = fd.add_days(self.start_date, i)
cact(BREAKFAST, date, 9)
cact(LUNCH, date, 14)
cact(DINNER, date, 20)
self.save()
| StarcoderdataPython |
9667000 | from autoslug import AutoSlugField
from django.contrib.auth.models import User
from django.contrib.postgres.fields import JSONField, ArrayField
from django.db import models, transaction
from django.db.models import Q
from django.utils.functional import cached_property
from django.utils.text import slugify
from django.utils.translation import gettext_lazy as _
from corehq.apps.domain.utils import domain_name_stop_words
from corehq.apps.registry.exceptions import RegistryAccessDenied
from corehq.apps.registry.schema import RegistrySchema, REGISTRY_JSON_SCHEMA
from corehq.util.validation import JSONSchemaValidator
def slugify_remove_stops(text):
words = slugify(text).split('-')
stop_words = domain_name_stop_words()
return "-".join([word for word in words if word not in stop_words])
class RegistryManager(models.Manager):
def owned_by_domain(self, domain, is_active=None):
query = self.filter(domain=domain)
if is_active is not None:
query = query.filter(is_active=is_active)
return query
def visible_to_domain(self, domain):
"""Return list of all registries that are visible to the domain. This includes
registries that are owned by the domain as well as those they have been invited
to participate in
"""
return (
self.filter(Q(domain=domain) | Q(invitations__domain=domain))
.distinct() # avoid getting duplicate registries
.prefetch_related("invitations")
)
def accessible_to_domain(self, domain, slug=None, has_grants=False):
"""
:param domain: Domain to get registries for
:param slug: (optional) Filter registries by slug
:param has_grants: (optional) Set to 'True' to only include registries for which the domain has grants
"""
query = (
self.filter(is_active=True)
.filter(
invitations__domain=domain,
invitations__status=RegistryInvitation.STATUS_ACCEPTED,
)
)
if slug:
query = query.filter(slug=slug)
if has_grants:
query = query.filter(grants__to_domains__contains=[domain])
return query
class DataRegistry(models.Model):
"""Top level model that represents a Data Registry.
A registry is owned by a domain but is used across domains
based on invitations that are sent from the owning domain.
"""
domain = models.CharField(max_length=255)
name = models.CharField(max_length=255)
# slug used for referencing the registry in app suite files, APIs etc.
slug = AutoSlugField(populate_from='name', slugify=slugify_remove_stops, unique=True)
description = models.TextField(blank=True)
is_active = models.BooleanField(default=True)
# [{"case_type": "X"}, {"case_type": "Y"}]
schema = JSONField(null=True, blank=True, validators=[JSONSchemaValidator(REGISTRY_JSON_SCHEMA)])
created_on = models.DateTimeField(auto_now_add=True)
modified_on = models.DateTimeField(auto_now=True)
objects = RegistryManager()
def __repr__(self):
return f"DataRegistry(id='{self.id}', domain='{self.domain}', slug='{self.slug}')"
@classmethod
@transaction.atomic
def create(cls, user, domain, name, **kwargs):
registry = DataRegistry.objects.create(domain=domain, name=name, **kwargs)
# creating domain is automatically added to the registry
invitation = registry.invitations.create(
domain=domain, status=RegistryInvitation.STATUS_ACCEPTED
)
registry.logger.invitation_added(user, invitation)
return registry
@transaction.atomic
def activate(self, user):
self.is_active = True
self.save()
self.logger.registry_activated(user)
@transaction.atomic
def deactivate(self, user):
self.is_active = False
self.save()
self.logger.registry_deactivated(user)
@cached_property
def wrapped_schema(self):
return RegistrySchema(self.schema)
def get_granted_domains(self, domain):
self.check_domain_has_access(domain)
return set(
self.grants.filter(to_domains__contains=[domain])
.values_list('from_domain', flat=True)
)
def get_participating_domains(self):
return set(self.invitations.filter(
status=RegistryInvitation.STATUS_ACCEPTED,
).values_list('domain', flat=True))
def check_domain_has_access(self, domain):
if not self.is_active:
raise RegistryAccessDenied()
invites = self.invitations.filter(domain=domain)
if not invites:
raise RegistryAccessDenied()
invite = invites[0]
if invite.status != RegistryInvitation.STATUS_ACCEPTED:
raise RegistryAccessDenied()
return True
def check_ownership(self, domain):
if self.domain != domain:
raise RegistryAccessDenied()
@cached_property
def logger(self):
return RegistryAuditHelper(self)
class RegistryInvitation(models.Model):
"""Invitations are the mechanism used to determine access to the registry.
The owning domain creates invitations which can be accepted or rejected by
the invitees.
Without an accepted invitation a domain can not access any features of the
registry."""
STATUS_PENDING = "pending"
STATUS_ACCEPTED = "accepted"
STATUS_REJECTED = "rejected"
STATUS_CHOICES = (
(STATUS_PENDING, _("Pending")),
(STATUS_ACCEPTED, _("Accepted")),
(STATUS_REJECTED, _("Rejected")),
)
registry = models.ForeignKey("DataRegistry", related_name="invitations", on_delete=models.CASCADE)
domain = models.CharField(max_length=255)
created_on = models.DateTimeField(auto_now_add=True)
modified_on = models.DateTimeField(auto_now=True)
status = models.CharField(max_length=32, choices=STATUS_CHOICES, default=STATUS_PENDING)
class Meta:
unique_together = ("registry", "domain")
def __repr__(self):
return (f"RegistryInvitation(registry_id='{self.registry_id}', "
f"domain='{self.domain}', status='{self.status}')")
@transaction.atomic
def accept(self, user):
self.status = self.STATUS_ACCEPTED
self.save()
self.registry.logger.invitation_accepted(user, self)
@transaction.atomic
def reject(self, user):
self.status = self.STATUS_REJECTED
self.save()
self.registry.logger.invitation_rejected(user, self)
@property
def is_accepted(self):
return self.status == self.STATUS_ACCEPTED
@property
def is_rejected(self):
return self.status == self.STATUS_REJECTED
def to_json(self):
return {
"id": self.id,
"registry_id": self.registry_id,
"domain": self.domain,
"created_on": self.created_on,
"modified_on": self.modified_on,
"status": self.status,
}
class RegistryGrant(models.Model):
"""Grants provide the model for giving access to data. The ownership of the grant
lies with the granting domain which can grant / revoke access to it's data to
any other domains that are participating in the registry (have been invited).
"""
registry = models.ForeignKey("DataRegistry", related_name="grants", on_delete=models.CASCADE)
from_domain = models.CharField(max_length=255)
to_domains = ArrayField(models.CharField(max_length=255))
def to_json(self):
return {
"id": self.id,
"registry_id": self.registry_id,
"from_domain": self.from_domain,
"to_domains": list(self.to_domains)
}
def __repr__(self):
return (f"RegistryGrant(registry_id='{self.registry_id}', "
f"from_domain='{self.from_domain}', to_domains='{self.to_domains}')")
class RegistryPermission(models.Model):
"""This model controls which users in a domain can access the data registry."""
registry = models.ForeignKey("DataRegistry", related_name="permissions", on_delete=models.CASCADE)
domain = models.CharField(max_length=255)
read_only_group_id = models.CharField(max_length=255, null=True)
class Meta:
unique_together = ('registry', 'domain')
def __repr__(self):
return (f"RegistryPermission(registry_id='{self.registry_id}', "
f"domain='{self.domain}', read_only_group_id='{self.read_only_group_id}')")
class RegistryAuditLog(models.Model):
"""Audit log model used to store logs of user level interactions
(not system level).
"""
ACTION_ACTIVATED = "activated"
ACTION_DEACTIVATED = "deactivated"
ACTION_INVITATION_ADDED = "invitation_added"
ACTION_INVITATION_REMOVED = "invitation_removed"
ACTION_INVITATION_ACCEPTED = "invitation_accepted"
ACTION_INVITATION_REJECTED = "invitation_rejected"
ACTION_GRANT_ADDED = "grant_added"
ACTION_GRANT_REMOVED = "grant_removed"
ACTION_SCHEMA_CHANGED = "schema"
ACTION_DATA_ACCESSED = "data_accessed"
NON_OWNER_ACTION_CHOICES = (
(ACTION_INVITATION_ACCEPTED, _("Invitation Accepted")),
(ACTION_INVITATION_REJECTED, _("Invitation Rejected")),
(ACTION_GRANT_ADDED, _("Grant created")),
(ACTION_GRANT_REMOVED, _("Grant removed")),
(ACTION_DATA_ACCESSED, _("Data Accessed")),
)
ACTION_CHOICES = (
(ACTION_ACTIVATED, _("Registry Activated")),
(ACTION_DEACTIVATED, _("Registry De-activated")),
(ACTION_INVITATION_ADDED, _("Invitation Added")),
(ACTION_INVITATION_REMOVED, _("Invitation Revoked")),
(ACTION_SCHEMA_CHANGED, _("Schema Changed")),
) + NON_OWNER_ACTION_CHOICES
RELATED_OBJECT_REGISTRY = "registry"
RELATED_OBJECT_INVITATION = "invitation"
RELATED_OBJECT_GRANT = "grant"
RELATED_OBJECT_UCR = "ucr"
RELATED_OBJECT_APPLICATION = "application" # case search
RELATED_OBJECT_CHOICES = (
(RELATED_OBJECT_REGISTRY, _("Data Registry")),
(RELATED_OBJECT_INVITATION, _("Invitation")),
(RELATED_OBJECT_GRANT, _("Grant")),
(RELATED_OBJECT_UCR, _("Report")),
(RELATED_OBJECT_APPLICATION, _("Case Search")),
)
registry = models.ForeignKey("DataRegistry", related_name="audit_logs", on_delete=models.CASCADE)
date = models.DateTimeField(auto_now_add=True, db_index=True)
action = models.CharField(max_length=32, choices=ACTION_CHOICES)
domain = models.CharField(max_length=255, db_index=True)
user = models.ForeignKey(User, related_name="registry_actions", on_delete=models.CASCADE)
related_object_id = models.CharField(max_length=36)
related_object_type = models.CharField(max_length=32, choices=RELATED_OBJECT_CHOICES, db_index=True)
detail = JSONField(null=True)
class Meta:
indexes = [
models.Index(fields=("domain",), name="registryauditlog_domain_idx"),
models.Index(fields=("action",), name="registryauditlog_action_idx"),
models.Index(
fields=("related_object_type",),
name="registryauditlog_rel_obj_idx"
),
]
def to_json(self):
return {
"registry_slug": self.registry.slug,
"date": self.date,
"action": self.action,
"action_display": self.get_action_display(),
"domain": self.domain,
"user": self.user.username,
}
class RegistryAuditHelper:
def __init__(self, registry):
self.registry = registry
def registry_activated(self, user):
self._log_registry_activated_deactivated(user, is_activated=True)
def registry_deactivated(self, user):
self._log_registry_activated_deactivated(user, is_activated=False)
def invitation_accepted(self, user, invitation):
return self._log_invitation_accepted_rejected(user, invitation, is_accepted=True)
def invitation_rejected(self, user, invitation):
return self._log_invitation_accepted_rejected(user, invitation, is_accepted=False)
def invitation_added(self, user, invitation):
return self._log_invitation_added_removed(user, invitation.id, invitation, is_added=True)
def invitation_removed(self, user, invitation_id, invitation):
return self._log_invitation_added_removed(user, invitation_id, invitation, is_added=False)
def grant_added(self, user, grant):
return self._log_grant_added_removed(user, grant.id, grant, is_added=True)
def grant_removed(self, user, grant_id, grant):
return self._log_grant_added_removed(user, grant_id, grant, is_added=False)
def schema_changed(self, user, new, old):
return RegistryAuditLog.objects.create(
registry=self.registry,
user=user,
action=RegistryAuditLog.ACTION_SCHEMA_CHANGED,
domain=self.registry.domain,
related_object_id=self.registry.id,
related_object_type=RegistryAuditLog.RELATED_OBJECT_REGISTRY,
detail={
"new_schema": new,
"old_schema": old,
}
)
def data_accessed(self, user, domain, related_object, filters=None):
if not related_object or not hasattr(related_object, "doc_type"):
raise ValueError("Unexpected related object")
if related_object.doc_type == "ReportConfiguration":
related_object_id = related_object.get_id
related_object_type = RegistryAuditLog.RELATED_OBJECT_UCR
elif related_object.doc_type in ["Application", "LinkedApplication"]:
related_object_id = related_object.get_id
related_object_type = RegistryAuditLog.RELATED_OBJECT_APPLICATION
else:
raise ValueError(f"Unexpected related object type: {related_object.doc_type}")
return RegistryAuditLog.objects.create(
registry=self.registry,
user=user,
action=RegistryAuditLog.ACTION_DATA_ACCESSED,
domain=domain,
related_object_id=related_object_id,
related_object_type=related_object_type,
detail=filters
)
def _log_registry_activated_deactivated(self, user, is_activated):
return RegistryAuditLog.objects.create(
registry=self.registry,
user=user,
action=RegistryAuditLog.ACTION_ACTIVATED if is_activated else RegistryAuditLog.ACTION_DEACTIVATED,
domain=self.registry.domain,
related_object_id=self.registry.id,
related_object_type=RegistryAuditLog.RELATED_OBJECT_REGISTRY,
)
def _log_invitation_added_removed(self, user, invitation_id, invitation, is_added):
if is_added:
action = RegistryAuditLog.ACTION_INVITATION_ADDED
else:
action = RegistryAuditLog.ACTION_INVITATION_REMOVED
return RegistryAuditLog.objects.create(
registry=self.registry,
user=user,
action=action,
domain=invitation.domain,
related_object_id=invitation_id,
related_object_type=RegistryAuditLog.RELATED_OBJECT_INVITATION,
detail={} if is_added else {"invitation_status": invitation.status}
)
def _log_invitation_accepted_rejected(self, user, invitation, is_accepted):
if is_accepted:
action = RegistryAuditLog.ACTION_INVITATION_ACCEPTED
else:
action = RegistryAuditLog.ACTION_INVITATION_REJECTED
return RegistryAuditLog.objects.create(
registry=self.registry,
user=user,
action=action,
domain=invitation.domain,
related_object_id=invitation.id,
related_object_type=RegistryAuditLog.RELATED_OBJECT_INVITATION,
)
def _log_grant_added_removed(self, user, grant_id, grant, is_added):
RegistryAuditLog.objects.create(
registry=self.registry,
user=user,
action=RegistryAuditLog.ACTION_GRANT_ADDED if is_added else RegistryAuditLog.ACTION_GRANT_REMOVED,
domain=grant.from_domain,
related_object_id=grant_id,
related_object_type=RegistryAuditLog.RELATED_OBJECT_GRANT,
detail={"to_domains": grant.to_domains}
)
| StarcoderdataPython |
124246 | from django.contrib import admin
# Register your models here.
from .models import Project
admin.site.register(Project) | StarcoderdataPython |
6664781 | from polyphony import testbench
def for15(x):
sum = 0
for i in range(0, x):
sum += i
x = 5
return sum
@testbench
def test():
assert 0 == for15(0)
assert 6 == for15(4)
assert 10 == for15(5)
test()
| StarcoderdataPython |
4899091 | <reponame>Shikanime/tezos<filename>docs/_extensions/tezos_custom_roles.py
from docutils import nodes
import os
import os.path
import re
def setup(app):
app.add_role('package', package_role)
app.add_role('package-name', package_role)
app.add_role('package-src', package_role)
app.add_role('opam', opam_role)
app.add_role('src', src_role)
def find_dot_opam(name):
for path, dirs, files in os.walk('..'):
for file in files:
if file == name + '.opam':
return path.lstrip('../')
raise ValueError('opam file ' + name + '.opam does not exist in the odoc')
def package_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
rel_lvl = inliner.document.current_source.replace(os.getcwd(),'').count('/')
parts = re.match("^([^<>]*)<([^<>]*)>$", text)
if parts:
text = parts.group(2)
lib = parts.group(1)
else:
lib = text
src = find_dot_opam(lib)
branch = os.environ.get('CI_COMMIT_REF_NAME', 'master')
project_url = os.environ.get('CI_PROJECT_URL', 'https://gitlab.com/tezos/tezos')
src_url = project_url + "/tree/" + branch + "/" + src
if os.path.isdir('_build/api/odoc/_html/'+lib):
if os.path.isdir(os.path.join('_build','api','odoc','_html',lib,lib.replace('-','_').capitalize())):
lib = lib + '/' + lib.replace('-','_').capitalize()
url = "api/api-inline.html#" + lib + '/index.html'
for i in range(1,rel_lvl):
url = '../' + url
else:
url = src_url
if name == 'package':
node = nodes.reference(rawtext, text, refuri=url, **options)
elif name == 'package-name':
node = nodes.literal(text, text)
elif name == 'package-src':
node = nodes.reference(rawtext, src, refuri=src_url, **options)
return [node], []
def opam_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
rel_lvl = inliner.document.current_source.replace(os.getcwd(),'').count('/')
parts = re.match("^([^<>]*)<([^<>]*)>$", text)
if parts:
text = parts.group(2)
lib = parts.group(1)
else:
lib = text
tagged = re.match('([^.]+)[.].*', lib)
if tagged:
url = "https://opam.ocaml.org/packages/" + tagged.group(1) + "/" + lib
else:
url = "https://opam.ocaml.org/packages/" + lib
node = nodes.reference(rawtext, text, refuri=url, **options)
return [node], []
def src_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
rel_lvl = inliner.document.current_source.replace(os.getcwd(),'').count('/')
parts = re.match("^([^<>]*)<([^<>]*)>$", text)
if parts:
text = parts.group(2)
src = parts.group(1)
else:
src = text
text = text
branch = os.environ.get('CI_COMMIT_REF_NAME', 'master')
project_url = os.environ.get('CI_PROJECT_URL', 'https://gitlab.com/tezos/tezos')
url = project_url + "/tree/" + branch + "/" + src
node = nodes.reference(rawtext, text, refuri=url, **options)
return [node], []
| StarcoderdataPython |
3426510 | import json
from django.core.management.base import BaseCommand
from safe_transaction_service.contracts.tx_decoder import get_db_tx_decoder
from ...models import MultisigTransaction
class Command(BaseCommand):
help = "Exports multisig tx data"
def add_arguments(self, parser):
parser.add_argument("--file-name", help="Filename", default="result.csv")
def handle(self, *args, **options):
file_name = options["file_name"]
queryset = MultisigTransaction.objects.exclude(origin=None).exclude(
ethereum_tx=None
)
count = queryset.count()
self.stdout.write(
self.style.SUCCESS(
f"Start exporting of {queryset.count()} "
f"multisig tx data to {file_name}"
)
)
if count:
with open(file_name, "w") as f:
decoder = get_db_tx_decoder()
f.write(
"|".join(
[
"Execution date",
"tx-hash",
"safe-address",
"gas_price",
"gas_limit",
"gas_used",
"to",
"failed",
"origin",
"decodedData",
]
)
+ "\n"
)
for m in queryset.select_related("ethereum_tx__block"):
ethereum_tx = m.ethereum_tx
f.write(
"|".join(
[
str(m.execution_date),
m.ethereum_tx_id,
m.safe,
str(ethereum_tx.gas_price),
str(m.ethereum_tx.gas),
str(m.ethereum_tx.gas_used),
m.to,
str(m.failed),
m.origin,
json.dumps(decoder.get_data_decoded(m.data.tobytes()))
if m.data
else "",
]
)
+ "\n"
)
self.stdout.write(
self.style.SUCCESS(f"Multisig tx data was exported to {file_name}")
)
| StarcoderdataPython |
5171764 | <reponame>koliupy/loldib<filename>loldib/getratings/models/NA/na_trundle/na_trundle_bot.py
from getratings.models.ratings import Ratings
class NA_Trundle_Bot_Aatrox(Ratings):
pass
class NA_Trundle_Bot_Ahri(Ratings):
pass
class NA_Trundle_Bot_Akali(Ratings):
pass
class NA_Trundle_Bot_Alistar(Ratings):
pass
class NA_Trundle_Bot_Amumu(Ratings):
pass
class NA_Trundle_Bot_Anivia(Ratings):
pass
class NA_Trundle_Bot_Annie(Ratings):
pass
class NA_Trundle_Bot_Ashe(Ratings):
pass
class NA_Trundle_Bot_AurelionSol(Ratings):
pass
class NA_Trundle_Bot_Azir(Ratings):
pass
class NA_Trundle_Bot_Bard(Ratings):
pass
class NA_Trundle_Bot_Blitzcrank(Ratings):
pass
class NA_Trundle_Bot_Brand(Ratings):
pass
class NA_Trundle_Bot_Braum(Ratings):
pass
class NA_Trundle_Bot_Caitlyn(Ratings):
pass
class NA_Trundle_Bot_Camille(Ratings):
pass
class NA_Trundle_Bot_Cassiopeia(Ratings):
pass
class NA_Trundle_Bot_Chogath(Ratings):
pass
class NA_Trundle_Bot_Corki(Ratings):
pass
class NA_Trundle_Bot_Darius(Ratings):
pass
class NA_Trundle_Bot_Diana(Ratings):
pass
class NA_Trundle_Bot_Draven(Ratings):
pass
class NA_Trundle_Bot_DrMundo(Ratings):
pass
class NA_Trundle_Bot_Ekko(Ratings):
pass
class NA_Trundle_Bot_Elise(Ratings):
pass
class NA_Trundle_Bot_Evelynn(Ratings):
pass
class NA_Trundle_Bot_Ezreal(Ratings):
pass
class NA_Trundle_Bot_Fiddlesticks(Ratings):
pass
class NA_Trundle_Bot_Fiora(Ratings):
pass
class NA_Trundle_Bot_Fizz(Ratings):
pass
class NA_Trundle_Bot_Galio(Ratings):
pass
class NA_Trundle_Bot_Gangplank(Ratings):
pass
class NA_Trundle_Bot_Garen(Ratings):
pass
class NA_Trundle_Bot_Gnar(Ratings):
pass
class NA_Trundle_Bot_Gragas(Ratings):
pass
class NA_Trundle_Bot_Graves(Ratings):
pass
class NA_Trundle_Bot_Hecarim(Ratings):
pass
class NA_Trundle_Bot_Heimerdinger(Ratings):
pass
class NA_Trundle_Bot_Illaoi(Ratings):
pass
class NA_Trundle_Bot_Irelia(Ratings):
pass
class NA_Trundle_Bot_Ivern(Ratings):
pass
class NA_Trundle_Bot_Janna(Ratings):
pass
class NA_Trundle_Bot_JarvanIV(Ratings):
pass
class NA_Trundle_Bot_Jax(Ratings):
pass
class NA_Trundle_Bot_Jayce(Ratings):
pass
class NA_Trundle_Bot_Jhin(Ratings):
pass
class NA_Trundle_Bot_Jinx(Ratings):
pass
class NA_Trundle_Bot_Kalista(Ratings):
pass
class NA_Trundle_Bot_Karma(Ratings):
pass
class NA_Trundle_Bot_Karthus(Ratings):
pass
class NA_Trundle_Bot_Kassadin(Ratings):
pass
class NA_Trundle_Bot_Katarina(Ratings):
pass
class NA_Trundle_Bot_Kayle(Ratings):
pass
class NA_Trundle_Bot_Kayn(Ratings):
pass
class NA_Trundle_Bot_Kennen(Ratings):
pass
class NA_Trundle_Bot_Khazix(Ratings):
pass
class NA_Trundle_Bot_Kindred(Ratings):
pass
class NA_Trundle_Bot_Kled(Ratings):
pass
class NA_Trundle_Bot_KogMaw(Ratings):
pass
class NA_Trundle_Bot_Leblanc(Ratings):
pass
class NA_Trundle_Bot_LeeSin(Ratings):
pass
class NA_Trundle_Bot_Leona(Ratings):
pass
class NA_Trundle_Bot_Lissandra(Ratings):
pass
class NA_Trundle_Bot_Lucian(Ratings):
pass
class NA_Trundle_Bot_Lulu(Ratings):
pass
class NA_Trundle_Bot_Lux(Ratings):
pass
class NA_Trundle_Bot_Malphite(Ratings):
pass
class NA_Trundle_Bot_Malzahar(Ratings):
pass
class NA_Trundle_Bot_Maokai(Ratings):
pass
class NA_Trundle_Bot_MasterYi(Ratings):
pass
class NA_Trundle_Bot_MissFortune(Ratings):
pass
class NA_Trundle_Bot_MonkeyKing(Ratings):
pass
class NA_Trundle_Bot_Mordekaiser(Ratings):
pass
class NA_Trundle_Bot_Morgana(Ratings):
pass
class NA_Trundle_Bot_Nami(Ratings):
pass
class NA_Trundle_Bot_Nasus(Ratings):
pass
class NA_Trundle_Bot_Nautilus(Ratings):
pass
class NA_Trundle_Bot_Nidalee(Ratings):
pass
class NA_Trundle_Bot_Nocturne(Ratings):
pass
class NA_Trundle_Bot_Nunu(Ratings):
pass
class NA_Trundle_Bot_Olaf(Ratings):
pass
class NA_Trundle_Bot_Orianna(Ratings):
pass
class NA_Trundle_Bot_Ornn(Ratings):
pass
class NA_Trundle_Bot_Pantheon(Ratings):
pass
class NA_Trundle_Bot_Poppy(Ratings):
pass
class NA_Trundle_Bot_Quinn(Ratings):
pass
class NA_Trundle_Bot_Rakan(Ratings):
pass
class NA_Trundle_Bot_Rammus(Ratings):
pass
class NA_Trundle_Bot_RekSai(Ratings):
pass
class NA_Trundle_Bot_Renekton(Ratings):
pass
class NA_Trundle_Bot_Rengar(Ratings):
pass
class NA_Trundle_Bot_Riven(Ratings):
pass
class NA_Trundle_Bot_Rumble(Ratings):
pass
class NA_Trundle_Bot_Ryze(Ratings):
pass
class NA_Trundle_Bot_Sejuani(Ratings):
pass
class NA_Trundle_Bot_Shaco(Ratings):
pass
class NA_Trundle_Bot_Shen(Ratings):
pass
class NA_Trundle_Bot_Shyvana(Ratings):
pass
class NA_Trundle_Bot_Singed(Ratings):
pass
class NA_Trundle_Bot_Sion(Ratings):
pass
class NA_Trundle_Bot_Sivir(Ratings):
pass
class NA_Trundle_Bot_Skarner(Ratings):
pass
class NA_Trundle_Bot_Sona(Ratings):
pass
class NA_Trundle_Bot_Soraka(Ratings):
pass
class NA_Trundle_Bot_Swain(Ratings):
pass
class NA_Trundle_Bot_Syndra(Ratings):
pass
class NA_Trundle_Bot_TahmKench(Ratings):
pass
class NA_Trundle_Bot_Taliyah(Ratings):
pass
class NA_Trundle_Bot_Talon(Ratings):
pass
class NA_Trundle_Bot_Taric(Ratings):
pass
class NA_Trundle_Bot_Teemo(Ratings):
pass
class NA_Trundle_Bot_Thresh(Ratings):
pass
class NA_Trundle_Bot_Tristana(Ratings):
pass
class NA_Trundle_Bot_Trundle(Ratings):
pass
class NA_Trundle_Bot_Tryndamere(Ratings):
pass
class NA_Trundle_Bot_TwistedFate(Ratings):
pass
class NA_Trundle_Bot_Twitch(Ratings):
pass
class NA_Trundle_Bot_Udyr(Ratings):
pass
class NA_Trundle_Bot_Urgot(Ratings):
pass
class NA_Trundle_Bot_Varus(Ratings):
pass
class NA_Trundle_Bot_Vayne(Ratings):
pass
class NA_Trundle_Bot_Veigar(Ratings):
pass
class NA_Trundle_Bot_Velkoz(Ratings):
pass
class NA_Trundle_Bot_Vi(Ratings):
pass
class NA_Trundle_Bot_Viktor(Ratings):
pass
class NA_Trundle_Bot_Vladimir(Ratings):
pass
class NA_Trundle_Bot_Volibear(Ratings):
pass
class NA_Trundle_Bot_Warwick(Ratings):
pass
class NA_Trundle_Bot_Xayah(Ratings):
pass
class NA_Trundle_Bot_Xerath(Ratings):
pass
class NA_Trundle_Bot_XinZhao(Ratings):
pass
class NA_Trundle_Bot_Yasuo(Ratings):
pass
class NA_Trundle_Bot_Yorick(Ratings):
pass
class NA_Trundle_Bot_Zac(Ratings):
pass
class NA_Trundle_Bot_Zed(Ratings):
pass
class NA_Trundle_Bot_Ziggs(Ratings):
pass
class NA_Trundle_Bot_Zilean(Ratings):
pass
class NA_Trundle_Bot_Zyra(Ratings):
pass
| StarcoderdataPython |
3370031 | <reponame>facade-technologies-inc/facile<gh_stars>1-10
# run is the most important function.
# one function for one checking algorithm and call all of them in run
# targetguimodel and aplmodel are the two things
# action is the parent object
# actionpipeline, component action and action wrapper all inherited from it
# actionpipeline could have component actions or actionpipelines in it
# action wrapper wrap around actionpipipeline or component action.
# So we only need to create one instance of an actionpipeline, but multiple wrappers refer to it.
from PySide2.QtCore import QThread, Slot, Signal
from data.apim.actionpipeline import ActionPipeline
from data.validatormessage import ValidatorMessage
from datetime import datetime
import data.statemachine as sm
class Validator(QThread):
"""
This Validator class run algorithms to validate user's action pipeline. It communicates with ValidatorView class with
signal/slot and sends message to the graphical view.
"""
sentMessage = Signal(ValidatorMessage)
updateProgress = Signal(float)
def __init__(self):
"""
Construct the validator.
"""
QThread.__init__(self)
def run(self):
"""
Run validation. Send validator message through emitting signal.
:return: None
:rtype: NoneType
"""
self._running = True
# TODO: acquire references to TGUIM and APIM
# accumulate list of algorithm method objects:
algorithms = [getattr(self, method_name) for method_name in dir(self)
if callable(getattr(self, method_name)) and
method_name.startswith("algorithm_")]
for algoNum in range(len(algorithms)):
algo = algorithms[algoNum]
if not self._running:
return
self.updateProgress.emit(100 * (algoNum+1)/(len(algorithms)+1))
algo()
self.updateProgress.emit(100)
def algorithm_at_least_one_actionpipeline(self):
"""
Validate if there is at least one action pipeline. If not, send out error messages.
:return: None
:rtype: NoneType
"""
tempAPIModel = sm.StateMachine.instance._project.getAPIModel()
if len(tempAPIModel.getActionPipelines()) == 0:
msg = "The project does not have any action pipelines"
message = ValidatorMessage(msg, ValidatorMessage.Level.Error)
self.sentMessage.emit(message)
def algorithm_all_actions_inports_has_wiresin(self):
"""
Validate if all of the required input ports for all the ACTIONS inside action pipelines have incoming wires.
If not, send out error messages.
:return: None
:rtype: NoneType
"""
tempAPIPipes = sm.StateMachine.instance._project.getAPIModel().getActionPipelines()
for pipe in tempAPIPipes:
for action in pipe.getActions():
for port in action.getInputPorts():
if port.getInputWire() is None and not port.isOptional():
msgTemplate = "There is no input wire for the input port '{}' at action '{}'"
message = ValidatorMessage(msgTemplate.format(port.getName(), action.getName()), ValidatorMessage.Level.Error)
self.sentMessage.emit(message)
def algorithm_all_actionpips_outport_has_wiresin(self):
"""
Validate if all of the output ports for all the ACTION PIPELINES have incoming wires. If not, send out error messages.
:return: None
:rtype: NoneType
"""
tempAPIPipes = sm.StateMachine.instance._project.getAPIModel().getActionPipelines()
for pipe in tempAPIPipes:
for port in pipe.getOutputPorts():
if port.getInputWire() is None:
msgTemplate = "There is no input wire for the output port '{}' at action '{}'"
message = ValidatorMessage(msgTemplate.format(port.getName(), pipe.getName()), ValidatorMessage.Level.Error)
self.sentMessage.emit(message)
def algorithm_no_duplicated_actionpips_name(self):
"""
Validate if there are two or more action pipelines having the same name. If so, send out error messages.
:return: None
:rtype: NoneType
"""
tempAPIPipes = sm.StateMachine.instance._project.getAPIModel().getActionPipelines()
allActionNames = set()
for action in tempAPIPipes:
if action.getName() in allActionNames:
msgTemplate = "There are multiple actions with the name '{}'"
message = ValidatorMessage(msgTemplate.format(action.getName()), ValidatorMessage.Level.Error)
self.sentMessage.emit(message)
else:
allActionNames.add(action.getName())
def algorithm_all_actionpips_inport_wiresout(self):
"""
Validate if all input ports for all ACTION PIPELINES having outgoing wires. If not, send out warning messages.
:return: None
:rtype: NoneType
"""
tempAPIPipes = sm.StateMachine.instance._project.getAPIModel().getActionPipelines()
for pipe in tempAPIPipes:
for port in pipe.getInputPorts():
if len(port.getOutputWires()) == 0:
msgTemplate = "The input port '{}' of action pipeline '{}' is not used."
message = ValidatorMessage(msgTemplate.format(port.getName(), pipe.getName()), ValidatorMessage.Level.Warning)
self.sentMessage.emit(message)
def algorithm_actionpips_names_are_python_identifiers(self):
"""
Validate if all action names are valid python identifiers. If not, send out warning messages.
:return: None
:rtype: NoneType
"""
tempAPIPipes = sm.StateMachine.instance._project.getAPIModel().getActionPipelines()
for action in tempAPIPipes:
if not action.getName().isidentifier():
msgTemplate = "'{}' is not a valid action pipeline name"
message = ValidatorMessage(msgTemplate.format(action.getName()), ValidatorMessage.Level.Warning)
self.sentMessage.emit(message)
def algorithm_wire_not_connect_to_previous_action(self):
"""
Validate if wire connect an action to a previous action. If so, send out an error message.
:return: None
:rtype: NoneType
"""
tempAPIPipes = sm.StateMachine.instance._project.getAPIModel().getActionPipelines()
for pipe in tempAPIPipes:
for wire in pipe.getWireSet().getWires():
srcAction = wire.getSourcePort().getAction()
dstAction = wire.getDestPort().getAction()
actions = pipe.getActions()
if srcAction is pipe or dstAction is pipe:
continue
srcActionIndex = actions.index(srcAction)
dstActionIndex = actions.index(dstAction)
if dstActionIndex < srcActionIndex:
msgTemplate = "A wire in action pipeline {} connects a action #{} to action #{}. Wires must be " \
"going from earlier in the sequence to later in the sequence."
message = ValidatorMessage(msgTemplate.format(pipe.getName(), str(srcActionIndex), str(dstActionIndex)), ValidatorMessage.Level.Error)
self.sentMessage.emit(message)
def algorithm_connected_ports_should_have_same_dataType(self):
"""
Validate if two connected ports have same data type. If not, send out warning messages.
:return: None
:rtype: NoneType
"""
tempAPIPipes = sm.StateMachine.instance._project.getAPIModel().getActionPipelines()
for pipe in tempAPIPipes:
for wire in pipe.getWireSet().getWires():
srcPort = wire.getSourcePort()
dstPort = wire.getDestPort()
srcType = srcPort.getDataTypeStr()
dstType = dstPort.getDataTypeStr()
srcAct = srcPort.getAction()
dstAct = srcPort.getAction()
if srcType != dstType:
msgTemplate = "In action pipeline {}, the wire connecting port '{}' of action '{}' to port '{}' " \
"of action '{}' have conflicting data types of '{}' and '{}'"
msg = msgTemplate.format(pipe.getName(), srcPort.getName(), srcAct.getName(), dstPort.getName(),
dstAct.getName(), srcType, dstType)
message = ValidatorMessage(msg, ValidatorMessage.Level.Warning)
self.sentMessage.emit(message)
# TODO: what if I put actionpipeline inside of actionpipeline. Do more manual testing on it.
@Slot()
def stop(self):
"""
Stop validator.
:return: None
:rtype: NoneType
"""
self._running = False | StarcoderdataPython |
1640830 | <filename>bua/caffe/modeling/box_regression.py<gh_stars>1-10
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import math
import torch
from detectron2.structures import Boxes
from typing import List, Tuple, Union
# Value for clamping large dw and dh predictions. The heuristic is that we clamp
# such that dw and dh are no larger than what would transform a 16px box into a
# 1000px box (based on a small anchor, 16px, and a typical image size, 1000px).
_DEFAULT_SCALE_CLAMP = math.log(1000.0 / 16)
__all__ = ["BUABoxes", "BUABox2BoxTransform"]
class BUABoxes(Boxes):
"""
This structure stores a list of boxes as a Nx4 torch.Tensor.
It supports some common methods about boxes
(`area`, `clip`, `nonempty`, etc),
and also behaves like a Tensor
(support indexing, `to(device)`, `.device`, and iteration over all boxes)
Attributes:
tensor: float matrix of Nx4.
"""
BoxSizeType = Union[List[int], Tuple[int, int]]
def __init__(self, tensor: torch.Tensor):
super().__init__(tensor)
def clip(self, box_size: BoxSizeType) -> None:
"""
NOTE: In order to be the same as bottom-up-attention network, we have
defined the new clip function.
Clip (in place) the boxes by limiting x coordinates to the range [0, width]
and y coordinates to the range [0, height].
Args:
box_size (height, width): The clipping box's size.
"""
assert torch.isfinite(self.tensor).all(), "Box tensor contains infinite or NaN!"
TO_REMOVE = 1
h, w = box_size
self.tensor[:, 0].clamp_(min=0, max=w - TO_REMOVE)
self.tensor[:, 1].clamp_(min=0, max=h - TO_REMOVE)
self.tensor[:, 2].clamp_(min=0, max=w - TO_REMOVE)
self.tensor[:, 3].clamp_(min=0, max=h - TO_REMOVE)
def nonempty(self, threshold: int = 0) -> torch.Tensor:
"""
NOTE: In order to be the same as bottom-up-attention network, we have
defined the new nonempty function.
Find boxes that are non-empty.
A box is considered empty, if either of its side is no larger than threshold.
Returns:
Tensor:
a binary vector which represents whether each box is empty
(False) or non-empty (True).
"""
TO_REMOVE = 1
box = self.tensor
widths = box[:, 2] - box[:, 0] + TO_REMOVE
heights = box[:, 3] - box[:, 1] + TO_REMOVE
keep = (widths > threshold) & (heights > threshold)
return keep
def filter_boxes(self):
box = self.tensor
keep = (box[:, 3] > box[:, 1]) & (box[:, 2] > box[:, 0])
return keep
def __getitem__(self, item: Union[int, slice, torch.BoolTensor]) -> "Boxes":
"""
Returns:
BUABoxes: Create a new :class:`BUABoxes` by indexing.
The following usage are allowed:
1. `new_boxes = boxes[3]`: return a `Boxes` which contains only one box.
2. `new_boxes = boxes[2:10]`: return a slice of boxes.
3. `new_boxes = boxes[vector]`, where vector is a torch.BoolTensor
with `length = len(boxes)`. Nonzero elements in the vector will be selected.
Note that the returned Boxes might share storage with this Boxes,
subject to Pytorch's indexing semantics.
"""
if isinstance(item, int):
return BUABoxes(self.tensor[item].view(1, -1))
b = self.tensor[item]
assert b.dim() == 2, "Indexing on Boxes with {} failed to return a matrix!".format(item)
return BUABoxes(b)
class BUABox2BoxTransform(object):
"""
The box-to-box transform defined in R-CNN. The transformation is parameterized
by 4 deltas: (dx, dy, dw, dh). The transformation scales the box's width and height
by exp(dw), exp(dh) and shifts a box's center by the offset (dx * width, dy * height).
"""
def __init__(self, weights, scale_clamp=_DEFAULT_SCALE_CLAMP):
"""
Args:
weights (4-element tuple): Scaling factors that are applied to the
(dx, dy, dw, dh) deltas. In Fast R-CNN, these were originally set
such that the deltas have unit variance; now they are treated as
hyperparameters of the system.
scale_clamp (float): When predicting deltas, the predicted box scaling
factors (dw and dh) are clamped such that they are <= scale_clamp.
"""
self.weights = weights
self.scale_clamp = scale_clamp
def get_deltas(self, src_boxes, target_boxes):
"""
Get box regression transformation deltas (dx, dy, dw, dh) that can be used
to transform the `src_boxes` into the `target_boxes`. That is, the relation
``target_boxes == self.apply_deltas(deltas, src_boxes)`` is true (unless
any delta is too large and is clamped).
Args:
src_boxes (Tensor): source boxes, e.g., object proposals
target_boxes (Tensor): target of the transformation, e.g., ground-truth
boxes.
"""
assert isinstance(src_boxes, torch.Tensor), type(src_boxes)
assert isinstance(target_boxes, torch.Tensor), type(target_boxes)
TO_REMOVE = 1 # TODO remove
src_widths = src_boxes[:, 2] - src_boxes[:, 0] + TO_REMOVE
src_heights = src_boxes[:, 3] - src_boxes[:, 1] + TO_REMOVE
src_ctr_x = src_boxes[:, 0] + 0.5 * src_widths
src_ctr_y = src_boxes[:, 1] + 0.5 * src_heights
target_widths = target_boxes[:, 2] - target_boxes[:, 0] + TO_REMOVE
target_heights = target_boxes[:, 3] - target_boxes[:, 1] + TO_REMOVE
target_ctr_x = target_boxes[:, 0] + 0.5 * target_widths
target_ctr_y = target_boxes[:, 1] + 0.5 * target_heights
wx, wy, ww, wh = self.weights
dx = wx * (target_ctr_x - src_ctr_x) / src_widths
dy = wy * (target_ctr_y - src_ctr_y) / src_heights
dw = ww * torch.log(target_widths / src_widths)
dh = wh * torch.log(target_heights / src_heights)
deltas = torch.stack((dx, dy, dw, dh), dim=1)
assert (src_widths > 0).all().item(), "Input boxes to Box2BoxTransform are not valid!"
return deltas
def apply_deltas(self, deltas, boxes):
"""
Apply transformation `deltas` (dx, dy, dw, dh) to `boxes`.
Args:
deltas (Tensor): transformation deltas of shape (N, k*4), where k >= 1.
deltas[i] represents k potentially different class-specific
box transformations for the single box boxes[i].
boxes (Tensor): boxes to transform, of shape (N, 4)
"""
assert torch.isfinite(deltas).all().item(), "Box regression deltas become infinite or NaN!"
boxes = boxes.to(deltas.dtype)
TO_REMOVE = 1 # TODO remove
widths = boxes[:, 2] - boxes[:, 0] + TO_REMOVE
heights = boxes[:, 3] - boxes[:, 1] + TO_REMOVE
ctr_x = boxes[:, 0] + 0.5 * widths
ctr_y = boxes[:, 1] + 0.5 * heights
wx, wy, ww, wh = self.weights
dx = deltas[:, 0::4] / wx
dy = deltas[:, 1::4] / wy
dw = deltas[:, 2::4] / ww
dh = deltas[:, 3::4] / wh
# Prevent sending too large values into torch.exp()
dw = torch.clamp(dw, max=self.scale_clamp)
dh = torch.clamp(dh, max=self.scale_clamp)
pred_ctr_x = dx * widths[:, None] + ctr_x[:, None]
pred_ctr_y = dy * heights[:, None] + ctr_y[:, None]
pred_w = torch.exp(dw) * widths[:, None]
pred_h = torch.exp(dh) * heights[:, None]
pred_boxes = torch.zeros_like(deltas)
pred_boxes[:, 0::4] = pred_ctr_x - 0.5 * pred_w # x1
pred_boxes[:, 1::4] = pred_ctr_y - 0.5 * pred_h # y1
pred_boxes[:, 2::4] = pred_ctr_x + 0.5 * pred_w # x2
pred_boxes[:, 3::4] = pred_ctr_y + 0.5 * pred_h # y2
return pred_boxes | StarcoderdataPython |
286122 | <filename>goc/game/Game.py
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 28 13:56:55 2019
@author: Phoenix
"""
from .stage.WaterStage import WaterStage
from .stage.RoomStage import RoomStage
from .stage.Lv_Tower_1 import Lv_Tower_1
from .entity.Player import Player
from graphics.Background import Background
from graphics.WaterBackground import WaterBackground
import Global
# Layer
#
# 0.
#
#
#
#
class Game:
def __init__(self, arguments=None):
self.arguments = arguments
def init(self):
self.viewport = [0,0,2]
#self.background = WaterBackground()
self.background = Background("image/background.png")
self.stages = []
self.stages.append([-2.0,WaterStage()])
self.stages.append([-1.6,WaterStage()])
self.stages.append([-1.2,WaterStage()])
self.stages.append([-0.8,WaterStage()])
self.stages.append([-0.4,Lv_Tower_1()])
#self.stages.append([0,WaterStage()])
self.stages.append([0,RoomStage()])
self.stage = self.stages[-1][1]
self.player = Player()
self.player_layer = -1
self.stage.addPlayer(self.player)
self.stage.initPlayer(self.player)
self.stage.initViewport()
#self.layers = reversed(range(10))
self.layers = range(10)
def playerLayerUp(self):
if len (self.stages) > 1:
self.stages[self.player_layer][1].delPlayer()
self.player_layer = (self.player_layer + len(self.stages) - 1) % len(self.stages)
self.stage = self.stages[self.player_layer][1]
self.stage.addPlayer(self.player)
self.stage.initPlayer(self.player)
self.stage.initViewport()
def playerLayerDown(self):
if len (self.stages) > 1:
self.stages[self.player_layer][1].delPlayer()
self.player_layer = (self.player_layer + 1) % len(self.stages)
self.stage = self.stages[self.player_layer][1]
self.stage.addPlayer(self.player)
self.stage.initPlayer(self.player)
self.stage.initViewport()
def transitionTo(self,levelname):
if len (self.stages) > 1:
index = 0
for z, stage in self.stages:
if stage.id == levelname:
self.stages[self.player_layer][1].delPlayer()
self.player_layer = index
self.stage = stage
self.stage.addPlayer(self.player)
self.stage.initPlayer(self.player)
self.stage.initViewport()
return True
index += 1
else:
return False
def update(self):
if self.background:
self.background.update()
if self.stage:
#self.stage.update()
self.stage.setViewport() # Wait, what?
for z, stage in self.stages:
if stage.active:
stage.update()
stage.do_draw_vbo = stage == self.stage
if self.player:
self.player.update(self.stage)
Global.master.screen.view_pos = self.viewport
def draw(self, screen):
if self.background:
self.background.draw(screen)
for z, stage in self.stages:
screen.translate(0,0,z)
stage.draw(screen)
screen.popTranslation()
if stage.player is not None: # Draw up to the point where the player is
break
#if self.stage:
# #self.stage.drawBackground(screen)
# self.stage.draw(screen)
#for layer in self.layers:
#
# if self.stage:
# self.stage.drawLayer(screen,layer)
# #if self.player and self.player.layer == layer:
# # self.player.draw(screen)
#self.stage.drawLayer(screen,5)
#if self.stage:
# self.stage.draw(screen) | StarcoderdataPython |
8129551 | <gh_stars>0
# Generated by Django 2.2.16 on 2021-04-27 14:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0138_custom_inventory_scripts_removal'),
]
operations = [
migrations.AlterField(
model_name='job',
name='webhook_service',
field=models.CharField(
blank=True,
choices=[('github', 'GitHub'), ('gitlab', 'GitLab'), ('generic', 'Generic')],
help_text='Service that webhook requests will be accepted from',
max_length=16,
),
),
migrations.AlterField(
model_name='jobtemplate',
name='webhook_service',
field=models.CharField(
blank=True,
choices=[('github', 'GitHub'), ('gitlab', 'GitLab'), ('generic', 'Generic')],
help_text='Service that webhook requests will be accepted from',
max_length=16,
),
),
migrations.AlterField(
model_name='workflowjob',
name='webhook_service',
field=models.CharField(
blank=True,
choices=[('github', 'GitHub'), ('gitlab', 'GitLab'), ('generic', 'Generic')],
help_text='Service that webhook requests will be accepted from',
max_length=16,
),
),
migrations.AlterField(
model_name='workflowjobtemplate',
name='webhook_service',
field=models.CharField(
blank=True,
choices=[('github', 'GitHub'), ('gitlab', 'GitLab'), ('generic', 'Generic')],
help_text='Service that webhook requests will be accepted from',
max_length=16,
),
),
]
| StarcoderdataPython |
348347 | <filename>gsom/applications/zoo_experiment/gsmote/comparison_testing/comparison_test.py
# Importing the libraries
# import sys
# sys.path.append('/content/pygsom/')
import datetime
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.tree import DecisionTreeRegressor
from gsmote import GSMOTE
from gsmote.comparison_testing.Evaluator import evaluate
import gsmote.preprocessing as pp
from gsmote.comparison_testing.compare_visual import visualize_data as vs
import sys
import pandas as pd
import xgboost as xgb
sys.path.append('../../')
date_file = "../../data/KDD.csv".replace('\\', '/')
# date_file = "content/pygsom/data/ecoli.csv".replace('\\', '/')
X, y = pp.pre_process(date_file)
X_t, X_test, y_t, y_test = train_test_split(X, y, test_size=0.2, random_state=0)
# Visualize original data
# vs(X_t, y_t, "Original data")
# oversample
print("Oversampling inprogress...")
X_train, y_train = GSMOTE.OverSample(X_t, y_t)
# visualize oversampled data
print("Oversampling completed")
print("Plotting oversampled data...")
# vs(X_train, y_train, "Oversampled ")
print("Plotting completed")
def linear_training():
# Fitting Simple Linear Regression to the Training set
regressor = LinearRegression()
regressor.fit(X_train, y_train)
# Predicting the Test set results
y_predict = regressor.predict(X_test)
y_pred = np.where(y_predict > 0.5, 1, 0)
return evaluate("Linear Regression", y_test, y_pred)
def gradient_boosting():
# Fitting Gradient boosting
gbc = GradientBoostingClassifier(n_estimators=100, learning_rate=0.01, max_depth=3)
gbc.fit(X_train, y_train)
# Predicting the Test set results
y_predict = gbc.predict(X_test)
y_pred = np.where(y_predict.astype(int) > 0.5, 1, 0)
return evaluate("Gradient Boosting", y_test, y_pred)
def XGBoost():
# Fitting Gradient boosting
gbc = xgb.XGBClassifier(objective="binary:logistic", random_state=42)
gbc.fit(X_train, y_train)
# Predicting the Test set results
y_predict = gbc.predict(X_test)
y_pred = np.where(y_predict.astype(int) > 0.5, 1, 0)
return evaluate("XGBoost", y_test, y_pred)
def KNN():
# X_train,y_train = X_t,y_t
# Fitting Simple Linear Regression to the Training set
classifier = KNeighborsClassifier(n_neighbors=5, metric='minkowski', p=2)
classifier.fit(X_train, y_train)
# Predicting the Test set results
y_pred = classifier.predict(X_test).astype(int)
return evaluate("KNN", y_test, y_pred)
def decision_tree():
# Fitting Simple Linear Regression to the Training set
regressor = DecisionTreeRegressor()
regressor.fit(X_train, y_train)
# Predicting the Test set results
y_predict = regressor.predict(X_test)
y_pred = np.where(y_predict > 0.5, 1, 0)
return evaluate("Decision Tree", y_test, y_pred)
def MLPClassifier():
# Fitting MLPClassifier to the Training set
from sklearn.neural_network import MLPClassifier
mlp = MLPClassifier(hidden_layer_sizes=(10, 10, 10), max_iter=1000, solver='lbfgs', alpha=1e-5,
random_state=1)
mlp.fit(X_train, y_train)
y_pred = mlp.predict(X_test).astype(int)
return evaluate("MLPClassifier", y_test, y_pred)
def GSOM_Classifier():
# y_train = y_train.astype(int)
y1 = np.copy(y_train)
y = np.column_stack([y1, y_train])
labels = ["Name", "label"]
y = np.vstack((labels, y))
frame = pd.DataFrame(y[1:, :], columns=y[0, :])
from GSOM import GSOM
gsom1 = GSOM(1.0, X_train.shape[1], max_radius=4)
gsom1.fit(X_train,50, 25)
gsom1.labelling_gsom(X_train, frame, "Name", "label")
gsom1.finalize_gsom_label()
y_pred = gsom1.predict_values(X_test)
# print(y_pred)
# print("complete")
return evaluate("GSOM_Classifier",y_test,np.array(y_pred).astype(int))
def Deep_One_Class_Classifier():
from sklearn.mixture import GaussianMixture
gmm = GaussianMixture(n_components=1)
gmm.fit(X_train[y_train=='1'])
OKscore = gmm.score_samples(X_train[y_train=='1'])
threshold = OKscore.mean() - 1* OKscore.std()
# Trainer=np.column_stack((y_train[y_train=='1'],OKscore))
# Trainer = np.vstack((["y_train","Score"],Trainer))
# Train_Frame=pd.DataFrame(Trainer[1:,:],columns=Trainer[0,:])
# Train_Stat = Train_Frame["Score"]
# asd=pd.Series(OKscore).describe()
# # vcng=pd.Series(OKscore[y_train=='0']).describe()
score = gmm.score_samples(X_test)
# Tester = np.column_stack((y_test, score))
# Test_Frame = pd.DataFrame(Tester,columns=["y_test","Score"])
# Test_Stat = Test_Frame["Score"].describe()
# majority_correct = len(score[(y_test == 1) & (score > thred)])
y_pred = np.where(score > threshold,1,0)
return evaluate("Deep_One_Cls_Classifier",y_test,y_pred)
#
performance7 = GSOM_Classifier()
# performance1 = linear_training()
# performance2 = gradient_boosting()
# performance3 = XGBoost()
# performance4 = KNN()
# performance5 = decision_tree()
# performance6 = MLPClassifier()
# performance8 = Deep_One_Class_Classifier()
labels = ["Classifier", "f_score","g_mean","auc_value"]
values = [
# performance1,
# performance2,
# performance3,
# performance4,
# performance5,
# performance6,
performance7,
# performance8
]
# values=[performance9]
scores = pd.DataFrame(values,columns=labels)
print(scores) | StarcoderdataPython |
9709479 | <reponame>TugberkArkose/MLScheduler
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.091616,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.274648,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.520219,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.391203,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.677423,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.388521,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.45715,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.306931,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.39575,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0982804,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0141814,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.135814,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.10488,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.234094,
'Execution Unit/Register Files/Runtime Dynamic': 0.119062,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.353281,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.862561,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 3.11879,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00194108,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00194108,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00170414,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000667066,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00150661,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00709291,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0181298,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.100824,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.41328,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.301834,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.342444,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.94781,
'Instruction Fetch Unit/Runtime Dynamic': 0.770325,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0820752,
'L2/Runtime Dynamic': 0.0199891,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.19637,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.44201,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0957386,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0957386,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.6503,
'Load Store Unit/Runtime Dynamic': 2.0099,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.236075,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.47215,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0837838,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0849217,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.398754,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.049762,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.702099,
'Memory Management Unit/Runtime Dynamic': 0.134684,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 25.3397,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.342878,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0241299,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.198109,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.565117,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 6.61881,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0349072,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.230106,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.186535,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.199412,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.321644,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.162355,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.683411,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.199472,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.54302,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0352404,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00836423,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0736333,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0618586,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.108874,
'Execution Unit/Register Files/Runtime Dynamic': 0.0702228,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.163853,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.422127,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.81124,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00164758,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00164758,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00146645,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000584865,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000888604,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00565021,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0146746,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0594663,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.78257,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.197629,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.201974,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.18466,
'Instruction Fetch Unit/Runtime Dynamic': 0.479394,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0578043,
'L2/Runtime Dynamic': 0.0144022,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.2811,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.997118,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0661277,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0661276,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.59337,
'Load Store Unit/Runtime Dynamic': 1.38936,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.16306,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.326119,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0578704,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0586063,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.235186,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0327898,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.490706,
'Memory Management Unit/Runtime Dynamic': 0.0913961,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 18.459,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0927007,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0101251,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.100327,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.203153,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.98895,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0433329,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.236724,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.24556,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.234504,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.378247,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.190926,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.803678,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.230556,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.70014,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0463916,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00983617,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0868812,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0727445,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.133273,
'Execution Unit/Register Files/Runtime Dynamic': 0.0825807,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.19387,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.503058,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.03142,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00147261,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00147261,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00130879,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000520954,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00104498,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00529898,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.013185,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0699312,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.44822,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.202916,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.237518,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.88262,
'Instruction Fetch Unit/Runtime Dynamic': 0.528849,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0473865,
'L2/Runtime Dynamic': 0.0129392,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.49543,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.10062,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0730615,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0730616,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.84044,
'Load Store Unit/Runtime Dynamic': 1.534,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.180157,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.360315,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0639384,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0646163,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.276574,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0333653,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.542518,
'Memory Management Unit/Runtime Dynamic': 0.0979815,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 19.6026,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.122035,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0120653,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.118566,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.252667,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.45786,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.069908,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.257597,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.39967,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.19912,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.321173,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.162117,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.682411,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.166461,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.84012,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0755062,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00835199,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0856676,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0617681,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.161174,
'Execution Unit/Register Files/Runtime Dynamic': 0.07012,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.197958,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.464346,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.87985,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000792122,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000792122,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000699709,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000276214,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000887303,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00317126,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00724564,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0593792,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.77703,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.156433,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.201679,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.17885,
'Instruction Fetch Unit/Runtime Dynamic': 0.427907,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.034022,
'L2/Runtime Dynamic': 0.00755801,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.85623,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.784692,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0523822,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0523822,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.10359,
'Load Store Unit/Runtime Dynamic': 1.09541,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.129166,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.258331,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0458413,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0463346,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.234842,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0256967,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.469698,
'Memory Management Unit/Runtime Dynamic': 0.0720314,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 18.2158,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.198623,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0114009,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0981942,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.308218,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.79097,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 2.470850712340774,
'Runtime Dynamic': 2.470850712340774,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.177873,
'Runtime Dynamic': 0.0821714,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 81.795,
'Peak Power': 114.907,
'Runtime Dynamic': 18.9388,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 81.6171,
'Total Cores/Runtime Dynamic': 18.8566,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.177873,
'Total L3s/Runtime Dynamic': 0.0821714,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}} | StarcoderdataPython |
233500 | <reponame>tommycz1/salt-formula-keystone
import io
import json
import logging
import sys
LOG = logging.getLogger(__name__)
import yaml
import yaml.constructor
try:
# included in standard lib from Python 2.7
from collections import OrderedDict
except ImportError:
# try importing the backported drop-in replacement
# it's available on PyPI
from ordereddict import OrderedDict
# https://stackoverflow.com/questions/5121931/in-python-how-can-you-load-yaml-mappings-as-ordereddicts
class OrderedDictYAMLLoader(yaml.Loader):
"""
A YAML loader that loads mappings into ordered dictionaries.
"""
def __init__(self, *args, **kwargs):
yaml.Loader.__init__(self, *args, **kwargs)
self.add_constructor(u'tag:yaml.org,2002:map', type(self).construct_yaml_map)
self.add_constructor(u'tag:yaml.org,2002:omap', type(self).construct_yaml_map)
def construct_yaml_map(self, node):
data = OrderedDict()
yield data
value = self.construct_mapping(node)
data.update(value)
def construct_mapping(self, node, deep=False):
if isinstance(node, yaml.MappingNode):
self.flatten_mapping(node)
else:
raise yaml.constructor.ConstructorError(None, None,
'expected a mapping node, but found %s' % node.id, node.start_mark)
mapping = OrderedDict()
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
try:
hash(key)
except TypeError as exc:
raise yaml.constructor.ConstructorError('while constructing a mapping',
node.start_mark, 'found unacceptable key (%s)' % exc, key_node.start_mark)
value = self.construct_object(value_node, deep=deep)
mapping[key] = value
return mapping
def __virtual__():
return True
def rule_list(path, **kwargs):
try:
with io.open(path, 'r') as file_handle:
rules = yaml.load(file_handle, OrderedDictYAMLLoader) or OrderedDict()
except Exception as e:
msg = "Unable to load policy file %s: %s" % (path, repr(e))
LOG.debug(msg)
rules = {'Error': msg}
return rules
def rule_delete(name, path, **kwargs):
ret = {}
rules = __salt__['keystone_policy.rule_list'](path, **kwargs)
if 'Error' not in rules:
if name not in rules:
return ret
del rules[name]
try:
with io.open(path, 'w') as file_handle:
if path.endswith('json'):
serialized = json.dumps(rules, indent=4)
else:
serialized = yaml.safe_dump(rules, indent=4)
if sys.version_info[0] >= 3:
file_handle.write(serialized)
else:
file_handle.write(unicode(serialized))
except Exception as e:
msg = "Unable to save policy file: %s" % repr(e)
LOG.error(msg)
return {'Error': msg}
ret = 'Rule {0} deleted'.format(name)
return ret
def rule_set(name, rule, path, **kwargs):
rules = __salt__['keystone_policy.rule_list'](path, **kwargs)
if 'Error' not in rules:
if name in rules and rules[name] == rule:
return {name: 'Rule %s already exists and is in correct state' % name}
rules.update({name: rule})
try:
with io.open(path, 'w') as file_handle:
if path.endswith('json'):
serialized = json.dumps(rules, indent=4)
else:
serialized = yaml.safe_dump(rules, indent=4)
if sys.version_info[0] >= 3:
file_handle.write(serialized)
else:
file_handle.write(unicode(serialized))
except Exception as e:
msg = "Unable to save policy file %s: %s" % (path, repr(e))
LOG.error(msg)
return {'Error': msg}
return rule_get(name, path, **kwargs)
return rules
def rule_get(name, path, **kwargs):
ret = {}
rules = __salt__['keystone_policy.rule_list'](path, **kwargs)
if 'Error' in rules:
ret['Error'] = rules['Error']
elif name in rules:
ret[name] = rules.get(name)
return ret
| StarcoderdataPython |
5053522 | <filename>tests/test_data_helper.py
from unittest import TestCase
# TODO implement these tests
import numpy as np
import pandas as pd
import torch
from torch.utils.data import WeightedRandomSampler, TensorDataset, DataLoader
from sci.data_helpers import SciDataHelper
class DataHelperTests(TestCase):
def test_negative_sampling(self):
raise NotImplementedError()
def test_weighted_sampler(self):
items_a = ['a'] * 10
items_b = ['b'] * 3
items_c = ['c'] * 5
items = items_a + items_b + items_c
dh = SciDataHelper(label_col='label', labels=['a', 'b', 'c'], none_label=None)
df = pd.DataFrame({'label': items})
dh.set_label_encoder(df)
label_weights, weights = dh.get_sampler_weights(df)
ys = torch.tensor(dh.label_encoder.transform(items))
sampler = WeightedRandomSampler(weights, num_samples=int(weights.sum()), replacement=True)
dl = DataLoader(TensorDataset(ys), sampler=sampler, batch_size=4)
out = []
for batch in dl:
yss = batch[0].numpy()
out += dh.label_encoder.inverse_transform(yss).tolist()
odf = pd.DataFrame({'label': out})
print(df['label'].value_counts())
print(odf['label'].value_counts())
| StarcoderdataPython |
9740209 | from __future__ import division
import numpy as np
import cv2 as cv
import os
import matplotlib.pyplot as plt
def read_image(path):
"""
Read an image to RGB uint8.
Read with opencv (cv) and covert from BGR colorspace to RGB.
:param path: The path to the image.
:return: RGB uint8 image.
"""
assert os.path.isfile(path), 'File not found'
im = cv.imread(path)
im = cv.cvtColor(im, cv.COLOR_BGR2RGB)
return im
def show_colors(C):
"""
Visualize rows of C as colors (RGB)
:param C: An array N x 3 where the rows are considered as RGB colors.
:return:
"""
assert isinstance(C, np.ndarray)
assert C.ndim == 2
assert C.shape[1] == 3
n = C.shape[0]
range255 = C.max() > 1.0
for i in range(n):
if range255:
plt.plot([0, 1], [n - 1 - i, n - 1 - i], c=C[i] / 255, linewidth=20)
else:
plt.plot([0, 1], [n - 1 - i, n - 1 - i], c=C[i], linewidth=20)
plt.axis('off')
plt.axis([0, 1, -1, n])
def show(image, now=True, fig_size=(10, 10)):
"""
Show an image (np.array).
Caution! Rescales image to be in range [0,1].
:param image:
:param now: plt.show() now?
:param fig_size: Figure size.
:return:
"""
image = check_image(image)
is_gray = True if image.ndim == 2 else False
image = image.astype(np.float32)
m, M = image.min(), image.max()
if fig_size != None:
plt.rcParams['figure.figsize'] = (fig_size[0], fig_size[1])
if is_gray:
plt.imshow((image - m) / (M - m), cmap='gray')
else:
plt.imshow((image - m) / (M - m))
plt.axis('off')
if now == True:
plt.show()
def build_stack(images):
"""
Build a stack of images from a tuple/list of images.
:param images: A tuple/list of images.
:return:
"""
N = len(images)
images = [check_image(image) for image in images]
for image in images:
assert image.ndim == images[0].ndim
is_gray = True if images[0].ndim == 2 else False
if is_gray:
h, w = images[0].shape
stack = np.zeros((N, h, w))
else:
h, w, c = images[0].shape
stack = np.zeros((N, h, w, c))
for i in range(N):
stack[i] = images[i]
return stack
def patch_grid(ims, width=5, sub_sample=False, rand=False, save_name=None):
"""
Display a grid of patches.
:param ims: A patch 'stack'
:param width: Images per row.
:param sub_sample: Should we take a subsample?
:param rand: Randomize subsample?
:return:
"""
N0 = np.shape(ims)[0]
if sub_sample and rand:
N = sub_sample
idx = np.random.choice(range(N), sub_sample, replace=False)
stack = ims[idx]
elif sub_sample and not rand:
N = sub_sample
stack = ims[:N]
else:
N = N0
stack = ims
height = np.ceil(float(N) / width).astype(np.uint16)
plt.rcParams['figure.figsize'] = (18, (18 / width) * height)
plt.figure()
for i in range(N):
plt.subplot(height, width, i + 1)
show(stack[i], now=False, fig_size=None)
if save_name != None:
os.makedirs(os.path.dirname(save_name), exist_ok=True)
plt.savefig(save_name)
plt.show()
def standardize_brightness(I, percentile=95):
"""
Standardize brightness.
:param I: Image uint8 RGB.
:return: Image uint8 RGB with standardized brightness.
"""
assert is_uint8_image(I)
I_LAB = cv.cvtColor(I, cv.COLOR_RGB2LAB)
L = I_LAB[:, :, 0]
p = np.percentile(L, percentile)
I_LAB[:, :, 0] = np.clip(255. * L / p, 0, 255).astype(np.uint8) # 255. float seems to be important...
I = cv.cvtColor(I_LAB, cv.COLOR_LAB2RGB)
return I
def remove_zeros(I):
"""
Remove zeros in an image, replace with 1's.
:param I: An Array.
:return: New array where 0's have been replaced with 1's.
"""
mask = (I == 0)
I[mask] = 1
return I
def RGB_to_OD(I):
"""
Convert from RGB to optical density (OD_RGB) space.
RGB = 255 * exp(-1*OD_RGB).
:param I: Image RGB uint8.
:return: Optical denisty RGB image.
"""
I = remove_zeros(I) # we don't want to take the log of zero..
return -1 * np.log(I / 255)
def OD_to_RGB(OD):
"""
Convert from optical density (OD_RGB) to RGB
RGB = 255 * exp(-1*OD_RGB)
:param OD: Optical denisty RGB image.
:return: Image RGB uint8.
"""
assert OD.min() >= 0, 'Negative optical density'
return (255 * np.exp(-1 * OD)).astype(np.uint8)
def normalize_rows(A):
"""
Normalize the rows of an array.
:param A: An array.
:return: Array with rows normalized.
"""
return A / np.linalg.norm(A, axis=1)[:, None]
def notwhite_mask(I, thresh=0.8):
"""
Get a binary mask where true denotes 'not white'.
Specifically, a pixel is not white if its luminance (in LAB color space) is less than the specified threshold.
:param I: RGB uint 8 image.
:param thresh: Luminosity threshold.
:return: Binary mask where true denotes 'not white'.
"""
assert is_uint8_image(I)
I_LAB = cv.cvtColor(I, cv.COLOR_RGB2LAB)
L = I_LAB[:, :, 0] / 255.0
return (L < thresh)
def sign(x):
"""
Returns the sign of x.
:param x: A scalar x.
:return: The sign of x \in (+1, -1, 0).
"""
if x > 0:
return +1
elif x < 0:
return -1
elif x == 0:
return 0
### Checks
def array_equal(A, B, eps=1e-9):
"""
Are arrays A and B equal?
:param A: Array.
:param B: Array.
:param eps: Tolerance.
:return: True/False.
"""
if A.ndim != B.ndim:
return False
if A.shape != B.shape:
return False
if np.mean(A - B) > eps:
return False
return True
def is_image(x):
"""
Is x an image?
i.e. numpy array of 2 or 3 dimensions.
:param x: Input.
:return: True/False.
"""
if not isinstance(x, np.ndarray):
return False
if x.ndim not in [2, 3]:
return False
return True
def is_gray_image(x):
"""
Is x a gray image?
:param x: Input.
:return: True/False.
"""
if not is_image(x):
return False
squeezed = x.squeeze()
if not squeezed.ndim == 2:
return False
return True
def is_uint8_image(x):
"""
Is x a uint8 image?
:param x: Input.
:return: True/False.
"""
if not is_image(x):
return False
if x.dtype != np.uint8:
return False
return True
def check_image(x):
"""
Check if is an image.
If gray make sure it is 'squeezed' correctly.
:param x: Input.
:return: True/False.
"""
assert is_image(x)
if is_gray_image(x):
x = x.squeeze()
return x
# Defined in utils/misc_utils
def standardize_brightness(I, percentile=95):
"""
Standardize brightness
:param I:
:return:
"""
assert is_uint8_image(I)
I_LAB = cv.cvtColor(I, cv.COLOR_RGB2LAB)
L = I_LAB[:, :, 0]
p = np.percentile(L, percentile)
I_LAB[:, :, 0] = np.clip(255. * L / p, 0, 255).astype(np.uint8) # 255. float seems to be important...
I = cv.cvtColor(I_LAB, cv.COLOR_LAB2RGB)
return I
class Normaliser(object):
"""
Abstract base class for normalizers. Defines some necessary methods to be considered a normalizer.
"""
def __init__(self, **kwargs):
self.standardize = kwargs['standardize'] if 'standardize' in kwargs.keys() else True
if self.standardize:
print('Using brightness standardization')
else:
print('Not standardizing brightness')
#@abstractmethod
def fit(self, target):
"""Fit the normalizer to an target image"""
#@abstractmethod
def transform(self, I):
"""Transform an image to the target stain"""
class ReinhardNormalizer(Normaliser):
def __init__(self, **kwargs):
super(ReinhardNormalizer, self).__init__(**kwargs)
self.target_means = None
self.target_stds = None
def fit(self, target):
"""
Fit to a target image
:param target: Image RGB uint8.
:return:
"""
if self.standardize:
target = standardize_brightness(target)
means, stds = self.get_mean_std(target)
self.target_means = means
self.target_stds = stds
def transform(self, I):
"""
Transform an image.
:param I: Image RGB uint8.
:return:
"""
if self.standardize:
I = standardize_brightness(I)
I1, I2, I3 = self.lab_split(I)
means, stds = self.get_mean_std(I)
norm1 = ((I1 - means[0]) * (self.target_stds[0] / stds[0])) + self.target_means[0]
norm2 = ((I2 - means[1]) * (self.target_stds[1] / stds[1])) + self.target_means[1]
norm3 = ((I3 - means[2]) * (self.target_stds[2] / stds[2])) + self.target_means[2]
return self.merge_back(norm1, norm2, norm3)
@staticmethod
def lab_split(I):
"""
Convert from RGB uint8 to LAB and split into channels.
:param I: Image RGB uint8.
:return:
"""
assert is_uint8_image(I)
I = cv.cvtColor(I, cv.COLOR_RGB2LAB)
I = I.astype(np.float32)
I1, I2, I3 = cv.split(I)
I1 /= 2.55
I2 -= 128.0
I3 -= 128.0
return I1, I2, I3
@staticmethod
def merge_back(I1, I2, I3):
"""
Take seperate LAB channels and merge back to give RGB uint8.
:param I1: L
:param I2: A
:param I3: B
:return: Image RGB uint8.
"""
I1 *= 2.55
I2 += 128.0
I3 += 128.0
I = np.clip(cv.merge((I1, I2, I3)), 0, 255).astype(np.uint8)
return cv.cvtColor(I, cv.COLOR_LAB2RGB)
def get_mean_std(self, I):
"""
Get mean and standard deviation of each channel.
:param I: Image RGB uint8.
:return:
"""
I1, I2, I3 = self.lab_split(I)
m1, sd1 = cv.meanStdDev(I1)
m2, sd2 = cv.meanStdDev(I2)
m3, sd3 = cv.meanStdDev(I3)
means = m1, m2, m3
stds = sd1, sd2, sd3
return means, stds
| StarcoderdataPython |
4894959 | import cv2, collections
import numpy
inu = input("choose image ")
img = cv2.imread("Images/"+str(inu)+".jpg", flags=cv2.IMREAD_COLOR)
lower_red = numpy.array([0,50,50])
upper_red = numpy.array([10,255,255])
lower_red2 = numpy.array([165,50,50])
upper_red2 = numpy.array([180,255,255])
hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
maskR = cv2.inRange(hsv, lower_red, upper_red)
maskR2 = cv2.inRange(hsv, lower_red2, upper_red2)
maskRED = maskR + maskR2
lower_green = numpy.array([44,50,50])
upper_green = numpy.array([70,255,255])
maskG = cv2.inRange(hsv,lower_green, upper_green)
lower_blue = numpy.array([105,50,50])
upper_blue = numpy.array([127,255,255])
maskB = cv2.inRange(hsv,lower_blue,upper_blue)
lower_yellow = numpy.array([50,255,255])
upper_yellow = numpy.array([65,255,255])
maskY = cv2.inRange(hsv,lower_yellow,upper_yellow)
threshold = 1
if (numpy.count_nonzero(maskRED==255) > threshold):
CountR = "01"
else:
CountR = 0
if (numpy.count_nonzero(maskG==255) > threshold):
CountG = "10"
else:
CountG = 0
if (numpy.count_nonzero(maskB==255) > threshold):
CountB = "11"
else:
CountB = 0
if (numpy.count_nonzero(maskY==255) > threshold):
CountY = "00"
else:
CountY = 0
print(CountR)
print(CountG)
print(CountB)
print(CountY)
print("RED pixels: " + str(numpy.count_nonzero(maskRED==255)))
print("GREEN pixels: " + str(numpy.count_nonzero(maskG==255)))
print("BLUE pixels: " + str(numpy.count_nonzero(maskB==255)))
print("YELLOW pixels: " + str(numpy.count_nonzero(maskY==255)))
| StarcoderdataPython |
266264 | # automatically generated by the FlatBuffers compiler, do not modify
# namespace: fbs
import flatbuffers
from flatbuffers.compat import import_numpy
np = import_numpy()
class Graph(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsGraph(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = Graph()
x.Init(buf, n + offset)
return x
@classmethod
def GraphBufferHasIdentifier(cls, buf, offset, size_prefixed=False):
return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x4F\x52\x54\x4D", size_prefixed=size_prefixed)
# Graph
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# Graph
def Initializers(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
x = self._tab.Vector(o)
x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4
x = self._tab.Indirect(x)
from ort_flatbuffers_py.experimental.fbs.Tensor import Tensor
obj = Tensor()
obj.Init(self._tab.Bytes, x)
return obj
return None
# Graph
def InitializersLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Graph
def InitializersIsNone(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
return o == 0
# Graph
def NodeArgs(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
x = self._tab.Vector(o)
x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4
x = self._tab.Indirect(x)
from ort_flatbuffers_py.experimental.fbs.ValueInfo import ValueInfo
obj = ValueInfo()
obj.Init(self._tab.Bytes, x)
return obj
return None
# Graph
def NodeArgsLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Graph
def NodeArgsIsNone(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
return o == 0
# Graph
def Nodes(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8))
if o != 0:
x = self._tab.Vector(o)
x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4
x = self._tab.Indirect(x)
from ort_flatbuffers_py.experimental.fbs.Node import Node
obj = Node()
obj.Init(self._tab.Bytes, x)
return obj
return None
# Graph
def NodesLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Graph
def NodesIsNone(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8))
return o == 0
# Graph
def MaxNodeIndex(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos)
return 0
# Graph
def NodeEdges(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12))
if o != 0:
x = self._tab.Vector(o)
x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4
x = self._tab.Indirect(x)
from ort_flatbuffers_py.experimental.fbs.NodeEdge import NodeEdge
obj = NodeEdge()
obj.Init(self._tab.Bytes, x)
return obj
return None
# Graph
def NodeEdgesLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Graph
def NodeEdgesIsNone(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12))
return o == 0
# Graph
def Inputs(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14))
if o != 0:
a = self._tab.Vector(o)
return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4))
return ""
# Graph
def InputsLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Graph
def InputsIsNone(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14))
return o == 0
# Graph
def Outputs(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16))
if o != 0:
a = self._tab.Vector(o)
return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4))
return ""
# Graph
def OutputsLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Graph
def OutputsIsNone(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16))
return o == 0
# Graph
def SparseInitializers(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18))
if o != 0:
x = self._tab.Vector(o)
x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4
x = self._tab.Indirect(x)
from ort_flatbuffers_py.experimental.fbs.SparseTensor import SparseTensor
obj = SparseTensor()
obj.Init(self._tab.Bytes, x)
return obj
return None
# Graph
def SparseInitializersLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Graph
def SparseInitializersIsNone(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18))
return o == 0
def GraphStart(builder): builder.StartObject(8)
def GraphAddInitializers(builder, initializers): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(initializers), 0)
def GraphStartInitializersVector(builder, numElems): return builder.StartVector(4, numElems, 4)
def GraphAddNodeArgs(builder, nodeArgs): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(nodeArgs), 0)
def GraphStartNodeArgsVector(builder, numElems): return builder.StartVector(4, numElems, 4)
def GraphAddNodes(builder, nodes): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(nodes), 0)
def GraphStartNodesVector(builder, numElems): return builder.StartVector(4, numElems, 4)
def GraphAddMaxNodeIndex(builder, maxNodeIndex): builder.PrependUint32Slot(3, maxNodeIndex, 0)
def GraphAddNodeEdges(builder, nodeEdges): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(nodeEdges), 0)
def GraphStartNodeEdgesVector(builder, numElems): return builder.StartVector(4, numElems, 4)
def GraphAddInputs(builder, inputs): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(inputs), 0)
def GraphStartInputsVector(builder, numElems): return builder.StartVector(4, numElems, 4)
def GraphAddOutputs(builder, outputs): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(outputs), 0)
def GraphStartOutputsVector(builder, numElems): return builder.StartVector(4, numElems, 4)
def GraphAddSparseInitializers(builder, sparseInitializers): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(sparseInitializers), 0)
def GraphStartSparseInitializersVector(builder, numElems): return builder.StartVector(4, numElems, 4)
def GraphEnd(builder): return builder.EndObject()
| StarcoderdataPython |
9667061 | <reponame>sonocent/daktari
import logging
import re
from semver import VersionInfo
from typing import Optional
from daktari.check import Check, CheckResult
from daktari.command_utils import get_stdout
from daktari.os import OS
from daktari.version_utils import try_parse_semver
flutter_version_pattern = re.compile(r"Flutter\s+([\d\.]+)")
def parse_flutter_version_output(version_output: Optional[str]) -> Optional[VersionInfo]:
if version_output:
match = flutter_version_pattern.search(version_output)
if match:
version_string = match.group(1)
logging.debug(f"Flutter version string: {version_string}")
return try_parse_semver(version_string)
return None
def get_flutter_version() -> Optional[VersionInfo]:
version_output = get_stdout("flutter --version")
return parse_flutter_version_output(version_output)
class FlutterInstalled(Check):
name = "flutter.installed"
suggestions = {
OS.GENERIC: "Install Flutter: https://flutter.dev/docs/get-started/install",
}
def __init__(self, required_version: Optional[str] = None, recommended_version: Optional[str] = None):
self.required_version = required_version
self.recommended_version = recommended_version
def check(self) -> CheckResult:
flutter_version = get_flutter_version()
logging.info(f"Flutter version: {flutter_version}")
return self.validate_semver_expression(
"Flutter", flutter_version, self.required_version, self.recommended_version
)
| StarcoderdataPython |
11266077 | from __future__ import print_function
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import numpy as np
from PIL import Image
import matplotlib.pyplot as plt
import torchvision.transforms as transforms
import torchvision.models as models
import copy
import os
import time
import heapq
from scipy import spatial
featurePath = './small_features/'
neighbourPath = './small_neighbours/'
try:
torch.cuda.set_device(0)
except:
pass
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
print("Running on " + str(device))
#fileNames = os.listdir('./features/numpy/')
fileNames = os.listdir(featurePath)
fileNames.sort()
def getDistance(x, y):
return np.linalg.norm(x-y)
# x = x.flatten()
# y = y.flatten()
# return 1 - spatial.distance.cosine(x, y)
### UNCOMMENT THIS LATER ###
# numOfImages = len(fileNames)
k = 10
numOfImages = 1000
print("No of images", numOfImages)
n = 100
begin = time.time()
for i in range(n):
filename = fileNames[i]
print(i, filename)
features = np.load(featurePath + filename)
maxHeap = []
for j in range(numOfImages):
if i == j:
continue
# print(j)
features2 = np.load(featurePath + fileNames[j])
d = getDistance(features, features2)
if len(maxHeap) < k:
heapq.heappush(maxHeap, (-d, j))
else:
top = heapq.heappop(maxHeap)
if (d < -top[0]):
heapq.heappush(maxHeap, (-d, j))
else:
heapq.heappush(maxHeap, top)
# print(len(maxHeap))
neighbours = []
for j in range(k-1, -1, -1):
top = heapq.heappop(maxHeap)
neighbours.append(top[1])
# print(top)
# file.write(fileNames[maxHeap[j][1]])
file = open(neighbourPath + filename.strip('.npy') + '.txt', 'w')
neighbours = neighbours[::-1]
for j in range(len(neighbours)):
file.write(fileNames[neighbours[j]] + '\n')
file.close()
# print(i)
print("ETA: ", ((time.time() - begin) * (n - i - 1)) / (i + 1), " seconds")
print("Extracted all neighbours successfully!")
| StarcoderdataPython |
5063297 | # encoding: utf-8
# Created by chenghaomou at 2019-06-11
import argparse
import regex as re
from elisa_dnt.utils import *
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='DNT process script')
parser.add_argument('step', type=str, choices=['pre', 'post'],
help="Parameter for choosing between preprocess or postprocess")
parser.add_argument('scheme', type=str, choices=['del', 'sub'],
help="Parameter for scheme")
parser.add_argument('--dnt_src', type=str,
help='[Post]File path to the dnt source file')
parser.add_argument('--dnt_ini', type=str,
help="[Post]File path to the dnt conf file")
parser.add_argument('--output', type=str,
help="[Post]File path to the output file")
parser.add_argument('--ordered', action='store_true', dest='ordered', default=False,
help='Sub parameter, use markers orderly as how LI tokens appear; '
'suggest True for translation, False for bpe')
parser.add_argument('--src', type=str,
help='[Pre]File path to the source file')
parser.add_argument('--src_output', type=str,
help='[Pre]File path to the source output file')
parser.add_argument('--ini_output', type=str,
help='[Pre]File path to the source ini file')
parser.add_argument('--tgt', type=str, required=False,
help='[Pre]File path to the target file')
parser.add_argument('--cross', dest='pb_cross', default=False, action='store_true',
help='[Pre]Parameter for whether use reference target file for regex extraction')
parser.add_argument('--visual', type=str,
help="[Pre]File path to visualization html file")
args = parser.parse_args()
print(args)
scheme = args.scheme
rules = load_rules(scheme=scheme)
options = generate_options()
if args.step == "post":
restore(args.dnt_src, args.dnt_ini, args.output, args.scheme, ordered=args.ordered)
exit(0)
if args.visual:
with open(args.visual, "w") as o:
o.write("""
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<link href="https://fonts.googleapis.com/css?family=Open+Sans&display=swap&subset=cyrillic,cyrillic-ext,greek,greek-ext,latin-ext,vietnamese" rel="stylesheet">
<style>
html,body{
font-family: 'Open Sans', sans-serif;
}
""" + "\n".join([".%s {%s}" % (key, value) for key, value in options["colors"].items()]) + """
</style>
<head>
<body>
""")
path = args.src
split(args.src, args.src_output, args.ini_output, scheme=args.scheme,
ref=args.tgt if args.scheme == "sub" and args.pb_cross else "", rules=rules, ordered=args.ordered)
if args.visual:
if args.tgt == "":
for line in open(path):
matches = find(line, rules)
if matches:
res = visual(line, matches, options, rules)
with open(args.visual, "a+") as o:
o.write(f"<p>{res}</p>" + "\n")
else:
src_lines, tgt_lines = open(path).readlines(), open(args.tgt).readlines()
assert len(src_lines) == len(tgt_lines)
for src_line, tgt_line in zip(src_lines, tgt_lines):
src_matches = find(src_line, rules)
tgt_matches = find(tgt_line, rules)
src_matches_text = [src_line[m.start:m.end] for m in src_matches]
tgt_matches_text = [tgt_line[m.start:m.end] for m in tgt_matches]
x_matches = list(set(src_matches_text).intersection(set(tgt_matches_text)))
x_src_matches = [m for m in src_matches if
src_line[m.start:m.end] in x_matches] if args.pb_cross else src_matches
x_tgt_matches = [m for m in tgt_matches if
tgt_line[m.start:m.end] in x_matches] if args.pb_cross else tgt_matches
if x_matches:
res = visual(src_line, x_src_matches, options, rules)
with open(args.visual, "a+") as o:
o.write(f"<p>{res}</p>" + "\n")
res = visual(tgt_line, x_tgt_matches, options, rules)
with open(args.visual, "a+") as o:
o.write(f"<p>{res}</p>" + "\n")
with open(args.visual, "a+") as o:
o.write('</body></html>') | StarcoderdataPython |
6569559 | <gh_stars>0
from __future__ import print_function
"""
outputs to produce for RIST workbook:
- an asset is a zone-technology-vintage combination
x capital outlay for new/replacement assets each period (per asset)
- annual amortization for each asset during each period
- annual O&M per asset per year
- annual fuel cost per asset per year
- maybe group the above into
- PPA projects (report amortization + fuel + O&M together)
- HECO-owned (report capital outlay, O&M, fuel)
- annual values:
- Net Load (GWh)
- battery/transmission losses (GWh)
- Customer Count (?) (if have time)
- Average Residential Use (kWh/month) (?) (if have time)
- CO2 Emissions (tons)
- Utility-scale Renewable Generation (GWh)
- Distributed Generation (GWh)
- Distributed Generation Capacity (MW)
what is new capacity and those costs (any new, PPA or heco-owned; for now we assume all done through PPA)
when does heco's capacity shutdown?
amortization of capital with 6% plus O&M and fuel
if peopel want to use this scneario as our own, we should lay out our assumptions:
capacity, shutdown date or start date for these facilities, what capacity was available what year.
net load is net of DG and losses
non-retired thermal should be shown as online even if not used
ASAP (not necessarily this week but she'll be in germany next week)
"""
"""
Look at formulas for these:
['TotalGenFixedCosts',
'FuelCostsPerPeriod',
'StorageEnergyInstallCosts',
'RFM_Fixed_Costs_Annual',
'Pumped_Hydro_Fixed_Cost_Annual',
'Federal_Investment_Tax_Credit_Annual']
['GenVariableOMCostsInTP', 'Total_StartupGenCapacity_OM_Costs']
Break down by project and sum back up, without discounting.
Also split fixed costs into amortization and O&M.
Then aggregate projects in groups:
HECO-owned (existing thermal except H-POWER, AES, Kalaeloa, cogen; any renewables?)
PPA (H-POWER, AES, Kalaeloa, cogen, most/all renewables?)
Fuel costs: allocate supply curve proportionately based on % fuel used by each plant, i.e., calculate an average cost per MMBtu used and allocate back
** see code for gen_cap.csv
for each project, each period, report
- capital outlay (power + energy) net of investment tax credits
- amortization net of investment tax credits
- fixed O&M
- variable O&M
- startup costs
- fuel expenditure (for markets, use avg. cost in fuel market incl. FuelCostsPerPeriod and RFM_Fixed_Costs_Annual)
- production (MWh)
- capacity added (MW)
- capacity retired (MW)
- capacity in place
Then also report other cost terms, summed for period:
- Pumped_Hydro_Fixed_Cost_Annual
-
Sum and discount to verify that these match objective fn. (also check that amortization matches capital outlays?)
"""
import os
import pandas as pd
from collections import OrderedDict, defaultdict
from pyomo.environ import value
from switch_model.financials import capital_recovery_factor as crf
def post_solve(m, outdir=None):
""" Calculate detailed costs per generation project per period. """
if outdir is None:
outdir = m.options.outputs_dir
zone_fuel_cost = get_zone_fuel_cost(m)
has_subsidies = hasattr(m, 'gen_investment_subsidy_fraction')
gen_data = OrderedDict()
gen_period_data = OrderedDict()
gen_vintage_period_data = OrderedDict()
for g, p in sorted(m.GEN_PERIODS):
# helper function to calculate annual sums
def ann(expr):
try:
return sum(
expr(g, t) * m.tp_weight_in_year[t]
for t in m.TPS_IN_PERIOD[p]
)
except AttributeError:
# expression uses a component that doesn't exist
return None
# is this a storage gen?
is_storage = hasattr(m, 'STORAGE_GENS') and g in m.STORAGE_GENS
BuildGen = m.BuildGen[g, p] if (g, p) in m.GEN_BLD_YRS else 0.0
# BuildStorageEnergy = (
# m.BuildStorageEnergy[g, p]
# if is_storage and (g, p) in m.GEN_BLD_YRS
# else 0.0
# )
gen_data[g] = OrderedDict(
gen_tech=m.gen_tech[g],
gen_load_zone=m.gen_load_zone[g],
gen_energy_source=m.gen_energy_source[g],
gen_is_intermittent=int(m.gen_is_variable[g])
)
# temporary storage of per-generator data to be allocated per-vintage
# below
gen_period_data = OrderedDict(
total_output=0.0 if is_storage else ann(
lambda g, t: m.DispatchGen[g, t]
),
renewable_output=0.0 if is_storage else ann(
lambda g, t: renewable_mw(m, g, t)
),
non_renewable_output=0.0 if is_storage else ann(
lambda g, t: m.DispatchGen[g, t]-renewable_mw(m, g, t)
),
storage_load=(
ann(lambda g, t: m.ChargeStorage[g, t] - m.DispatchGen[g, t])
if is_storage else 0.0
),
fixed_om=m.GenFixedOMCosts[g, p],
variable_om=ann(
lambda g, t: m.DispatchGen[g, t] * m.gen_variable_om[g]
),
startup_om=ann(
lambda g, t:
m.gen_startup_om[g]
* m.StartupGenCapacity[g, t] / m.tp_duration_hrs[t]
),
fuel_cost=ann(
lambda g, t: sum(
0.0 # avoid nan fuel prices for unused fuels
if m.GenFuelUseRate[g, t, f] == 0.0 else
(
m.GenFuelUseRate[g, t, f]
* zone_fuel_cost[m.gen_load_zone[g], f, m.tp_period[t]]
)
for f in m.FUELS_FOR_GEN[g]
) if g in m.FUEL_BASED_GENS else 0.0
)
)
for v in m.BLD_YRS_FOR_GEN_PERIOD[g, p]:
# fill in data for each vintage of generator that is active now
gen_vintage_period_data[g, v, p] = OrderedDict(
capacity_in_place=m.BuildGen[g, v],
capacity_added=m.BuildGen[g, p] if p == v else 0.0,
capital_outlay=(
m.BuildGen[g, p] * (
m.gen_overnight_cost[g, p] +
m.gen_connect_cost_per_mw[g]
) * (
(1.0 - m.gen_investment_subsidy_fraction[g, p])
if has_subsidies else 1.0
) + (
(
m.BuildStorageEnergy[g, p]
* m.gen_storage_energy_overnight_cost[g, p]
) if is_storage else 0.0
)
) if p == v else 0.0,
amortized_cost=
m.BuildGen[g, v] * m.gen_capital_cost_annual[g, v]
+ ((
m.BuildStorageEnergy[g, v]
* m.gen_storage_energy_overnight_cost[g, v]
* crf(m.interest_rate, m.gen_max_age[g])
) if is_storage else 0.0)
- ((
m.gen_investment_subsidy_fraction[g, v]
* m.BuildGen[g, v]
* m.gen_capital_cost_annual[g, v]
) if has_subsidies else 0.0),
)
# allocate per-project values among the vintages based on amount
# of capacity currently online (may not be physically meaningful if
# gens have discrete commitment, but we assume the gens are run
# roughly this way)
vintage_share = ratio(m.BuildGen[g, v], m.GenCapacity[g, p])
for var, val in gen_period_data.items():
gen_vintage_period_data[g, v, p][var] = vintage_share * val
# record capacity retirements
# (this could be done earlier if we included the variable name
# in the dictionary key tuple instead of having a data dict for
# each key)
for g, v in m.GEN_BLD_YRS:
retire_year = v + m.gen_max_age[g]
# find the period when this retires
for p in m.PERIODS:
if p >= retire_year:
gen_vintage_period_data \
.setdefault((g, v, p), OrderedDict())['capacity_retired'] \
= m.BuildGen[g, v]
break
# convert dicts to data frames
generator_df = (
pd.DataFrame(evaluate(gen_vintage_period_data))
.unstack()
.to_frame(name='value')
)
generator_df.index.names = [
'generation_project', 'gen_vintage', 'period', 'variable'
]
for g, d in gen_data.items():
for k, v in d.items():
# assign generator general data to all rows with generator==g
generator_df.loc[g, k] = v
# convert from float
generator_df['gen_is_intermittent'] = generator_df['gen_is_intermittent'].astype(int)
generator_df = generator_df.reset_index().set_index([
'generation_project', 'gen_vintage', 'gen_tech', 'gen_load_zone',
'gen_energy_source', 'gen_is_intermittent',
'variable'
]).sort_index()
generator_df.to_csv(
os.path.join(outdir, 'generation_project_details.csv'), index=True
)
# dict should be var, gen, period
# but gens have all-years values too (technology, fuel, etc.)
# and there are per-year non-gen values
# report other costs on an undiscounted, annualized basis
# (custom modules, transmission, etc.)
# List of comparisons to make later; dict value shows which model
# components should match which variables in generator_df
itemized_cost_comparisons = {
'gen_fixed_cost': (
[
'TotalGenFixedCosts', 'StorageEnergyFixedCost',
'TotalGenCapitalCostsSubsidy'
],
['amortized_cost', 'fixed_om']
),
'fuel_cost': (
['FuelCostsPerPeriod', 'RFM_Fixed_Costs_Annual'],
['fuel_cost']
),
'variable_om': (
['GenVariableOMCostsInTP', 'Total_StartupGenCapacity_OM_Costs'],
['startup_om', 'variable_om']
)
}
##### most detailed level of data:
# owner, tech, generator, fuel (if relevant, otherwise 'all' or specific fuel or 'multiple'?)
# then aggregate up
"""
In generic summarize_results.py:
- lists of summary expressions; each creates a new variable per indexing set
then those get added to summary tables, which then get aggregated
gen_fuel_period_exprs
gen_period_exprs (can incl. owner, added to top of list from outside)
gen_exprs -> get pushed down into gen_period table? or only when creating by-period summaries?
period_exprs (get added as quasi-gens)
fuel_period_exprs
these create tables like 'summary_per_gen_fuel_period' (including quasi gen
data from period_exprs and fuel_period_exprs).
Those get pivoted
to make 'summary_per_gen_fuel_by_period', with data from 'summary_per_gen_fuel'
added to the same rows. Maybe there should be a list of summary groups too. ugh.
"""
# list of costs that should have already been accounted for
itemized_gen_costs = set(
component
for model_costs, df_costs in itemized_cost_comparisons.values()
for component in model_costs
)
non_gen_costs = OrderedDict()
for p in m.PERIODS:
non_gen_costs[p] = {
cost: getattr(m, cost)[p]
for cost in m.Cost_Components_Per_Period
if cost not in itemized_gen_costs
}
for cost in m.Cost_Components_Per_TP:
if cost not in itemized_gen_costs:
non_gen_costs[p][cost] = sum(
getattr(m, cost)[t] * m.tp_weight_in_year[t]
for t in m.TPS_IN_PERIOD[p]
)
non_gen_costs[p]['co2_emissions'] = m.AnnualEmissions[p]
non_gen_costs[p]['gross_load'] = ann(
lambda g, t: sum(m.zone_demand_mw[z, t] for z in m.LOAD_ZONES)
)
non_gen_costs[p]['ev_load'] = 0.0
if hasattr(m, 'ChargeEVs'):
non_gen_costs[p]['ev_load'] += ann(
lambda g, t: sum(m.ChargeEVs[z, t] for z in m.LOAD_ZONES)
)
if hasattr(m, 'ev_charge_min') and hasattr(m, 'ChargeEVs_min'):
m.logger.error(
'ERROR: Need to update {} to handle combined loads from '
'ev_simple and ev_advanced modules'.format(__name__)
)
if hasattr(m, 'StorePumpedHydro'):
non_gen_costs[p]['Pumped_Hydro_Net_Load'] = ann(
lambda g, t: sum(
m.StorePumpedHydro[z, t] - m.GeneratePumpedHydro[z, t]
for z in m.LOAD_ZONES
)
)
non_gen_df = pd.DataFrame(evaluate(non_gen_costs)).unstack().to_frame(name='value')
non_gen_df.index.names=['period', 'variable']
non_gen_df.to_csv(os.path.join(outdir, 'non_generation_costs_by_period.csv'))
# check whether reported generator costs match values used in the model
gen_df_totals = generator_df.groupby(['variable', 'period'])['value'].sum()
gen_total_costs = defaultdict(float)
for label, (model_costs, df_costs) in itemized_cost_comparisons.items():
for p in m.PERIODS:
for cost in model_costs:
if cost in m.Cost_Components_Per_Period:
cost_val = value(getattr(m, cost)[p])
elif cost in m.Cost_Components_Per_TP:
# aggregate to period
cost_val = value(sum(
getattr(m, cost)[t] * m.tp_weight_in_year[t]
for t in m.TPS_IN_PERIOD[p]
))
else:
cost_val = 0.0
gen_total_costs[label, p, 'model'] += cost_val
gen_total_costs[label, p, 'reported'] = (
gen_df_totals.loc[df_costs, p].sum()
)
mc = gen_total_costs[label, p, 'model']
rc = gen_total_costs[label, p, 'reported']
if different(mc, rc):
m.logger.warning(
"WARNING: model and reported values don't match for {} in "
"{}: {:,.0f} != {:,.0f}; NPV of difference: {:,.0f}."
.format(label, p, mc, rc, m.bring_annual_costs_to_base_year[p]*(mc-rc))
)
raise
# else:
# m.logger.info(
# "INFO: model and reported values match for {} in "
# "{}: {} == {}.".format(label, p, mc, rc)
# )
# check costs on an aggregated basis too (should be OK if the gen costs are)
cost_vars = [
var
for model_costs, df_costs in itemized_cost_comparisons.values()
for var in df_costs
]
total_costs = (
generator_df.loc[pd.IndexSlice[:, :, :, :, cost_vars], :]
.groupby('period')['value'].sum()
) + non_gen_df.unstack(0).drop(
['co2_emissions', 'gross_load', 'Pumped_Hydro_Net_Load']
).sum()
npv_cost = value(sum(
m.bring_annual_costs_to_base_year[p] * v
for ((_, p), v) in total_costs.iteritems()
))
system_cost = value(m.SystemCost)
if different(npv_cost, system_cost):
m.logger.warning(
"WARNING: NPV of all costs in model doesn't match reported total: "
"{:,.0f} != {:,.0f}; difference: {:,.0f}."
.format(npv_cost, system_cost, npv_cost - system_cost)
)
print()
print("TODO: *** check for missing MWh terms in {}.".format(__name__))
print()
print("Creating RIST summary; may take several minutes.")
summarize_for_rist(m, outdir)
# data for HECO info request 2/14/20
print("Saving hourly reserve data.")
report_hourly_reserves(m)
if hasattr(m, 'Smooth_Free_Variables'):
# using the smooth_dispatch module; re-report dispatch data
print("Re-saving dispatch data after smoothing.")
import switch_model.generators.core.dispatch as dispatch
dispatch.post_solve(m, m.options.outputs_dir)
else:
print(
"WARNING: the smooth_dispatch module is not being used. Hourly "
"dispatch may be rough and hourly contingency reserve targets may "
"inflated."
)
print("Comparing Switch to EIA production data.")
if True:
compare_switch_to_eia_production(m)
else:
print("(skipped, takes several minutes)")
# value(m.SystemCost) ==
# import code
# code.interact(local=dict(list(globals().items()) + list(locals().items())))
def different(v1, v2):
""" True if v1 and v2 differ by more than 0.000001 * their average value """
return abs(v1 - v2) > 0.0000005 * (v1 + v2)
def renewable_mw(m, g, t):
if not hasattr(m, "RPS_ENERGY_SOURCES"):
return 0.0
elif m.gen_energy_source[g] in m.RPS_ENERGY_SOURCES:
return m.DispatchGen[g, t]
elif g in m.FUEL_BASED_GENS:
return m.DispatchGenRenewableMW[g, t]
else:
return 0.0
def ratio(x, y):
""" Return ratio of x/y, giving 0 if x is 0, even if y is 0 """
return 0.0 if abs(value(x)) < 1e-9 and abs(value(y)) < 1e-9 else value(x / y)
def evaluate(d):
return {
k1: {
k2: value(v2)
for k2, v2 in v1.items()
}
for k1, v1 in d.items()
}
def get_zone_fuel_cost(m):
"""
Calculate average cost of each fuel in each load zone during each period
"""
if hasattr(m, 'REGIONAL_FUEL_MARKETS'):
# using fuel markets
# note: we fuel market expansion because that may be treated as a
# capital expense or may be factored into the fuel cost
rfm_fuel_expend = {
(rfm, p):
sum(
m.ConsumeFuelTier[rfm_st] * m.rfm_supply_tier_cost[rfm_st]
for rfm_st in m.SUPPLY_TIERS_FOR_RFM_PERIOD[rfm, p]
)
for rfm in m.REGIONAL_FUEL_MARKETS for p in m.PERIODS
}
rfm_fuel_use = {
(rfm, p):
sum(
m.ConsumeFuelTier[rfm_st]
for rfm_st in m.SUPPLY_TIERS_FOR_RFM_PERIOD[rfm, p]
)
for rfm in m.REGIONAL_FUEL_MARKETS for p in m.PERIODS
}
rfm_fuel_cost = {
(rfm, p):
float('nan') if rfm_fuel_use[rfm, p] == 0.0 else
(rfm_fuel_expend[rfm, p] / rfm_fuel_use[rfm, p])
for rfm in m.REGIONAL_FUEL_MARKETS for p in m.PERIODS
}
# assign to corresponding zones and fuels
zone_fuel_cost = {
(z, f, p): rfm_fuel_cost[m.zone_fuel_rfm[z, f], p]
for z, f in m.ZONE_FUELS
for p in m.PERIODS
}
else:
# simple fuel costs
zone_fuel_cost = {(z, f, p): m.fuel_cost[z, f, p]}
return zone_fuel_cost
# outdir='outputs'
# summarize_for_rist(m, outdir)
def summarize_for_rist(m, outdir=''):
non_gen_df = pd.read_csv(
os.path.join(outdir, 'non_generation_costs_by_period.csv')
).set_index(['variable', 'period'])['value'].unstack()
gen_df = pd.read_csv(
os.path.join(outdir, 'generation_project_details.csv')
)
techs_for_owner = dict(
PPA=['AES', 'Battery_Bulk', 'CC_152', 'CentralTrackingPV',
'H-Power', 'IC_Barge', 'IC_MCBH',
'Hawaii_Cogen', 'Tesoro_Hawaii',
'Kalaeloa_CC1', 'Kalaeloa_CC2', 'Kalaeloa_CC3',
'OffshoreWind', 'OnshoreWind'
],
HECO=[
'Airport_DSG', 'Battery_Conting', 'Battery_Reg', 'CIP_CT',
'IC_Schofield',
'Honolulu_8', 'Honolulu_9',
'Kahe_1', 'Kahe_2', 'Kahe_3', 'Kahe_4', 'Kahe_5', 'Kahe_6',
'Waiau_3', 'Waiau_4', 'Waiau_5', 'Waiau_6', 'Waiau_7', 'Waiau_8',
'Waiau_9', 'Waiau_10',
],
distributed=['DistBattery', 'FlatDistPV', 'SlopedDistPV']
)
owner_for_tech = {t: o for o, techs in techs_for_owner.items() for t in techs}
gen_df['owner'] = gen_df['gen_tech'].replace(owner_for_tech)
missing_owners = set(gen_df['owner']) - set(techs_for_owner.keys())
if missing_owners:
print("\nWARNING: some plants were not assigned owners: {}\n".format(missing_owners))
gen_cols = ['owner', 'variable', 'gen_tech', 'gen_vintage', 'gen_is_intermittent', 'period']
gen_df = gen_df.groupby(gen_cols)['value'].sum().unstack()
gen_df.loc[('PPA', 'ppa_cost', 'PumpedHydro', '', 0), :] \
= non_gen_df.loc['Pumped_Hydro_Fixed_Cost_Annual', :]
gen_df.loc[('PPA', 'storage_net_load', 'PumpedHydro', '', 0), :] \
= non_gen_df.loc['Pumped_Hydro_Net_Load', :]
for col in ['co2_emissions', 'gross_load', 'ev_load']:
gen_df.loc[('system', col, '', '', ''), :] = non_gen_df.loc[col, :]
gen_df = gen_df.reindex(range(min(gen_df.columns), 2050), axis=1)
gen_df.update(gen_df.loc[pd.IndexSlice[:, ['capacity_added', 'capacity_retired', 'capital_outlay'], :], :].fillna(0))
# carry other values forward to the end of the period
period_edges = non_gen_df.columns.to_list() + [2050]
for start, end in zip(period_edges[:-1], period_edges[1:]):
gen_df.update(
gen_df.loc[:, start:end-1].fillna(method='ffill', axis=1)
)
gen_df = gen_df.sort_index()
# drop zeros and then drop all-nan rows
gen_df = gen_df.replace(0, float('nan')).dropna(how='all')
gen_df.to_csv(os.path.join(outdir, 'annual_details_by_tech.csv'))
var_df = gen_df.groupby(['owner', 'variable']).sum()
var_df.to_csv(os.path.join(outdir, 'annual_details_by_owner.csv'))
def compare_switch_to_eia_production(m):
# get totals per gen, aggregate up to group level (can probably just select by matching project name)
switch_data = dict()
def vsum(iter):
return value(sum(iter))
for g, p in m.GEN_PERIODS:
dispatch = (
(lambda m, g, t: m.DispatchGen[g, t] - m.ChargeStorage[g, t])
if g in m.STORAGE_GENS else
(lambda m, g, t: m.DispatchGen[g, t])
)
if g in m.FUEL_BASED_GENS:
total_fuel_in_tp = {
t: vsum(m.GenFuelUseRate[g, t, f] for f in m.FUELS_FOR_GEN[g])
for t in m.TPS_IN_PERIOD[p]
}
for f in m.FUELS_FOR_GEN[g]:
fuel_use = vsum(
m.GenFuelUseRate[g, t, f] * m.tp_weight_in_year[t]
for t in m.TPS_IN_PERIOD[p]
)
if fuel_use != 0.0:
switch_data[g, f, p, 'fuel_use'] = fuel_use
# prorate production among sources
switch_data[g, f, p, 'production'] = vsum(
dispatch(m, g, t)
* m.tp_weight_in_year[t]
* ratio(
m.GenFuelUseRate[g, t, f], total_fuel_in_tp[t]
)
for t in m.TPS_IN_PERIOD[p]
)
else:
switch_data[g, m.gen_energy_source[g], p, 'fuel_use'] = 0.0
switch_data[g, m.gen_energy_source[g], p, 'production'] = vsum(
dispatch(m, g, t) * m.tp_weight_in_year[t]
for t in m.TPS_IN_PERIOD[p]
)
switch_df = pd.Series(switch_data, name='value').to_frame()
switch_df.index.names=['generation_project', 'switch_fuel', 'year', 'variable']
switch_df = switch_df.reset_index()
### TODO: add lake_wilson
# Get EIA data
# list of plants; tends to include some that are retired, so no need to
# look further back
# ~5s
oahu_plants = read_excel_cached(
os.path.join('EIA data', 'eia8602018', '2___Plant_Y2018.xlsx'),
sheet_name='Plant',
skiprows=1,
header=0, index_col=None
)
oahu_plants = oahu_plants.loc[
(oahu_plants['State']=='HI') & (oahu_plants['County']=='Honolulu'),
['Plant Code']
].rename({'Plant Code': 'Plant Id'}, axis=1).reset_index(drop=True)
# get EIA production and fuel data
# year = 2018
eia_dfs = []
for year in range(2012, 2019+1):
# ~21s
filename = os.path.join(
'EIA data',
'EIA923_Schedules_2_3_4_5_M_12_{}_Final_Revision.xlsx'.format(year)
)
if not os.path.exists(filename):
if year == 2019:
filename = filename.replace('Final_Revision', '21FEB2020')
elif year == 2013:
filename = filename.replace('5_M_12_20', '5_20')
df = read_excel_cached(
filename,
sheet_name='Page 1 Generation and Fuel Data',
skiprows=5,
header=0, index_col=None
)
df = df.merge(oahu_plants, on='Plant Id', how='inner')
df.columns = [c.replace('\n', ' ') for c in df.columns]
df = df.rename({
'YEAR': 'year',
'AER Fuel Type Code': 'eia_fuel',
'Elec Fuel Consumption MMBtu': 'fuel_use',
'Net Generation (Megawatthours)': 'production'
}, axis=1)
df['plant_mover'] = df['Plant Name'] + ' ' + df['Reported Prime Mover']
df = df.loc[df['production'] != 0.0, :] # drop extraneous records
eia_dfs.append(df.melt(
id_vars=['plant_mover', 'eia_fuel', 'year'],
value_vars=['production', 'fuel_use'],
var_name='variable', value_name='value'
))
eia_df = pd.concat(eia_dfs, axis=0)
# give plants and fuels common names
eia_plant_name, switch_plant_name = get_eia_switch_plants(eia_df, switch_df)
eia_fuel_name, switch_fuel_name = get_eia_switch_fuels(eia_df, switch_df)
eia_df['fuel'] = eia_df['eia_fuel'].replace(eia_fuel_name)
switch_df['fuel'] = switch_df['switch_fuel'].replace(switch_fuel_name)
eia_df['plant'] = eia_df['plant_mover'].replace(eia_plant_name)
switch_df['plant'] = switch_df['generation_project'].replace(switch_plant_name)
eia_df['source'] = 'actual'
switch_df['source'] = 'switch'
cols = ['variable', 'plant', 'fuel', 'source', 'year', 'value']
compare = (
pd.concat([eia_df.loc[:, cols], switch_df.loc[:, cols]], axis=0)
.groupby(cols[:-1])
.sum()
.unstack(['source', 'year'])
).loc[:, 'value'].sort_index(axis=0).sort_index(axis=1)
compare.to_csv(os.path.join(m.options.outputs_dir, 'compare_eia_switch_production.csv'))
import hashlib
def read_excel_cached(excel_file, *args, **kwargs):
h = hashlib.sha1(str([args, kwargs]).encode()).hexdigest()[:6]
pickle_file = os.path.splitext(excel_file)[0] + '.' + h + '.zip'
if os.path.exists(pickle_file):
df = pd.read_pickle(pickle_file)
else:
print("Reading {} and caching in {}; takes about 20s.".format(excel_file, pickle_file))
df = pd.read_excel(excel_file, *args, **kwargs)
df.to_pickle(pickle_file)
return df
def get_eia_switch_fuels(eia_df, switch_df):
# for f in eia_df['fuel'].unique():
# print(f"'': (['{f}'], ['']),")
# for f in switch_df['fuel'].unique():
# print(f"'{f}'")
eia_switch_fuels = {
'LSFO': (['RFO'], ['LSFO']),
'diesel': (['DFO'], ['Diesel']),
'waste oil': (['WOO'], []),
'gas': (['OOG'], ['LNG']),
'muni waste': (['MLG'], ['MSW']),
'other': (['OTH'], ['Battery']),
'coal': (['COL'], ['Coal']),
'biodiesel': (['ORW'], ['Biodiesel']),
'wind': (['WND'], ['WND']),
'solar': (['SUN'], ['SUN']),
}
# add missing EIA fuels
included_fuels = set(
f
for eia_fuels, switch_fuels in eia_switch_fuels.values()
for f in eia_fuels
)
eia_switch_fuels.update({
f: ([f], [])
for f in eia_df['eia_fuel'].unique()
if f not in included_fuels
})
# add missing Switch fuels
included_fuels = set(
f
for eia_fuels, switch_fuels in eia_switch_fuels.values()
for f in switch_fuels
)
eia_switch_fuels.update({
f: ([], [f])
for f in switch_df['switch_fuel'].unique()
if f not in included_fuels
})
# split into eia conversion table and switch conversion table
eia_renamer = {
f: name
for name, (eia_fuels, switch_fuels) in eia_switch_fuels.items()
for f in eia_fuels
}
switch_renamer = {
f: name
for name, (eia_fuels, switch_fuels) in eia_switch_fuels.items()
for f in switch_fuels
}
return eia_renamer, switch_renamer
# print("""
# ========================================
# TODO (fix these problems):
# ========================================
# """)
def get_eia_switch_plants(eia_df, switch_df):
# get lists of Oahu plants and prime movers (gives initial data for
# eia_switch_plants dict)
# {m: (['{}'.format(m)], []) for m in df['plant_mover'].drop_duplicates().sort_values()}
# list(m.GENERATION_PROJECTS)
# (also see existing plants spreadsheet in Switch's database inputs)
# map eia plants to switch projects (many to many)
eia_switch_plants = {
'AES Coal': (['AES Hawaii ST'], ['Oahu_AES']),
'CIP CT': (['Campbell Industrial Park GT'], ['Oahu_CIP_CT']),
'H-Power': (['H Power ST'], ['Oahu_H-Power']),
'Airport DSG': (
['HNL Emergency Power Facility IC'],
['Oahu_Airport_DSG']
),
'Par and Tesoro cogen': (
['Hawaii Cogen GT', 'Tesoro Hawaii GT'],
['Oahu_Hawaii_Cogen', 'Oahu_Tesoro_Hawaii']
),
'Kahe': (
['Kahe ST'],
[
'Oahu_Kahe_1',
'Oahu_Kahe_2',
'Oahu_Kahe_3',
'Oahu_Kahe_4',
'Oahu_Kahe_5',
'Oahu_Kahe_6',
]
),
'Kahuku Wind': (
['Kahuku Wind Power LLC WT'],
['Oahu_OnshoreWind_OnWind_Kahuku']
),
'Kalaeloa': (
['Kalaeloa Cogen Plant CA', 'Kalaeloa Cogen Plant CT'],
[
'Oahu_Kalaeloa_CC1', # train 1
'Oahu_Kalaeloa_CC2', # train 2
'Oahu_Kalaeloa_CC3', # duct burner
]
),
'Kawailoa Wind': (
['Kawailoa Wind WT'],
['Oahu_OnshoreWind_OnWind_Kawailoa']
),
'Schofield Generating Station IC': (
['Schofield Generating Station IC'],
['Oahu_IC_Schofield']
),
'Waiau GT': (
['Waiau GT'],
['Oahu_Waiau_10', 'Oahu_Waiau_9']
),
'Waiau ST': (
['Waiau ST'],
[
'Oahu_Waiau_3',
'Oahu_Waiau_4',
'Oahu_Waiau_5',
'Oahu_Waiau_6',
'Oahu_Waiau_7',
'Oahu_Waiau_8',
]
),
'Batteries': (
['Campbell Industrial Park BESS BA'],
[
'Oahu_Battery_Bulk',
'Oahu_Battery_Reg', # should always be 0
'Oahu_Battery_Conting', # should always be 0
'Oahu_DistBattery'
]
),
'Utility-Scale Solar': (
[
'Aloha Solar Energy Fund 1 PK1 PV',
'Kalaeloa Solar Two PV',
'Kalaeloa Renewable Energy Park PV',
'Kapolei Solar Energy Park PV',
'Waihonu North Solar PV',
'Waihonu South Solar PV',
'Pearl City Peninsula Solar Park PV',
'EE Waianae Solar Project PV',
'Kawailoa Solar PV',
'Waipio Solar PV',
],
[
'Oahu_CentralTrackingPV_PV_01',
'Oahu_CentralTrackingPV_PV_02',
'Oahu_CentralTrackingPV_PV_03',
'Oahu_CentralTrackingPV_PV_04',
'Oahu_CentralTrackingPV_PV_05',
'Oahu_CentralTrackingPV_PV_06',
'Oahu_CentralTrackingPV_PV_07',
'Oahu_CentralTrackingPV_PV_08',
'Oahu_CentralTrackingPV_PV_09',
'Oahu_CentralTrackingPV_PV_10',
'Oahu_CentralTrackingPV_PV_11',
'Oahu_CentralTrackingPV_PV_12',
'Oahu_CentralTrackingPV_PV_13',
'Oahu_CentralTrackingPV_PV_14',
'Oahu_CentralTrackingPV_PV_15',
'Oahu_CentralTrackingPV_PV_16',
'Oahu_CentralTrackingPV_PV_17',
'Oahu_CentralTrackingPV_PV_18',
]
),
'New Onshore Wind': (
[],
[
'Oahu_OnshoreWind_OnWind_101',
'Oahu_OnshoreWind_OnWind_102',
'Oahu_OnshoreWind_OnWind_103',
'Oahu_OnshoreWind_OnWind_104',
'Oahu_OnshoreWind_OnWind_105',
'Oahu_OnshoreWind_OnWind_106',
'Oahu_OnshoreWind_OnWind_107',
'Oahu_OnshoreWind_OnWind_201',
'Oahu_OnshoreWind_OnWind_202',
'Oahu_OnshoreWind_OnWind_203',
'Oahu_OnshoreWind_OnWind_204',
'Oahu_OnshoreWind_OnWind_205',
'Oahu_OnshoreWind_OnWind_206',
'Oahu_OnshoreWind_OnWind_207',
'Oahu_OnshoreWind_OnWind_208',
'Oahu_OnshoreWind_OnWind_209',
'Oahu_OnshoreWind_OnWind_301',
'Oahu_OnshoreWind_OnWind_302',
'Oahu_OnshoreWind_OnWind_303',
'Oahu_OnshoreWind_OnWind_304',
'Oahu_OnshoreWind_OnWind_305',
'Oahu_OnshoreWind_OnWind_306',
'Oahu_OnshoreWind_OnWind_307',
'Oahu_OnshoreWind_OnWind_308',
'Oahu_OnshoreWind_OnWind_309',
'Oahu_OnshoreWind_OnWind_401',
'Oahu_OnshoreWind_OnWind_402',
'Oahu_OnshoreWind_OnWind_403',
'Oahu_OnshoreWind_OnWind_404',
'Oahu_OnshoreWind_OnWind_405',
'Oahu_OnshoreWind_OnWind_406',
'Oahu_OnshoreWind_OnWind_407',
'Oahu_OnshoreWind_OnWind_408',
'Oahu_OnshoreWind_OnWind_409',
'Oahu_OnshoreWind_OnWind_410',
'Oahu_OnshoreWind_OnWind_501',
'Oahu_OnshoreWind_OnWind_502',
'Oahu_OnshoreWind_OnWind_503',
'Oahu_OnshoreWind_OnWind_504',
'Oahu_OnshoreWind_OnWind_505',
'Oahu_OnshoreWind_OnWind_506',
'Oahu_OnshoreWind_OnWind_507',
'Oahu_OnshoreWind_OnWind_508',
'Oahu_OnshoreWind_OnWind_509',
]
),
'New Offshore Wind': ([], ['Oahu_OffshoreWind_OffWind']),
'Distributed PV': (
[],
[
'Oahu_FlatDistPV_Oahu_FlatDistPV_0',
'Oahu_FlatDistPV_Oahu_FlatDistPV_1',
'Oahu_FlatDistPV_Oahu_FlatDistPV_2',
'Oahu_FlatDistPV_Oahu_FlatDistPV_3',
'Oahu_SlopedDistPV_Oahu_SlopedDistPV_0',
'Oahu_SlopedDistPV_Oahu_SlopedDistPV_1',
'Oahu_SlopedDistPV_Oahu_SlopedDistPV_10',
'Oahu_SlopedDistPV_Oahu_SlopedDistPV_11',
'Oahu_SlopedDistPV_Oahu_SlopedDistPV_12',
'Oahu_SlopedDistPV_Oahu_SlopedDistPV_13',
'Oahu_SlopedDistPV_Oahu_SlopedDistPV_14',
'Oahu_SlopedDistPV_Oahu_SlopedDistPV_15',
'Oahu_SlopedDistPV_Oahu_SlopedDistPV_2',
'Oahu_SlopedDistPV_Oahu_SlopedDistPV_3',
'Oahu_SlopedDistPV_Oahu_SlopedDistPV_4',
'Oahu_SlopedDistPV_Oahu_SlopedDistPV_5',
'Oahu_SlopedDistPV_Oahu_SlopedDistPV_6',
'Oahu_SlopedDistPV_Oahu_SlopedDistPV_7',
'Oahu_SlopedDistPV_Oahu_SlopedDistPV_8',
'Oahu_SlopedDistPV_Oahu_SlopedDistPV_9',
]
),
'IC Barge': ([], ['Oahu_IC_Barge']),
'IC MCBH': ([], ['Oahu_IC_MCBH']),
'New CC 152': ([], ['Oahu_CC_152'])
}
# add missing Switch projects
included_projects = set(
g
for plants, projects in eia_switch_plants.values()
for g in projects
)
eia_switch_plants.update({
gp: ([], [gp])
for gp in switch_df['generation_project'].unique()
if gp not in included_projects
})
# add missing EIA plants
included_plants = set(
p
for plants, projects in eia_switch_plants.values()
for p in plants
)
eia_switch_plants.update({
pm: ([pm], [])
for pm in eia_df['plant_mover'].unique()
if pm not in included_plants
})
# split into eia conversion table and switch conversion table
eia_renamer = {
p: name
for name, (plants, projects) in eia_switch_plants.items()
for p in plants
}
switch_renamer = {
p: name
for name, (plants, projects) in eia_switch_plants.items()
for p in projects
}
return eia_renamer, switch_renamer
def report_hourly_reserves(m):
import os
import pandas as pd
rows = []
for dir in ['Up', 'Down']:
cmp = getattr(m, 'Satisfy_Spinning_Reserve_{}_Requirement'.format(dir))
rows.extend([
(
ba, rt, dir.lower(), m.tp_timestamp[tp],
constr.body.args[0](),
m.ts_scale_to_year[m.tp_ts[tp]]
)
for (rt, ba, tp), constr in cmp.iteritems()
])
reserves = pd.DataFrame(rows, columns=[
'balancing_area', 'reserve_type', 'direction', 'timepoint',
'target', 'day_repeat'
])
outfile = os.path.join(m.options.outputs_dir, 'reserve_requirements.csv')
reserves.to_csv(outfile, index=False)
print("Created {}".format(outfile))
if __name__ == '__main__' and 'm' not in locals():
# For debugging:
import sys, switch_model.solve
indir = 'inputs_annual'
outdir = 'outputs_annual_smoothed_redo' # reused elsewhere when debugging
sys.argv=[
'switch solve',
'--inputs-dir', indir,
'--outputs-dir', outdir,
'--reload-prior-solution',
'--no-post-solve',
# '--input-alias', 'gen_build_costs.csv=gen_build_costs_no_new_thermal.csv'
# '--exclude-module', 'switch_model.hawaii.fed_subsidies'
]
sys.argv = [
'switch solve',
'--inputs-dir', 'inputs_annual', # abt. 3 mins to construct, 2 more to load solution
'--outputs-dir', '/tmp/outputs_annual_smoothed',
'--reload-prior-solution',
'--input-alias', 'gen_build_predetermined.csv=gen_build_predetermined_adjusted.csv',
'--exclude-module', 'switch_model.hawaii.heco_outlook_2019',
'--ph-mw', '150', '--ph-year', '2022',
# '--include-module', 'switch_model.hawaii.smooth_dispatch',
# '--no-post-solve',
]
sys.argv = [
'switch solve',
'--inputs-dir', 'inputs_annual', # abt. 3 mins to construct, 2 more to load solution
'--outputs-dir', 'outputs_annual_smoothed_redo',
'--reload-prior-solution',
'--input-alias',
'gen_build_predetermined.csv=gen_build_predetermined_adjusted.csv',
'generation_projects_info.csv=generation_projects_info_adjusted.csv',
'--ph-mw', '150', '--ph-year', '2045',
'--exclude-module', 'switch_model.hawaii.heco_outlook_2020_06',
'--exclude-module',
'switch_model.hawaii.save_results',
'switch_model.reporting',
'summarize_results',
'--include-module',
'switch_model.hawaii.smooth_dispatch',
'switch_model.hawaii.save_results',
'switch_model.reporting',
'summarize_results',
'--no-post-solve',
]
sys.argv = [
'switch solve',
'--inputs-dir', 'inputs_2019_2022',
'--outputs-dir', 'outputs_2019_2022',
'--reload-prior-solution',
'--input-alias',
'gen_build_predetermined.csv=gen_build_predetermined_adjusted.csv',
'generation_projects_info.csv=generation_projects_info_adjusted.csv',
'--ph-mw', '0', '--ph-year', '2022',
'--exclude-module', 'switch_model.hawaii.heco_outlook_2020_06',
'--exclude-module',
'switch_model.hawaii.save_results',
'switch_model.reporting',
'summarize_results',
'--include-module',
'switch_model.hawaii.smooth_dispatch',
'switch_model.hawaii.save_results',
'switch_model.reporting',
'summarize_results',
'--no-post-solve',
]
m = switch_model.solve.main()
# m.post_solve()
# sys.argv.extend([
# '--exclude-module', 'switch_model.hawaii.smooth_dispatch',
# '--outputs-dir', '/tmp/outputs_annual_unsmoothed',
# ])
# mu = switch_model.solve.main()
# post_solve(m, outdir)
| StarcoderdataPython |
11245210 | <reponame>danielmicaletti/music-academy-student-portal
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
class ScheduleConfig(AppConfig):
name = 'schedule'
| StarcoderdataPython |
4932552 | <filename>tools/clipper/bin/train_cascade.py
#!/usr/local/bin/python
# -*- coding: utf-8 -*-
import argparse
import os
import subprocess
import sys
import time
from pprint import pprint
def parsearguments():
parser = argparse.ArgumentParser(description='run cascade training')
parser.add_argument('positivefilename', help='positive sample file')
parser.add_argument('negativefilename', help='negative sample file')
parser.add_argument('-m', '--maxfarate', help='max false alarm rate',
type=float, default=0.5)
parser.add_argument('-d', '--dstdir', help='destination directory',
type=str, default='train')
parser.add_argument('-f', '--feature', help='feature type',
type=str, default='LBP')
parser.add_argument('-w', '--width', help='width', type=int, default=24)
return parser.parse_args()
def createsamples(positivefile, vecdir='./vec', width=24, height=24):
os.environ['PATH'] = '/bin:/usr/bin:/usr/local/bin'
if not os.path.isdir(vecdir):
os.mkdir(vecdir)
numpos = len(open(positivefile).readlines())
print('samples: %d' % (numpos,))
vecfile = vecdir + '/' + positivefile + '.vec'
cmdline = ['opencv_createsamples', '-info', positivefile,
'-vec', vecfile, '-num', str(numpos), '-w', str(width), '-h', str(height)]
print(' '.join(cmdline))
try:
p = subprocess.Popen(cmdline, cwd='./', shell=False,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, close_fds=True)
except OSError as e:
print(e)
sys.exit(-1)
while True:
line = p.stdout.readline()
if not line:
break
print(line.rstrip())
ret = p.wait()
print('')
return (vecfile, numpos)
def traincascade(dstdir, vecfile, numpos, negativefilename, featuretype='LBP', maxfarate=0.5,
width=24, height=24):
if not os.path.isdir(dstdir):
os.mkdir(dstdir)
numpos = int(round(numpos*0.85))
numneg = len(open(negativefilename).readlines())
cmdline = [
'opencv_traincascade', '-data', dstdir, '-vec', vecfile,
'-bg', negativefilename, '-numPos', str(numpos), '-numNeg', str(numneg),
'-featureType', featuretype, '-maxFalseAlarmRate', str(maxfarate),
'-w', str(width), '-h', str(height)
]
print(' '.join(cmdline))
try:
print('Start cascade training')
p = subprocess.Popen(cmdline, cwd='./', shell=False,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, close_fds=True)
except OSError as e:
print(e)
sys.exit(-1)
while True:
line = p.stdout.readline()
if not line:
break
print(line.rstrip())
ret = p.wait()
if __name__ == '__main__':
args = parsearguments()
positivefilename = args.positivefilename
negativefilename = args.negativefilename
maxfarate = args.maxfarate
dstdir = args.dstdir
feature = args.feature
width = args.width
(vecfile, numpos) = createsamples(positivefilename, width=width, height=width)
# vecfile = './vec/positive.dat.vec'
# numpos = len(open(args.positivefilename).readlines())
ts = time.time()
traincascade(dstdir, vecfile, numpos, negativefilename, feature, maxfarate, width, width)
processtime = int(time.time() - ts)
print('process time: %s' % (str(processtime),))
| StarcoderdataPython |
9789910 | # Generated by Django 3.1.7 on 2021-03-23 18:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0003_organization_img'),
]
operations = [
migrations.AlterField(
model_name='organization',
name='email',
field=models.URLField(),
),
migrations.AlterField(
model_name='organization',
name='number',
field=models.IntegerField(),
),
]
| StarcoderdataPython |
9696690 | <gh_stars>1-10
#coding: utf-8
class DCRun(object):
def __init__(self):
self.status = "invalid"
self.target_ips = set([])
def add_valid_room_ip(self, ip):
self.target_ips.add(ip)
def delete_room_ip(self, ip):
try:
self.target_ips.remove(ip)
finally:
pass
def view_ips(self):
| StarcoderdataPython |
8028025 | <filename>pytorch-commen-code/code4.py
import torch
import torchvision
'''
# 主要介绍张量操作
'''
### 张量形变
# 相比torch.view,
# torch.reshape可以自动处理输入张量不连续的情况。
tensor = torch.rand(2, 3, 4)
shape = (6, 4)
tensor = torch.reshape(tensor, shape)
### 打乱顺序
# 打乱第一个维度
tensor = tensor[torch.randperm(tensor.size(0))]
### 水平翻转
# tensor [n, c, h, w]
tensor = tensor[:, :, :, torch.arange(tensor.size(3) - 1, -1, -1).long()]
### 复制
# numpy.copy() tensor.clone()
# Operation | New/Shared memory | Still in computation graph |
tensor.clone() # | New | Yes |
tensor.detach() # | Shared | No |
tensor.detach().clone() # | New | No |
### 拼接
'''
注意torch.cat和torch.stack的区别在于torch.cat沿着给定的维度拼接,
而torch.stack会新增一维。
例如当参数是3个10x5的张量,
torch.cat的结果是30x5的张量,
而torch.stack的结果是3x10x5的张量。
'''
tensor = torch.cat(list_of_tensors, dim=0)
tensor = torch.stack(list_of_tensors, dim=0)
### one-hot
tensor = torch.tensor([0, 2, 1, 3])
N = tensor.size(0)
num_classes = 4
one_hot = torch.zeros(N, num_classes).long()
one_hot.scatter_(dim=1,
index=torch.unsqueeze(tensor,dim=1),
src=torch.ones(N, num_classes).long()
)
### 得到非零元素
torch.nonzero(tensor) # index of non-zero elements
torch.nonzero(tensor == 0) # index of zero elements
torch.nonzero(tensor).size(0) # number of non-zero elements
torch.nonzero(tensor == 0).size(0) # number of zero elements
### 判断两个张量相等
torch.allclose(tensor1, tensor2) # float tensor
torch.equal(tensor1, tensor2) # int tensor
### 张量扩展
# Expand tensor of shape 64*512 to shape 64*512*7*7.
tensor = torch.rand(64,512)
torch.reshape(tensor, (64, 512, 1, 1)).expand(64, 512, 7, 7)
### 矩阵乘法
# Matrix multiplcation: (m*n) * (n*p) * -> (m*p).
result = torch.mm(tensor1, tensor2)
# Batch matrix multiplication: (b*m*n) * (b*n*p) -> (b*m*p)
result = torch.bmm(tensor1, tensor2)
# Element-wise multiplication.
result = tensor1 * tensor2
### 计算两组数据之间的两两欧式距离
# 利用broadcast机制
dist = torch.sqrt(torch.sum((X1[:, None, :] - X2)**2, dim=2))
| StarcoderdataPython |
3312880 | import argparse
import json
import os
class ConfigParser:
def _create_command_line_parser():
""" Specify the expected arguments
Returns:
parser: An ArgumentParser object from standard library argparse.
"""
parser = argparse.ArgumentParser()
parser.add_argument('-m', '--mode', type=str, choices=['train','val','test','online','pretrain'], default='train', help='Running mode: [pretrain | train | val | test | online]' )
parser.add_argument('-c', '--continue_exp', type=str, help='The exp folder name from which you want to continue. This option is valid for train, val and test modes.')
parser.add_argument('-e', '--exp', type=str, default='pose', help='The exp folder name to which you want to save your checkpoint or outputs, e.g. try01. This is valid for train, val and test modes.')
# parser.add_argument('-v', '--evaluate', action='store_true', help='load predicted results')
parser.add_argument('-f', '--resume_file', type=str, default='checkpoint.pth.tar' ,help='The checkpoint file name.')
parser.add_argument('-p', '--epoch', type=str, default='0', help='Epoch num you want to start from')
parser.add_argument('-t', '--trans_style', type=int, default=0, help='Want to transfer to opposite style or just reconstruction? Can not be 1 when training!')
return parser
# Command line parser
def _parse_command_line():
"""Finish command line parsing
Returns:
args: argparse.Namespace object
"""
parser = ConfigParser._create_command_line_parser()
args = parser.parse_args()
print('options: ')
print(json.dumps(vars(args), indent = 4))
if args.mode=='train':
assert(args.trans_style==0)
return args
# Config file parser
def _parse_config_file(opt):
"""Read JSON formatted configuration file.
Args:
opt: A Namespace object formed by parsing command line.
Returns:
config: A dict contains information of loader,trainer, model, evaluator, crit, expPath and opt(parsed command line)
"""
with open('./config.json', 'r') as f:
config = json.load(f)
if opt.continue_exp:
config['contPath'] = os.path.join(config['expPath'], opt.continue_exp)
opt.epoch = float(opt.epoch)
config['expPath'] = os.path.join(config['expPath'], opt.exp)
config['loader']['isTrans'] = int(opt.trans_style)
print('config: ')
print(json.dumps(config, indent = 4))
config['opt'] = opt
return config
def parse_config():
"""The top level function of parsing."""
# parse command line
opt = ConfigParser._parse_command_line()
# parse config file, combine opt and config into config.
config = ConfigParser._parse_config_file(opt)
return config
| StarcoderdataPython |
71818 | #!/usr/bin/python
import urllib2
import sys
import cv2.cv as cv
import numpy
if __name__ == "__main__":
cv.NamedWindow("camera", 1)
capture = cv.CaptureFromCAM(0)
paste = cv.CreateMat(960, 1280, cv.CV_8UC3)
topleft = numpy.asarray(cv.GetSubRect(paste, (0, 0, 640, 480)))
topright = numpy.asarray(cv.GetSubRect(paste, (640, 0, 640, 480)))
bottomleft = numpy.asarray(cv.GetSubRect(paste, (0, 480, 640, 480)))
bottomright = numpy.asarray(cv.GetSubRect(paste, (640, 480, 640, 480)))
while True:
img = cv.GetMat(cv.QueryFrame(capture))
n = (numpy.asarray(img)).astype(numpy.uint8)
red = n[:,:,0]
grn = n[:,:,1]
blu = n[:,:,2]
topleft[:,:,0] = 255 - grn
topleft[:,:,1] = red
topleft[:,:,2] = blu
topright[:,:,0] = blu
topright[:,:,1] = 255 - red
topright[:,:,2] = grn
bottomright[:,:,0] = red
bottomright[:,:,1] = grn
bottomright[:,:,2] = 255 - blu
fgrn = grn.astype(numpy.float32)
fred = red.astype(numpy.float32)
bottomleft[:,:,0] = blu
bottomleft[:,:,1] = (abs(fgrn - fred)).astype(numpy.uint8)
bottomleft[:,:,2] = red
cv.ShowImage("camera", paste)
if cv.WaitKey(6) == 27:
break
cv.DestroyAllWindows()
| StarcoderdataPython |
6466270 | <reponame>chuwyler/IRISreader
#!/usr/bin/env python3
# exception class for corrupt FITS files
class CorruptFITSException( Exception ):
pass
# function that converts wavelength string into extension number
def line2extension( header, line ):
"""
Converts wavelength string into an extension number.
Returns -1 if the line could not be found or several lines where found.
Parameters
----------
header : astropy.io.fits.header.Header
Primary header of the FITS file
line : str
Line to select: this can be any unique abbreviation of the line name (e.g. "Mg"). For non-unique abbreviations, an error is thrown.
Returns
-------
location : int
-1 if if no or multiple matching lines are found,
otherwise extension number is returned.
"""
# get all headers that contain line information
keys = [k for k in header.keys() if k.startswith("TDESC")]
# make sure the line descriptions are sorted
line_descriptions = [header[k] for k in sorted(keys)]
# get all line descriptions that contain the line string
res = [s for s in line_descriptions if line in s]
# return -1 if no or multiple matching lines are found
if len( res ) != 1:
return -1
else:
return line_descriptions.index( res[0] ) + 1
# function to translate headers stored in a data array
def array2dict( header, data ):
"""
Reads (key, index) pairs from the header of the extension and uses them
to assign each row of the data array to a dictionary.
Parameters
----------
header : astropy.io.fits.header.Header
Header with the keys to the data array
data : numpy.ndarray
Data array
Returns
-------
list of header dictionaries
"""
# some headers are not keys but real headers: remove them
keys_to_remove=['XTENSION', 'BITPIX', 'NAXIS', 'NAXIS1', 'NAXIS2', 'PCOUNT', 'GCOUNT']
header_keys = dict( header )
header_keys = {k: v for k, v in header_keys.items() if k not in keys_to_remove}
# initialize dictionary list
res = [dict() for x in range( data.shape[0] )]
# fill dictionaries
for i in range(0, data.shape[0]):
res[i] = dict( zip( header_keys.keys(), data[i,list(header_keys.values())] ) )
return res
| StarcoderdataPython |
9684580 | from werkzeug.security import check_password_hash, generate_password_hash
import atp_classes
ADMIN_USERS_ID = ['5644b9622582d972352da864']
class User:
def __init__(self, id, username, password=<PASSWORD>):
self._id = id
self.username = username
self.password = password
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self._id
def is_admin(self):
if str(self._id) in ADMIN_USERS_ID:
return True
return False
@classmethod
def find_user_by_id(cls, id):
app_db = atp_classes.AppDB()
user = app_db.get_document_by_id('users', id)
if user:
return cls(user["_id"], user["username"], user["password"])
else:
return None
@classmethod
def find_user_by_username(cls, username):
app_db = atp_classes.AppDB()
user = app_db.get_document_by_field('users', 'username', username)
if user:
return cls(user["_id"], user["username"], user["password"])
else:
return None
@staticmethod
def validate_login(password_hash, password):
return check_password_hash(password_hash, password)
@staticmethod
def generate_hash(str):
return generate_password_hash(str)
| StarcoderdataPython |
3472823 | from simple_rest_client.api import API
from . import resource
from . import async_resource
def get_api_instance(token='', api_root_url=None, timeout=3, resource_class=resource):
headers = {
'Authorization': 'Bearer {}'.format(token),
'Content-Type': 'application/json'
}
file_upload_headers = {
'Authorization': 'Bearer {}'.format(token),
'Content-Type': 'multipart/form-data'
}
api_root_url = api_root_url or 'https://sentry.io/api/0/'
api = API(api_root_url=api_root_url, headers=headers, json_encode_body=True, timeout=timeout)
api.add_resource(resource_name='issues', resource_class=getattr(resource_class, 'Issues'))
api.add_resource(resource_name='project_issues', resource_class=getattr(resource_class, 'ProjectIssues'))
api.add_resource(resource_name='project_events', resource_class=getattr(resource_class, 'ProjectEvents'))
api.add_resource(resource_name='organizations', resource_class=getattr(resource_class, 'Organizations'))
api.add_resource(resource_name='projects', resource_class=getattr(resource_class, 'Projects'))
api.add_resource(resource_name='projects_dsyms', resource_class=getattr(resource_class, 'ProjectsDsyms'))
api.add_resource(
resource_name='projects_dsyms_upload', resource_class=getattr(resource_class, 'ProjectsDsymsUpload'),
json_encode_body=False, headers=file_upload_headers
)
api.add_resource(resource_name='projects_hooks', resource_class=getattr(resource_class, 'ProjectsHooks'))
api.add_resource(resource_name='projects_keys', resource_class=getattr(resource_class, 'ProjectsKeys'))
api.add_resource(
resource_name='projects_user_feedback', resource_class=getattr(resource_class, 'ProjectsUserFeedback')
)
api.add_resource(
resource_name='organizations_releases',
resource_class=getattr(resource_class, 'OrganizationsReleases')
)
api.add_resource(
resource_name='organizations_releases_deploys',
resource_class=getattr(resource_class, 'OrganizationsReleasesDeploys')
)
api.add_resource(
resource_name='organizations_releases_file_upload',
resource_class=getattr(resource_class, 'OrganizationsReleasesFileUpload'), json_encode_body=False,
headers=file_upload_headers
)
api.add_resource(
resource_name='organizations_releases_files',
resource_class=getattr(resource_class, 'OrganizationsReleasesFiles')
)
api.add_resource(
resource_name='projects_releases',
resource_class=getattr(resource_class, 'ProjectsReleases')
)
api.add_resource(
resource_name='projects_releases_file_upload',
resource_class=getattr(resource_class, 'ProjectsReleasesFileUpload'), json_encode_body=False,
headers=file_upload_headers
)
api.add_resource(
resource_name='projects_releases_files',
resource_class=getattr(resource_class, 'ProjectsReleasesFiles')
)
api.add_resource(
resource_name='organizations_teams',
resource_class=getattr(resource_class, 'OrganizationsTeams')
)
api.add_resource(resource_name='teams', resource_class=getattr(resource_class, 'Teams'))
api.add_resource(resource_name='teams_projects', resource_class=getattr(resource_class, 'TeamsProjects'))
return api
def get_async_api_instance(token='', api_root_url=None, timeout=3):
return get_api_instance(
token=token, api_root_url=api_root_url, timeout=timeout, resource_class=async_resource
)
| StarcoderdataPython |
70677 | #!/usr/bin/python
# coding: utf-8
# Copyright (c) 2013 Mountainstorm
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from MobileDevice import *
from amdevice import *
from plistservice import *
import os
class FileRelay(PlistService):
u'''Provides access to the file relay service; allowing you to retrive
filesets from the device in cpio.gz format'''
filesets = [
u'AppleSupport',
u'Network',
u'VPN',
u'WiFi',
u'UserDatabases',
u'CrashReporter',
u'tmp',
u'SystemConfiguration'
]
def __init__(self, amdevice):
PlistService.__init__(
self,
amdevice,
[AMSVC_FILE_RELAY],
kCFPropertyListXMLFormat_v1_0
)
def get_filesets(self, sources):
u'''retrieves the fileset/sets specified in sources; returns the data
in cpio.gz format
Arguments:
sources -- an array of source names
'''
self._sendmsg({u'Sources': sources})
reply = self._recvmsg()
if u'Status' in reply and reply[u'Status'] == u'Acknowledged':
# now read the cpio.gz file it returns
retval = ''
while True:
data = os.read(self.s, 1024)
if data is None or len(data) == 0:
break
retval += data
else:
raise RuntimeError(u'Unable to retrieve filesets: %s' % reply)
return retval
def register_argparse_filerelay(cmdargs):
import argparse
import sys
def cmd_filerelay(args, dev):
fr = FileRelay(dev)
sets = FileRelay.filesets
if args.s is not None:
sets = []
for s in args.s:
sets.append(s.decode(u'utf-8'))
f = open(args.dest.decode(u'utf-8'), 'wb')
f.write(fr.get_filesets(sets))
f.close()
fr.disconnect()
# filerelay command
filerelaycmd = cmdargs.add_parser(
u'filerelay',
help=u'retrieves filesets from the device in .cpio.gz format'
)
filerelaycmd.add_argument(
u'-s',
metavar=u'setname',
action=u'append',
help=u'the set name to retrieve; if no -s options are specified it retrieves all sets'
)
filerelaycmd.add_argument(
u'dest',
help=u'destination filename; should really end in .cpio.gz'
)
filerelaycmd.set_defaults(func=cmd_filerelay)
| StarcoderdataPython |
6514398 | import yaml
class GeneralUtils:
def __init__(self):
self.yaml_dict = None
def read_yaml(self, yaml_file: str) -> dict:
"""
:param yaml_file: This is the path to the yaml file that needs to be read
:return: Return a dictionary of the yaml file
"""
stream = open(yaml_file, "r")
self.yaml_dict = yaml.safe_load(stream)
return self.yaml_dict
def write_yaml(self, yaml_file: str, values_dict: dict) -> None:
with open(yaml_file, "w") as open_file:
params = yaml.dump(values_dict, open_file)
| StarcoderdataPython |
12806357 | <reponame>SH-anonta/Discussion-Forum
from forum.tests.functional_tests.base_testcase import BaseTestCase
from forum.tests.functional_tests.page_objects import RegistrationPage
class RegistrationTest(BaseTestCase):
def loadData(self):
pass
# Test Case: 56
def test_basicRegistration(self):
browser = self.browser
#Page object
reg_page= RegistrationPage(browser)
# user goes to the registration page
browser.get(self.getRegisterPageAddress())
#The page loads and he sees a registration form
# with username, email, password and repeat password fields
# user enters valid data to these fields
uname = 'Fielex'
pw = 'password'
email= '<EMAIL>'
reg_page.enterUserName(uname)
reg_page.enterEmail(email)
reg_page.enterPassword(pw)
reg_page.enterConfirmPassword(pw)
#Then he clicks the Register button
reg_page.clickRegisterButton()
# Registration is successful and he is redirected to the Login page
self.assertLoginPageLoaded()
#He then logs in with his newly created account
self.login(uname, pw)
#The login is successful and he is brought the homepage
self.assertHomepageLoaded()
#Success!
# todo: write test for invalid registration attempt | StarcoderdataPython |
65186 | <gh_stars>1-10
from __future__ import generators
# Python test set -- part 6, built-in types
from test.test_support import *
print '6. Built-in types'
print '6.1 Truth value testing'
if None: raise TestFailed, 'None is true instead of false'
if 0: raise TestFailed, '0 is true instead of false'
if 0L: raise TestFailed, '0L is true instead of false'
if 0.0: raise TestFailed, '0.0 is true instead of false'
if '': raise TestFailed, '\'\' is true instead of false'
if (): raise TestFailed, '() is true instead of false'
if []: raise TestFailed, '[] is true instead of false'
if {}: raise TestFailed, '{} is true instead of false'
if not 1: raise TestFailed, '1 is false instead of true'
if not 1L: raise TestFailed, '1L is false instead of true'
if not 1.0: raise TestFailed, '1.0 is false instead of true'
if not 'x': raise TestFailed, '\'x\' is false instead of true'
if not (1, 1): raise TestFailed, '(1, 1) is false instead of true'
if not [1]: raise TestFailed, '[1] is false instead of true'
if not {'x': 1}: raise TestFailed, '{\'x\': 1} is false instead of true'
def f(): pass
class C: pass
import sys
x = C()
if not f: raise TestFailed, 'f is false instead of true'
if not C: raise TestFailed, 'C is false instead of true'
if not sys: raise TestFailed, 'sys is false instead of true'
if not x: raise TestFailed, 'x is false instead of true'
print '6.2 Boolean operations'
if 0 or 0: raise TestFailed, '0 or 0 is true instead of false'
if 1 and 1: pass
else: raise TestFailed, '1 and 1 is false instead of true'
if not 1: raise TestFailed, 'not 1 is true instead of false'
print '6.3 Comparisons'
if 0 < 1 <= 1 == 1 >= 1 > 0 != 1: pass
else: raise TestFailed, 'int comparisons failed'
if 0L < 1L <= 1L == 1L >= 1L > 0L != 1L: pass
else: raise TestFailed, 'long int comparisons failed'
if 0.0 < 1.0 <= 1.0 == 1.0 >= 1.0 > 0.0 != 1.0: pass
else: raise TestFailed, 'float comparisons failed'
if '' < 'a' <= 'a' == 'a' < 'abc' < 'abd' < 'b': pass
else: raise TestFailed, 'string comparisons failed'
if 0 in [0] and 0 not in [1]: pass
else: raise TestFailed, 'membership test failed'
if None is None and [] is not []: pass
else: raise TestFailed, 'identity test failed'
try: float('')
except ValueError: pass
else: raise TestFailed, "float('') didn't raise ValueError"
try: float('5\0')
except ValueError: pass
else: raise TestFailed, "float('5\0') didn't raise ValueError"
try: 5.0 / 0.0
except ZeroDivisionError: pass
else: raise TestFailed, "5.0 / 0.0 didn't raise ZeroDivisionError"
try: 5.0 // 0.0
except ZeroDivisionError: pass
else: raise TestFailed, "5.0 // 0.0 didn't raise ZeroDivisionError"
try: 5.0 % 0.0
except ZeroDivisionError: pass
else: raise TestFailed, "5.0 % 0.0 didn't raise ZeroDivisionError"
try: 5 / 0L
except ZeroDivisionError: pass
else: raise TestFailed, "5 / 0L didn't raise ZeroDivisionError"
try: 5 // 0L
except ZeroDivisionError: pass
else: raise TestFailed, "5 // 0L didn't raise ZeroDivisionError"
try: 5 % 0L
except ZeroDivisionError: pass
else: raise TestFailed, "5 % 0L didn't raise ZeroDivisionError"
print '6.4 Numeric types (mostly conversions)'
if 0 != 0L or 0 != 0.0 or 0L != 0.0: raise TestFailed, 'mixed comparisons'
if 1 != 1L or 1 != 1.0 or 1L != 1.0: raise TestFailed, 'mixed comparisons'
if -1 != -1L or -1 != -1.0 or -1L != -1.0:
raise TestFailed, 'int/long/float value not equal'
# calling built-in types without argument must return 0
if int() != 0: raise TestFailed, 'int() does not return 0'
if long() != 0L: raise TestFailed, 'long() does not return 0L'
if float() != 0.0: raise TestFailed, 'float() does not return 0.0'
if int(1.9) == 1 == int(1.1) and int(-1.1) == -1 == int(-1.9): pass
else: raise TestFailed, 'int() does not round properly'
if long(1.9) == 1L == long(1.1) and long(-1.1) == -1L == long(-1.9): pass
else: raise TestFailed, 'long() does not round properly'
if float(1) == 1.0 and float(-1) == -1.0 and float(0) == 0.0: pass
else: raise TestFailed, 'float() does not work properly'
print '6.4.1 32-bit integers'
if 12 + 24 != 36: raise TestFailed, 'int op'
if 12 + (-24) != -12: raise TestFailed, 'int op'
if (-12) + 24 != 12: raise TestFailed, 'int op'
if (-12) + (-24) != -36: raise TestFailed, 'int op'
if not 12 < 24: raise TestFailed, 'int op'
if not -24 < -12: raise TestFailed, 'int op'
# Test for a particular bug in integer multiply
xsize, ysize, zsize = 238, 356, 4
if not (xsize*ysize*zsize == zsize*xsize*ysize == 338912):
raise TestFailed, 'int mul commutativity'
# And another.
m = -sys.maxint - 1
for divisor in 1, 2, 4, 8, 16, 32:
j = m // divisor
prod = divisor * j
if prod != m:
raise TestFailed, "%r * %r == %r != %r" % (divisor, j, prod, m)
if type(prod) is not int:
raise TestFailed, ("expected type(prod) to be int, not %r" %
type(prod))
# Check for expected * overflow to long.
for divisor in 1, 2, 4, 8, 16, 32:
j = m // divisor - 1
prod = divisor * j
if type(prod) is not long:
raise TestFailed, ("expected type(%r) to be long, not %r" %
(prod, type(prod)))
# Check for expected * overflow to long.
m = sys.maxint
for divisor in 1, 2, 4, 8, 16, 32:
j = m // divisor + 1
prod = divisor * j
if type(prod) is not long:
raise TestFailed, ("expected type(%r) to be long, not %r" %
(prod, type(prod)))
print '6.4.2 Long integers'
if 12L + 24L != 36L: raise TestFailed, 'long op'
if 12L + (-24L) != -12L: raise TestFailed, 'long op'
if (-12L) + 24L != 12L: raise TestFailed, 'long op'
if (-12L) + (-24L) != -36L: raise TestFailed, 'long op'
if not 12L < 24L: raise TestFailed, 'long op'
if not -24L < -12L: raise TestFailed, 'long op'
x = sys.maxint
if int(long(x)) != x: raise TestFailed, 'long op'
try: y = int(long(x)+1L)
except OverflowError: raise TestFailed, 'long op'
if not isinstance(y, long): raise TestFailed, 'long op'
x = -x
if int(long(x)) != x: raise TestFailed, 'long op'
x = x-1
if int(long(x)) != x: raise TestFailed, 'long op'
try: y = int(long(x)-1L)
except OverflowError: raise TestFailed, 'long op'
if not isinstance(y, long): raise TestFailed, 'long op'
try: 5 << -5
except ValueError: pass
else: raise TestFailed, 'int negative shift <<'
try: 5L << -5L
except ValueError: pass
else: raise TestFailed, 'long negative shift <<'
try: 5 >> -5
except ValueError: pass
else: raise TestFailed, 'int negative shift >>'
try: 5L >> -5L
except ValueError: pass
else: raise TestFailed, 'long negative shift >>'
print '6.4.3 Floating point numbers'
if 12.0 + 24.0 != 36.0: raise TestFailed, 'float op'
if 12.0 + (-24.0) != -12.0: raise TestFailed, 'float op'
if (-12.0) + 24.0 != 12.0: raise TestFailed, 'float op'
if (-12.0) + (-24.0) != -36.0: raise TestFailed, 'float op'
if not 12.0 < 24.0: raise TestFailed, 'float op'
if not -24.0 < -12.0: raise TestFailed, 'float op'
print '6.5 Sequence types'
print '6.5.1 Strings'
if len('') != 0: raise TestFailed, 'len(\'\')'
if len('a') != 1: raise TestFailed, 'len(\'a\')'
if len('abcdef') != 6: raise TestFailed, 'len(\'abcdef\')'
if 'xyz' + 'abcde' != 'xyzabcde': raise TestFailed, 'string concatenation'
if 'xyz'*3 != 'xyzxyzxyz': raise TestFailed, 'string repetition *3'
if 0*'abcde' != '': raise TestFailed, 'string repetition 0*'
if min('abc') != 'a' or max('abc') != 'c': raise TestFailed, 'min/max string'
if 'a' in 'abc' and 'b' in 'abc' and 'c' in 'abc' and 'd' not in 'abc': pass
else: raise TestFailed, 'in/not in string'
x = 'x'*103
if '%s!'%x != x+'!': raise TestFailed, 'nasty string formatting bug'
#extended slices for strings
a = '0123456789'
vereq(a[::], a)
vereq(a[::2], '02468')
vereq(a[1::2], '13579')
vereq(a[::-1],'9876543210')
vereq(a[::-2], '97531')
vereq(a[3::-2], '31')
vereq(a[-100:100:], a)
vereq(a[100:-100:-1], a[::-1])
vereq(a[-100L:100L:2L], '02468')
if have_unicode:
a = unicode('0123456789', 'ascii')
vereq(a[::], a)
vereq(a[::2], unicode('02468', 'ascii'))
vereq(a[1::2], unicode('13579', 'ascii'))
vereq(a[::-1], unicode('9876543210', 'ascii'))
vereq(a[::-2], unicode('97531', 'ascii'))
vereq(a[3::-2], unicode('31', 'ascii'))
vereq(a[-100:100:], a)
vereq(a[100:-100:-1], a[::-1])
vereq(a[-100L:100L:2L], unicode('02468', 'ascii'))
print '6.5.2 Tuples'
# calling built-in types without argument must return empty
if tuple() != (): raise TestFailed,'tuple() does not return ()'
if len(()) != 0: raise TestFailed, 'len(())'
if len((1,)) != 1: raise TestFailed, 'len((1,))'
if len((1,2,3,4,5,6)) != 6: raise TestFailed, 'len((1,2,3,4,5,6))'
if (1,2)+(3,4) != (1,2,3,4): raise TestFailed, 'tuple concatenation'
if (1,2)*3 != (1,2,1,2,1,2): raise TestFailed, 'tuple repetition *3'
if 0*(1,2,3) != (): raise TestFailed, 'tuple repetition 0*'
if min((1,2)) != 1 or max((1,2)) != 2: raise TestFailed, 'min/max tuple'
if 0 in (0,1,2) and 1 in (0,1,2) and 2 in (0,1,2) and 3 not in (0,1,2): pass
else: raise TestFailed, 'in/not in tuple'
try: ()[0]
except IndexError: pass
else: raise TestFailed, "tuple index error didn't raise IndexError"
x = ()
x += ()
if x != (): raise TestFailed, 'tuple inplace add from () to () failed'
x += (1,)
if x != (1,): raise TestFailed, 'tuple resize from () failed'
# extended slicing - subscript only for tuples
a = (0,1,2,3,4)
vereq(a[::], a)
vereq(a[::2], (0,2,4))
vereq(a[1::2], (1,3))
vereq(a[::-1], (4,3,2,1,0))
vereq(a[::-2], (4,2,0))
vereq(a[3::-2], (3,1))
vereq(a[-100:100:], a)
vereq(a[100:-100:-1], a[::-1])
vereq(a[-100L:100L:2L], (0,2,4))
# Check that a specific bug in _PyTuple_Resize() is squashed.
def f():
for i in range(1000):
yield i
vereq(list(tuple(f())), range(1000))
# Verify that __getitem__ overrides are not recognized by __iter__
# XXX: this is a problem
#class T(tuple):
# def __getitem__(self, key):
# return str(key) + '!!!'
#vereq(iter(T((1,2))).next(), 1)
print '6.5.3 Lists'
# calling built-in types without argument must return empty
if list() != []: raise TestFailed,'list() does not return []'
if len([]) != 0: raise TestFailed, 'len([])'
if len([1,]) != 1: raise TestFailed, 'len([1,])'
if len([1,2,3,4,5,6]) != 6: raise TestFailed, 'len([1,2,3,4,5,6])'
if [1,2]+[3,4] != [1,2,3,4]: raise TestFailed, 'list concatenation'
if [1,2]*3 != [1,2,1,2,1,2]: raise TestFailed, 'list repetition *3'
if [1,2]*3L != [1,2,1,2,1,2]: raise TestFailed, 'list repetition *3L'
if 0*[1,2,3] != []: raise TestFailed, 'list repetition 0*'
if 0L*[1,2,3] != []: raise TestFailed, 'list repetition 0L*'
if min([1,2]) != 1 or max([1,2]) != 2: raise TestFailed, 'min/max list'
if 0 in [0,1,2] and 1 in [0,1,2] and 2 in [0,1,2] and 3 not in [0,1,2]: pass
else: raise TestFailed, 'in/not in list'
a = [1, 2, 3, 4, 5]
a[:-1] = a
if a != [1, 2, 3, 4, 5, 5]:
raise TestFailed, "list self-slice-assign (head)"
a = [1, 2, 3, 4, 5]
a[1:] = a
if a != [1, 1, 2, 3, 4, 5]:
raise TestFailed, "list self-slice-assign (tail)"
a = [1, 2, 3, 4, 5]
a[1:-1] = a
if a != [1, 1, 2, 3, 4, 5, 5]:
raise TestFailed, "list self-slice-assign (center)"
try: [][0]
except IndexError: pass
else: raise TestFailed, "list index error didn't raise IndexError"
try: [][0] = 5
except IndexError: pass
else: raise TestFailed, "list assignment index error didn't raise IndexError"
try: [].pop()
except IndexError: pass
else: raise TestFailed, "empty list.pop() didn't raise IndexError"
try: [1].pop(5)
except IndexError: pass
else: raise TestFailed, "[1].pop(5) didn't raise IndexError"
try: [][0:1] = 5
except TypeError: pass
else: raise TestFailed, "bad list slice assignment didn't raise TypeError"
try: [].extend(None)
except TypeError: pass
else: raise TestFailed, "list.extend(None) didn't raise TypeError"
a = [1, 2, 3, 4]
a *= 0
if a != []:
raise TestFailed, "list inplace repeat"
a = []
a[:] = tuple(range(10))
if a != range(10):
raise TestFailed, "assigning tuple to slice"
print '6.5.3a Additional list operations'
a = [0,1,2,3,4]
a[0L] = 1
a[1L] = 2
a[2L] = 3
if a != [1,2,3,3,4]: raise TestFailed, 'list item assignment [0L], [1L], [2L]'
a[0] = 5
a[1] = 6
a[2] = 7
if a != [5,6,7,3,4]: raise TestFailed, 'list item assignment [0], [1], [2]'
a[-2L] = 88
a[-1L] = 99
if a != [5,6,7,88,99]: raise TestFailed, 'list item assignment [-2L], [-1L]'
a[-2] = 8
a[-1] = 9
if a != [5,6,7,8,9]: raise TestFailed, 'list item assignment [-2], [-1]'
a[:2] = [0,4]
a[-3:] = []
a[1:1] = [1,2,3]
if a != [0,1,2,3,4]: raise TestFailed, 'list slice assignment'
a[ 1L : 4L] = [7,8,9]
if a != [0,7,8,9,4]: raise TestFailed, 'list slice assignment using long ints'
del a[1:4]
if a != [0,4]: raise TestFailed, 'list slice deletion'
del a[0]
if a != [4]: raise TestFailed, 'list item deletion [0]'
del a[-1]
if a != []: raise TestFailed, 'list item deletion [-1]'
a=range(0,5)
del a[1L:4L]
if a != [0,4]: raise TestFailed, 'list slice deletion'
del a[0L]
if a != [4]: raise TestFailed, 'list item deletion [0]'
del a[-1L]
if a != []: raise TestFailed, 'list item deletion [-1]'
a.append(0)
a.append(1)
a.append(2)
if a != [0,1,2]: raise TestFailed, 'list append'
a.insert(0, -2)
a.insert(1, -1)
a.insert(2,0)
if a != [-2,-1,0,0,1,2]: raise TestFailed, 'list insert'
b = a[:]
b.insert(-2, "foo")
b.insert(-200, "left")
b.insert(200, "right")
if b != ["left",-2,-1,0,0,"foo",1,2,"right"]: raise TestFailed, 'list insert2'
# a = [-2,-1,0,0,1,2]
if a.count(0) != 2: raise TestFailed, ' list count'
if a.index(0) != 2: raise TestFailed, 'list index'
if a.index(0,2) != 2: raise TestFailed, 'list index, start argument'
if a.index(0,-4) != 2: raise TestFailed, 'list index, -start argument'
if a.index(-2,-10) != 0: raise TestFailed, 'list index, very -start argument'
if a.index(0,3) != 3: raise TestFailed, 'list index, start argument'
if a.index(0,-3) != 3: raise TestFailed, 'list index, -start argument'
if a.index(0,3,4) != 3: raise TestFailed, 'list index, stop argument'
if a.index(0,-3,-2) != 3: raise TestFailed, 'list index, -stop argument'
#XXX index with Long not working yet.
#if a.index(0,-4*sys.maxint,4*sys.maxint) != 2:
# raise TestFailed, 'list index, -maxint, maxint argument'
#try:
# a.index(0, 4*sys.maxint,-4*sys.maxint)
#except ValueError:
# pass
#else:
# raise TestFailed, 'list index, maxint,-maxint argument'
try:
a.index(2,0,-10)
except ValueError:
pass
else:
raise TestFailed, 'list index, very -stop argument'
a.remove(0)
try:
a.index(2,0,4)
except ValueError:
pass
else:
raise TestFailed, 'list index, stop argument.'
if a != [-2,-1,0,1,2]: raise TestFailed, 'list remove'
a.reverse()
if a != [2,1,0,-1,-2]: raise TestFailed, 'list reverse'
a.sort()
if a != [-2,-1,0,1,2]: raise TestFailed, 'list sort'
def revcmp(a, b): return cmp(b, a)
a.sort(revcmp)
if a != [2,1,0,-1,-2]: raise TestFailed, 'list sort with cmp func'
# The following dumps core in unpatched Python 1.5:
def myComparison(x,y):
return cmp(x%3, y%7)
z = range(12)
z.sort(myComparison)
try: z.sort(2)
except TypeError: pass
else: raise TestFailed, 'list sort compare function is not callable'
#XXX need a strategy for locking list during sort.
#def selfmodifyingComparison(x,y):
# z.append(1)
# return cmp(x, y)
#try: z.sort(selfmodifyingComparison)
#except ValueError: pass
#else: raise TestFailed, 'modifying list during sort'
try: z.sort(lambda x, y: 's')
except TypeError: pass
else: raise TestFailed, 'list sort compare function does not return int'
# Test extreme cases with long ints
a = [0,1,2,3,4]
if a[ -pow(2,128L): 3 ] != [0,1,2]:
raise TestFailed, "list slicing with too-small long integer"
if a[ 3: pow(2,145L) ] != [3,4]:
raise TestFailed, "list slicing with too-large long integer"
# extended slicing
# subscript
a = [0,1,2,3,4]
vereq(a[::], a)
vereq(a[::2], [0,2,4])
vereq(a[1::2], [1,3])
vereq(a[::-1], [4,3,2,1,0])
vereq(a[::-2], [4,2,0])
vereq(a[3::-2], [3,1])
vereq(a[-100:100:], a)
vereq(a[100:-100:-1], a[::-1])
vereq(a[-100L:100L:2L], [0,2,4])
vereq(a[1000:2000:2], [])
vereq(a[-1000:-2000:-2], [])
# deletion
del a[::2]
vereq(a, [1,3])
a = range(5)
del a[1::2]
vereq(a, [0,2,4])
a = range(5)
del a[1::-2]
vereq(a, [0,2,3,4])
a = range(10)
del a[::1000]
vereq(a, [1, 2, 3, 4, 5, 6, 7, 8, 9])
# assignment
a = range(10)
a[::2] = [-1]*5
vereq(a, [-1, 1, -1, 3, -1, 5, -1, 7, -1, 9])
a = range(10)
a[::-4] = [10]*3
vereq(a, [0, 10, 2, 3, 4, 10, 6, 7, 8 ,10])
a = range(4)
a[::-1] = a
vereq(a, [3, 2, 1, 0])
a = range(10)
b = a[:]
c = a[:]
a[2:3] = ["two", "elements"]
b[slice(2,3)] = ["two", "elements"]
c[2:3:] = ["two", "elements"]
vereq(a, b)
vereq(a, c)
a = range(10)
a[::2] = tuple(range(5))
vereq(a, [0, 1, 1, 3, 2, 5, 3, 7, 4, 9])
# Verify that __getitem__ overrides are not recognized by __iter__
class L(list):
def __getitem__(self, key):
return str(key) + '!!!'
vereq(iter(L([1,2])).next(), 1)
print '6.6 Mappings == Dictionaries'
# calling built-in types without argument must return empty
if dict() != {}: raise TestFailed,'dict() does not return {}'
d = {}
if d.keys() != []: raise TestFailed, '{}.keys()'
if d.values() != []: raise TestFailed, '{}.values()'
if d.items() != []: raise TestFailed, '{}.items()'
if d.has_key('a') != 0: raise TestFailed, '{}.has_key(\'a\')'
if ('a' in d) != 0: raise TestFailed, "'a' in {}"
if ('a' not in d) != 1: raise TestFailed, "'a' not in {}"
if len(d) != 0: raise TestFailed, 'len({})'
d = {'a': 1, 'b': 2}
if len(d) != 2: raise TestFailed, 'len(dict)'
k = d.keys()
k.sort()
if k != ['a', 'b']: raise TestFailed, 'dict keys()'
if d.has_key('a') and d.has_key('b') and not d.has_key('c'): pass
else: raise TestFailed, 'dict keys()'
if 'a' in d and 'b' in d and 'c' not in d: pass
else: raise TestFailed, 'dict keys() # in/not in version'
if d['a'] != 1 or d['b'] != 2: raise TestFailed, 'dict item'
d['c'] = 3
d['a'] = 4
if d['c'] != 3 or d['a'] != 4: raise TestFailed, 'dict item assignment'
del d['b']
if d != {'a': 4, 'c': 3}: raise TestFailed, 'dict item deletion'
# dict.clear()
d = {1:1, 2:2, 3:3}
d.clear()
if d != {}: raise TestFailed, 'dict clear'
# dict.update()
d.update({1:100})
d.update({2:20})
d.update({1:1, 2:2, 3:3})
if d != {1:1, 2:2, 3:3}: raise TestFailed, 'dict update'
d.clear()
try: d.update(None)
except AttributeError: pass
else: raise TestFailed, 'dict.update(None), AttributeError expected'
class SimpleUserDict:
def __init__(self):
self.d = {1:1, 2:2, 3:3}
def keys(self):
return self.d.keys()
def __getitem__(self, i):
return self.d[i]
d.update(SimpleUserDict())
if d != {1:1, 2:2, 3:3}: raise TestFailed, 'dict.update(instance)'
d.clear()
class FailingUserDict:
def keys(self):
raise ValueError
try: d.update(FailingUserDict())
except ValueError: pass
else: raise TestFailed, 'dict.keys() expected ValueError'
class FailingUserDict:
def keys(self):
class BogonIter:
def __iter__(self):
raise ValueError
return BogonIter()
try: d.update(FailingUserDict())
except ValueError: pass
else: raise TestFailed, 'iter(dict.keys()) expected ValueError'
class FailingUserDict:
def keys(self):
class BogonIter:
def __init__(self):
self.i = 1
def __iter__(self):
return self
def next(self):
if self.i:
self.i = 0
return 'a'
raise ValueError
return BogonIter()
def __getitem__(self, key):
return key
try: d.update(FailingUserDict())
except ValueError: pass
else: raise TestFailed, 'iter(dict.keys()).next() expected ValueError'
class FailingUserDict:
def keys(self):
class BogonIter:
def __init__(self):
self.i = ord('a')
def __iter__(self):
return self
def next(self):
if self.i <= ord('z'):
rtn = chr(self.i)
self.i += 1
return rtn
raise StopIteration
return BogonIter()
def __getitem__(self, key):
raise ValueError
try: d.update(FailingUserDict())
except ValueError: pass
else: raise TestFailed, 'dict.update(), __getitem__ expected ValueError'
# dict.fromkeys()
if dict.fromkeys('abc') != {'a':None, 'b':None, 'c':None}:
raise TestFailed, 'dict.fromkeys did not work as a class method'
d = {}
if d.fromkeys('abc') is d:
raise TestFailed, 'dict.fromkeys did not return a new dict'
if d.fromkeys('abc') != {'a':None, 'b':None, 'c':None}:
raise TestFailed, 'dict.fromkeys failed with default value'
if d.fromkeys((4,5),0) != {4:0, 5:0}:
raise TestFailed, 'dict.fromkeys failed with specified value'
if d.fromkeys([]) != {}:
raise TestFailed, 'dict.fromkeys failed with null sequence'
def g():
yield 1
if d.fromkeys(g()) != {1:None}:
raise TestFailed, 'dict.fromkeys failed with a generator'
try: {}.fromkeys(3)
except TypeError: pass
else: raise TestFailed, 'dict.fromkeys failed to raise TypeError'
class dictlike(dict): pass
#if dictlike.fromkeys('a') != {'a':None}:
# raise TestFailed, 'dictsubclass.fromkeys did not inherit'
#if dictlike().fromkeys('a') != {'a':None}:
# raise TestFailed, 'dictsubclass.fromkeys did not inherit'
if type(dictlike.fromkeys('a')) is not dictlike:
raise TestFailed, 'dictsubclass.fromkeys created wrong type'
if type(dictlike().fromkeys('a')) is not dictlike:
raise TestFailed, 'dictsubclass.fromkeys created wrong type'
from UserDict import UserDict
class mydict(dict):
def __new__(cls):
return UserDict()
#ud = mydict.fromkeys('ab')
#if ud != {'a':None, 'b':None} or not isinstance(ud,UserDict):
# raise TestFailed, 'fromkeys did not instantiate using __new__'
# dict.copy()
d = {1:1, 2:2, 3:3}
if d.copy() != {1:1, 2:2, 3:3}: raise TestFailed, 'dict copy'
if {}.copy() != {}: raise TestFailed, 'empty dict copy'
# dict.get()
d = {}
if d.get('c') is not None: raise TestFailed, 'missing {} get, no 2nd arg'
if d.get('c', 3) != 3: raise TestFailed, 'missing {} get, w/ 2nd arg'
d = {'a' : 1, 'b' : 2}
if d.get('c') is not None: raise TestFailed, 'missing dict get, no 2nd arg'
if d.get('c', 3) != 3: raise TestFailed, 'missing dict get, w/ 2nd arg'
if d.get('a') != 1: raise TestFailed, 'present dict get, no 2nd arg'
if d.get('a', 3) != 1: raise TestFailed, 'present dict get, w/ 2nd arg'
# dict.setdefault()
d = {}
if d.setdefault('key0') is not None:
raise TestFailed, 'missing {} setdefault, no 2nd arg'
if d.setdefault('key0') is not None:
raise TestFailed, 'present {} setdefault, no 2nd arg'
d.setdefault('key', []).append(3)
if d['key'][0] != 3:
raise TestFailed, 'missing {} setdefault, w/ 2nd arg'
d.setdefault('key', []).append(4)
if len(d['key']) != 2:
raise TestFailed, 'present {} setdefault, w/ 2nd arg'
# dict.popitem()
for copymode in -1, +1:
# -1: b has same structure as a
# +1: b is a.copy()
for log2size in range(12):
size = 2**log2size
a = {}
b = {}
for i in range(size):
a[`i`] = i
if copymode < 0:
b[`i`] = i
if copymode > 0:
b = a.copy()
for i in range(size):
ka, va = ta = a.popitem()
if va != int(ka): raise TestFailed, "a.popitem: %s" % str(ta)
kb, vb = tb = b.popitem()
if vb != int(kb): raise TestFailed, "b.popitem: %s" % str(tb)
if copymode < 0 and ta != tb:
raise TestFailed, "a.popitem != b.popitem: %s, %s" % (
str(ta), str(tb))
if a: raise TestFailed, 'a not empty after popitems: %s' % str(a)
if b: raise TestFailed, 'b not empty after popitems: %s' % str(b)
d.clear()
try: d.popitem()
except KeyError: pass
else: raise TestFailed, "{}.popitem doesn't raise KeyError"
# Tests for pop with specified key
d.clear()
k, v = 'abc', 'def'
d[k] = v
try: d.pop('ghi')
except KeyError: pass
else: raise TestFailed, "{}.pop(k) doesn't raise KeyError when k not in dictionary"
if d.pop(k) != v: raise TestFailed, "{}.pop(k) doesn't find known key/value pair"
if len(d) > 0: raise TestFailed, "{}.pop(k) failed to remove the specified pair"
try: d.pop(k)
except KeyError: pass
else: raise TestFailed, "{}.pop(k) doesn't raise KeyError when dictionary is empty"
# verify longs/ints get same value when key > 32 bits (for 64-bit archs)
# see SF bug #689659
x = 4503599627370496L
y = 4503599627370496
h = {x: 'anything', y: 'something else'}
if h[x] != h[y]:
raise TestFailed, "long/int key should match"
if d.pop(k, v) != v: raise TestFailed, "{}.pop(k, v) doesn't return default value"
d[k] = v
if d.pop(k, 1) != v: raise TestFailed, "{}.pop(k, v) doesn't find known key/value pair"
#XXX need locking strategy.
#d[1] = 1
#try:
# for i in d:
# d[i+1] = 1
#except RuntimeError:
# pass
#else:
# raise TestFailed, "changing dict size during iteration doesn't raise Error"
try: type(1, 2)
except TypeError: pass
else: raise TestFailed, 'type(), w/2 args expected TypeError'
try: type(1, 2, 3, 4)
except TypeError: pass
else: raise TestFailed, 'type(), w/4 args expected TypeError'
#XXX: Jython will probably never have buffers
#print 'Buffers'
#try: buffer('asdf', -1)
#except ValueError: pass
#else: raise TestFailed, "buffer('asdf', -1) should raise ValueError"
#
#try: buffer(None)
#except TypeError: pass
#else: raise TestFailed, "buffer(None) should raise TypeError"
#
#a = buffer('asdf')
#hash(a)
#b = a * 5
#if a == b:
# raise TestFailed, 'buffers should not be equal'
#if str(b) != ('asdf' * 5):
# raise TestFailed, 'repeated buffer has wrong content'
#if str(a * 0) != '':
# raise TestFailed, 'repeated buffer zero times has wrong content'
#if str(a + buffer('def')) != 'asdfdef':
# raise TestFailed, 'concatenation of buffers yields wrong content'
#
#try: a[1] = 'g'
#except TypeError: pass
#else: raise TestFailed, "buffer assignment should raise TypeError"
#
#try: a[0:1] = 'g'
#except TypeError: pass
#else: raise TestFailed, "buffer slice assignment should raise TypeError"
| StarcoderdataPython |
1607295 | <reponame>ikolokotronis/donation_app
"""charity_donation_app URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/4.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from main.views import LandingPageView
from donations.views import AddDonationView, DonationDetailsView
from users.views import (
LoginView, RegisterView,
LogoutView, UserPanelView, UserEditView, PasswordChangeView,
VerificationView, PasswordResetView, PasswordResetVerificationView
)
urlpatterns = [
path('admin/', admin.site.urls),
path('', LandingPageView.as_view(), name='index'),
path('add_donation/', AddDonationView.as_view(), name='donation-page'),
path('donation/<int:donation_id>/', DonationDetailsView.as_view()),
path('login/', LoginView.as_view(), name='login-page'),
path('register/', RegisterView.as_view(), name='registration-page'),
path('logout/', LogoutView.as_view(), name='logout-page'),
path('panel/<int:user_id>/', UserPanelView.as_view(), name='panel-page'),
path('edit/<int:user_id>/', UserEditView.as_view(), name='user-edit-page'),
path('change_password/<int:user_id>/', PasswordChangeView.as_view()),
path('activate/<uidb64>/<token>/', VerificationView.as_view(), name='activate-page'),
path('password-reset-verification/<uidb64>/<token>/', PasswordResetVerificationView.as_view(),
name='password-reset-verification'),
path('password-reset/', PasswordResetView.as_view())
]
| StarcoderdataPython |
1801616 | openatv_like = True
try:
# This works in OpenATV (and similar code bases) but fails on OpenPLi.
# The particular import might not be relevant for the actual plugin.
from Screens.EpgSelection import SingleEPG
ADJUST={'adjust': False}
except:
ADJUST={}
openatv_like = False
# Quick fix for Vix
try:
import boxbranding
if "openvix" in boxbranding.getImageDistro().lower():
openatv_like = True
except:
pass
title_like = True
try:
import inspect
from Screens.MessageBox import MessageBox
title_like = 'title' in inspect.getargspec(MessageBox.__init__)[0]
except:
title_like = False
import time
from enigma import eServiceReference
from Components.config import config, configfile, ConfigBoolean, ConfigSelection, ConfigSubsection, ConfigText
from Plugins.Plugin import PluginDescriptor
from Screens.ChannelSelection import ChannelSelection, service_types_tv
from Screens.MessageBox import MessageBox
if openatv_like:
from Screens.Setup import Setup
else:
import Screens.Setup
import xml.etree.cElementTree
from Components.config import configfile
PLUGIN_VERSION='6.2.0d'
PLUGIN_MONIKER='[Lv]'
PLUGIN_NAME='Love'
PLUGIN_DESC='Current2Fav'
PLUGIN_ICON='love.png'
PLUGIN_PATH='Extensions/Love'
if not openatv_like:
PLUGIN_PATH='/usr/lib/enigma2/python/Plugins/' + PLUGIN_PATH
SETUP_KEY='love'
VERSION_DEF=PLUGIN_VERSION
VERSION_CHOICES=[(VERSION_DEF, VERSION_DEF)]
BOUQUET_DEF='userbouquet.favourites.tv'
BOUQUET=BOUQUET_DEF
SILENT_DEF=False
SILENT=SILENT_DEF
DEBUG_ACTIVE_DEF=False
DEBUG_ACTIVE=DEBUG_ACTIVE_DEF
VISIBLE_WIDTH=40
config.plugins.love = ConfigSubsection()
config.plugins.love.bouquet = ConfigText(default=BOUQUET_DEF, fixed_size=False, visible_width=VISIBLE_WIDTH)
config.plugins.love.silent = ConfigBoolean(default=SILENT_DEF)
config.plugins.love.debug = ConfigBoolean(default=DEBUG_ACTIVE_DEF)
config.plugins.love.version = ConfigSelection(default=VERSION_DEF, choices=VERSION_CHOICES)
if not openatv_like:
SAVED_SETUP=Screens.Setup.setupdom
DEBUG_FILE='/tmp/love-debug.log'
def DEBUG(s):
if DEBUG_ACTIVE:
try:
t = time.ctime()
f = open(DEBUG_FILE, 'a+')
f.write('%s %s' % (t, s))
f.close()
print '%s %s' % (t,s)
except:
pass
def info(session, text=None, callback=None):
if session and text:
if not openatv_like and text[:4] != PLUGIN_MONIKER:
text = PLUGIN_MONIKER + ' ' + text
TITLE={}
if title_like:
TITLE={'simple': True, 'title': PLUGIN_NAME}
if callback:
session.openWithCallback(callback, MessageBox, text, type=None, **TITLE)
else:
session.open(MessageBox, text, type=None, **TITLE)
def reConfig():
global BOUQUET
global SILENT
global DEBUG_ACTIVE
BOUQUET = config.plugins.love.bouquet.value
SILENT = config.plugins.love.silent.value
DEBUG_ACTIVE = config.plugins.love.debug.value
config.plugins.love.save()
if not openatv_like:
configfile.save()
def onSetupClose(test=None):
reConfig()
if not openatv_like:
Screens.Setup.setupdom = SAVED_SETUP
def openSetup(session):
global SAVED_SETUP
if session:
if openatv_like:
session.openWithCallback(onSetupClose, Setup, setup=SETUP_KEY, plugin=PLUGIN_PATH)
else:
try:
setup_file = file(PLUGIN_PATH + '/setup.xml', 'r')
new_setupdom = xml.etree.cElementTree.parse(setup_file)
setup_file.close()
SAVED_SETUP = Screens.Setup.setupdom
Screens.Setup.setupdom = new_setupdom
session.openWithCallback(onSetupClose, Screens.Setup.Setup, SETUP_KEY)
Screens.Setup.setupdom = SAVED_SETUP
except:
pass
def addFavourite(session):
if session:
DEBUG('ADDING FAVOURITE!\n')
try:
service = session.nav.currentlyPlayingServiceReference
ref = eServiceReference('%s FROM BOUQUET "%s" ORDER BY bouquet' % (service_types_tv, BOUQUET))
ChannelSelection.instance.addServiceToBouquet(ref, service)
if not SILENT:
info(session, text='Saved Channel!')
except:
DEBUG('SOMETHING WRONG!\n')
try:
if not SILENT:
info(session, text='Something Went Wrong!\nBad Bouquet?')
except:
DEBUG('SOMETHING VERY WRONG!\n')
def fromPluginBrowser(session):
if session:
DEBUG('%s\n' % session.dialog_stack)
if session.dialog_stack:
DEBUG('RETURNING TRUE!\n')
return True
DEBUG('RETURNING FALSE!\n')
return False
def main(session, **kwargs):
if session:
reConfig()
if fromPluginBrowser(session):
DEBUG('OPENING SETUP!\n')
openSetup(session)
return
DEBUG('ADDING FAVOURITE!\n')
addFavourite(session)
def Plugins(**kwargs):
return PluginDescriptor(
name=PLUGIN_NAME,
description=PLUGIN_DESC,
where=PluginDescriptor.WHERE_PLUGINMENU,
icon=PLUGIN_ICON,
fnc=main)
| StarcoderdataPython |
237538 | import shutil
import psutil
import os.path
from os import path
def cpuOverUsage():
"""Check if is more than 50% usage of cpu in interval of 2 seconds and returns a tuple with False and a message if less than 50%\n
True and a message otherwise\n
more on https://psutil.readthedocs.io/en/latest/"""
cpuPercent=psutil.cpu_percent(2)
if cpuPercent<50:
isOverUsage=False
message="It's using less than 50%"
return (isOverUsage,message)
else:
isOverUsage=True
message="It's using more than 50%"
return (isOverUsage,message)
def diskOverUsage(partition):
"""Check if is more than 50% usage of disk on certain partition and returns a tuple with False and a message if less than 50%\n
True and a message otherwise\n
If partition doesn't exist raise a ValueError\n
more on https://docs.python.org/3/library/shutil.html"""
if path.exists(partition):
memory=shutil.disk_usage(partition)
diskPercent=(memory.used/memory.total)*100
if diskPercent<50:
isOverUsage=False
message="It's using less than 50%"
return (isOverUsage,message)
else:
isOverUsage=True
message="It's using more than 50%"
return (isOverUsage,message)
else:
raise ValueError("The path doesn't exists")
| StarcoderdataPython |
3489057 | """Config flow for habitica integration."""
from __future__ import annotations
import logging
from aiohttp import ClientResponseError
from habitipy.aio import HabitipyAsync
import voluptuous as vol
from openpeerpower import config_entries, core, exceptions
from openpeerpower.const import CONF_API_KEY, CONF_NAME, CONF_URL
from openpeerpower.helpers.aiohttp_client import async_get_clientsession
from .const import CONF_API_USER, DEFAULT_URL, DOMAIN
DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_API_USER): str,
vol.Required(CONF_API_KEY): str,
vol.Optional(CONF_NAME): str,
vol.Optional(CONF_URL, default=DEFAULT_URL): str,
}
)
_LOGGER = logging.getLogger(__name__)
async def validate_input(
opp: core.OpenPeerPower, data: dict[str, str]
) -> dict[str, str]:
"""Validate the user input allows us to connect."""
websession = async_get_clientsession(opp)
api = HabitipyAsync(
conf={
"login": data[CONF_API_USER],
"password": data[CONF_API_KEY],
"url": data[CONF_URL] or DEFAULT_URL,
}
)
try:
await api.user.get(session=websession)
return {
"title": f"{data.get('name', 'Default username')}",
CONF_API_USER: data[CONF_API_USER],
}
except ClientResponseError as ex:
raise InvalidAuth() from ex
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for habitica."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
if user_input is not None:
try:
info = await validate_input(self.opp, user_input)
except InvalidAuth:
errors = {"base": "invalid_credentials"}
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors = {"base": "unknown"}
else:
await self.async_set_unique_id(info[CONF_API_USER])
self._abort_if_unique_id_configured()
return self.async_create_entry(title=info["title"], data=user_input)
return self.async_show_form(
step_id="user",
data_schema=DATA_SCHEMA,
errors=errors,
description_placeholders={},
)
async def async_step_import(self, import_data):
"""Import habitica config from configuration.yaml."""
return await self.async_step_user(import_data)
class InvalidAuth(exceptions.OpenPeerPowerError):
"""Error to indicate there is invalid auth."""
| StarcoderdataPython |
4959916 | <reponame>minhhn2910/tensorox
#!/usr/bin/python
# Designed by: <NAME>
# Date: March 26th - 2015
# Alternative Computing Technologies Lab.
# Georgia Institute of Technology
import sys
import random
import math
def Usage():
print "Usage: python data_generator.py <size> <output file>"
exit(1)
if(len(sys.argv) != 3):
Usage()
data_size = sys.argv[1]
loc_out = open(sys.argv[2], 'w')
loc_out.write(str(data_size) + "\n")
for i in range(int(data_size)):
for j in range(6):
for k in range(3):
loc_out.write(str(random.randint(0, 100) - 50) + " ")
pass
pass
loc_out.write("\n")
pass
print "Thank you..."
| StarcoderdataPython |
1788765 | from . import UP_ENDPOINT, session
import json
class AccountIDMissingError(Exception):
pass
class Webhooks(self):
"""
Webhooks
~~~~~~~~~~~~~~~~~~~~~
Webhooks provide a mechanism for a configured URL to receive events when transaction activity occurs on Up. You can think of webhooks as being like push notifications for your server-side application.
"""
"""
list_webhooks = "/webhooks"
create_webhook = "POST /webhooks"
retrieve_webhook = "/webhooks/{id}"
delete_webhook = "DELETE /webhooks/{id}"
ping_webhook = "POST /webhooks/{webhookId}/ping"
list_webhook_logs = "/webhooks/{webhookId}/logs"
"""
def __init__(self, pageSize=None, id=None, webhookID=None):
"""
``page[size] = integer``\n
The number of records to return in each page.\n
e.g. ?page[size]=30
``id = string``\n
The unique identifier for the webhook.\n
e.g. e5327e26-084f-41ed-842e-a538820ed155
``webhookId = string``\n
The unique identifier for the webhook.\n
e.g. ee77dad8-4213-40f3-a217-aed603477f64
"""
if pageSize is None:
self.webhooks_pagesize = None
else:
self.webhooks_pagesize = f"page[size]={pageSize}"
if id is None:
self.webhooks_id = None
else:
self.webhooks_id = f"{id}"
if webhookID is None:
self.webhooks_webhook_id is None
else:
self.webhooks_webhook_id = f"{webhookID}"
def list_webhooks(self):
"""
Retrieve a list of configured webhooks. The returned list is paginated and can be scrolled by following the next and prev links where present. Results are ordered oldest first to newest last.
"""
if self.webhooks_pagesize:
path = f""
response = session.get(path)
print(path)
return response
else:
path = f""
response = session.get(path)
print(path)
return response
def create_webhook(self):
"""
Create a new webhook with a given URL. The URL will receive webhook events as JSON-encoded POST requests. The URL must respond with a HTTP 200 status on success.
There is currently a limit of 10 webhooks at any given time. Once this limit is reached, existing webhooks will need to be deleted before new webhooks can be created.
Event delivery is retried with exponential backoff if the URL is unreachable or it does not respond with a 200 status. The response includes a secretKey attribute, which is used to sign requests sent to the webhook URL. It will not be returned from any other endpoints within the Up API. If the secretKey is lost, simply create a new webhook with the same URL, capture its secretKey and then delete the original webhook. See Handling webhook events for details on how to process webhook events.
It is probably a good idea to test the webhook by sending it a PING event after creating it.
"""
#POST
pass
def retrieve_webhook(self):
"""
Retrieve a specific webhook by providing its unique identifier.
"""
if self.webhooks_id:
path = f""
response = session.get(path)
print(path)
return response
else:
raise WebhooksIDMissingError(
"Webhooks ID required for this request"
)
def delete_webhook(self):
"""
Delete a specific webhook by providing its unique identifier. Once deleted, webhook events will no longer be sent to the configured URL.
"""
if self.webhooks_id:
path = f""
response = session.get(path)
print(path)
return response
else:
raise WebhooksIDMissingError(
"Webhooks ID required for this request"
)
def ping_webhook(self):
"""
Send a PING event to a webhook by providing its unique identifier. This is useful for testing and debugging purposes. The event is delivered asynchronously and its data is returned in the response to this request.
"""
if self.webhooks_webhook_id:
path = f""
response = session.get(path)
print(path)
return response
else:
raise WebhooksWebhookIDMissingError(
"Webhooks Webhook ID required for this request"
)
def list_webhook_logs(self):
"""
Retrieve a list of delivery logs for a webhook by providing its unique identifier. This is useful for analysis and debugging purposes. The returned list is paginated and can be scrolled by following the next and prev links where present. Results are ordered newest first to oldest last. Logs may be automatically purged after a period of time.
"""
if self.webhooks_webhook_id:
if self.webhooks_pagesize:
path = f""
response = session.get(path)
print(path)
return response
else:
path = f""
response = session.get(path)
print(path)
else:
raise WebhooksWebhookIDMissingError(
"Webhooks Webhook ID required for this request"
)
| StarcoderdataPython |
1671548 | """
Divide two integers without using multiplication, division and mod operator.
If it is overflow, return 2147483647
Example
Given dividend = 100 and divisor = 9, return 11.
"""
__author__ = 'Daniel'
class Solution:
def divide(self, dividend, divisor):
q = 0
if dividend == 0 or divisor == 0:
return 0
# pitfall, overflow
MAXINT = 2147483647
MININT = -2147483648
if dividend == MININT and divisor == -1:
return MAXINT
sign = 1 if dividend*divisor > 0 else -1
dividend, divisor = abs(dividend), abs(divisor)
d = divisor
q_cur = 1
if divisor > dividend:
return 0
while d<<1 < dividend:
d <<= 1
q_cur <<= 1
q += q_cur
dividend -= d
while dividend:
if divisor > dividend:
break
while d > dividend:
d >>= 1
q_cur >>= 1
q += q_cur
dividend -= d
return q*sign
if __name__ == "__main__":
print Solution().divide(-1, 1) | StarcoderdataPython |
4893962 | # Copyright (c) 2015 <NAME>
#
# See the file license.txt for copying permission.
import anyio
import unittest
from distmqtt.mqtt.publish import PublishPacket, PublishVariableHeader, PublishPayload
from distmqtt.adapters import BufferAdapter
from distmqtt.mqtt.constants import QOS_0, QOS_1, QOS_2
class PublishPacketTest(unittest.TestCase):
def test_from_stream_qos_0(self):
data = b"\x31\x11\x00\x05topic0123456789"
stream = BufferAdapter(data)
message = anyio.run(PublishPacket.from_stream, stream)
self.assertEqual(message.variable_header.topic_name, "topic")
self.assertEqual(message.variable_header.packet_id, None)
self.assertFalse((message.fixed_header.flags >> 1) & 0x03)
self.assertTrue(message.fixed_header.flags & 0x01)
self.assertTrue(message.payload.data, b"0123456789")
def test_from_stream_qos_2(self):
data = b"\x37\x13\x00\x05topic\x00\x0a0123456789"
stream = BufferAdapter(data)
message = anyio.run(PublishPacket.from_stream, stream)
self.assertEqual(message.variable_header.topic_name, "topic")
self.assertEqual(message.variable_header.packet_id, 10)
self.assertTrue((message.fixed_header.flags >> 1) & 0x03)
self.assertTrue(message.fixed_header.flags & 0x01)
self.assertTrue(message.payload.data, b"0123456789")
def test_to_stream_no_packet_id(self):
variable_header = PublishVariableHeader("topic", None)
payload = PublishPayload(b"0123456789")
publish = PublishPacket(variable_header=variable_header, payload=payload)
out = publish.to_bytes()
self.assertEqual(out, b"\x30\x11\x00\x05topic0123456789")
def test_to_stream_packet(self):
variable_header = PublishVariableHeader("topic", 10)
payload = PublishPayload(b"0123456789")
publish = PublishPacket(variable_header=variable_header, payload=payload)
out = publish.to_bytes()
self.assertEqual(out, b"\x30\x13\x00\x05topic\00\x0a0123456789")
def test_build(self):
packet = PublishPacket.build("/topic", b"data", 1, False, QOS_0, False)
self.assertEqual(packet.packet_id, 1)
self.assertFalse(packet.dup_flag)
self.assertEqual(packet.qos, QOS_0)
self.assertFalse(packet.retain_flag)
packet = PublishPacket.build("/topic", b"data", 1, False, QOS_1, False)
self.assertEqual(packet.packet_id, 1)
self.assertFalse(packet.dup_flag)
self.assertEqual(packet.qos, QOS_1)
self.assertFalse(packet.retain_flag)
packet = PublishPacket.build("/topic", b"data", 1, False, QOS_2, False)
self.assertEqual(packet.packet_id, 1)
self.assertFalse(packet.dup_flag)
self.assertEqual(packet.qos, QOS_2)
self.assertFalse(packet.retain_flag)
packet = PublishPacket.build("/topic", b"data", 1, True, QOS_0, False)
self.assertEqual(packet.packet_id, 1)
self.assertTrue(packet.dup_flag)
self.assertEqual(packet.qos, QOS_0)
self.assertFalse(packet.retain_flag)
packet = PublishPacket.build("/topic", b"data", 1, True, QOS_1, False)
self.assertEqual(packet.packet_id, 1)
self.assertTrue(packet.dup_flag)
self.assertEqual(packet.qos, QOS_1)
self.assertFalse(packet.retain_flag)
packet = PublishPacket.build("/topic", b"data", 1, True, QOS_2, False)
self.assertEqual(packet.packet_id, 1)
self.assertTrue(packet.dup_flag)
self.assertEqual(packet.qos, QOS_2)
self.assertFalse(packet.retain_flag)
packet = PublishPacket.build("/topic", b"data", 1, False, QOS_0, True)
self.assertEqual(packet.packet_id, 1)
self.assertFalse(packet.dup_flag)
self.assertEqual(packet.qos, QOS_0)
self.assertTrue(packet.retain_flag)
packet = PublishPacket.build("/topic", b"data", 1, False, QOS_1, True)
self.assertEqual(packet.packet_id, 1)
self.assertFalse(packet.dup_flag)
self.assertEqual(packet.qos, QOS_1)
self.assertTrue(packet.retain_flag)
packet = PublishPacket.build("/topic", b"data", 1, False, QOS_2, True)
self.assertEqual(packet.packet_id, 1)
self.assertFalse(packet.dup_flag)
self.assertEqual(packet.qos, QOS_2)
self.assertTrue(packet.retain_flag)
packet = PublishPacket.build("/topic", b"data", 1, True, QOS_0, True)
self.assertEqual(packet.packet_id, 1)
self.assertTrue(packet.dup_flag)
self.assertEqual(packet.qos, QOS_0)
self.assertTrue(packet.retain_flag)
packet = PublishPacket.build("/topic", b"data", 1, True, QOS_1, True)
self.assertEqual(packet.packet_id, 1)
self.assertTrue(packet.dup_flag)
self.assertEqual(packet.qos, QOS_1)
self.assertTrue(packet.retain_flag)
packet = PublishPacket.build("/topic", b"data", 1, True, QOS_2, True)
self.assertEqual(packet.packet_id, 1)
self.assertTrue(packet.dup_flag)
self.assertEqual(packet.qos, QOS_2)
self.assertTrue(packet.retain_flag)
| StarcoderdataPython |
385887 | import os
from params import args
os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu
from keras.preprocessing.image import img_to_array, load_img
from keras.applications.imagenet_utils import preprocess_input
from models.model_factory import make_model
from os import path, mkdir, listdir
import numpy as np
np.random.seed(1)
import random
random.seed(1)
import tensorflow as tf
tf.set_random_seed(1)
import timeit
import cv2
from tqdm import tqdm
test_folder = args.test_folder
test_pred = os.path.join(args.out_root_dir, args.out_masks_folder)
all_ids = []
all_images = []
all_masks = []
OUT_CHANNELS = args.out_channels
def preprocess_inputs(x):
return preprocess_input(x, mode=args.preprocessing_function)
if __name__ == '__main__':
t0 = timeit.default_timer()
weights = [os.path.join(args.models_dir, m) for m in args.models]
models = []
for w in weights:
model = make_model(args.network, (None, None, 3))
print("Building model {} from weights {} ".format(args.network, w))
model.load_weights(w)
models.append(model)
os.makedirs(test_pred, exist_ok=True)
print('Predicting test')
for d in tqdm(listdir(test_folder)):
if not path.isdir(path.join(test_folder, d)):
continue
final_mask = None
for scale in range(1):
fid = d
img = cv2.imread(path.join(test_folder, fid, 'images', '{0}.png'.format(fid)), cv2.IMREAD_COLOR)[...,::-1]
if final_mask is None:
final_mask = np.zeros((img.shape[0], img.shape[1], OUT_CHANNELS))
if scale == 1:
img = cv2.resize(img, None, fx=0.75, fy=0.75, interpolation=cv2.INTER_AREA)
elif scale == 2:
img = cv2.resize(img, None, fx=1.25, fy=1.25, interpolation=cv2.INTER_CUBIC)
x0 = 16
y0 = 16
x1 = 16
y1 = 16
if (img.shape[1] % 32) != 0:
x0 = int((32 - img.shape[1] % 32) / 2)
x1 = (32 - img.shape[1] % 32) - x0
x0 += 16
x1 += 16
if (img.shape[0] % 32) != 0:
y0 = int((32 - img.shape[0] % 32) / 2)
y1 = (32 - img.shape[0] % 32) - y0
y0 += 16
y1 += 16
img0 = np.pad(img, ((y0, y1), (x0, x1), (0, 0)), 'symmetric')
inp0 = []
inp1 = []
for flip in range(2):
for rot in range(4):
if flip > 0:
img = img0[::-1, ...]
else:
img = img0
if rot % 2 == 0:
inp0.append(np.rot90(img, k=rot))
else:
inp1.append(np.rot90(img, k=rot))
inp0 = np.asarray(inp0)
inp0 = preprocess_inputs(np.array(inp0, "float32"))
inp1 = np.asarray(inp1)
inp1 = preprocess_inputs(np.array(inp1, "float32"))
mask = np.zeros((img0.shape[0], img0.shape[1], OUT_CHANNELS))
for model in models:
pred0 = model.predict(inp0, batch_size=1)
pred1 = model.predict(inp1, batch_size=1)
j = -1
for flip in range(2):
for rot in range(4):
j += 1
if rot % 2 == 0:
pr = np.rot90(pred0[int(j / 2)], k=(4 - rot))
else:
pr = np.rot90(pred1[int(j / 2)], k=(4 - rot))
if flip > 0:
pr = pr[::-1, ...]
mask += pr # [..., :2]
mask /= (8 * len(models))
mask = mask[y0:mask.shape[0] - y1, x0:mask.shape[1] - x1, ...]
if scale > 0:
mask = cv2.resize(mask, (final_mask.shape[1], final_mask.shape[0]))
final_mask += mask
final_mask /= 1
if OUT_CHANNELS == 2:
final_mask = np.concatenate([final_mask, np.zeros_like(final_mask)[..., 0:1]], axis=-1)
final_mask = final_mask * 255
final_mask = final_mask.astype('uint8')
cv2.imwrite(path.join(test_pred, '{0}.png'.format(fid)), final_mask, [cv2.IMWRITE_PNG_COMPRESSION, 9])
elapsed = timeit.default_timer() - t0
print('Time: {:.3f} min'.format(elapsed / 60)) | StarcoderdataPython |
5199515 | class OsakaBehavior:
def print(self):
print("TAKOYAKI")
| StarcoderdataPython |
1854296 | import json
import os
import pytest
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorDatabase
from motor_odm import Document
@pytest.fixture(scope="function")
async def db() -> AsyncIOMotorDatabase:
client = AsyncIOMotorClient("localhost")
db = client["test"]
await client.drop_database(db)
Document.use(db)
yield db
await client.drop_database(db)
@pytest.fixture(scope="function")
async def data(db: AsyncIOMotorDatabase) -> None:
with open(os.path.join(os.path.dirname(__file__), "data.json")) as f:
file_data = json.load(f)
for key, value in file_data.items():
await db[key].insert_many(value)
| StarcoderdataPython |
3410137 | <filename>custom/icds/repeaters/phi.py
from django.utils.translation import ugettext_lazy as _
from corehq import toggles
from corehq.form_processor.models import CommCareCaseSQL
from corehq.form_processor.signals import sql_case_post_save
from corehq.motech.repeaters.models import CaseRepeater
from corehq.motech.repeaters.signals import create_repeat_records
from custom.icds.integrations.phi import send_request
from custom.icds.repeaters.generators.phi import (
SearchByParamsPayloadGenerator,
ValidatePHIDPayloadGenerator,
)
class BasePHIRepeater(CaseRepeater):
@classmethod
def available_for_domain(cls, domain):
return toggles.PHI_CAS_INTEGRATION.enabled(domain)
class SearchByParamsRepeater(BasePHIRepeater):
payload_generator_classes = (SearchByParamsPayloadGenerator,)
friendly_name = _("Search for Beneficiary via params to get PHI ID")
def allowed_to_forward(self, payload):
return (
super(SearchByParamsRepeater, self).allowed_to_forward(payload)
and not payload.get_case_property('phid_for_beneficiary')
)
def send_request(self, repeat_record, payload):
return send_request('search', payload)
class ValidatePHIDRepeater(BasePHIRepeater):
payload_generator_classes = (ValidatePHIDPayloadGenerator,)
friendly_name = _("Validate PHI ID")
def allowed_to_forward(self, payload):
return (
super(ValidatePHIDRepeater, self).allowed_to_forward(payload)
and payload.get_case_property('phid_for_beneficiary')
and not payload.get_case_property('phid_validated')
)
def send_request(self, repeat_record, payload):
return send_request('validate', payload)
def create_phi_repeat_records(sender, case, **kwargs):
create_repeat_records(SearchByParamsRepeater, case)
create_repeat_records(ValidatePHIDRepeater, case)
sql_case_post_save.connect(create_phi_repeat_records, CommCareCaseSQL,
dispatch_uid='phi_integration_case_receiver')
| StarcoderdataPython |
1985857 | # coding=utf-8
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Constant Value Network that always predicts a constant value."""
import gin
import tensorflow as tf
from tf_agents.networks import network
from tf_agents.utils import nest_utils
@gin.configurable
class ConstantValueNetwork(network.Network):
"""Constant value network that predicts zero per batch item."""
def __init__(self, input_tensor_spec, constant_output_val=0, name=None):
"""Creates an instance of `ConstantValueNetwork`.
Network supports calls with shape outer_rank + observation_spec.shape. Note
outer_rank must be at least 1.
Args:
input_tensor_spec: A `tensor_spec.TensorSpec` or a tuple of specs
representing the input observations.
constant_output_val: A constant scalar value the network will output.
name: A string representing name of the network.
Raises:
ValueError: If input_tensor_spec is not an instance of network.InputSpec.
"""
super(ConstantValueNetwork, self).__init__(
input_tensor_spec=input_tensor_spec, state_spec=(), name=name)
self._constant_output_val = constant_output_val
def call(self, observation, step_type=None, network_state=(), training=False):
shape = nest_utils.get_outer_array_shape(observation,
self._input_tensor_spec)
return tf.constant(
self._constant_output_val, tf.float32, shape=shape), network_state
| StarcoderdataPython |
9632049 | import os
import wandb
import tempfile
import numpy as np
from functools import partial
import torch
from torch.utils.data import DataLoader
from torch.optim import Adam,AdamW
from torch.nn.utils import clip_grad_norm_
from oil.datasetup.datasets import split_dataset
from oil.utils.utils import LoaderTo, FixedNumpySeed, \
cosLr, Eval
from oil.model_trainers import Trainer
from ..systems.chain_pendulum import ChainPendulum
from ..models import HNN
from ..datasets import RigidBodyDataset
def logspace(a, b, k):
return np.exp(np.linspace(np.log(a), np.log(b), k))
class IntegratedDynamicsTrainer(Trainer):
""" Model should specify the dynamics, mapping from t,z -> dz/dt"""
def __init__(self, *args, tol=1e-4, constrained=False, loss="l2", **kwargs):
super().__init__(*args, **kwargs)
self.hypers["tol"] = tol
self.num_mbs = 0
self.constrained = constrained
self.loss_type = loss
self.body = self.dataloaders["train"].dataset.body
if "ms" in dir(self.body):
self.m = torch.from_numpy(np.array(self.body.ms))
print(f"***MASS***: {self.m}")
def loss(self, minibatch, regularization=True):
with torch.enable_grad():
""" Standard cross-entropy loss """
self.num_mbs += 1
(z0, ts), true_zs = minibatch
pred_zs = self.model.integrate(z0, ts[0], tol=self.hypers["tol"])
if self.loss_type == "l2":
loss = (pred_zs - true_zs).pow(2).mean()
elif self.loss_type == "l1":
loss = (pred_zs - true_zs).abs().mean()
if hasattr(self.model, "regularization"):
loss += self.model.regularization(self)
return loss
def step(self, minibatch):
self.model.train()
self.optimizer.zero_grad()
loss = self.loss(minibatch)
loss.backward()
clip_grad_norm_(self.model.parameters(), 1)
self.optimizer.step()
return loss
def rel_err(self, pred_zs, true_zs, ts):
square_err = ((true_zs - pred_zs)**2).mean((-1,-2))
rel_err = torch.sqrt(square_err) / \
(torch.sqrt((true_zs**2).mean((-1,-2)))+torch.sqrt((pred_zs**2).mean((-1,-2))))
loggeomean_rel_err = torch.log(torch.clamp(rel_err,min=1e-7))
return loggeomean_rel_err
def rel_err_vec(self):
test_loader = self.dataloaders["test"]
log_H_err = []
for mb in test_loader:
self.model.eval()
(z0, ts), true_zs = mb
pred_zs = self.model.integrate(z0, ts[0], tol=self.hypers["tol"])
_log_H_err = self.rel_err(pred_zs, true_zs, ts).cpu().data.numpy()
log_H_err.append(_log_H_err)
log_H_err = np.concatenate(log_H_err, axis=0)
return log_H_err
def energy_err(self, pred_zs, true_zs, ts):
bs,T = true_zs.shape[:2]
cartesian_pred_zs = self.body.body2globalCoords(pred_zs.reshape(bs*T, *pred_zs.shape[2:]))
pred_H = self.body.hamiltonian(ts, cartesian_pred_zs.reshape(bs*T,-1).cpu())
cartesian_true_zs = self.body.body2globalCoords(true_zs.reshape(bs*T, *true_zs.shape[2:]))
true_H = self.body.hamiltonian(ts, cartesian_true_zs.reshape(bs*T,-1).cpu())
H_err = torch.abs(true_H - pred_H)/(torch.abs(true_H)+torch.abs(pred_H))
log_H_err = torch.log(torch.clamp(H_err,min=1e-7))
log_H_err = log_H_err.reshape(bs, T)
return log_H_err
def energy_err_vec(self):
test_loader = self.dataloaders["test"]
log_H_err = []
for mb in test_loader:
self.model.eval()
(z0, ts), true_zs = mb
pred_zs = self.model.integrate(z0, ts[0], tol=self.hypers["tol"])
_log_H_err = self.energy_err(pred_zs, true_zs, ts).cpu().data.numpy()
log_H_err.append(_log_H_err)
log_H_err = np.concatenate(log_H_err, axis=0)
return log_H_err
def metrics(self, loader):
def _metrics(mb):
self.model.eval()
(z0, ts), true_zs = mb
pred_zs = self.model.integrate(z0, ts[0], tol=self.hypers["tol"])
square_err = ((true_zs - pred_zs)**2).mean((-1,-2))
loggeomean_rel_err = self.rel_err(pred_zs, true_zs, ts).mean()
log_H_err = self.energy_err(pred_zs, true_zs, ts).mean()
return np.array([loggeomean_rel_err.cpu().data.numpy(), square_err.mean().cpu().data.numpy(),
log_H_err.cpu().data.numpy()])
loggeomean,mse,log_herr = self.evalAverageMetrics(loader, _metrics)
return {"MSE": mse,'gerr':np.exp(loggeomean),'Herr':np.exp(log_herr)}
def logStuff(self, step, minibatch=None):
self.logger.add_scalars(
"info", {"nfe": self.model.nfe / (max(self.num_mbs, 1e-3))}, step
)
super().logStuff(step, minibatch)
df = self.logger.scalar_frame.iloc[-1:]
log_dict = {str(c): df[c].to_numpy()[0] for c in df.columns}
# err_vec = self.energy_err_vec()[:50]
# log_dict["H_err_vec"] = wandb.Table(columns=list(range(err_vec.shape[1])),
# data=err_vec)
# err_vec = self.rel_err_vec()[:50]
# log_dict["rel_err_vec"] = wandb.Table(columns=list(range(err_vec.shape[1])),
# data=err_vec)
try:
wandb.log(log_dict)
except Exception as e:
print(e)
pass
def test_rollouts(self, angular_to_euclidean=False, pert_eps=1e-4):
dataloader = self.dataloaders["test"]
rel_errs = []
with Eval(self.model), torch.no_grad():
for mb in dataloader:
z0, T = mb[0] # assume timesteps evenly spaced for now
T = T[0]
body = dataloader.dataset.body
long_T = torch.arange(0., 10., body.dt).to(z0.device, z0.dtype)
zt_pred = self.model.integrate(z0, long_T, method='rk4')
bs, Nlong, *rest = zt_pred.shape
if angular_to_euclidean:
z0 = body.body2globalCoords(z0)
flat_pred = body.body2globalCoords(zt_pred.reshape(bs * Nlong, *rest))
zt_pred = flat_pred.reshape(bs, Nlong, *flat_pred.shape[1:])
zt = dataloader.dataset.body.integrate(z0, long_T)
rel_error = ((zt_pred - zt) ** 2).sum(-1).sum(-1).sum(-1).sqrt() / (
(zt_pred + zt) ** 2
).sum(-1).sum(-1).sum(-1).sqrt()
rel_errs.append(rel_error)
rel_errs = torch.cat(rel_errs, dim=0) # (D,T)
both = (rel_errs, zt_pred)
return both
def make_trainer(*,
network=HNN, net_cfg={}, device=None, root_dir=None,
dataset=RigidBodyDataset, body=ChainPendulum(3), tau=3, n_systems=1000, regen=False, C=5,
lr=3e-3, bs=200, num_epochs=100, trainer_config={}, net_seed=0, n_subsample=None, data_seed=0,
noise_rate=None, weight_decay=1e-4):
# Create Training set and model
if isinstance(network, str):
network = eval(network)
constrained = False
angular = not constrained
if n_subsample is None:
n_subsample = n_systems
splits = {"train": int(0.8 * n_subsample), "test": int(0.2 * n_subsample)}
body.integration_time = tau
with FixedNumpySeed(data_seed):
dataset_cons = dataset
dataset = dataset_cons(root_dir=root_dir, n_systems=n_systems, regen=regen,
chunk_len=C, body=body, angular_coords=angular,
n_subsample=n_subsample, noise_rate=noise_rate, seed=data_seed)
datasets = split_dataset(dataset, splits)
datasets["test"] = dataset_cons(root_dir=root_dir, n_systems=splits["test"], regen=regen,
chunk_len=100, body=body, angular_coords=angular,
n_subsample=splits["test"], noise_rate=noise_rate, mode="val",
seed=data_seed)
dof_ndim = dataset.body.D if angular else dataset.body.d
torch.manual_seed(net_seed)
model = network(G=dataset.body.body_graph, dof_ndim=dof_ndim,
angular_dims=dataset.body.angular_dims, **net_cfg)
if torch.cuda.is_available() and device is None:
device = "cuda"
model = model.float().to(device)
# Create train and Dev(Test) dataloaders and move elems to gpu
dataloaders = {
k: LoaderTo(DataLoader(v, batch_size=min(bs, splits[k]), num_workers=0,
shuffle=(k == "train"), drop_last=True), device=device, dtype=torch.float32)
for k, v in datasets.items()}
dataloaders["Train"] = dataloaders["train"]
# Initialize optimizer and learning rate schedule
opt_constr = lambda params: AdamW(params, lr=lr, weight_decay=weight_decay)
lr_sched = cosLr(num_epochs)
return IntegratedDynamicsTrainer(
model, dataloaders, opt_constr, lr_sched,
log_dir=os.path.join("runs", tempfile.mkdtemp()),
constrained=constrained, log_args={"timeFrac": 1 / 4, "minPeriod": 0.0},
**trainer_config)
| StarcoderdataPython |
8047855 | #!/usr/bin/env python
r"""Test spacecraft class.
"""
import pdb
import numpy as np
import pandas as pd
import unittest
import pandas.testing as pdt
from scipy import constants
from unittest import TestCase
from abc import ABC, abstractclassmethod, abstractproperty
# import test_base as base
from solarwindpy.tests import test_base as base
from solarwindpy import vector
from solarwindpy import spacecraft
pd.set_option("mode.chained_assignment", "raise")
class TestBase(ABC):
@classmethod
def setUpClass(cls):
data = base.TestData()
# pdb.set_trace()
cls.data = data.spacecraft_data
cls.set_object_testing()
# print("TestBase.setUpClass", flush=True)
# test_plasma = {
# ("pos_HCI", "x", ""): {0: -42, 1: -22, 2: -34},
# ("pos_HCI", "y", ""): {0: 23, 1: 31, 2: 11},
# ("pos_HCI", "z", ""): {0: 35, 1: 27, 2: 49},
# ("v_HCI", "x", ""): {0: 9.0, 1: 10.0, 2: 8.0},
# ("v_HCI", "y", ""): {0: -80.0, 1: -70.0, 2: -90.0},
# ("v_HCI", "z", ""): {0: -0.5, 1: 0.5, 2: 1.5},
# ("Carr", "lat", ""): {0: -2.0, 1: -1.0, 2: 3.0},
# ("Carr", "lon", ""): {0: -26.0, 1: -36.0, 2: -16.0},
# ("gse", "x", ""): {0: 230, 1: 235, 2: 240},
# ("gse", "y", ""): {0: 50, 1: 20, 2: 10},
# ("gse", "z", ""): {0: 30, 1: 25, 2: -50},
# }
#
# test_data = pd.DataFrame.from_dict(
# test_plasma, orient="columns", dtype=np.float64
# )
# test_data.columns.names = ["M", "C", "S"]
# cls.data = test_data.xs("", axis=1, level="S")
# cls.set_object_testing()
# print("Done with TestBase", flush=True)
# super(TestBase, cls).setUpClass()
# # print(cls.data.iloc[:, :7])
# # print(cls.data.columns.values)
# cls.data = cls.spacecraft_data
# del cls.spacecraft_data
# cls.set_object_testing()
@abstractclassmethod
def set_object_testing(cls):
pass
@abstractproperty
def name(self):
pass
@abstractproperty
def frame(self):
pass
def test_position(self):
cols = pd.Index(("x", "y", "z"), name="C")
ot = self.object_testing
pdt.assert_index_equal(cols, ot.position.columns)
self.assertIsInstance(ot.position, vector.Vector)
self.assertEqual(ot.position, ot.r)
self.assertEqual(ot.position, ot.pos)
return ot
def test_velocity(self):
cols = pd.Index(("x", "y", "z"), name="C")
ot = self.object_testing
pdt.assert_index_equal(cols, ot.velocity.columns)
self.assertIsInstance(ot.velocity, vector.Vector)
self.assertEqual(ot.velocity, ot.v)
return ot
def test_data(self):
ot = self.object_testing
pdt.assert_frame_equal(self.data, ot.data)
def test_name(self):
ot = self.object_testing
self.assertEqual(self.name, ot.name)
def test_frame(self):
ot = self.object_testing
self.assertEqual(self.frame, ot.frame)
def test_distance2sun(self):
ot = self.object_testing
frame = self.frame
pos = self.data.loc[:, "pos"]
if frame == "GSE":
# Origin is Earth, so we need to transform x-component to sun-centered.
assert pos.columns.equals(pd.Index(("x", "y", "z"), name="C"))
au = constants.au # 1 AU in meters
re = 6378.1e3 # Earth radius in meters
sign_x = re * pd.Series(
[-1.0, 1.0, 1.0], index=pd.Index(("x", "y", "z"), name="C")
)
change_origin = pd.Series(
[au, 0.0, 0.0], index=pd.Index(("x", "y", "z"), name="C")
)
pos = pos.multiply(sign_x, axis=1).add(change_origin, axis=1)
elif frame == "HCI":
# Origin is sun and propagationd distance is just magnitude
assert pos.columns.equals(pd.Index(("x", "y", "z"), name="C"))
rs = 695.508e6 # Sun radius in meters
pos = pos.multiply(rs)
else:
raise NotImplementedError("No test written for frame {}".format(frame))
dist = pos.pow(2).sum(axis=1).pipe(np.sqrt)
dist.name = "distance2sun"
pdt.assert_series_equal(dist, ot.distance2sun)
class TestWind(TestBase, TestCase):
@classmethod
def set_object_testing(cls):
data = cls.data.xs("gse", axis=1, level="M")
data = pd.concat({"pos": data}, axis=1, names=["M"], sort=True).sort_index(
axis=1
)
cls.data = data
sc = spacecraft.Spacecraft(data, "wind", "gse")
cls.object_testing = sc
@property
def frame(self):
return "GSE"
@property
def name(self):
return "WIND"
def test_position(self):
super(TestWind, self).test_position()
pos = self.data.xs("pos", axis=1, level="M")
ot = self.object_testing
pdt.assert_frame_equal(pos, ot.position.data)
def test_velocity(self):
with self.assertRaises(KeyError):
self.object_testing.velocity
with self.assertRaises(KeyError):
self.object_testing.v
def test_carrington(self):
with self.assertRaises(KeyError):
self.object_testing.carrington
class TestPSP(TestBase, TestCase):
@classmethod
def set_object_testing(cls):
p = cls.data.xs("pos_HCI", axis=1, level="M")
v = cls.data.xs("v_HCI", axis=1, level="M")
c = cls.data.xs("Carr", axis=1, level="M")
data = pd.concat(
{"v": v, "pos": p, "carr": c}, axis=1, names=["M"], sort=True
).sort_index(axis=1)
sc = spacecraft.Spacecraft(data, "psp", "hci")
cls.object_testing = sc
cls.data = data
@property
def frame(self):
return "HCI"
@property
def name(self):
return "PSP"
def test_position(self):
super(TestPSP, self).test_position()
pos = self.data.xs("pos", axis=1, level="M")
ot = self.object_testing
pdt.assert_frame_equal(pos, ot.position.data)
def test_velocity(self):
super(TestPSP, self).test_velocity()
v = self.data.xs("v", axis=1, level="M")
ot = self.object_testing
pdt.assert_frame_equal(v, ot.velocity.data)
def test_carrington(self):
cols = pd.Index(("lat", "lon"), name="C")
carr = self.data.xs("carr", axis=1, level="M")
ot = self.object_testing
self.assertIsInstance(ot.carrington, pd.DataFrame)
pdt.assert_index_equal(cols, ot.carrington.columns)
pdt.assert_frame_equal(carr, ot.carrington)
if __name__ == "__main__":
# Just make recursion stacks smaller in Terminal.
# Comment this line if it causes problems with other
# tests or decrease the denominator.
# sys.setrecursionlimit(sys.getrecursionlimit() // 10)
try:
unittest.main(verbosity=2)
except (AssertionError, AttributeError, ValueError, TypeError) as e: # noqa: 841
import sys
import traceback as tb
exc_info = sys.exc_info()
tb.print_exception(*exc_info)
pdb.post_mortem(exc_info[-1])
| StarcoderdataPython |
5136792 | import numpy as np
import matplotlib.pyplot as plt
import os
#################
# Load the tuple
#################
folder = './'
name = 'rbm_tuple'
extension = '.npy'
tuple = np.load(folder + name + extension)
weights = tuple[0] # This is the weights
samples = tuple[1] # This is the actual sampling
vis_samples = tuple[2] # This is the mean activation that should be ploted
# Transform them to arrays
weights = np.array(weights)
samples = np.array(samples)
vis_samples = np.array(vis_samples)
###################
# Extract relevant information
###################
# Chose the how much filters you are going to plot
Nplot = 25
hidden_indexes = np.random.choice(500, Nplot, replace=False)
# Get the last weights
data = weights[-1, hidden_indexes, ...]
# Reshape the data
data = data.reshape((Nplot, 28, 28)) # 28 is the size of the side MNIST
###################
# Plot the files
##################
# [label.reshape((28, 28)) for label in labels_list]
to_plot = data
# to_plot = to_plot[0]
# Plot parameters
fontsize = 20
figsize = (16, 12)
axes_position = [0.1, 0.1, 0.8, 0.8]
remove_axis = True
title = 'Stress vs size of embedding space'
xlabel = 'Dimension'
ylabel = 'Stress'
# Plot format
inter = 'nearest'
cmap = 'hot'
cmap = 'jet'
cmap = 'binary'
# Save directory
folder = './results/'
extensions = '.pdf'
name = 'final_filters1'
filename = folder + name + extensions
# Plot here
nplots = int(np.sqrt(Nplot))
gs = plt.GridSpec(nplots, nplots)
fig = plt.figure(figsize=figsize)
axes = []
for index1 in xrange(nplots):
for index2 in xrange(nplots):
ax = fig.add_subplot(gs[index1, index2])
index = index1 * nplots + index2
map = to_plot[index]
im = ax.imshow(map, interpolation=inter, cmap=cmap)
# ax.set_title(str(partition_set[index]) + ' clusters')
# ax.set_aspect(1)
axes.append(ax)
# Remove the axis
if remove_axis:
for index in range(Nplot):
axes[index].get_xaxis().set_visible(False)
axes[index].get_yaxis().set_visible(False)
# fig.tight_layout(pad=0, w_pad=0, h_pad=0)
plt.subplots_adjust(left=0.25, right=0.75, wspace=0.0, hspace=0)
# Save the figure
plt.savefig(filename)
os.system('pdfcrop %s %s' % (filename, filename))
plt.show()
| StarcoderdataPython |
12800649 | import os
import shutil
import tkinter as tk
from tkinter import filedialog
from tkinter import messagebox
import cv2
import numpy as np
from PIL import Image
from PIL import ImageTk
class Button:
def __init__(self, root, frame3):
self.root = root
self.frame3 = frame3
self.radio_var = tk.IntVar()
self.path_selected = 'none'
self.paths = []
self.radio_handle = []
self.check_value = []
def on_click_select_button(self, fname_label):
print('select button clicked')
fileType = [('jpg/png file', ('*.jpg', '*.png'))]
self.path_selected = filedialog.askopenfilename(filetypes=fileType)
fname_label['text'] = os.path.basename(self.path_selected)
def on_click_upload_button(self, path='None', image='None'):
print('upload button clicked')
if path == 'None':
path = self.path_selected
else:
cv2.imwrite(path, image)
if path in self.paths:
messagebox.showerror('Upload Error', '"'
+ path
+ '"' + ' is already uploaded.')
else:
self.paths.append(path)
self.create_radio_button(path)
def on_click_show_button(self, method):
global file_name
print('showButton clicked')
image = cv2.imread(self.paths[self.radio_var.get()])
image = self.image_processing(image, method)
file_name = os.path.basename(self.paths[self.radio_var.get()])
name, ext = os.path.splitext(file_name)
path = 'Data/images/' + name + '_' + method + ext
# cv2.imwrite(path, image)
self.open_image_window(path, image)
def image_processing(self, image, method):
if method == 'gray':
image = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
elif method == 'binary':
ret2, image = cv2.threshold(image[:, :, 0], 0, 255, cv2.THRESH_OTSU)
elif method == 'gblur':
image = cv2.GaussianBlur(image, (9, 9), 0)
elif method == 'canny':
image = cv2.Canny(image, 100, 400)
# elif method == 'Send_to_Directory':
# image = os.mkdir("Differece_images")
else:
print('method is wrong')
return image
def create_radio_button(self, path):
image = cv2.imread(path)
# image = cv2.resize(image,(120,120))
image = self.scale_to_height(image, 120)
image_tk = self.to_tk_image(image)
radio_button = tk.Radiobutton(self.frame3, image=image_tk,
value=len(self.radio_handle),
variable=self.radio_var)
self.radio_var.set(0)
self.radio_handle.append(radio_button)
self.check_value.append(self.radio_var)
radio_button.grid(row=(len(self.radio_handle) - 1) // 3,
column=(len(self.radio_handle) - 1) % 3)
self.root.mainloop()
def open_image_window(self, path, image):
if image.shape[0] > 300:
image = self.scale_to_height(image, 300)
img_win = tk.Toplevel(self.root)
fname = os.path.basename(path)
img_win.title(fname)
img_canvas = tk.Canvas(img_win, width=image.shape[1],
height=image.shape[0])
img_canvas.pack()
image_tk = self.to_tk_image(image)
img_canvas.create_image(0, 0, image=image_tk, anchor='nw')
uploadButton2 = tk.Button(img_win, text='upload',
command=lambda: self.on_click_upload_button(path, image))
uploadButton2.pack()
self.root.mainloop()
def to_tk_image(self, image_bgr):
image_rgb = cv2.cvtColor(image_bgr, cv2.COLOR_BGR2RGB)
image_pil = Image.fromarray(image_rgb)
image_tk = ImageTk.PhotoImage(image_pil)
return image_tk
def scale_to_height(self, img, height):
scale = height / img.shape[0]
return cv2.resize(img, dsize=None, fx=scale, fy=scale)
class Sample_Button(Button):
def __init__(self, root, frame6):
self.root = root
self.frame6 = frame6
self.Sample_radio_var = tk.IntVar()
self.Sample_path_selected = 'none'
self.Sample_paths = []
self.Sample_radio_handle = []
self.Sample_check_value = []
def on_click_select_button_Sample(self, Sample_fname_label):
print('select Sample button clicked')
Sample_fileType = [('jpg/png file', ('*.jpg', '*.png'))]
self.Sample_path_selected = filedialog.askopenfilename(filetypes=Sample_fileType)
Sample_fname_label['text'] = os.path.basename(self.Sample_path_selected)
def on_click_upload_button_Sample(self, Sample_path='None', Sample_image='None'):
print('upload Sample button clicked')
if Sample_path == 'None':
Sample_path = self.Sample_path_selected
else:
cv2.imwrite(Sample_path, Sample_image)
if Sample_path in self.Sample_paths:
messagebox.showerror('Upload Error', '"'
+ Sample_path
+ '"' + ' Sample is already uploaded.')
else:
self.Sample_paths.append(Sample_path)
self.create_Sample_radio_button(Sample_path)
def on_click_show_button_Sample(self, Sample_method):
global Sample_file_name
print('show Sample Button clicked')
Sample_image = cv2.imread(self.Sample_paths[self.Sample_radio_var.get()])
Sample_image = self.Sample_image_processing(Sample_image, Sample_method)
Sample_file_name = os.path.basename(self.Sample_paths[self.Sample_radio_var.get()])
Sample_name, ext = os.path.splitext(Sample_file_name)
Sample_path = 'Data/Sample_images/' + Sample_name + '_' + Sample_method + ext
# cv2.imwrite(path, image)
self.open_Sample_image_window(Sample_path, Sample_image)
def Sample_image_processing(self, Sample_image, Sample_method):
if Sample_method == 'Sample_gray':
Sample_image = cv2.cvtColor(Sample_image, cv2.COLOR_RGB2GRAY)
elif Sample_method == 'Sample_binary':
Sample_ret2, Sample_image = cv2.threshold(Sample_image[:, :, 0], 0, 255, cv2.THRESH_OTSU)
elif Sample_method == 'Sample_gblur':
Sample_image = cv2.GaussianBlur(Sample_image, (9, 9), 0)
elif Sample_method == 'Sample_canny':
Sample_image = cv2.Canny(Sample_image, 100, 400)
# elif Sample_method == 'Send_to_Directory':
# Sample_image = os.mkdir("data/Sample_Differece_images")
else:
print('Sample method is wrong')
return Sample_image
def create_Sample_radio_button(self, Sample_path):
Sample_image = cv2.imread(Sample_path)
# image = cv2.resize(image,(120,120))
Sample_image = self.scale_to_height_Sample(Sample_image, 120)
Sample_image_tk = self.Sample_to_tk_image(Sample_image)
Sample_radio_button = tk.Radiobutton(self.frame6, image=Sample_image_tk,
value=len(self.Sample_radio_handle),
variable=self.Sample_radio_var)
self.Sample_radio_var.set(0)
self.Sample_radio_handle.append(Sample_radio_button)
self.Sample_check_value.append(self.Sample_radio_var)
Sample_radio_button.grid(row=(len(self.Sample_radio_handle) - 1) // 3,
column=(len(self.Sample_radio_handle) - 1) % 3)
self.root.mainloop()
def open_Sample_image_window(self, Sample_path, Sample_image):
if Sample_image.shape[0] > 300:
Sample_image = self.scale_to_height_Sample(Sample_image, 300)
Sample_img_win = tk.Toplevel(self.root)
Sample_fname = os.path.basename(Sample_path)
Sample_img_win.title(Sample_fname)
Sample_img_canvas = tk.Canvas(Sample_img_win, width=Sample_image.shape[1],
height=Sample_image.shape[0])
Sample_img_canvas.pack()
Sample_image_tk = self.Sample_to_tk_image(Sample_image)
Sample_img_canvas.create_image(0, 0, image=Sample_image_tk, anchor='nw')
uploadSampleButton2 = tk.Button(Sample_img_win, text='Upload Sample',
command=lambda: self.on_click_upload_button_Sample(Sample_path, Sample_image))
uploadSampleButton2.pack()
self.root.mainloop()
def Sample_to_tk_image(self, Sample_image_bgr):
Sample_image_rgb = cv2.cvtColor(Sample_image_bgr, cv2.COLOR_BGR2RGB)
Sample_image_pil = Image.fromarray(Sample_image_rgb)
Sample_image_tk = ImageTk.PhotoImage(Sample_image_pil)
return Sample_image_tk
def scale_to_height_Sample(self, Sample_img, height):
scale = height / Sample_img.shape[0]
return cv2.resize(Sample_img, dsize=None, fx=scale, fy=scale)
class Difference_Button(Sample_Button, Button):
def on_click_diff_per_button(self, diff_per):
threshold = 0.8
resultsDirectory = 'Data/Differece_images'
sourceDirectory = os.fsencode('Data/images')
templateDirectory = os.fsencode('Data/Sample_images')
detectedCount = 0
for file in os.listdir(sourceDirectory):
filename = os.fsdecode(file)
if filename.endswith(".jpg") or filename.endswith(".png"):
print(filename)
img = cv2.imread('Data/images/' + filename)
im_grayRef = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
for templateFile in os.listdir(templateDirectory):
templateFilename = os.fsdecode(templateFile)
print("_vs_" + templateFilename)
if filename.endswith(".jpg") or filename.endswith(".png"):
Sample_image = cv2.imread('Data/Sample_images/' + templateFilename, 0)
cv2.waitKey(0)
w, h = Sample_image.shape[::-1]
score = cv2.matchTemplate(im_grayRef, Sample_image, cv2.TM_CCOEFF_NORMED)
cv2.waitKey(0)
loc = np.where(score >= threshold)
if (len(loc[0])):
detectedCount = detectedCount + 1
for pt in zip(*loc[::-1]):
cv2.rectangle(img, pt, (pt[0] + w, pt[1] + h), (0, 0, 255), 2)
if score * 100 > 70:
new_text = '{}_&_{} is Matched by {}'.format(filename, templateFilename, score * 100)
else:
cv2.imwrite(resultsDirectory + '/diff_per_' + filename + '.jpg', img)
new_text = '{}_&_{} Not Matched by {}'.format(filename, templateFilename, score * 100)
Difference_per_label["text"] += "\n" + new_text
continue
else:
continue
if __name__ == '__main__':
root = tk.Tk()
root.title('Image GUI')
root.geometry('1280x960')
#######################################Reference#########################################################
os.makedirs('Data/images', exist_ok=True)
pw_left = tk.Frame(root, relief='ridge', borderwidth=6)
pw_left.pack(side='left', anchor='nw')
pw_right = tk.Frame(root, relief='ridge', borderwidth=6)
pw_right.pack(side='top', anchor='e')
frame1 = tk.Frame(pw_left, bd=2, relief="ridge")
frame1.pack(fill=tk.X)
frame2 = tk.LabelFrame(pw_left, bd=2, relief="ridge", text='options')
frame2.pack(fill=tk.X)
frame3 = tk.LabelFrame(pw_right, bd=2, text='Uploaded images')
frame3.pack(fill=tk.Y)
button = Button(root, frame3)
label = tk.Label(frame1, text='File:')
label.pack(fill=tk.X)
file_name_label = tk.Label(frame1, text='-----not selected-----', width=50, bg='white')
file_name_label.pack(fill=tk.X)
select_button = tk.Button(frame1, text='select', command=lambda: button.on_click_select_button(file_name_label))
select_button.pack(side='left', padx=5, pady=5)
uploadButton = tk.Button(frame1, text='Upload',
command=lambda: button.on_click_upload_button())
uploadButton.pack(side='left', padx=5, pady=5)
# gray button
grayscale_label = tk.Label(frame2, text='gray scale')
grayscale_label.grid(row=0, column=0)
gray_show = tk.Button(frame2, text='show',
command=lambda: button.on_click_show_button('gray'))
gray_show.grid(row=0, column=1)
# binary
binary_label = tk.Label(frame2, text='binary')
binary_label.grid(row=1, column=0)
binary_show = tk.Button(frame2, text='show',
command=lambda: button.on_click_show_button('binary'))
binary_show.grid(row=1, column=1)
# gaussian blur
gblur_label = tk.Label(frame2, text='Gaussian blur')
gblur_label.grid(row=2, column=0)
gblur_show = tk.Button(frame2, text='show',
command=lambda: button.on_click_show_button('gblur'))
gblur_show.grid(row=2, column=1)
# canny edge
canny_label = tk.Label(frame2, text='canny edge')
canny_label.grid(row=3, column=0)
canny_show = tk.Button(frame2, text='show',
command=lambda: button.on_click_show_button('canny'))
canny_show.grid(row=3, column=1)
######################################################Sample####################################################3333333#
os.makedirs('Data/Sample_images', exist_ok=True)
pw_Sample_right = tk.Frame(root, relief='ridge', borderwidth=6)
pw_Sample_right.pack(side='top', anchor='e')
frame4 = tk.Frame(pw_left, bd=2, relief="ridge")
frame4.pack(fill=tk.X)
frame5 = tk.LabelFrame(pw_left, bd=2, relief="ridge", text='options')
frame5.pack(fill=tk.X)
frame6 = tk.LabelFrame(pw_Sample_right, bd=2, text='Uploaded Sample images')
frame6.pack(fill=tk.Y)
Sample_button = Sample_Button(root, frame6)
Sample_label = tk.Label(frame4, text='Sample File:')
Sample_label.pack(fill=tk.X)
Sample_file_name_label = tk.Label(frame4, text='-----not selected-----', width=50, bg='white')
Sample_file_name_label.pack(fill=tk.X)
Sample_select_button = tk.Button(frame4, text='select',
command=lambda: Sample_button.on_click_select_button_Sample(
Sample_file_name_label))
Sample_select_button.pack(side='left', padx=5, pady=5)
Sample_uploadButton = tk.Button(frame4, text='Upload',
command=lambda: Sample_button.on_click_upload_button_Sample())
Sample_uploadButton.pack(side='left', padx=5, pady=5)
Sample_grayscale_label = tk.Label(frame5, text='gray scale')
Sample_grayscale_label.grid(row=0, column=0)
Sample_gray_show = tk.Button(frame5, text='show',
command=lambda: Sample_button.on_click_show_button_Sample('Sample_gray'))
Sample_gray_show.grid(row=0, column=1)
# binary
Sample_binary_label = tk.Label(frame5, text='binary')
Sample_binary_label.grid(row=1, column=0)
Sample_binary_show = tk.Button(frame5, text='show',
command=lambda: Sample_button.on_click_show_button_Sample('Sample_binary'))
Sample_binary_show.grid(row=1, column=1)
# gaussian blur
Sample_gblur_label = tk.Label(frame5, text='Gaussian blur')
Sample_gblur_label.grid(row=2, column=0)
Sample_gblur_show = tk.Button(frame5, text='show',
command=lambda: Sample_button.on_click_show_button_Sample('Sample_gblur'))
Sample_gblur_show.grid(row=2, column=1)
# canny edge
Sample_canny_label = tk.Label(frame5, text='canny edge')
Sample_canny_label.grid(row=3, column=0)
Sample_canny_show = tk.Button(frame5, text='show',
command=lambda: Sample_button.on_click_show_button_Sample('Sample_canny'))
Sample_canny_show.grid(row=3, column=1)
#############################################merged########################################################
os.makedirs('Data/Differece_images', exist_ok=True)
frame7 = tk.Frame(pw_left, bd=2, relief="ridge")
frame7.pack()
difference_button = Difference_Button(root, frame7)
Difference_per_label = tk.Label(frame7, text='-----not selected-----', width=50, bg='white', height='12')
Difference_per_label.pack(fill=tk.X)
# Diff_label = tk.Label(frame7, textvariable= 'diffvalue', width=40, bg='white', height = '5')
# Diff_label.pack(fill=tk.X)
Difference_button = tk.Button(frame7, text='Difference',
command=lambda: difference_button.on_click_diff_per_button(Difference_per_label))
Difference_button.pack(side='bottom', padx=5, pady=5)
Reset_button = tk.Button(frame7, text='Reset',
command=lambda: difference_button.Reset())
Reset_button.pack(side='bottom', padx=5, pady=5)
root.mainloop() | StarcoderdataPython |
1635856 | import conans
class ProjectConan(conans.ConanFile):
generators = "cmake", "virtualenv"
requires = (
"gtest/1.10.0"
)
default_options = (
"gtest:shared=False"
)
| StarcoderdataPython |
5035207 | import os
import cv2
import numpy as np
from scipy.ndimage.morphology import binary_dilation
from flask import Flask, request, redirect, url_for, send_from_directory
from werkzeug import secure_filename
UPLOAD_FOLDER = '/Users/charleslai/Documents/Programming/other-projects/waldoBot/server/imgs'
UPLOAD_FOLDER = '/Users/dcadden/dev/hackny/python/waldoBot/server/imgs'
ALLOWED_EXTENSIONS = set(['png', 'jpg'])
app = Flask(__name__, static_url_path='')
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
@app.route("/", methods=['POST'])
def hello():
try:
if request.method == "POST":
file = request.files['file']
if file and allowed_file(file.filename):
print("request has both a file, and is secure")
filename = secure_filename(file.filename)
print("file is secured")
print("saving file to "+os.path.join(app.config['UPLOAD_FOLDER'], filename))
file.save(os.path.join(app.config['UPLOAD_FOLDER'],filename))
print("File has been saved")
print("Made it past the request with file="+filename)
# Read in file
waldo = cv2.imread('imgs/'+filename)
# Resize
waldo = cv2.resize(waldo, None, fx=.2, fy=.2, interpolation = cv2.INTER_AREA)
# Separate color channels
waldo_float = waldo.astype(float)
# Gaussian Blur to reduce noise
# waldo_float = cv2.GaussianBlur(waldo_float, (3,3), 0)
b,g,r = cv2.split(waldo_float)
w = waldo_float.mean(2)
# Create a convolution kernel representing a red and white shirt
pattern = np.ones((24,16), float)
for i in xrange(2):
pattern[i::4] = -1
# Convolve with red less white to find Waldo's shirt
v = cv2.filter2D(r-w, -1, pattern)
# Create a mask to bring out probable locations of Waldo
mask = (v >= v.max()-(v.max()/3))
mask = binary_dilation(mask, np.ones((48,24)))
waldo -= .8*waldo * ~mask[:,:,None]
# Overwrite file with resulting file
cv2.imwrite('result/'+filename, waldo)
# Return url handle of new image
return "result/"+filename
else:
return "Bad filename"
else:
return "Must use sick bars."
except Exception, e:
print e
return e
@app.route('/result/<path:path>')
def send_js(path):
return send_from_directory('result', path)
if __name__ == "__main__":
app.run()
| StarcoderdataPython |
4903321 | <reponame>SimonSuster/allennlp
# pylint: disable=no-self-use,invalid-name,protected-access
from typing import List
import pytest
from allennlp.common.testing import AllenNlpTestCase
from allennlp.semparse import DomainLanguage, ExecutionError, ParsingError, predicate
class Arithmetic(DomainLanguage):
def __init__(self):
super().__init__(start_types={int}, allowed_constants={
# We unfortunately have to explicitly enumerate all allowed constants in the
# grammar. Because we'll be inducing a grammar for this language for use with a
# semantic parser, we need the grammar to be finite, which means we can't allow
# arbitrary constants (you can't parameterize an infinite categorical
# distribution). So our Arithmetic language will have to only operate on simple
# numbers.
'1': 1,
'2': 2,
'3': 3,
'4': 4,
'5': 5,
'6': 6,
'7': 7,
'8': 8,
'9': 9,
'10': 10,
'20': 20,
'-5': -5,
'-2': -2,
})
@predicate
def add(self, num1: int, num2: int) -> int:
return num1 + num2
@predicate
def sum(self, numbers: List[int]) -> int:
return sum(numbers)
# Unfortunately, to make lists, we need to have some function with a fixed number of list
# elements that we can predict. No variable number of arguments - that gives us an infinite
# number of production rules in our grammar.
@predicate
def list1(self, num1: int) -> List[int]:
return [num1]
@predicate
def list2(self, num1: int, num2: int) -> List[int]:
return [num1, num2]
@predicate
def list3(self, num1: int, num2: int, num3: int) -> List[int]:
return [num1, num2, num3]
@predicate
def list4(self, num1: int, num2: int, num3: int, num4: int) -> List[int]:
return [num1, num2, num3, num4]
@predicate
def subtract(self, num1: int, num2: int) -> int:
return num1 - num2
@predicate
def power(self, num1: int, num2: int) -> int:
return num1 ** num2
@predicate
def multiply(self, num1: int, num2: int) -> int:
return num1 * num2
@predicate
def divide(self, num1: int, num2: int) -> int:
return num1 // num2
@predicate
def halve(self, num1: int) -> int:
return num1 // 2
@predicate
def three(self) -> int:
return 3
def not_a_predicate(self) -> int:
return 5
def check_productions_match(actual_rules: List[str], expected_right_sides: List[str]):
actual_right_sides = [rule.split(' -> ')[1] for rule in actual_rules]
assert set(actual_right_sides) == set(expected_right_sides)
class DomainLanguageTest(AllenNlpTestCase):
def setUp(self):
super().setUp()
self.language = Arithmetic()
def test_constant_logical_form(self):
assert self.language.execute('5') == 5
assert self.language.execute('2') == 2
assert self.language.execute('20') == 20
assert self.language.execute('3') == 3
with pytest.raises(ExecutionError, match='Unrecognized constant'):
self.language.execute('"add"')
def test_error_message_with_wrong_arguments(self):
with pytest.raises(ExecutionError):
self.language.execute('add')
with pytest.raises(ExecutionError):
self.language.execute('(add)')
with pytest.raises(ExecutionError):
self.language.execute('(add 2)')
def test_not_all_functions_are_predicates(self):
# This should not execute to 5, but instead be treated as a constant.
with pytest.raises(ExecutionError, match='Unrecognized constant'):
self.language.execute('not_a_predicate')
def test_basic_logical_form(self):
assert self.language.execute('three') == 3
assert self.language.execute('(add 2 3)') == 5
assert self.language.execute('(subtract 2 3)') == -1
assert self.language.execute('(halve 20)') == 10
def test_list_types(self):
assert self.language.execute('(sum (list1 2))') == 2
assert self.language.execute('(sum (list2 2 3))') == 5
assert self.language.execute('(sum (list4 2 10 -2 -5))') == 5
assert self.language.execute('(sum (list4 2 three (halve 4) (add -5 -2)))') == 0
def test_nested_logical_form(self):
assert self.language.execute('(add 2 (subtract 4 2))') == 4
assert self.language.execute('(halve (multiply (divide 9 3) (power 2 3)))') == 12
def test_get_valid_actions(self):
valid_actions = self.language.get_valid_actions()
assert set(valid_actions.keys()) == {
'@start@',
'int',
'List[int]',
'<int:int>',
'<int,int:int>',
'<List[int]:int>',
'<int:List[int]>',
'<int,int:List[int]>',
'<int,int,int:List[int]>',
'<int,int,int,int:List[int]>',
}
check_productions_match(valid_actions['@start@'],
['int'])
check_productions_match(valid_actions['int'],
['[<int,int:int>, int, int]', '[<int:int>, int]',
'[<List[int]:int>, List[int]]', 'three', '1',
'2', '3', '4', '5', '6', '7', '8', '9', '10', '20', '-5', '-2'])
check_productions_match(valid_actions['List[int]'],
['[<int:List[int]>, int]',
'[<int,int:List[int]>, int, int]',
'[<int,int,int:List[int]>, int, int, int]',
'[<int,int,int,int:List[int]>, int, int, int, int]'])
check_productions_match(valid_actions['<int:int>'],
['halve'])
check_productions_match(valid_actions['<int,int:int>'],
['add', 'subtract', 'multiply', 'divide', 'power'])
check_productions_match(valid_actions['<List[int]:int>'],
['sum'])
check_productions_match(valid_actions['<int:List[int]>'],
['list1'])
check_productions_match(valid_actions['<int,int:List[int]>'],
['list2'])
check_productions_match(valid_actions['<int,int,int:List[int]>'],
['list3'])
check_productions_match(valid_actions['<int,int,int,int:List[int]>'],
['list4'])
def test_logical_form_to_action_sequence(self):
action_sequence = self.language.logical_form_to_action_sequence('(add 2 3)')
assert action_sequence == ['@start@ -> int',
'int -> [<int,int:int>, int, int]',
'<int,int:int> -> add',
'int -> 2',
'int -> 3']
action_sequence = self.language.logical_form_to_action_sequence('(halve (subtract 8 three))')
assert action_sequence == ['@start@ -> int',
'int -> [<int:int>, int]',
'<int:int> -> halve',
'int -> [<int,int:int>, int, int]',
'<int,int:int> -> subtract',
'int -> 8',
'int -> three']
logical_form = '(halve (multiply (divide 9 three) (power 2 3)))'
action_sequence = self.language.logical_form_to_action_sequence(logical_form)
assert action_sequence == ['@start@ -> int',
'int -> [<int:int>, int]',
'<int:int> -> halve',
'int -> [<int,int:int>, int, int]',
'<int,int:int> -> multiply',
'int -> [<int,int:int>, int, int]',
'<int,int:int> -> divide',
'int -> 9',
'int -> three',
'int -> [<int,int:int>, int, int]',
'<int,int:int> -> power',
'int -> 2',
'int -> 3']
def test_action_sequence_to_logical_form(self):
logical_form = '(add 2 3)'
action_sequence = self.language.logical_form_to_action_sequence(logical_form)
recovered_logical_form = self.language.action_sequence_to_logical_form(action_sequence)
assert recovered_logical_form == logical_form
logical_form = '(halve (multiply (divide 9 three) (power 2 3)))'
action_sequence = self.language.logical_form_to_action_sequence(logical_form)
recovered_logical_form = self.language.action_sequence_to_logical_form(action_sequence)
assert recovered_logical_form == logical_form
def test_logical_form_parsing_fails_on_bad_inputs(self):
# We don't catch all type inconsistencies in the code, but we _do_ catch some. If we add
# more that we catch, this is a good place to test for them.
with pytest.raises(ParsingError, match='Wrong number of arguments'):
self.language.logical_form_to_action_sequence('(halve 2 3)')
with pytest.raises(ParsingError, match='Wrong number of arguments'):
self.language.logical_form_to_action_sequence('(add 3)')
with pytest.raises(ParsingError, match='unallowed start type'):
self.language.logical_form_to_action_sequence('add')
with pytest.raises(ParsingError, match='Zero-arg function or constant'):
self.language.logical_form_to_action_sequence('(sum (3 2))')
with pytest.raises(ParsingError, match='did not have expected type'):
self.language.logical_form_to_action_sequence('(sum (add 2 3))')
| StarcoderdataPython |
175743 | """
/llrws/__init__.py
Concerns all things LLR Web Suite.
"""
from flask import Flask
from flask_cors import CORS
from flask_restful import Api
from llrws.config import Config
from llrws.api.routes import initialize_routes
application = Flask(__name__)
def create_app(config_class=Config):
"""Creates Flask application instance."""
application.config.from_object(config_class)
CORS(application)
from llrws.main.routes import main
from llrws.errors.handlers import errors
application.register_blueprint(main)
application.register_blueprint(errors)
# Register RESTful API
from llrws.api.routes import api_bp
api = Api(api_bp)
initialize_routes(api)
application.register_blueprint(api_bp, url_prefix="/api")
return application
| StarcoderdataPython |
5014440 | <gh_stars>1-10
"""Eze's Languages module"""
from __future__ import annotations
import os
import pathlib
import re
from abc import ABC, abstractmethod
from pathlib import Path
from typing import Callable
from pydash import py_
from eze.core.config import EzeConfig
from eze.core.enums import (
SourceType,
LICENSE_DENYLIST_CONFIG,
LICENSE_ALLOWLIST_CONFIG,
LICENSE_CHECK_CONFIG,
)
from eze.core.tool import ToolManager
from eze.plugins.tools.semgrep import SemGrepTool
from eze.plugins.tools.trufflehog import TruffleHogTool
from eze.utils.io import write_text
from eze.utils.print import pretty_print_table
from eze.utils.config import extract_embedded_run_type
from eze.utils.error import EzeConfigError
from eze.utils.log import log, log_debug, log_error
class LanguageRunnerMeta(ABC):
"""Base class for all language implementations
3 Stages of Eze test
DISCOVER : RUN
DISCOVER: USE EXISTING EZERC OR CREATE NEW
- find
- create_local_ezerc_config
RUN:
- run pre-tool language helper
- run tools"""
LANGUAGE_NAME: str = "AbstractLanguage"
SOURCE_TYPE: SourceType = None
SHORT_DESCRIPTION: str = ""
INSTALL_HELP: str = ""
MORE_INFO: str = ""
FILE_PATTERNS: dict = {}
FOLDER_PATTERNS: dict = {}
def __init__(self, config: dict = None):
"""constructor"""
self.discovery: LanguageDiscoveryVO = LanguageDiscoveryVO()
self.discovery.language_name = self.LANGUAGE_NAME
self.discovery.set_patterns(self.FILE_PATTERNS, self.FOLDER_PATTERNS)
if config is None:
config = {}
self.config = config
@classmethod
def language_name(cls) -> str:
"""Returns the language name"""
return cls.LANGUAGE_NAME
@classmethod
def source_type(cls) -> str:
"""Returns the sources supported by tool"""
return cls.SOURCE_TYPE
@classmethod
def short_description(cls) -> str:
"""Returns short description of tool"""
return cls.SHORT_DESCRIPTION
@classmethod
def more_info(cls) -> str:
"""Returns more info about tool"""
return cls.MORE_INFO
@classmethod
def install_help(cls) -> str:
"""Returns self help instructions how to install the tool"""
return cls.INSTALL_HELP
@staticmethod
@abstractmethod
def check_installed() -> str:
"""Method for detecting if tool installed and ready to run scan, returns version installed"""
@abstractmethod
async def pre_test(self) -> list:
"""Method for running a pre test builds on project"""
# AB#662: implement auto builds
@abstractmethod
def create_ezerc(self) -> dict:
"""Method for building a dynamic ezerc.toml fragment"""
class DefaultRunner(LanguageRunnerMeta):
"""Base class for default language runner"""
LANGUAGE_NAME: str = "default"
SOURCE_TYPE: SourceType = SourceType.ALL
SHORT_DESCRIPTION: str = "default scan profile"
MORE_INFO: str = """-"""
FILE_PATTERNS: dict = {}
FOLDER_PATTERNS: dict = {}
@staticmethod
def check_installed() -> str:
"""Method for detecting if tool installed and ready to run scan, returns version installed"""
return "inbuilt"
async def pre_test(self) -> list:
"""Method for running a synchronous scan using tool"""
# AB#662: implement auto builds
def create_ezerc(self) -> dict:
"""Method for building a dynamic ezerc.toml fragment"""
fragment = {
"fragment": f"""
[{self.LANGUAGE_NAME}]
# Eze was unable to find what language the codebase is written in
#
# defaulted to generic SECRET and SAST scanning
# for SCA and SBOM tooling please look at what is available in eze
# and manually configure
#
# eze tools list -t SBOM --include-source-type
# eze tools list -t SCA --include-source-type
#
tools = ['{SemGrepTool.TOOL_NAME}', '{TruffleHogTool.TOOL_NAME}']
[{self.LANGUAGE_NAME}.{SemGrepTool.TOOL_NAME}]
REPORT_FILE = "reports/semgrep-report.json"
CONFIGS = [
"p/ci"
]
[{self.LANGUAGE_NAME}.{TruffleHogTool.TOOL_NAME}]
REPORT_FILE = "reports/truffleHog-report.json"
SOURCE = "."
IGNORED_FILES = [
"node_modules/",
"target/",
"build/",
"dist/",
".gradle",
".aws",
".idea",
".pytest_cache"
]
""",
"message": """Eze was unable to find what language the codebase is written in
defaulted to generic SECRET and SAST scanning
for SCA and SBOM tooling please look at what is available in eze
and manually configure
eze tools list -t SBOM --include-source-type
eze tools list -t SCA --include-source-type""",
}
return fragment
class LanguageDiscoveryVO:
"""Language Discovery object"""
def __init__(self):
"""constructor"""
self.is_discovered: bool = False
self.language_name: str
self.language_config: dict = {}
self.folders: dict = {}
self.files: dict = {}
self.folder_patterns: dict = {}
self.file_patterns: dict = {}
def set_patterns(self, file_patterns: dict, folder_patterns: dict):
"""set patterns to discover"""
current_regex = None
try:
for file_type in file_patterns:
current_regex = file_patterns[file_type]
self.file_patterns[file_type] = re.compile(current_regex)
self.files[file_type] = []
for folder_type in folder_patterns:
current_regex = folder_patterns[folder_type]
self.folder_patterns[folder_type] = re.compile(current_regex)
self.folders[folder_type] = []
except:
raise EzeConfigError(f"Unable to parse regex '{current_regex}'")
def ingest_discovered_file(self, file_name: str) -> None:
"""Method ingesting file for discovery"""
for file_type in self.file_patterns:
is_matching = self.file_patterns[file_type].match(file_name)
if is_matching:
self.is_discovered = True
self.files[file_type].append(file_name)
def ingest_discovered_folder(self, folder_name: str) -> None:
"""Method ingesting folder for discovery"""
for folder_type in self.file_patterns:
is_matching = self.file_patterns[folder_type].match(folder_name)
if is_matching:
self.is_discovered = True
self.folders[folder_type].append(folder_name)
class LanguageManager:
"""Singleton Class for accessing all available Languages"""
_instance = None
@staticmethod
def get_instance() -> LanguageManager:
"""Get previously set languages config"""
if LanguageManager._instance is None:
log_error("LanguageManager unable to get config before it is setup")
return LanguageManager._instance
@staticmethod
def set_instance(plugins: dict) -> LanguageManager:
"""Set the global languages config"""
LanguageManager._instance = LanguageManager(plugins)
return LanguageManager._instance
@staticmethod
def reset_instance():
"""Reset the global languages config"""
LanguageManager._instance = None
def __init__(self, plugins: dict = None):
"""takes list of config files, and merges them together, dicts can also be passed instead of pathlib.Path"""
if plugins is None:
plugins = {}
#
self.languages = {}
for plugin_name in plugins:
plugin = plugins[plugin_name]
if not hasattr(plugin, "get_languages") or not isinstance(plugin.get_languages, Callable):
log_debug(f"'get_languages' function missing from plugin '{plugin_name}'")
continue
plugin_languages = plugin.get_languages()
self._add_languages(plugin_languages)
def _discover(self, root_path: str = None) -> dict:
"""Discover languages in codebase"""
ignored_directories = [
".git",
".idea",
"node_modules",
".gradle",
"~",
"__pycache__",
".pytest_cache",
"target",
]
if not root_path:
root_path = Path.cwd()
walk_dir = os.path.abspath(root_path)
root_prefix = len(str(Path(root_path))) + 1
tmp_languages = {}
for language_key in self.languages:
language: LanguageRunnerMeta = self.languages[language_key]()
tmp_languages[language_key] = language
for root, subdirs, files in os.walk(walk_dir):
# Ignore Some directories
for ignored_directory in ignored_directories:
if ignored_directory in subdirs:
subdirs.remove(ignored_directory)
for subdir in subdirs:
folder_path = os.path.join(root, subdir)[root_prefix:]
for language_key in self.languages:
language: LanguageRunnerMeta = tmp_languages[language_key]
language.discovery.ingest_discovered_folder(folder_path)
for filename in files:
file_path = os.path.join(root, filename)[root_prefix:]
for language_key in self.languages:
language: LanguageRunnerMeta = tmp_languages[language_key]
language.discovery.ingest_discovered_file(file_path)
languages = {}
for language_key in tmp_languages:
language: LanguageRunnerMeta = tmp_languages[language_key]
if language.discovery.is_discovered:
languages[language_key] = language
# Default to DefaultRunner
if py_.values(languages) == 0:
languages[DefaultRunner.LANGUAGE_NAME] = DefaultRunner()
return languages
def create_local_ezerc_config(self, root_path: str = None) -> bool:
"""Create new local ezerc file"""
languages: dict = self._discover(root_path)
language_list = []
eze_rc = f"""# Ezerc auto generated
# ===================================
# GLOBAL CONFIG
# ===================================
[global]
# LICENSE_CHECK, available modes:
# - PROPRIETARY : for commercial projects, check for non-commercial, strong-copyleft, and source-available licenses
# - PERMISSIVE : for permissive open source projects (aka MIT, LGPL), check for strong-copyleft licenses
# - OPENSOURCE : for copyleft open source projects (aka GPL), check for non-OSI or FsfLibre certified licenses
# - OFF : no license checks
# All modes will also warn on "unprofessional", "deprecated", and "permissive with conditions" licenses
LICENSE_CHECK = "{LICENSE_CHECK_CONFIG["default"]}"
# LICENSE_ALLOWLIST, {LICENSE_ALLOWLIST_CONFIG["help_text"]}
LICENSE_ALLOWLIST = []
# LICENSE_DENYLIST, {LICENSE_DENYLIST_CONFIG["help_text"]}
LICENSE_DENYLIST = []
# ===================================
# TOOL CONFIG
# ===================================
"""
for language_key in languages:
language: LanguageRunnerMeta = languages[language_key]
output = language.create_ezerc()
log(f"Found Language '{language_key}':")
log(output["message"])
log("\n")
eze_rc += output["fragment"]
eze_rc += "\n\n"
language_list.append('"' + language_key + '"')
eze_rc += f"""# ===================================
# REPORTER CONFIG
# ===================================
[json]
# Optional JSON_FILE
# By default set to eze_report.json
# REPORT_FILE: XXX-XXX
[bom]
# Optional JSON_FILE
# By default set to eze_report.json
# REPORT_FILE: XXX-XXX
[junit]
# Optional XML_FILE
# By default set to eze_junit_report.xml
# REPORT_FILE: XXX-XXX
[quality]
# Will exit when total number of vulnerabilities in all tools over VULNERABILITY_SEVERITY_THRESHOLD exceeds VULNERABILITY_COUNT_THRESHOLD
# [Optional] defaults to 0
# VULNERABILITY_COUNT_THRESHOLD = 0
# [Optional] defaults to "medium"
# VULNERABILITY_SEVERITY_THRESHOLD = "xxx"
#
# Set Explicit limits for each type of vulnerability
# [Optional] Will when errors of type over limit, not set by default
# VULNERABILITY_CRITICAL_SEVERITY_LIMIT = xxx
# VULNERABILITY_HIGH_SEVERITY_LIMIT = xxx
# VULNERABILITY_MEDIUM_SEVERITY_LIMIT = xxx
# VULNERABILITY_LOW_SEVERITY_LIMIT = xxx
# VULNERABILITY_NONE_SEVERITY_LIMIT = xxx
# VULNERABILITY_NA_SEVERITY_LIMIT = xxx
[console]
PRINT_SUMMARY_ONLY = false
PRINT_IGNORED = false
[scan]
reporters = ["console", "bom", "json", "junit", "quality"]
languages = [{",".join(language_list)}]
"""
local_config_location = EzeConfig.get_local_config_filename()
write_text(str(local_config_location), eze_rc)
log(f"Successfully written configuration file to '{local_config_location}'")
return True
def print_languages_list(self):
"""list available languages"""
log(
"""Available Languages are:
======================="""
)
languages = []
for current_language_name in self.languages:
current_language_class: LanguageRunnerMeta = self.languages[current_language_name]
current_language_type = current_language_class.source_type().name
current_language_version = current_language_class.check_installed() or "Not Installed"
current_language_description = current_language_class.short_description()
entry = {
"Name": current_language_name,
"Version": current_language_version,
"Source": current_language_type,
"Description": current_language_description,
}
languages.append(entry)
pretty_print_table(languages)
def print_languages_help(self):
"""print help for all Languages"""
log(
"""Available Languages Help:
======================="""
)
for current_tool_name in self.languages:
self.print_language_help(current_tool_name)
def print_language_help(self, language: str):
"""print out language help"""
language_class: LanguageRunnerMeta = self.languages[language]
language_description = language_class.short_description()
log(
f"""=================================
Language '{language}' Help
{language_description}
================================="""
)
language_version = language_class.check_installed()
if language_version:
log(f"Version: {language_version} Installed\n")
else:
log(
"""Language Install Instructions:
---------------------------------"""
)
log(language_class.install_help())
log("")
log(
"""Language More Info:
---------------------------------"""
)
log(language_class.more_info())
def _add_languages(self, languages: dict):
"""adds new languages to languages registry"""
for language_name in languages:
language = languages[language_name]
if issubclass(language, LanguageRunnerMeta):
if not hasattr(self.languages, language_name):
log_debug(f"-- installing language '{language_name}'")
self.languages[language_name] = language
else:
log_debug(f"-- skipping '{language_name}' already defined")
continue
# TODO: else check public functions
else:
log_debug(f"-- skipping invalid language '{language_name}'")
continue
def get_language_config(self, language_name: str, scan_type: str = None, run_type: str = None):
"""
Get Language Config, handle default config parameters
:raises EzeConfigError
"""
eze_config = EzeConfig.get_instance()
language_config = eze_config.get_plugin_config(language_name, scan_type, run_type)
# Warnings for corrupted config
if language_name not in self.languages:
error_message = f"[{language_name}] The ./ezerc config references unknown language plugin '{language_name}', run 'eze languages list' to see available languages"
raise EzeConfigError(error_message)
# Warnings for corrupted config
if "tools" not in language_config:
error_message = f"[{language_name}] The ./ezerc config missing required {language_name}.tools list, run 'eze housekeeping create-local-config' to recreate"
raise EzeConfigError(error_message)
return language_config
def get_language(self, language_name: str, scan_type: str = None, run_type: str = None) -> LanguageRunnerMeta:
"""
Gets a instance of a language, populated with it's configuration
:raises EzeConfigError
"""
[language_name, run_type] = extract_embedded_run_type(language_name, run_type)
language_config = self.get_language_config(language_name, scan_type, run_type)
language_class: LanguageRunnerMeta = self.languages[language_name]
language_instance = language_class(language_config)
return language_instance
async def run_language(self, language_name: str, scan_type: str = None, run_type: str = None) -> list:
"""Runs a instance of a tool, populated with it's configuration"""
[language_name, run_type] = extract_embedded_run_type(language_name, run_type)
language_instance: LanguageRunnerMeta = self.get_language(language_name, scan_type, run_type)
# get raw scan result
tools = language_instance.config["tools"]
results = []
tool_manager = ToolManager.get_instance()
for tool_name in tools:
scan_result = await tool_manager.run_tool(tool_name, scan_type, None, language_name)
results.append(scan_result)
return results
| StarcoderdataPython |
5142739 | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import copy
import json
import os
import shutil
from maro.cli.process.utils.details import env_prepare, load_details
from maro.cli.utils.params import LocalPaths, ProcessRedisName
from maro.utils.logger import CliLogger
logger = CliLogger(name=__name__)
class ProcessExecutor:
def __init__(self):
self.redis_connection = env_prepare()
def start_job(self, deployment_path: str):
job_details = load_details(deployment_path)
self._push_pending_job(job_details)
def _push_pending_job(self, job_details: dict):
job_name = job_details["name"]
# Push job details to redis
self.redis_connection.hset(
ProcessRedisName.JOB_DETAILS,
job_name,
json.dumps(job_details)
)
# Push job name to pending_job_tickets
self.redis_connection.lpush(
ProcessRedisName.PENDING_JOB_TICKETS,
job_name
)
logger.info(f"Sending {job_name} into pending job tickets.")
def stop_job(self, job_name: str):
if not self.redis_connection.hexists(ProcessRedisName.JOB_DETAILS, job_name):
logger.error(f"No such job '{job_name}' in Redis.")
return
# push job_name into kill_job_tickets
self.redis_connection.lpush(
ProcessRedisName.KILLED_JOB_TICKETS,
job_name
)
logger.info(f"Sending {job_name} into killed job tickets.")
def delete_job(self, job_name: str):
# Stop job for running and pending job.
self.stop_job(job_name)
# Rm job details in Redis
self.redis_connection.hdel(ProcessRedisName.JOB_DETAILS, job_name)
# Rm job's log folder
job_folder = os.path.expanduser(f"{LocalPaths.MARO_PROCESS}/{job_name}")
shutil.rmtree(job_folder, True)
logger.info(f"Remove local temporary log folder {job_folder}.")
def get_job_logs(self, job_name):
source_path = os.path.expanduser(f"{LocalPaths.MARO_PROCESS}/{job_name}")
if not os.path.exists(source_path):
logger.error(f"Cannot find the logs of {job_name}.")
destination = os.path.join(os.getcwd(), job_name)
if os.path.exists(destination):
shutil.rmtree(destination)
shutil.copytree(source_path, destination)
logger.info(f"Dump logs in path: {destination}.")
def list_job(self):
# Get all jobs
jobs = self.redis_connection.hgetall(ProcessRedisName.JOB_DETAILS)
for job_name, job_details in jobs.items():
job_name = job_name.decode()
job_details = json.loads(job_details)
if self.redis_connection.hexists(ProcessRedisName.RUNNING_JOB, job_name):
job_details["job_status"] = "running"
else:
pending_jobs = self.redis_connection.lrange(ProcessRedisName.PENDING_JOB_TICKETS, 0, -1)
pending_jobs = [job_name.decode() for job_name in pending_jobs]
job_details["job_status"] = "pending" if job_name in pending_jobs else "finish"
logger.info(job_details)
def start_schedule(self, deployment_path: str):
schedule_detail = load_details(deployment_path)
# push schedule details to Redis
self.redis_connection.hset(
ProcessRedisName.JOB_DETAILS,
schedule_detail["name"],
json.dumps(schedule_detail)
)
job_list = schedule_detail["job_names"]
# switch schedule details into job details
job_detail = copy.deepcopy(schedule_detail)
del job_detail["job_names"]
for job_name in job_list:
job_detail["name"] = job_name
self._push_pending_job(job_detail)
def stop_schedule(self, schedule_name: str):
if self.redis_connection.hexists(ProcessRedisName.JOB_DETAILS, schedule_name):
schedule_details = json.loads(self.redis_connection.hget(ProcessRedisName.JOB_DETAILS, schedule_name))
else:
logger.error(f"Cannot find {schedule_name} in Redis. Please check schedule name.")
return
job_list = schedule_details["job_names"]
for job_name in job_list:
self.stop_job(job_name)
| StarcoderdataPython |
5071412 | from django.contrib.auth import get_user_model
from rest_framework import serializers
from mime.mime.models import Mime
User = get_user_model()
class MimeSerializer(serializers.ModelSerializer):
"""Mime serializer"""
# Attach user to mime
owner = serializers.ReadOnlyField(source="owner.username")
class Meta:
"""
Meta class for Mime serializer
"""
model = Mime
fields = ["id", "owner", "no_inf", "inf_name", "city", "estate"]
| StarcoderdataPython |
207463 | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""List and compare most used OpenStack cloud resources."""
import argparse
import json
import subprocess
import sys
from rally.cli import cliutils
from rally.common import objects
from rally.common.plugin import discover
from rally import consts
from rally import osclients
class ResourceManager(object):
REQUIRED_SERVICE = None
REPR_KEYS = ("id", "name", "tenant_id", "zone", "zoneName", "pool")
def __init__(self, clients):
self.clients = clients
def is_available(self):
if self.REQUIRED_SERVICE:
return self.REQUIRED_SERVICE in self.clients.services().values()
return True
@property
def client(self):
return getattr(self.clients, self.__class__.__name__.lower())()
def get_resources(self):
all_resources = []
cls = self.__class__.__name__.lower()
for prop in dir(self):
if not prop.startswith("list_"):
continue
f = getattr(self, prop)
resources = f() or []
resource_name = prop[5:][:-1]
for res in resources:
# NOTE(stpierre): It'd be nice if we could make this a
# dict, but then we get ordering issues. So a list of
# 2-tuples it must be.
res_repr = []
for key in self.REPR_KEYS + (resource_name,):
if isinstance(res, dict):
value = res.get(key)
else:
value = getattr(res, key, None)
if value:
res_repr.append((key, value))
if not res_repr:
raise ValueError("Failed to represent resource %r" % res)
res_repr.extend([("class", cls),
("resource_name", resource_name)])
all_resources.append(res_repr)
return all_resources
class Keystone(ResourceManager):
REQUIRED_SERVICE = consts.Service.KEYSTONE
def list_users(self):
return self.client.users.list()
def list_tenants(self):
if hasattr(self.client, "projects"):
return self.client.projects.list() # V3
return self.client.tenants.list() # V2
def list_roles(self):
return self.client.roles.list()
class Nova(ResourceManager):
REQUIRED_SERVICE = consts.Service.NOVA
def list_flavors(self):
return self.client.flavors.list()
def list_floating_ip_pools(self):
return self.client.floating_ip_pools.list()
def list_floating_ips(self):
return self.client.floating_ips.list()
def list_images(self):
return self.client.images.list()
def list_keypairs(self):
return self.client.keypairs.list()
def list_networks(self):
return self.client.networks.list()
def list_security_groups(self):
return self.client.security_groups.list(
search_opts={"all_tenants": True})
def list_servers(self):
return self.client.servers.list(
search_opts={"all_tenants": True})
def list_services(self):
return self.client.services.list()
def list_availability_zones(self):
return self.client.availability_zones.list()
class Neutron(ResourceManager):
REQUIRED_SERVICE = consts.Service.NEUTRON
def has_extension(self, name):
extensions = self.client.list_extensions().get("extensions", [])
return any(ext.get("alias") == name for ext in extensions)
def list_networks(self):
return self.client.list_networks()["networks"]
def list_subnets(self):
return self.client.list_subnets()["subnets"]
def list_routers(self):
return self.client.list_routers()["routers"]
def list_ports(self):
return self.client.list_ports()["ports"]
def list_floatingips(self):
return self.client.list_floatingips()["floatingips"]
def list_security_groups(self):
return self.client.list_security_groups()["security_groups"]
def list_health_monitors(self):
if self.has_extension("lbaas"):
return self.client.list_health_monitors()["health_monitors"]
def list_pools(self):
if self.has_extension("lbaas"):
return self.client.list_pools()["pools"]
def list_vips(self):
if self.has_extension("lbaas"):
return self.client.list_vips()["vips"]
class Glance(ResourceManager):
REQUIRED_SERVICE = consts.Service.GLANCE
def list_images(self):
return self.client.images.list()
class Heat(ResourceManager):
REQUIRED_SERVICE = consts.Service.HEAT
def list_resource_types(self):
return self.client.resource_types.list()
def list_stacks(self):
return self.client.stacks.list()
class Cinder(ResourceManager):
REQUIRED_SERVICE = consts.Service.CINDER
def list_availability_zones(self):
return self.client.availability_zones.list()
def list_backups(self):
return self.client.backups.list()
def list_volume_snapshots(self):
return self.client.volume_snapshots.list()
def list_volume_types(self):
return self.client.volume_types.list()
def list_volumes(self):
return self.client.volumes.list(
search_opts={"all_tenants": True})
class Senlin(ResourceManager):
REQUIRED_SERVICE = consts.Service.SENLIN
def list_clusters(self):
return self.client.clusters()
def list_profiles(self):
return self.client.profiles()
class Watcher(ResourceManager):
REQUIRED_SERVICE = consts.Service.WATCHER
REPR_KEYS = ("uuid", "name")
def list_audits(self):
return self.client.audit.list()
def list_audit_templates(self):
return self.client.audit_template.list()
def list_goals(self):
return self.client.goal.list()
def list_strategies(self):
return self.client.strategy.list()
def list_action_plans(self):
return self.client.action_plan.list()
class CloudResources(object):
"""List and compare cloud resources.
resources = CloudResources(auth_url=..., ...)
saved_list = resources.list()
# Do something with the cloud ...
changes = resources.compare(saved_list)
has_changed = any(changes)
removed, added = changes
"""
def __init__(self, **kwargs):
self.clients = osclients.Clients(objects.Credential(**kwargs))
def _deduplicate(self, lst):
"""Change list duplicates to make all items unique.
>>> resources._deduplicate(["a", "b", "c", "b", "b"])
>>> ['a', 'b', 'c', 'b (duplicate 1)', 'b (duplicate 2)'
"""
deduplicated_list = []
for value in lst:
if value in deduplicated_list:
ctr = 0
try_value = value
while try_value in deduplicated_list:
ctr += 1
try_value = "%s (duplicate %i)" % (value, ctr)
value = try_value
deduplicated_list.append(value)
return deduplicated_list
def list(self):
managers_classes = discover.itersubclasses(ResourceManager)
resources = []
for cls in managers_classes:
manager = cls(self.clients)
if manager.is_available():
resources.extend(manager.get_resources())
return sorted(self._deduplicate(resources))
def compare(self, with_list):
# NOTE(stpierre): Each resource is either a list of 2-tuples,
# or a list of lists. (JSON doesn't honor tuples, so when we
# load data from JSON our tuples get turned into lists.) It's
# easiest to do the comparison with sets, so we need to change
# it to a tuple of tuples so that it's hashable.
saved_resources = set(tuple(tuple(d) for d in r) for r in with_list)
current_resources = set(tuple(tuple(d) for d in r)
for r in self.list())
removed = saved_resources - current_resources
added = current_resources - saved_resources
return (sorted(removed), sorted(added))
def _print_tabular_resources(resources, table_label):
cliutils.print_list(
objs=[dict(r) for r in resources],
fields=("class", "resource_name", "identifiers"),
field_labels=("service", "resource type", "identifiers"),
table_label=table_label,
formatters={"identifiers":
lambda d: " ".join("%s:%s" % (k, v)
for k, v in d.items()
if k not in ("class", "resource_name"))}
)
print("")
def main():
parser = argparse.ArgumentParser(
description=("Save list of OpenStack cloud resources or compare "
"with previously saved list."))
parser.add_argument("--credentials",
type=argparse.FileType("r"),
metavar="<path/to/credentials.json>",
help="cloud credentials in JSON format")
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("--dump-list",
type=argparse.FileType("w"),
metavar="<path/to/output/list.json>",
help="dump resources to given file in JSON format")
group.add_argument("--compare-with-list",
type=argparse.FileType("r"),
metavar="<path/to/existent/list.json>",
help=("compare current resources with a list from "
"given JSON file"))
args = parser.parse_args()
if args.credentials:
config = json.load(args.credentials)
else:
config = json.loads(subprocess.check_output(["rally", "deployment",
"config"]))
config.update(config.pop("admin"))
del config["type"]
if "users" in config:
del config["users"]
resources = CloudResources(**config)
if args.dump_list:
resources_list = resources.list()
json.dump(resources_list, args.dump_list)
elif args.compare_with_list:
given_list = json.load(args.compare_with_list)
changes = resources.compare(with_list=given_list)
removed, added = changes
# filter out expected additions
expected = []
for resource_tuple in added:
resource = dict(resource_tuple)
if ((resource["class"] == "keystone" and
resource["resource_name"] == "role" and
resource["name"] == "_member_") or
(resource["class"] == "nova" and
resource["resource_name"] == "security_group" and
resource["name"] == "default")):
expected.append(resource_tuple)
for resource in expected:
added.remove(resource)
if removed:
_print_tabular_resources(removed, "Removed resources")
if added:
_print_tabular_resources(added, "Added resources (unexpected)")
if expected:
_print_tabular_resources(expected, "Added resources (expected)")
if any(changes):
return 0 # `1' will fail gate job
return 0
if __name__ == "__main__":
sys.exit(main())
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.