id stringlengths 1 265 | text stringlengths 6 5.19M | dataset_id stringclasses 7
values |
|---|---|---|
1669145 | <filename>cdk-app/scripts/processing_script/preprocessing.py<gh_stars>1-10
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import os
import pyspark
from pyspark.sql import SparkSession
from pyspark.sql.functions import *
from pyspark.sql.types import (
DoubleType,
StringType,
StructField,
StructType,
IntegerType
)
import boto3
client = boto3.client('s3')
def main():
parser = argparse.ArgumentParser(description="app inputs and outputs")
parser.add_argument("--s3_input_bucket", type=str, help="s3 input bucket")
parser.add_argument("--s3_input_key_prefix", type=str, help="s3 input key prefix")
parser.add_argument("--s3_output_bucket", type=str, help="s3 output bucket")
args = parser.parse_args()
# listing all old files if any to delete them and create a new dataset
response = client.list_objects_v2(Bucket = args.s3_output_bucket)
file_keys = [ obj['Key'] for obj in response['Contents'] if (obj['Key'].find('Xgboost')!=-1 or obj['Key'].find('Linear')!=-1 or obj['Key'].find('folder')!=-1)]
#delete any old files before running the job
if len(file_keys)!=0:
for key in file_keys:
client.delete_object(Bucket=args.s3_output_bucket, Key=key)
spark = SparkSession.builder.appName("PySparkApp").getOrCreate()
# This is needed to save RDDs which is the only way to write nested Dataframes into CSV format
spark.sparkContext._jsc.hadoopConfiguration().set("mapred.output.committer.class",
"org.apache.hadoop.mapred.FileOutputCommitter")
spark.sparkContext._jsc.hadoopConfiguration().set("mapreduce.fileoutputcommitter.marksuccessfuljobs", "false")
# Defining the schema corresponding to the input data. The input data does not contain the headers
schema = StructType([StructField("rings", IntegerType(), True),
StructField("sex", StringType(), True),
StructField("length", StringType(), True),
StructField("diameter", StringType(), True),
StructField("height", StringType(), True),
StructField("whole_weight", StringType(), True),
StructField("shucked_weight", StringType(), True),
StructField("viscera_weight", StringType(), True),
StructField("shell_weight", StringType(), True)])
# Downloading the data from S3 into a Dataframe
total_df = spark.read.csv(('s3://' + os.path.join(args.s3_input_bucket,args.s3_input_key_prefix ,
'*')), header=False, schema=schema, sep=' ')
# Split the overall dataset into 70-15-15 training , validation and testing
(xg_train_df, xg_validation_df, xg_test_df) = total_df.randomSplit([0.7, 0.15,0.15])
# Convert the train dataframe to RDD to save in CSV format and upload to S3
xg_train_df.coalesce(1).write.csv(('s3://' + os.path.join(args.s3_output_bucket, 'Xgboost', 'train')),sep=' ')
# Convert the validation dataframe to RDD to save in CSV format and upload to S3
xg_validation_df.coalesce(1).write.csv(('s3://' + os.path.join(args.s3_output_bucket, 'Xgboost', 'validation')),sep=' ')
# Convert the validation dataframe to RDD to save in CSV format and upload to S3
xg_test_df.coalesce(1).write.csv(('s3://' + os.path.join(args.s3_output_bucket, 'Xgboost', 'test')),sep=' ')
#lambda function to split the <feature_number>:<feature_value> format and remove the <feature_number>
chop_value = udf(lambda x: x.split(":")[1], StringType())
#looping on all feature to split except the rings
features=['sex','length','diameter','height','whole_weight','shucked_weight','viscera_weight','shell_weight']
for feature in features:
total_df=total_df.withColumn(feature,chop_value(total_df[feature]))
# Split the overall dataset into 70-15-15 training , validation and testing
(ll_train_df, ll_validation_df, ll_test_df) = total_df.randomSplit([0.7, 0.15,0.15])
# Convert the train dataframe to RDD to save in CSV format and upload to S3
ll_train_df.coalesce(1).write.csv('s3://' + os.path.join(args.s3_output_bucket, 'Linear', 'train'))
# Convert the validation dataframe to RDD to save in CSV format and upload to S3
ll_validation_df.coalesce(1).write.csv('s3://' + os.path.join(args.s3_output_bucket, 'Linear', 'validation'))
# Convert the validation dataframe to RDD to save in CSV format and upload to S3
ll_test_df.coalesce(1).write.csv('s3://' + os.path.join(args.s3_output_bucket, 'Linear', 'test'))
# Delete any *_$folder$ files created
response = client.list_objects_v2(Bucket = args.s3_output_bucket)
file_keys = [ obj['Key'] for obj in response['Contents'] if (obj['Key'].find('folder')!=-1)]
#delete any old files before running the job
if len(file_keys)!=0:
for key in file_keys:
client.delete_object(Bucket=args.s3_output_bucket, Key=key)
if __name__ == "__main__":
main() | StarcoderdataPython |
4805796 | import struct
from pathlib import Path
from typing import IO
import numpy as np
_INDIANNESS: str = "<"
"""Based on 'ieee-le' of MATLAB (see ``src/main/matlab/dataset/wu2/generate_audio_files.m``)"""
_SAMPLE_TYPE: str = "f"
"""Based on 'single' of MATLAB (see ``src/main/matlab/dataset/wu2/generate_audio_files.m``)"""
_SAMPLE_BYTES: int = 4
"""Based on ``_SAMPLE_TYPE``"""
def get_file_length(file: Path) -> int:
"""Returns how many audio samples are contained in the binary audio file ``file``."""
assert isinstance(file, Path) and file.exists()
b: int = file.stat().st_size
if b % _SAMPLE_BYTES != 0:
raise ValueError("File size (" + str(b) + "B) is not a valid length")
return int(b / _SAMPLE_BYTES)
def _decode_bytes(b: bytes) -> np.ndarray:
"""Decodes a byte stream of audio samples to a numpy array. Encoding is done by
``src/main/matlab/dataset/wu2/generate_audio_files.m``"""
assert isinstance(b, bytes)
tmp: int = int(len(b) / 4)
return np.array(struct.unpack(_INDIANNESS + str(tmp) + "f", b))
def _read_bytes(io: IO, i: int, n: int) -> bytes:
"""
Read a section of bytes.
:param io: The IO stream to read bytes from
:param i: The position of the first byte to read
:param n: The number of bytes to read
:return: The bytes
"""
io.seek(i)
return io.read(n)
def read_samples(io: IO, i: int, n: int) -> np.ndarray:
"""
Read a section of samples.
:param io: The IO stream to read the samples from
:param i: The index of the first sample to read
:param n: The number of samples to read
:return: The samples
"""
b: bytes = _read_bytes(io, i * _SAMPLE_BYTES, n * _SAMPLE_BYTES)
return _decode_bytes(b)
| StarcoderdataPython |
65494 | #------------------------------------------------------------------------------
# Copyright (c) 2013, Nucleic Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#------------------------------------------------------------------------------
from atom.api import Event, Typed, Unicode
from atom.datastructures.api import sortedmap
from .declarative_meta import DeclarativeMeta
from .expression_engine import ExpressionEngine
from .object import Object, flag_generator, flag_property
from enaml.compat import with_metaclass
def d_(member, readable=True, writable=True, final=True):
""" Mark an Atom member as bindable from Enaml syntax.
Parameters
----------
member : Member
The atom member to mark as bindable from Enaml syntax.
readable : bool, optional
Whether the member is readable from Enaml syntax. The member
must be readable to use the '>>', ':=', and '::' operators.
The default is True.
writable : bool, optional
Whether the member is writable from Enaml syntax. The member
must be writable to use the '=', '<<', and ':=' operators.
The default is True.
final : bool, optional
Whether or not the member can be redefined from Enaml syntax
using the 'attr' keyword. The default is True and indicates
that the member cannot be overridden.
"""
metadata = member.metadata
if metadata is None:
metadata = member.metadata = {}
metadata['d_member'] = True
metadata['d_readable'] = readable
metadata['d_writable'] = writable
metadata['d_final'] = final
return member
def d_func(func):
""" Mark a method as overridable from Enaml syntax.
Parameters
----------
func : FunctionType
The function to tag as declarative.
Returns
-------
result : func
The original function tagged with the compiler metadata.
"""
func._d_func = True
return func
#: The flag indicating that the Declarative object has been initialized.
INITIALIZED_FLAG = next(flag_generator)
class Declarative(with_metaclass(DeclarativeMeta, Object)):
""" The most base class of the Enaml declarative objects.
This class provides the core functionality required of declarative
Enaml types. It can be used directly in a declarative Enaml object
tree to store and react to state changes. It has no concept of a
visual representation; that functionality is added by subclasses.
"""
#: Export the 'name' attribute as a declarative member.
name = d_(Unicode())
#: An event fired when an object is initialized. It is triggered
#: once during the object lifetime, at the end of the initialize
#: method.
initialized = d_(Event(), writable=False)
#: A property which gets and sets the initialized flag. This should
#: not be manipulated directly by user code.
is_initialized = flag_property(INITIALIZED_FLAG)
#: Storage space for the declarative runtime. This value should not
#: be manipulated by user code.
_d_storage = Typed(sortedmap, ())
#: Storage space for the declarative engine. This value should not
#: be manipulated by user code.
_d_engine = Typed(ExpressionEngine)
def initialize(self):
""" Initialize this object all of its children recursively.
This is called to give the objects in the tree the opportunity
to initialize additional state which depends upon the object
tree being fully built. It is the responsibility of external
code to call this method at the appropriate time. This will
emit the `initialized` signal after all of the children have
been initialized.
"""
# Iterate over a copy since the children add and remove
# other children during initialization.
for child in self.children[:]:
if isinstance(child, Declarative):
child.initialize()
self.is_initialized = True
self.initialized()
def destroy(self):
""" An overridden destructor method for declarative cleanup.
"""
self.is_initialized = False
del self._d_storage
del self._d_engine
super(Declarative, self).destroy()
def child_added(self, child):
""" An overridden child added event handler.
This handler will automatically initialize a declarative child
if this object itself has already been initialized.
"""
super(Declarative, self).child_added(child)
if isinstance(child, Declarative):
if self.is_initialized and not child.is_initialized:
child.initialize()
| StarcoderdataPython |
148419 | <filename>bert-sentiment/src/app.py
import flask
import torch
from flask import Flask, render_template, request
from utils import label_full_decoder
import sys
import config
import dataset
import engine
from model import BERTBaseUncased
from tokenizer import tokenizer
from werkzeug.serving import run_simple
from werkzeug.wsgi import DispatcherMiddleware
T = tokenizer.TweetTokenizer(
preserve_handles=True, preserve_hashes=True, preserve_case=False, preserve_url=False)
app = Flask(__name__,
static_folder='app_resources/static',
static_url_path='/sentimentanalyzer',
instance_relative_config=True,
template_folder='app_resources/templates/public')
MODEL = None
DEVICE = config.device
def preprocess(text):
tokens = T.tokenize(text)
print(tokens, file=sys.stderr)
ptokens = []
for index, token in enumerate(tokens):
if "@" in token:
if index > 0:
# check if previous token was mention
if "@" in tokens[index-1]:
pass
else:
ptokens.append("mention_0")
else:
ptokens.append("mention_0")
else:
ptokens.append(token)
print(ptokens, file=sys.stderr)
return " ".join(ptokens)
def sentence_prediction(sentence):
sentence = preprocess(sentence)
model_path = config.MODEL_PATH
test_dataset = dataset.BERTDataset(
review=[sentence],
target=[0]
)
test_data_loader = torch.utils.data.DataLoader(
test_dataset,
batch_size=config.VALID_BATCH_SIZE,
num_workers=3
)
device = config.device
model = BERTBaseUncased()
model.load_state_dict(torch.load(
model_path, map_location=torch.device(device)))
model.to(device)
outputs, [] = engine.predict_fn(test_data_loader, model, device)
print(outputs)
return outputs[0]
@app.route("/sentimentanalyzer/predict", methods=['POST'])
def predict():
print(request.form, file=sys.stderr)
# print([(x) for x in request.get_json()],file=sys.stderr)
# sentence = request.get_json().get("sentence","")
sentence = request.form['sentence']
if sentence:
print(sentence, file=sys.stderr)
prediction = sentence_prediction(sentence)
response = {}
response["response"] = {
'sentence': sentence,
'prediction': label_full_decoder(prediction),
}
return flask.jsonify(response)
else:
return flask.jsonify({"error": "empty text"})
@app.route("/sentimentanalyzer/")
def index():
return render_template("index.html")
@app.route("/sentimentanalyzer/demo")
def demo():
return render_template("demo.html")
@app.route("/sentimentanalyzer/models")
def models():
return render_template("models.html")
@app.route("/sentimentanalyzer/about")
def about():
return render_template("about.html")
if __name__ == "__main__":
MODEL = BERTBaseUncased()
MODEL.load_state_dict(torch.load(
config.MODEL_PATH, map_location=torch.device(DEVICE)))
MODEL.eval()
app.run("0.0.0.0", port=1095, debug=True)
# host="http://cleopatra.ijs.si/sentimentanalyzer"
| StarcoderdataPython |
17080 | from django.core.mail import EmailMessage
from django.conf import settings
def send_email(name, date, email):
txt = """
<html>
<body>
<table cellpadding='0' cellspacing='0' width='100%' border='0'>
<tbody>
<tr>
<td style='word-wrap:break-word;font-size:0px;padding:0px;padding-bottom:10px' align='left'>
<div style='color:#000000;font-family:Spoqa Han Sans,sans-serif;font-size:20px;line-height:22px;letter-spacing:-0.8px;text-align:left'>
안녕하세요 <span style='color:#3832D8'>{0}</span> 님,
</div>
</td>
</tr>
<tr>
<td style='word-wrap:break-word;font-size:0px;padding:0px;padding-bottom:10px' align='left'>
<div style='color:#000000;font-family:Spoqa Han Sans,sans-serif;font-size:30px;line-height:1.3;letter-spacing:-1.1px; text-align:left'>
OpenInfra Days Korea 2018
</div>
</td>
</tr>
<tr>
<td style='word-wrap:break-word;font-size:0px;padding:0px;padding-bottom:30px' align='left'>
<div style='color:#000000;font-family:Spoqa Han Sans,sans-serif;font-size:20px;line-height:22px;letter-spacing:-0.8px;text-align:left'>
초청 티켓 등록이 완료되었습니다.
</div>
</td>
</tr>
<tr>
<td style='word-wrap:break-word;font-size:0px;padding:0px;padding-bottom:30px' align='left'>
<div style='color:#000000;font-family:Spoqa Han Sans,sans-serif;font-size:20px;line-height:22px;letter-spacing:-0.8px;text-align:left'>
참가 일자 : {1}
</div>
</td>
</tr>
<tr>
<td style='word-wrap:break-word;font-size:0px;padding:0px' align='left'>
<div style='color:#000000;font-family:Spoqa Han Sans,sans-serif;font-size:20px;line-height:22px;letter-spacing:-0.8px;text-align:left'>
<a href="http://invite.openinfradays.kr">티켓 확인</a>
</div>
</td>
</tr>
</tbody>
</table>
</body>
</html>
""".format(name, date)
email = EmailMessage(settings.EMAIL_TITLE, txt, to=(email,))
email.content_subtype = "html"
return email.send()
| StarcoderdataPython |
25263 | <reponame>Holly-Jiang/QCTSA
class NeighborResult:
def __init__(self):
self.solutions = []
self.choose_path = []
self.current_num = 0
self.curr_solved_gates = []
| StarcoderdataPython |
1741893 | <filename>Desktop/cs61a/lab/lab02/lab02.py
"""Lab 2: Lambda Expressions and Higher Order Functions"""
# Lambda Functions
def lambda_curry2(func):
"""
Returns a Curried version of a two-argument function FUNC.
>>> from operator import add
>>> curried_add = lambda_curry2(add)
>>> add_three = curried_add(3)
>>> add_three(5)
8
"""
def h(y):
def i(z):
return func(y,z)
return i
return h
| StarcoderdataPython |
124849 | import io
import unittest
from unittest.mock import patch
from kattis import k_trip2007
###############################################################################
class SampleInput(unittest.TestCase):
'''Problem statement sample inputs and outputs'''
def test_sample_input(self):
'''Run and assert problem statement sample input and output.'''
inputs = []
inputs.append('6')
inputs.append('1 1 2 2 2 3')
inputs.append('6')
inputs.append('1 1 2 2 2 3')
inputs.append('0')
inputs = '\n'.join(inputs) + '\n'
outputs = []
outputs.append('3')
outputs.append('2 1')
outputs.append('2 1')
outputs.append('2 3')
outputs.append('')
outputs.append('3')
outputs.append('2 1')
outputs.append('2 1')
outputs.append('2 3')
outputs = '\n'.join(outputs) + '\n'
with patch('sys.stdin', io.StringIO(inputs)) as stdin,\
patch('sys.stdout', new_callable=io.StringIO) as stdout:
k_trip2007.main()
self.assertEqual(stdout.getvalue(), outputs)
self.assertEqual(stdin.read(), '')
###############################################################################
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
1747869 | <gh_stars>0
"""
Serving class that consumes images and outputs segmentation bitmap.
"""
import numpy as np
import torch
import mmcv
from mmcv.parallel import collate
from mmseg.datasets.pipelines import Compose
from mmseg.apis.inference import init_segmentor, inference_segmentor, LoadImage
from mmseg.ops import resize
from deploy_test import TfliteSegmentor
class LocalModelServer:
"""
Encapsulates inference methods to serve model.
Stores model to call inference on images.
See test at main for use example.
"""
def __init__(self, config_file: str, checkpoint_file: str):
self.model = init_segmentor(config_file, checkpoint_file, device='cuda:0')
def infer(self, img: np.ndarray):
return inference_segmentor(self.model, img)
def save_visualise(self, img: np.ndarray, result: np.ndarray, out_file: str):
self.model.show_result(img, result, out_file=out_file, opacity=0.5)
def get_visualise(self, img: np.ndarray, result: np.ndarray):
return self.model.show_result(img, result, opacity=0.5)
class TfliteModelServer:
def __init__(self, config_file: str, checkpoint_file: str):
self.cfg = mmcv.Config.fromfile(config_file)
self.model = TfliteSegmentor(checkpoint_file, cfg=self.cfg, device_id=0)
def infer(self, img: np.ndarray, opacity: float = 0.5):
test_pipeline = [LoadImage()] + self.cfg.data.test.pipeline[1:]
test_pipeline = Compose(test_pipeline)
data = dict(img=img)
data = test_pipeline(data)
data_img = collate([data['img']], samples_per_gpu=1)[0]
data_img = resize(data_img, size=(384, 512), mode='bilinear',align_corners=True, warning=False)
seg_pred = self.model.simple_test(data_img, data['img_metas'][0], is_resize=False)
seg_pred = torch.from_numpy(seg_pred[0]).float().unsqueeze(0).unsqueeze(0)
seg_pred = resize(seg_pred, size=tuple(img.shape[:2]), mode='nearest').squeeze().squeeze()
seg = seg_pred.long().detach().cpu().numpy()
return seg
def save_visualise(self, img: np.ndarray, seg: np.ndarray, out_file: str, opacity: float = 0.5):
palette = np.array(self.cfg.PALETTE)
color_seg = np.zeros((seg.shape[0], seg.shape[1], 3), dtype=np.uint8)
for label, color in enumerate(palette):
color_seg[seg == label, :] = color
color_seg = color_seg[..., ::-1]
vis = img * (1 - opacity) + color_seg * opacity
vis = vis.astype(np.uint8)
mmcv.imwrite(vis, out_file)
# for testing only
if __name__ == '__main__':
IMG_FILE='data/labelmefacade/tests/Picture2.png'
CONFIG_FILE='configs/ocrnet/ocrnet_hr18_512x1024_80k_labelmefacade.py'
CHECKPOINT_FILE='../bushierbrows.tflite'
if CHECKPOINT_FILE.endswith('tflite'):
server = TfliteModelServer(config_file=CONFIG_FILE, checkpoint_file=CHECKPOINT_FILE)
else:
server = LocalModelServer(config_file=CONFIG_FILE, checkpoint_file=CHECKPOINT_FILE)
img = mmcv.imread(IMG_FILE)
result = server.infer(img)
print(result)
out_file = IMG_FILE[:-4] + '_result.jpg'
server.save_visualise(img, result, out_file)
mmcv.imwrite(result[0], IMG_FILE[:-4] + '_result_bitmap.jpg') | StarcoderdataPython |
1600260 | <gh_stars>0
from .boilerplate import boilerplate
from .build import build
from .generate import generate
from .precompute import precompute
from .sources import sources
from .update import update
| StarcoderdataPython |
3218340 | """
Support for functionality to have conversations with AI-Speaker.
"""
import asyncio
import datetime
import json
import logging
import re
import subprocess
import warnings
import platform
from aiohttp.web import json_response
import async_timeout
import psutil
import requests
import voluptuous as vol
from homeassistant import core
from homeassistant.components import ais_cloud, ais_drives_service, conversation
import homeassistant.components.ais_dom.ais_global as ais_global
from homeassistant.components.blueprint import BlueprintInputs
from homeassistant.components.conversation.default_agent import (
DefaultAgent,
async_register,
)
import homeassistant.components.mqtt as mqtt
from homeassistant.const import (
ATTR_ASSUMED_STATE,
ATTR_ENTITY_ID,
ATTR_UNIT_OF_MEASUREMENT,
SERVICE_CLOSE_COVER,
SERVICE_OPEN_COVER,
SERVICE_TOGGLE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_CUSTOM_BYPASS,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_ARMING,
STATE_ALARM_DISARMED,
STATE_ALARM_DISARMING,
STATE_ALARM_TRIGGERED,
STATE_CLOSED,
STATE_CLOSING,
STATE_HOME,
STATE_IDLE,
STATE_LOCKED,
STATE_NOT_HOME,
STATE_OFF,
STATE_OK,
STATE_ON,
STATE_OPEN,
STATE_OPENING,
STATE_PAUSED,
STATE_PLAYING,
STATE_PROBLEM,
STATE_STANDBY,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
STATE_UNLOCKED,
)
from homeassistant.helpers import config_validation as cv, event, intent
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.loader import bind_hass
from homeassistant.util import dt as dt_util
from ..automation import AutomationConfig
from .ais_agent import AisAgent
aisCloudWS = None
ATTR_TEXT = "text"
DOMAIN = "ais_ai_service"
REGEX_TURN_COMMAND = re.compile(r"turn (?P<name>(?: |\w)+) (?P<command>\w+)")
SERVICE_PROCESS_SCHEMA = vol.Schema({vol.Required(ATTR_TEXT): cv.string})
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional("intents"): vol.Schema(
{cv.string: vol.All(cv.ensure_list, [cv.string])}
)
}
)
},
extra=vol.ALLOW_EXTRA,
)
INTENT_GET_TIME = "AisGetTime"
INTENT_GET_DATE = "AisGetDate"
INTENT_PLAY_RADIO = "AisPlayRadio"
INTENT_PLAY_PODCAST = "AisPlayPodcast"
INTENT_PLAY_YT_MUSIC = "AisPlayYtMusic"
INTENT_PLAY_SPOTIFY = "AisPlaySpotify"
INTENT_ASK_QUESTION = "AisAskQuestion"
INTENT_ASKWIKI_QUESTION = "AisAskWikiQuestion"
INTENT_CHANGE_CONTEXT = "AisChangeContext"
INTENT_GET_WEATHER = "AisGetWeather"
INTENT_GET_WEATHER_48 = "AisGetWeather48"
INTENT_STATUS = "AisStatusInfo"
INTENT_PERSON_STATUS = "AisPersonStatusInfo"
INTENT_TURN_ON = "AisTurnOn"
INTENT_TURN_OFF = "AisTurnOff"
INTENT_TOGGLE = "AisToggle"
INTENT_LAMPS_ON = "AisLampsOn"
INTENT_LAMPS_OFF = "AisLampsOff"
INTENT_SWITCHES_ON = "AisSwitchesOn"
INTENT_SWITCHES_OFF = "AisSwitchesOff"
INTENT_OPEN_COVER = "AisCoverOpen"
INTENT_CLOSE_COVER = "AisCoverClose"
INTENT_STOP = "AisStop"
INTENT_PLAY = "AisPlay"
INTENT_NEXT = "AisNext"
INTENT_PREV = "AisPrev"
INTENT_SCENE = "AisSceneActive"
INTENT_SAY_IT = "AisSayIt"
INTENT_CLIMATE_SET_TEMPERATURE = "AisClimateSetTemperature"
INTENT_CLIMATE_SET_PRESENT_MODE = "AisClimateSetPresentMode"
INTENT_CLIMATE_SET_ALL_ON = "AisClimateSetAllOn"
INTENT_CLIMATE_SET_ALL_OFF = "AisClimateSetAllOff"
INTENT_SPELL_STATUS = "AisSpellStatusInfo"
INTENT_RUN_AUTOMATION = "AisRunAutomation"
INTENT_ASK_GOOGLE = "AisAskGoogle"
REGEX_TYPE = type(re.compile(""))
_LOGGER = logging.getLogger(__name__)
GROUP_VIEWS = ["Pomoc", "Mój Dom", "Audio", "Ustawienia"]
CURR_GROUP_VIEW = None
# group entities in each group view, see main_ais_groups.yaml
GROUP_ENTITIES = []
CURR_GROUP = None
CURR_ENTITIE = None
CURR_ENTITIE_ENTERED = False
CURR_ENTITIE_SELECTED_ACTION = None
CURR_BUTTON_CODE = None
CURR_BUTTON_LONG_PRESS = False
CURR_REMOTE_MODE_IS_IN_AUDIO_MODE = False
CURR_ENTITIE_POSITION = None
PREV_CURR_GROUP = None
PREV_CURR_ENTITIE = None
ALL_SWITCHES = [
"input_boolean",
"automation",
"switch",
"light",
"media_player",
"script",
]
# ais-dom virtual keyboard
# kodowała to Asia Raczkowska w 2019 roku
VIRTUAL_KEYBOARD_MODE = [
"Litery",
"Wielkie litery",
"Cyfry",
"Znaki specjalne",
"Usuwanie",
]
CURR_VIRTUAL_KEYBOARD_MODE = None
VIRTUAL_KEYBOARD_LETTERS = [
"-",
"A",
"Ą",
"B",
"C",
"Ć",
"D",
"E",
"Ę",
"F",
"G",
"H",
"I",
"J",
"K",
"L",
"Ł",
"M",
"N",
"Ń",
"O",
"Ó",
"P",
"Q",
"R",
"S",
"Ś",
"T",
"U",
"V",
"W",
"X",
"Y",
"Z",
"Ź",
"Ż",
]
VIRTUAL_KEYBOARD_NUMBERS = ["-", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
VIRTUAL_KEYBOARD_SYMBOLS = [
"-",
" ",
"!",
'"',
"#",
"$",
"%",
"&",
"'",
"(",
")",
"*",
"+",
",",
"-",
"_",
".",
"/",
":",
";",
"<",
"=",
">",
"?",
"@",
"[",
"\\",
"]",
"^",
"{",
"|",
"}",
]
VIRTUAL_KEYBOARD_SYMBOLS_NAMES = [
"-",
"spacja",
"wykrzyknik",
"cudzysłów",
"hash",
"dolar",
"procent",
"symbol and",
"pojedynczy cudzysłów",
"nawias otwierający",
"nawias zamykający",
"gwiazdka",
"plus",
"przecinek",
"myślnik",
"podkreślenie dolne",
"kropka",
"ukośnik prawy",
"dwukropek",
"średnik",
"znak mniejszości",
"znak równości",
"znak większości",
"znak zapytania",
"małpa",
"kwadratowy nawias otwierający",
"ukośnik lewy",
"kwadratowy nawias zamykający",
"daszek",
"nawias klamrowy otwierający",
"kreska pionowa",
"nawias klamrowy zamykający",
]
VIRTUAL_KEYBOARD_DELETE = ["-", "ostatni znak", "ostatni wyraz", "całe pole"]
CURR_VIRTUAL_KEYBOARD_VALUE = None
CURR_VIRTUAL_KEY = None
# ais-dom virtual keyboard
G_INPUT_CURRENT_HOUR = None
G_INPUT_CURRENT_MINNUTE = None
def is_switch(entity_id):
global ALL_SWITCHES
# problem with startswith and tuple of strings
for s in ALL_SWITCHES:
if entity_id.startswith(s):
return True
return False
@core.callback
@bind_hass
def async_register(hass, intent_type, utterances):
"""Register an intent.
Registrations don't require conversations to be loaded. They will become
active once the conversation component is loaded.
"""
intents = hass.data.get(DOMAIN)
if intents is None:
intents = hass.data[DOMAIN] = {}
conf = intents.get(intent_type)
if conf is None:
conf = intents[intent_type] = []
for utterance in utterances:
if isinstance(utterance, REGEX_TYPE):
conf.append(utterance)
else:
conf.append(_create_matcher(utterance))
def translate_state(state):
info_data = ""
device_class = ""
try:
info_data = state.state
domain = state.domain
if domain == "binary_sensor":
device_class = state.attributes.get("device_class", "")
except Exception as e:
_LOGGER.error("translate_state: " + str(e))
if info_data == STATE_ON:
info_data = "włączone"
if device_class == "battery":
info_data = "niski"
elif device_class == "cold":
info_data = "zimno"
elif device_class == "connectivity":
info_data = "podłączone"
elif device_class == "door":
info_data = "otwarte"
elif device_class == "garage_door":
info_data = "otwarte"
elif device_class == "gas":
info_data = "gaz wykryty"
elif device_class == "heat":
info_data = "gorąco"
elif device_class == "light":
info_data = "wykryto światło"
elif device_class == "lock":
info_data = "otwarte (odblokowane)"
elif device_class == "moisture":
info_data = "wilgoć wykrytą (mokra)"
elif device_class == "motion":
info_data = "wykrycie ruchu"
elif device_class == "moving":
info_data = "wykrycie ruchu"
elif device_class == "occupancy":
info_data = "zajęty"
elif device_class == "opening":
info_data = "otwarte"
elif device_class == "plug":
info_data = "podłączone"
elif device_class == "power":
info_data = "wykrycie zasilania"
elif device_class == "presence":
info_data = "obecny"
elif device_class == "problem":
info_data = "wykryty problem"
elif device_class == "safety":
info_data = "niebezpiecznie"
elif device_class == "smoke":
info_data = "dym wykrywany"
elif device_class == "sound":
info_data = "dźwięk wykryty"
elif device_class == "vibration":
info_data = "wykrycie wibracji"
elif device_class == "window":
info_data = "otwarte"
elif info_data == STATE_OFF:
info_data = "wyłączone"
if device_class == "battery":
info_data = "normalny"
elif device_class == "cold":
info_data = "normalnie"
elif device_class == "connectivity":
info_data = "odłączone"
elif device_class == "door":
info_data = "zamknięte"
elif device_class == "garage_door":
info_data = "zamknięte"
elif device_class == "gas":
info_data = "brak gazu (czysto)"
elif device_class == "heat":
info_data = "normalnie"
elif device_class == "light":
info_data = "brak światła"
elif device_class == "lock":
info_data = "zamknięte (zablokowane)"
elif device_class == "moisture":
info_data = " brak wilgoci (sucha)"
elif device_class == "motion":
info_data = "brak ruchu"
elif device_class == "moving":
info_data = "brak ruchu"
elif device_class == "occupancy":
info_data = "wolne"
elif device_class == "opening":
info_data = "zamknięte"
elif device_class == "plug":
info_data = "odłączone"
elif device_class == "power":
info_data = "brak zasilania"
elif device_class == "presence":
info_data = "nieobecny"
elif device_class == "problem":
info_data = "brak problemu (OK)"
elif device_class == "safety":
info_data = "bezpiecznie"
elif device_class == "smoke":
info_data = "brak dymu"
elif device_class == "sound":
info_data = "brak dźwięku"
elif device_class == "vibration":
info_data = "brak wibracji"
elif device_class == "window":
info_data = "zamknięte"
elif info_data == STATE_HOME:
info_data = "w domu"
elif info_data == STATE_NOT_HOME:
info_data = "poza domem"
elif info_data == STATE_UNKNOWN:
info_data = "status nieznany"
elif info_data == STATE_OPEN:
info_data = "otwarty"
elif info_data == STATE_OPENING:
info_data = "otwieranie"
elif info_data == STATE_CLOSED:
info_data = "zamknięty"
elif info_data == STATE_CLOSING:
info_data = "zamykanie"
elif info_data == STATE_PAUSED:
info_data = "pauza"
elif info_data == STATE_PLAYING:
info_data = "odtwarzanie"
elif info_data == STATE_IDLE:
info_data = "status bezczynny"
elif info_data == STATE_STANDBY:
info_data = "status bezczynny"
elif info_data == STATE_ALARM_DISARMED:
info_data = "status rozbrojony"
elif info_data == STATE_ALARM_ARMED_HOME:
info_data = "status uzbrojony w domu"
elif info_data == STATE_ALARM_ARMED_AWAY:
info_data = "status uzbrojony poza domem"
elif info_data == STATE_ALARM_ARMED_NIGHT:
info_data = "status uzbrojony noc"
elif info_data == STATE_ALARM_ARMED_CUSTOM_BYPASS:
info_data = "status uzbrojony własny"
elif info_data == STATE_ALARM_ARMING:
info_data = "alarm uzbrajanie"
elif info_data == STATE_ALARM_DISARMING:
info_data = "alarm rozbrajanie"
elif info_data == STATE_ALARM_TRIGGERED:
info_data = "alarm powiadomiony"
elif info_data == STATE_LOCKED:
info_data = "zamknięty"
elif info_data == STATE_UNLOCKED:
info_data = "otwarty"
elif info_data == STATE_UNAVAILABLE:
info_data = "niedostępny"
elif info_data == STATE_OK:
info_data = "ok"
elif info_data == STATE_PROBLEM:
info_data = "problem"
elif info_data == "above_horizon":
info_data = "powyżej horyzontu"
elif info_data == "below_horizon":
info_data = "poniżej horyzontu"
elif info_data == "heat":
info_data = "grzanie"
elif info_data == "cleaning":
info_data = "sprzątanie"
elif info_data == "docked":
info_data = "w stacji dokującej"
elif info_data == "returning":
info_data = "powrót do stacji dokującej"
return info_data
def get_next(arra, curr):
_first = None
_curr = None
_next = None
for a in arra:
# ignore empy option
if a != ais_global.G_EMPTY_OPTION:
if _curr is not None and _next is None:
_next = a
if _first is None:
_first = a
if curr == a:
_curr = a
if _next is not None:
return _next
else:
return _first
def get_prev(arra, curr):
_last = None
_curr = None
_prev = None
for a in arra:
# ignore empy option
if a != ais_global.G_EMPTY_OPTION:
_last = a
if curr == a:
_curr = a
if _curr is None:
_prev = a
if _prev is not None:
return _prev
else:
return _last
# Group views: Dom -> Audio -> Ustawienia -> Pomoc
def get_curr_group_view():
if CURR_GROUP_VIEW is None:
return GROUP_VIEWS[0]
return CURR_GROUP_VIEW
def say_curr_group_view(hass):
_say_it(hass, get_curr_group_view())
def set_curr_group_view():
# set focus on current menu group view
global CURR_GROUP_VIEW
global CURR_GROUP
global CURR_ENTITIE
global CURR_ENTITIE_ENTERED
global CURR_ENTITIE_POSITION
CURR_GROUP = None
CURR_ENTITIE = None
CURR_ENTITIE_ENTERED = False
CURR_ENTITIE_POSITION = None
CURR_GROUP_VIEW = get_curr_group_view()
def set_next_group_view():
# set focus on next menu group view
global CURR_GROUP_VIEW
CURR_GROUP_VIEW = get_next(GROUP_VIEWS, get_curr_group_view())
# to reset
set_curr_group_view()
def set_prev_group_view():
# set focus on prev menu group view
global CURR_GROUP_VIEW
CURR_GROUP_VIEW = get_prev(GROUP_VIEWS, get_curr_group_view())
# to reset
set_curr_group_view()
# virtual keybord
# Group views: Litery -> Wielkie litery -> Cyfry -> Znaki specjalne -> Usuwanie
def get_curr_virtual_keyboard_mode():
if CURR_VIRTUAL_KEYBOARD_MODE is None:
return VIRTUAL_KEYBOARD_MODE[0]
return CURR_VIRTUAL_KEYBOARD_MODE
def set_next_virtual_keyboard_mode():
global CURR_VIRTUAL_KEYBOARD_MODE
global CURR_VIRTUAL_KEY
CURR_VIRTUAL_KEY = None
CURR_VIRTUAL_KEYBOARD_MODE = get_next(
VIRTUAL_KEYBOARD_MODE, get_curr_virtual_keyboard_mode()
)
def set_prev_virtual_keyboard_mode():
global CURR_VIRTUAL_KEYBOARD_MODE
global CURR_VIRTUAL_KEY
CURR_VIRTUAL_KEY = None
CURR_VIRTUAL_KEYBOARD_MODE = get_prev(
VIRTUAL_KEYBOARD_MODE, get_curr_virtual_keyboard_mode()
)
def say_curr_virtual_keyboard_mode(hass):
_say_it(hass, get_curr_virtual_keyboard_mode())
def get_curr_virtual_key():
if CURR_VIRTUAL_KEY is not None:
return str(CURR_VIRTUAL_KEY)
km = get_curr_virtual_keyboard_mode()
if km == "Litery":
return VIRTUAL_KEYBOARD_LETTERS[0]
elif km == "Wielkie litery":
return VIRTUAL_KEYBOARD_LETTERS[0]
elif km == "Cyfry":
return VIRTUAL_KEYBOARD_NUMBERS[0]
elif km == "Znaki specjalne":
return VIRTUAL_KEYBOARD_SYMBOLS[0]
elif km == "Usuwanie":
return VIRTUAL_KEYBOARD_DELETE[0]
def set_next_virtual_key():
global CURR_VIRTUAL_KEY
km = get_curr_virtual_keyboard_mode()
if km == "Litery":
CURR_VIRTUAL_KEY = get_next(VIRTUAL_KEYBOARD_LETTERS, get_curr_virtual_key())
elif km == "Wielkie litery":
CURR_VIRTUAL_KEY = get_next(VIRTUAL_KEYBOARD_LETTERS, get_curr_virtual_key())
elif km == "Cyfry":
CURR_VIRTUAL_KEY = get_next(VIRTUAL_KEYBOARD_NUMBERS, get_curr_virtual_key())
elif km == "Znaki specjalne":
CURR_VIRTUAL_KEY = get_next(VIRTUAL_KEYBOARD_SYMBOLS, get_curr_virtual_key())
elif km == "Usuwanie":
CURR_VIRTUAL_KEY = get_next(VIRTUAL_KEYBOARD_DELETE, get_curr_virtual_key())
def set_prev_virtual_key():
global CURR_VIRTUAL_KEY
km = get_curr_virtual_keyboard_mode()
if km == "Litery":
CURR_VIRTUAL_KEY = get_prev(VIRTUAL_KEYBOARD_LETTERS, get_curr_virtual_key())
elif km == "Wielkie litery":
CURR_VIRTUAL_KEY = get_prev(VIRTUAL_KEYBOARD_LETTERS, get_curr_virtual_key())
elif km == "Cyfry":
CURR_VIRTUAL_KEY = get_prev(VIRTUAL_KEYBOARD_NUMBERS, get_curr_virtual_key())
elif km == "Znaki specjalne":
CURR_VIRTUAL_KEY = get_prev(VIRTUAL_KEYBOARD_SYMBOLS, get_curr_virtual_key())
elif km == "Usuwanie":
CURR_VIRTUAL_KEY = get_prev(VIRTUAL_KEYBOARD_DELETE, get_curr_virtual_key())
def say_curr_virtual_key(hass):
key = get_curr_virtual_key()
km = get_curr_virtual_keyboard_mode()
text = ""
if km == "Litery":
text = "" + key.lower()
elif km == "Wielkie litery":
text = "" + key
elif km == "Cyfry":
text = "" + key
elif km == "Znaki specjalne":
idx = VIRTUAL_KEYBOARD_SYMBOLS.index(key)
text = "" + VIRTUAL_KEYBOARD_SYMBOLS_NAMES[idx]
elif km == "Usuwanie":
text = "" + key
_say_it(hass, text)
def reset_virtual_keyboard(hass):
global CURR_VIRTUAL_KEYBOARD_MODE
global CURR_VIRTUAL_KEY
global CURR_VIRTUAL_KEYBOARD_VALUE
CURR_VIRTUAL_KEYBOARD_MODE = None
CURR_VIRTUAL_KEY = None
CURR_VIRTUAL_KEYBOARD_VALUE = None
# reset field value
hass.services.call(
"input_text", "set_value", {"entity_id": CURR_ENTITIE, "value": ""}
)
def get_hour_to_say(h, m):
from datetime import time
import babel.dates
t = time(h, m)
message = "godzina: " + babel.dates.format_time(t, format="short", locale="pl")
return message
def set_time_hour_up(hass, entity_id):
global G_INPUT_CURRENT_HOUR
global G_INPUT_CURRENT_MINNUTE
if G_INPUT_CURRENT_HOUR is None:
time_attr = hass.states.get(entity_id).attributes
G_INPUT_CURRENT_HOUR = time_attr.get("hour", 0)
G_INPUT_CURRENT_MINNUTE = time_attr.get("minute", 0)
if G_INPUT_CURRENT_HOUR == 23:
G_INPUT_CURRENT_HOUR = 0
else:
G_INPUT_CURRENT_HOUR = G_INPUT_CURRENT_HOUR + 1
_say_it(hass, get_hour_to_say(G_INPUT_CURRENT_HOUR, G_INPUT_CURRENT_MINNUTE))
def set_time_hour_down(hass, entity_id):
global G_INPUT_CURRENT_HOUR
global G_INPUT_CURRENT_MINNUTE
if G_INPUT_CURRENT_HOUR is None:
time_attr = hass.states.get(entity_id).attributes
G_INPUT_CURRENT_HOUR = time_attr.get("hour", 0)
G_INPUT_CURRENT_MINNUTE = time_attr.get("minute", 0)
if G_INPUT_CURRENT_HOUR == 0:
G_INPUT_CURRENT_HOUR = 23
else:
G_INPUT_CURRENT_HOUR = G_INPUT_CURRENT_HOUR - 1
_say_it(hass, get_hour_to_say(G_INPUT_CURRENT_HOUR, G_INPUT_CURRENT_MINNUTE))
def set_time_minute_up(hass, entity_id):
global G_INPUT_CURRENT_HOUR
global G_INPUT_CURRENT_MINNUTE
if G_INPUT_CURRENT_HOUR is None:
time_attr = hass.states.get(entity_id).attributes
G_INPUT_CURRENT_HOUR = time_attr.get("hour", 0)
G_INPUT_CURRENT_MINNUTE = time_attr.get("minute", 0)
if G_INPUT_CURRENT_MINNUTE == 59:
G_INPUT_CURRENT_MINNUTE = 0
else:
G_INPUT_CURRENT_MINNUTE = G_INPUT_CURRENT_MINNUTE + 1
_say_it(hass, get_hour_to_say(G_INPUT_CURRENT_HOUR, G_INPUT_CURRENT_MINNUTE))
def set_time_minute_down(hass, entity_id):
global G_INPUT_CURRENT_HOUR
global G_INPUT_CURRENT_MINNUTE
if G_INPUT_CURRENT_HOUR is None:
time_attr = hass.states.get(entity_id).attributes
G_INPUT_CURRENT_HOUR = time_attr.get("hour", 0)
G_INPUT_CURRENT_MINNUTE = time_attr.get("minute", 0)
if G_INPUT_CURRENT_MINNUTE == 0:
G_INPUT_CURRENT_MINNUTE = 59
else:
G_INPUT_CURRENT_MINNUTE = G_INPUT_CURRENT_MINNUTE - 1
_say_it(hass, get_hour_to_say(G_INPUT_CURRENT_HOUR, G_INPUT_CURRENT_MINNUTE))
def remove_selected_action(key_code):
global CURR_ENTITIE_SELECTED_ACTION
if key_code not in (19, 20, 21, 22, 23):
CURR_ENTITIE_SELECTED_ACTION = None
return
if (
CURR_ENTITIE_SELECTED_ACTION == ais_global.G_ACTION_SET_AUDIO_SHUFFLE
and key_code not in (19, 20, 23)
):
CURR_ENTITIE_SELECTED_ACTION = None
return
# Groups in Groups views
def get_curr_group():
global CURR_GROUP
if CURR_GROUP is None:
# take the first one from Group view
for group in GROUP_ENTITIES:
if group["remote_group_view"] == get_curr_group_view():
CURR_GROUP = group
break
return CURR_GROUP
def get_group_from_group(entity_id):
global CURR_GROUP
for group in GROUP_ENTITIES:
if group["entity_id"] == entity_id:
CURR_GROUP = group
break
return CURR_GROUP
def get_curr_group_idx():
idx = 0
for group in GROUP_ENTITIES:
if group["entity_id"] == get_curr_group()["entity_id"]:
return idx
idx += 1
return idx
def say_curr_group(hass):
_say_it(hass, get_curr_group()["friendly_name"])
def set_bookmarks_curr_group(hass):
for idx, g in enumerate(GROUP_ENTITIES, start=0):
if g["entity_id"] == "group.ais_bookmarks":
set_curr_group(hass, g)
return
def set_favorites_curr_group(hass):
for idx, g in enumerate(GROUP_ENTITIES, start=0):
if g["entity_id"] == "group.ais_favorites":
set_curr_group(hass, g)
return
def set_curr_group(hass, group):
# set focus on current menu group view
global CURR_GROUP_VIEW
global CURR_GROUP
global CURR_ENTITIE
global CURR_ENTITIE_ENTERED
global CURR_ENTITIE_POSITION
# the entitie can be selected or focused
CURR_ENTITIE = None
CURR_ENTITIE_ENTERED = False
CURR_ENTITIE_POSITION = None
if group is None:
CURR_GROUP = get_curr_group()
hass.states.async_set("binary_sensor.selected_entity", CURR_GROUP["entity_id"])
else:
CURR_GROUP_VIEW = group["remote_group_view"]
CURR_GROUP = group
# set display context for mega audio player
if CURR_GROUP["entity_id"] in (
"group.radio_player",
"group.podcast_player",
"group.music_player",
"group.ais_bookmarks",
"group.ais_rss_news_remote",
"group.local_audio",
"sensor.ais_drives",
"group.ais_favorites",
"group.audiobooks_player",
):
hass.states.async_set(
"sensor.ais_player_mode", CURR_GROUP["entity_id"].replace("group.", "")
)
def set_next_group(hass):
# set focus on next group in focused view
global CURR_GROUP
first_group_in_view = None
curr_group_in_view = None
next_group_in_view = None
for group in GROUP_ENTITIES:
if group["remote_group_view"] == get_curr_group_view():
# select the first group
if curr_group_in_view is not None and next_group_in_view is None:
next_group_in_view = group
if first_group_in_view is None:
first_group_in_view = group
if CURR_GROUP["entity_id"] == group["entity_id"]:
curr_group_in_view = group
if next_group_in_view is not None:
CURR_GROUP = next_group_in_view
else:
CURR_GROUP = first_group_in_view
# to reset
set_curr_group(hass, CURR_GROUP)
def set_prev_group(hass):
# set focus on prev group in focused view
global CURR_GROUP
last_group_in_view = None
curr_group_in_view = None
prev_group_in_view = None
for group in GROUP_ENTITIES:
if group["remote_group_view"] == get_curr_group_view():
# select the last group
last_group_in_view = group
if CURR_GROUP["entity_id"] == group["entity_id"]:
curr_group_in_view = group
if curr_group_in_view is None:
prev_group_in_view = group
if prev_group_in_view is not None:
CURR_GROUP = prev_group_in_view
else:
CURR_GROUP = last_group_in_view
# to reset
set_curr_group(hass, CURR_GROUP)
# entity in group
def get_curr_entity():
global CURR_ENTITIE
if CURR_ENTITIE is None:
if len(GROUP_ENTITIES[get_curr_group_idx()]["entities"]) > 0:
CURR_ENTITIE = GROUP_ENTITIES[get_curr_group_idx()]["entities"][0]
return CURR_ENTITIE
def get_curr_entity_idx():
idx = 0
for item in GROUP_ENTITIES[get_curr_group_idx()]["entities"]:
if item == get_curr_entity():
return idx
idx += 1
def set_curr_entity(hass, entity):
# set focus on current entity
global CURR_ENTITIE
global CURR_ENTITIE_POSITION
if entity is None:
CURR_ENTITIE = get_curr_entity()
else:
CURR_ENTITIE = entity
CURR_ENTITIE_POSITION = None
hass.states.async_set("binary_sensor.selected_entity", CURR_ENTITIE)
def set_next_entity(hass):
# set next entity
global CURR_ENTITIE
# special case for music
if CURR_ENTITIE == "input_select.ais_music_service":
state = hass.states.get("input_select.ais_music_service")
if state.state == "Spotify":
CURR_ENTITIE = "input_text.ais_spotify_query"
else:
CURR_ENTITIE = "input_text.ais_music_query"
elif CURR_ENTITIE == "input_text.ais_music_query":
CURR_ENTITIE = "sensor.youtubelist"
elif CURR_ENTITIE == "input_text.ais_spotify_query":
CURR_ENTITIE = "sensor.spotifysearchlist"
elif CURR_ENTITIE == "sensor.youtubelist":
CURR_ENTITIE = "input_select.ais_music_service"
elif CURR_ENTITIE == "sensor.spotifysearchlist":
CURR_ENTITIE = "sensor.spotifylist"
elif CURR_ENTITIE == "sensor.spotifylist":
CURR_ENTITIE = "input_select.ais_music_service"
else:
entity_idx = get_curr_entity_idx()
group_idx = get_curr_group_idx()
l_group_len = len(GROUP_ENTITIES[group_idx]["entities"])
if entity_idx + 1 == l_group_len:
entity_idx = 0
else:
entity_idx = entity_idx + 1
CURR_ENTITIE = GROUP_ENTITIES[group_idx]["entities"][entity_idx]
# to reset variables
set_curr_entity(hass, None)
say_curr_entity(hass)
def set_prev_entity(hass):
# set prev entity
global CURR_ENTITIE
# special case for music
if CURR_ENTITIE == "input_select.ais_music_service":
state = hass.states.get("input_select.ais_music_service")
if state.state == "Spotify":
CURR_ENTITIE = "sensor.spotifylist"
else:
CURR_ENTITIE = "sensor.youtubelist"
elif CURR_ENTITIE == "sensor.youtubelist":
CURR_ENTITIE = "input_text.ais_music_query"
elif CURR_ENTITIE == "input_text.ais_music_query":
CURR_ENTITIE = "input_select.ais_music_service"
elif CURR_ENTITIE == "sensor.spotifylist":
CURR_ENTITIE = "sensor.spotifysearchlist"
elif CURR_ENTITIE == "sensor.spotifysearchlist":
CURR_ENTITIE = "input_text.ais_spotify_query"
elif CURR_ENTITIE == "input_text.ais_spotify_query":
CURR_ENTITIE = "input_select.ais_music_service"
# end special case for music
else:
idx = get_curr_entity_idx()
l_group_len = len(GROUP_ENTITIES[get_curr_group_idx()]["entities"])
if idx == 0:
idx = l_group_len - 1
else:
idx = idx - 1
CURR_ENTITIE = GROUP_ENTITIES[get_curr_group_idx()]["entities"][idx]
# to reset variables
set_curr_entity(hass, None)
say_curr_entity(hass)
def say_curr_entity(hass):
# check if we have selected item
entity_id = get_curr_entity()
if entity_id is None:
if CURR_GROUP["entity_id"] == "group.all_ais_persons":
_say_it(
hass,
"Brak informacji o osobach. W konfiguracji możesz dodać osoby, "
"oraz urządzenia raportujące lokalizację osób.",
)
elif CURR_GROUP["entity_id"] == "group.all_automations":
_say_it(
hass,
"Brak zdefiniowanych automatyzacji. Dodaj automatyzację w konfiguracji.",
)
else:
_say_it(hass, "Brak pozycji")
return
state = hass.states.get(entity_id)
if state is None:
_say_it(hass, "Brak pozycji")
return
text = state.attributes.get("text")
info_name = state.attributes.get("friendly_name")
info_data = state.state
info_unit = state.attributes.get("unit_of_measurement")
if not text:
text = ""
# handle special cases...
if entity_id == "sensor.aisknowledgeanswer":
_say_it(hass, "Odpowiedź: " + text)
return
elif entity_id == "sensor.ais_drives":
state = hass.states.get("sensor.ais_drives")
if state.state is None or state.state == "":
_say_it(hass, "dysk wewnętrzny")
else:
attr = state.attributes
files = attr.get("files", [])
info = ais_drives_service.get_pozycji_variety(len(files))
_say_it(hass, info)
return
elif entity_id == "sensor.ais_secure_android_id_dom":
_say_it(
hass, info_name + " " + info_data + ". Aby przeliterować naciśnij 'OK'."
)
return
elif entity_id == "sensor.ais_connect_iot_device_info":
info = (
"Instrukcja. Podłącz urządzenie do prądu. Upewnij się, że urządzenie znajduje się w zasięgu routera "
"WiFi oraz bramki AIS dom. "
"Uruchom tryb parowania, naciskając 4 razy szybko przycisk na urządzeniu, "
"następnie poczekaj aż dioda na urządzeniu, zacznie pulsować. Gdy urządzenie jest w trybie parowania, "
"to naciśnij OK na pilocie, aby rozpocząć wyszukiwanie urządzenia."
)
_say_it(hass, info)
return
elif entity_id == "input_boolean.ais_quiet_mode":
state = hass.states.get("input_boolean.ais_quiet_mode").state
info_value = " wyłączony. Naciśnij OK by włączyć. "
if state == "on":
info_value = " włączony. Naciśnij OK by wyłączyć. "
_say_it(
hass,
info_name
+ info_value
+ " Gdy tryb nocny jest włączony to asystent w wybranych godzinach "
"automatycznie zredukuje głośność odtwarzania audio.",
)
return
elif entity_id == "input_boolean.ais_auto_update":
state = hass.states.get("input_boolean.ais_auto_update").state
info_value = (
"Automatyczne aktualizacje wyłączone. Aktualizujesz system samodzielnie w "
"dogodnym dla Ciebie czasie. Naciśnij OK by włączyć aktualizacje automatyczne."
)
if state == "on":
info_value = (
"Automatyczne aktualizacje włączone. Codziennie sprawdzimy i automatycznie "
"zainstalujemy dostępne aktualizacje składowych systemu. "
"Naciśnij OK by wyłączyć aktualizacje automatyczne. "
)
_say_it(hass, info_value)
return
elif entity_id == "input_select.ais_bookmark_last_played":
_say_it(hass, info_name + " " + info_data.replace("Local;", ""))
return
elif entity_id == "sensor.ais_wifi_service_current_network_info":
state = hass.states.get("sensor.ais_wifi_service_current_network_info")
attr = state.attributes
info = attr.get("description", "brak informacji o połączeniu")
_say_it(hass, "Prędkość połączenia " + info)
return
elif entity_id.startswith("script."):
_say_it(hass, info_name + " Naciśnij OK by uruchomić.")
return
elif entity_id.startswith("automation."):
_say_it(hass, info_name + " Naciśnij OK by uruchomić.")
return
elif entity_id.startswith("input_datetime."):
state = hass.states.get(entity_id)
attr = state.attributes
info_name = info_name + "; "
info_time = get_hour_to_say(attr.get("hour", "00"), attr.get("minute", 0))
_say_it(hass, info_name + info_time + ". Naciśnij OK by zmienić godzinę.")
return
elif entity_id.startswith("input_text."):
if CURR_BUTTON_CODE == 4:
if CURR_VIRTUAL_KEYBOARD_VALUE is None:
_say_it(hass, "Nic nie wpisałeś")
else:
_say_it(hass, "Wpisałeś " + CURR_VIRTUAL_KEYBOARD_VALUE)
else:
_say_it(
hass,
info_name
+ " "
+ info_data
+ ". Naciśnij OK aby wpisać lub dyktować tekst",
)
return
elif entity_id.startswith("input_select."):
if CURR_BUTTON_CODE == 4:
if info_data == ais_global.G_EMPTY_OPTION:
_say_it(hass, "Brak wyboru")
else:
_say_it(hass, "Wybrałeś " + info_data)
else:
if info_data != ais_global.G_EMPTY_OPTION:
_say_it(hass, info_name + " " + info_data + ". Naciśnij OK by zmienić.")
else:
_say_it(hass, info_name + " " + info_data + ". Naciśnij OK by wybrać.")
return
elif entity_id.startswith("sensor.") and entity_id.endswith("list"):
info_name = ""
if int(info_data) != -1:
try:
info_name = hass.states.get(entity_id).attributes.get(int(info_data))[
"title"
]
except Exception:
info_name = ""
if CURR_BUTTON_CODE == 4:
if int(info_data) == -1:
_say_it(hass, "Brak wybranej pozycji ")
else:
_say_it(hass, "Lista na pozycji " + info_name)
else:
if entity_id == "sensor.radiolist":
info = "Lista stacji radiowych "
elif entity_id == "sensor.podcastlist":
info = "Lista odcinków "
elif entity_id == "sensor.spotifylist":
info = "Lista utworów ze Spotify "
elif entity_id == "sensor.youtubelist":
info = "Lista utworów z YouTube "
elif entity_id == "sensor.rssnewslist":
info = "Lista artykułów "
elif entity_id == "sensor.aisbookmarkslist":
info = "Lista zakładek "
elif entity_id == "sensor.aisfavoriteslist":
info = "Lista ulubionych "
elif entity_id == "sensor.podcastnamelist":
info = "Lista audycji "
elif entity_id == "sensor.aisfavoriteslist":
info = "Lista ulubionych pozycji "
elif entity_id == "sensor.aisbookmarkslist":
info = "Lista zakładek "
elif entity_id == "sensor.audiobookslist":
info = "Lista książek "
elif entity_id == "sensor.audiobookschapterslist":
info = "Lista rozdziałów "
else:
info = "Pozycja "
if CURR_ENTITIE_ENTERED:
additional_info = ". Wybierz pozycję."
elif int(info_data) != -1:
additional_info = ". Naciśnij OK by zmienić."
else:
additional_info = ". Naciśnij OK by wybrać."
_say_it(hass, info + info_name + additional_info)
return
# normal case
# decode None
if not info_name:
info_name = ""
info_data = translate_state(state)
if not info_unit:
info_unit = ""
info = f"{info_name} {info_data} {info_unit}"
_say_it(hass, info)
def get_curent_position(hass):
# return the entity focused position
global CURR_ENTITIE_POSITION
if CURR_ENTITIE_POSITION is None:
CURR_ENTITIE_POSITION = hass.states.get(CURR_ENTITIE).state
return CURR_ENTITIE_POSITION
def commit_current_position(hass):
global CURR_ENTITIE_ENTERED
if CURR_ENTITIE.startswith("input_select."):
# force the change - to trigger the state change for automation
position = get_curent_position(hass)
state = hass.states.get(CURR_ENTITIE).state
if position == state:
if CURR_ENTITIE == "input_select.radio_type":
hass.services.call(
"ais_cloud", "get_radio_names", {"radio_type": state}
)
return
elif CURR_ENTITIE == "input_select.rss_news_category":
hass.services.call(
"ais_cloud", "get_rss_news_channels", {"rss_news_category": state}
)
return
elif CURR_ENTITIE == "input_select.rss_news_channel":
hass.services.call(
"ais_cloud", "get_rss_news_items", {"rss_news_channel": state}
)
return
elif CURR_ENTITIE == "input_select.podcast_type":
hass.services.call(
"ais_cloud", "get_podcast_names", {"podcast_type": state}
)
return
hass.services.call(
"input_select",
"select_option",
{"entity_id": CURR_ENTITIE, "option": position},
)
elif CURR_ENTITIE.startswith("input_number."):
hass.services.call(
"input_number",
"set_value",
{"entity_id": CURR_ENTITIE, "value": get_curent_position(hass)},
)
elif CURR_ENTITIE.startswith("input_datetime."):
hass.services.call(
"input_datetime",
"set_datetime",
{
"entity_id": CURR_ENTITIE,
"time": str(G_INPUT_CURRENT_HOUR) + ":" + str(G_INPUT_CURRENT_MINNUTE),
},
)
text = get_hour_to_say(G_INPUT_CURRENT_HOUR, G_INPUT_CURRENT_MINNUTE)
_say_it(hass, "wpisana " + text)
CURR_ENTITIE_ENTERED = False
elif CURR_ENTITIE.startswith("sensor.") and CURR_ENTITIE.endswith("list"):
# play/read selected source
idx = hass.states.get(CURR_ENTITIE).state
if CURR_ENTITIE == "sensor.radiolist":
hass.services.call(
"ais_cloud",
"play_audio",
{"id": idx, "media_source": ais_global.G_AN_RADIO},
)
elif CURR_ENTITIE == "sensor.":
hass.services.call(
"ais_cloud",
"play_audio",
{"id": idx, "media_source": ais_global.G_AN_PODCAST},
)
elif CURR_ENTITIE == "sensor.spotifysearchlist":
hass.services.call(
"ais_cloud",
"play_audio",
{"id": idx, "media_source": ais_global.G_AN_SPOTIFY_SEARCH},
)
elif CURR_ENTITIE == "sensor.spotifylist":
hass.services.call(
"ais_cloud",
"play_audio",
{"id": idx, "media_source": ais_global.G_AN_SPOTIFY},
)
elif CURR_ENTITIE == "sensor.youtubelist":
hass.services.call(
"ais_cloud",
"play_audio",
{"id": idx, "media_source": ais_global.G_AN_MUSIC},
)
elif CURR_ENTITIE == "sensor.rssnewslist":
hass.services.call(
"ais_cloud",
"play_audio",
{"id": idx, "media_source": ais_global.G_AN_NEWS},
)
elif CURR_ENTITIE == "sensor.audiobookslist":
hass.services.call(
"ais_cloud",
"play_audio",
{"id": idx, "media_source": ais_global.G_AN_AUDIOBOOK},
)
elif CURR_ENTITIE == "sensor.audiobookschapterslist":
hass.services.call(
"ais_cloud",
"play_audio",
{"id": idx, "media_source": ais_global.G_AN_AUDIOBOOK_CHAPTER},
)
elif CURR_ENTITIE == "sensor.aisbookmarkslist":
hass.services.call(
"ais_cloud",
"play_audio",
{"id": idx, "media_source": ais_global.G_AN_BOOKMARK},
)
elif CURR_ENTITIE == "sensor.aisfavoriteslist":
hass.services.call(
"ais_cloud",
"play_audio",
{"id": idx, "media_source": ais_global.G_AN_FAVORITE},
)
elif CURR_ENTITIE == "sensor.podcastnamelist":
hass.services.call(
"ais_cloud",
"play_audio",
{"id": idx, "media_source": ais_global.G_AN_PODCAST_NAME},
)
if CURR_ENTITIE == "input_select.ais_android_wifi_network":
_say_it(hass, "wybrano wifi: " + get_curent_position(hass).split(";")[0])
elif CURR_ENTITIE == "input_select.ais_music_service":
_say_it(
hass,
"Wybrano " + position + ", napisz lub powiedz jakiej muzyki mam wyszukać",
)
state = hass.states.get(CURR_ENTITIE)
if state.state == "YouTube":
input = "input_text.ais_music_query"
elif state.state == "Spotify":
input = "input_text.ais_spotify_query"
hass.services.call("input_text", "set_value", {"entity_id": input, "value": ""})
reset_virtual_keyboard(hass)
set_curr_entity(hass, input)
else:
_beep_it(hass, 33)
# TODO - run the script for the item,
# the automation on state should be executed only from app not from remote
def set_next_position(hass):
global CURR_ENTITIE_POSITION
CURR_ENTITIE_POSITION = get_curent_position(hass)
state = hass.states.get(CURR_ENTITIE)
attr = state.attributes
if CURR_ENTITIE.startswith("input_select."):
# the "-" option is always first
options = attr.get("options")
if len(options) < 2:
_say_it(hass, "brak pozycji")
else:
CURR_ENTITIE_POSITION = get_next(options, CURR_ENTITIE_POSITION)
_say_it(hass, CURR_ENTITIE_POSITION)
elif CURR_ENTITIE.startswith("sensor.") and CURR_ENTITIE.endswith("list"):
if len(attr) == 0:
_say_it(hass, "brak pozycji")
else:
curr_id = int(state.state)
next_id = int(curr_id) + 1
if next_id == len(attr):
next_id = 0
track = attr.get(int(next_id))
_say_it(hass, track["name"])
# update list
hass.states.async_set(CURR_ENTITIE, next_id, attr)
elif CURR_ENTITIE.startswith("input_number."):
_max = float(state.attributes.get("max"))
_step = float(state.attributes.get("step"))
_curr = float(CURR_ENTITIE_POSITION)
CURR_ENTITIE_POSITION = str(round(min(_curr + _step, _max), 2))
_say_it(hass, str(CURR_ENTITIE_POSITION))
def set_prev_position(hass):
global CURR_ENTITIE_POSITION
CURR_ENTITIE_POSITION = get_curent_position(hass)
state = hass.states.get(CURR_ENTITIE)
attr = state.attributes
if CURR_ENTITIE.startswith("input_select."):
options = attr.get("options")
if len(options) < 2:
_say_it(hass, "brak pozycji")
else:
CURR_ENTITIE_POSITION = get_prev(options, CURR_ENTITIE_POSITION)
_say_it(hass, CURR_ENTITIE_POSITION)
elif CURR_ENTITIE.startswith("sensor.") and CURR_ENTITIE.endswith("list"):
if len(attr) == 0:
_say_it(hass, "brak pozycji")
else:
curr_id = int(state.state)
prev_id = curr_id - 1
if prev_id < 0:
prev_id = len(attr) - 1
track = attr.get(int(prev_id))
_say_it(hass, track["name"])
# update list
hass.states.async_set(CURR_ENTITIE, prev_id, attr)
elif CURR_ENTITIE.startswith("input_number."):
_min = float(state.attributes.get("min"))
_step = float(state.attributes.get("step"))
_curr = float(CURR_ENTITIE_POSITION)
CURR_ENTITIE_POSITION = str(round(max(_curr - _step, _min), 2))
_say_it(hass, str(CURR_ENTITIE_POSITION))
def select_entity(hass, long_press):
global CURR_ENTITIE_SELECTED_ACTION
global G_INPUT_CURRENT_MINNUTE
global G_INPUT_CURRENT_HOUR
# on remote OK, select group view, group or entity
global CURR_ENTITIE_ENTERED
# OK on remote
if CURR_GROUP_VIEW is None:
# no group view was selected
get_groups(hass)
set_curr_group_view()
say_curr_group_view(hass)
return
if CURR_GROUP is None:
# no group is selected - we need to select the first one
# from the group view
set_curr_group(hass, None)
say_curr_group(hass)
return
# group in group
if CURR_GROUP["entity_id"] == "group.all_ais_devices":
get_groups(hass)
gg = CURR_GROUP["entities"]
set_curr_group(hass, get_group_from_group(gg[0]))
say_curr_group(hass)
return
if CURR_ENTITIE is None:
# no entity is selected - we need to focus the first one
set_curr_entity(hass, None)
say_curr_entity(hass)
CURR_ENTITIE_ENTERED = False
return
if CURR_ENTITIE == "sensor.ais_drives":
if CURR_ENTITIE_SELECTED_ACTION == ais_global.G_ACTION_DELETE:
hass.async_run_job(
hass.services.call("ais_drives_service", "remote_delete_item")
)
CURR_ENTITIE_SELECTED_ACTION = None
return
else:
hass.services.call("ais_drives_service", "remote_select_item")
return
elif CURR_ENTITIE.startswith("media_player."):
if CURR_ENTITIE_SELECTED_ACTION == ais_global.G_ACTION_SET_AUDIO_SHUFFLE:
state = hass.states.get("media_player.wbudowany_glosnik")
shuffle = state.attributes.get("shuffle", False)
if shuffle:
_say_it(hass, "Włączono odtwarzanie w kolejności.")
hass.services.call(
"media_player",
"shuffle_set",
{"entity_id": CURR_ENTITIE, "shuffle": False},
)
else:
_say_it(hass, "Włączono odtwarzanie losowe.")
hass.services.call(
"media_player",
"shuffle_set",
{"entity_id": CURR_ENTITIE, "shuffle": True},
)
return
if CURR_ENTITIE_ENTERED is False:
# check if the entity option can be selected
if can_entity_be_changed(hass, CURR_ENTITIE):
if can_entity_be_entered(hass, CURR_ENTITIE):
CURR_ENTITIE_ENTERED = True
if CURR_ENTITIE.startswith("input_text."):
_say_it(hass, "Wpisywanie/dyktowanie tekstu włączone")
reset_virtual_keyboard(hass)
elif CURR_ENTITIE.startswith("input_datetime."):
G_INPUT_CURRENT_MINNUTE = None
G_INPUT_CURRENT_HOUR = None
_say_it(
hass,
"OK, dostosuj godzinę strzałkami góra lub dół a minuty strzałkami lewo lub prawo."
" By zatwierdzić naciśnij 'OK'.",
)
else:
set_next_position(hass)
return
else:
# we will change this item directly
if CURR_ENTITIE.startswith("media_player."):
# enter to media player
CURR_ENTITIE_ENTERED = True
# play / pause on selected player
curr_state = hass.states.get(CURR_ENTITIE).state
if curr_state == "playing":
if long_press is True:
_say_it(hass, "stop")
hass.services.call(
"media_player",
"media_stop",
{"entity_id": CURR_ENTITIE},
)
else:
_say_it(hass, "pauza")
hass.services.call(
"media_player",
"media_pause",
{"entity_id": CURR_ENTITIE},
)
else:
_say_it(hass, "graj")
hass.services.call(
"media_player", "media_play", {"entity_id": CURR_ENTITIE}
)
elif CURR_ENTITIE.startswith("input_boolean."):
curr_state = hass.states.get(CURR_ENTITIE).state
if curr_state == "on":
_say_it(hass, "ok, wyłączam")
if curr_state == "off":
_say_it(hass, "ok, włączam")
hass.services.call(
"input_boolean", "toggle", {"entity_id": CURR_ENTITIE}
)
elif CURR_ENTITIE.startswith("switch."):
curr_state = hass.states.get(CURR_ENTITIE).state
if curr_state == "on":
_say_it(hass, "ok, wyłączam")
if curr_state == "off":
_say_it(hass, "ok, włączam")
if curr_state == "unavailable":
_say_it(hass, "przełącznik jest niedostępny")
hass.services.call("switch", "toggle", {"entity_id": CURR_ENTITIE})
elif CURR_ENTITIE.startswith("light."):
curr_state = hass.states.get(CURR_ENTITIE).state
if curr_state == "on":
_say_it(hass, "ok, wyłączam")
elif curr_state == "off":
_say_it(hass, "ok, włączam")
elif curr_state == "unavailable":
_say_it(hass, "oświetlnie jest niedostępne")
hass.services.call("light", "toggle", {"entity_id": CURR_ENTITIE})
elif CURR_ENTITIE.startswith("script."):
hass.services.call("script", CURR_ENTITIE.split(".")[1])
elif CURR_ENTITIE.startswith("automation."):
_say_it(hass, "ok, uruchamiam")
hass.services.call(
"automation", "trigger", {"entity_id": CURR_ENTITIE}
)
else:
# do some special staff for some entries
if CURR_ENTITIE == "sensor.version_info":
# get the info about upgrade
state = hass.states.get(CURR_ENTITIE)
reinstall_dom_app = state.attributes.get("reinstall_dom_app", False)
reinstall_android_app = state.attributes.get(
"reinstall_android_app", False
)
reinstall_linux_apt = state.attributes.get("reinstall_linux_apt", False)
if (
reinstall_dom_app is False
and reinstall_android_app is False
and reinstall_linux_apt is False
):
_say_it(hass, "Twoja wersja jest aktualna")
else:
_say_it(
hass,
"Poczekaj na zakończenie aktualizacji i restart. Do usłyszenia.",
)
hass.services.call("ais_updater", "execute_upgrade")
elif CURR_ENTITIE == "sensor.ais_secure_android_id_dom":
# spelling
state = hass.states.get("sensor.ais_secure_android_id_dom")
dom_id = state.state.replace("dom-", "")
dom_id = "; ".join(dom_id)
_say_it(hass, dom_id)
return
elif CURR_ENTITIE == "sensor.ais_connect_iot_device_info":
# start searching for the device
hass.services.call("script", "ais_scan_iot_devices_in_network")
return
else:
_say_it(hass, "Tej pozycji nie można zmieniać")
if CURR_ENTITIE_ENTERED is True:
# check if we can change this item
if can_entity_be_changed(hass, CURR_ENTITIE):
# these items can be controlled from remote
# if we are here it means that the enter on the same item was
# pressed twice, we should do something - to mange the item status
if CURR_ENTITIE.startswith(("input_select.", "input_number.")):
commit_current_position(hass)
elif CURR_ENTITIE.startswith("sensor.") and CURR_ENTITIE.endswith("list"):
if CURR_ENTITIE_SELECTED_ACTION == ais_global.G_ACTION_DELETE:
# delete
if CURR_ENTITIE == "sensor.aisfavoriteslist":
item_idx = hass.states.get("sensor.aisfavoriteslist").state
_say_it(hass, "OK usuwam tą pozycję z ulubionych.")
hass.async_run_job(
hass.services.call(
"ais_bookmarks", "delete_favorite", {"id": item_idx}
)
)
elif CURR_ENTITIE == "sensor.aisbookmarkslist":
item_idx = hass.states.get("sensor.aisbookmarkslist").state
hass.async_run_job(
hass.services.call(
"ais_bookmarks", "delete_bookmark", {"id": item_idx}
)
)
_say_it(hass, "OK. Usuwam tą zakładkę.")
# reset action
CURR_ENTITIE_SELECTED_ACTION = None
return
#
commit_current_position(hass)
elif CURR_ENTITIE.startswith("media_player."):
# play / pause on selected player
curr_state = hass.states.get(CURR_ENTITIE).state
if curr_state == "playing":
if long_press is True:
_say_it(hass, "stop")
hass.services.call(
"media_player", "media_stop", {"entity_id": CURR_ENTITIE}
)
else:
_say_it(hass, "pauza")
hass.services.call(
"media_player", "media_pause", {"entity_id": CURR_ENTITIE}
)
else:
_say_it(hass, "graj")
hass.services.call(
"media_player", "media_play", {"entity_id": CURR_ENTITIE}
)
elif CURR_ENTITIE.startswith("input_text."):
type_to_input_text_from_virtual_keyboard(hass)
elif CURR_ENTITIE.startswith("input_datetime."):
commit_current_position(hass)
else:
# eneter on unchanged item
_say_it(hass, "Tej pozycji nie można zmieniać")
def can_entity_be_changed(hass, entity):
# check if entity can be changed
if CURR_ENTITIE.startswith(
(
"media_player.",
"input_boolean.",
"switch.",
"script.",
"light.",
"input_text.",
"input_select.",
"input_number.",
"automation.",
"input_datetime.",
)
):
return True
elif CURR_ENTITIE.startswith("sensor.") and CURR_ENTITIE.endswith("list"):
return True
else:
return False
def can_entity_be_entered(hass, entity):
# check if entity can be entered
if CURR_ENTITIE.startswith(
(
"media_player.",
"input_boolean.",
"switch.",
"script.",
"light.",
"automation.",
"group.",
)
):
return False
else:
return True
def set_on_dpad_down(hass, long_press):
global CURR_ENTITIE_SELECTED_ACTION
if CURR_ENTITIE is not None:
if CURR_ENTITIE.startswith("media_player."):
if (
CURR_ENTITIE_SELECTED_ACTION is None
or CURR_ENTITIE_SELECTED_ACTION == ais_global.G_ACTION_SET_AUDIO_SHUFFLE
):
CURR_ENTITIE_SELECTED_ACTION = ais_global.G_ACTION_SET_AUDIO_SPEED
state = hass.states.get("input_number.media_player_speed")
l_speed_pl = ais_global.get_audio_speed_name(state.state)
_say_it(
hass,
"Prędkość odtwarzania audio "
+ l_speed_pl
+ ". Przyśpiesz strzałką w prawo, zwolnij strzałką w lewo.",
)
elif CURR_ENTITIE_SELECTED_ACTION == ais_global.G_ACTION_SET_AUDIO_SPEED:
CURR_ENTITIE_SELECTED_ACTION = ais_global.G_ACTION_SET_AUDIO_SHUFFLE
state = hass.states.get("media_player.wbudowany_glosnik")
shuffle = state.attributes.get("shuffle", False)
if shuffle:
_say_it(
hass, "Odtwarzanie losowe włączone. Naciśnij OK by wyłączyć."
)
else:
_say_it(
hass, "Odtwarzanie losowe wyłączone. Naciśnij OK by włączyć."
)
return
elif CURR_ENTITIE.startswith("input_text.") and CURR_ENTITIE_ENTERED:
set_prev_virtual_keyboard_mode()
say_curr_virtual_keyboard_mode(hass)
return
elif CURR_ENTITIE.startswith("input_datetime.") and CURR_ENTITIE_ENTERED:
set_time_hour_down(hass, CURR_ENTITIE)
return
elif CURR_ENTITIE_ENTERED and CURR_ENTITIE == "sensor.aisfavoriteslist":
_say_it(hass, "Usuwanie. Naciśnij OK aby usunąć pozycję z ulubionych.")
CURR_ENTITIE_SELECTED_ACTION = ais_global.G_ACTION_DELETE
return
elif CURR_ENTITIE_ENTERED and CURR_ENTITIE == "sensor.aisbookmarkslist":
_say_it(hass, "Usuwanie. Naciśnij OK aby usunąć tą zakładkę.")
CURR_ENTITIE_SELECTED_ACTION = ais_global.G_ACTION_DELETE
return
elif CURR_ENTITIE == "sensor.ais_drives":
path = hass.states.get(CURR_ENTITIE).state
if path.startswith("/dysk-wewnętrzny"):
_say_it(hass, "Usuwanie. Naciśnij OK aby usunąć tą pozycję.")
CURR_ENTITIE_SELECTED_ACTION = ais_global.G_ACTION_DELETE
else:
_say_it(hass, "Wybrana pozycja nie ma dodatkowych opcji.")
return
else:
_say_it(hass, "Wybrana pozycja nie ma dodatkowych opcji.")
def set_on_dpad_up(hass, long_press):
global CURR_ENTITIE_SELECTED_ACTION
if CURR_ENTITIE is not None:
if CURR_ENTITIE_SELECTED_ACTION == ais_global.G_ACTION_SET_AUDIO_SHUFFLE:
CURR_ENTITIE_SELECTED_ACTION = ais_global.G_ACTION_SET_AUDIO_SPEED
state = hass.states.get("input_number.media_player_speed")
l_speed_pl = ais_global.get_audio_speed_name(state.state)
_say_it(
hass,
"Prędkość odtwarzania audio "
+ l_speed_pl
+ ". Przyśpiesz strzałką w prawo zwolnij strzałką w lewo.",
)
return
elif CURR_ENTITIE_SELECTED_ACTION == ais_global.G_ACTION_SET_AUDIO_SPEED:
CURR_ENTITIE_SELECTED_ACTION = None
_say_it(hass, "Sterowanie odtwarzaczem")
elif CURR_ENTITIE.startswith("media_player."):
# info about audio
state = hass.states.get("media_player.wbudowany_glosnik")
text = "Odtwarzam " + state.attributes.get("media_title", "")
audio_type_pl = ais_global.G_NAME_FOR_AUDIO_NATURE.get(
state.attributes.get("source", ""), state.attributes.get("source", "")
)
text += " z " + audio_type_pl
_say_it(hass, text)
return
elif CURR_ENTITIE.startswith("input_text.") and CURR_ENTITIE_ENTERED:
set_next_virtual_keyboard_mode()
say_curr_virtual_keyboard_mode(hass)
return
elif CURR_ENTITIE.startswith("input_datetime.") and CURR_ENTITIE_ENTERED:
set_time_hour_up(hass, CURR_ENTITIE)
return
else:
_say_it(hass, "Wybrana pozycja nie ma dodatkowych informacji.")
def set_focus_on_prev_entity(hass, long_press):
# prev on joystick
if CURR_ENTITIE is not None:
if CURR_ENTITIE.startswith("media_player."):
if long_press:
# seek back on remote
hass.services.call(
"media_player",
"media_seek",
{"entity_id": CURR_ENTITIE, "seek_position": 0},
)
return
else:
if CURR_ENTITIE_SELECTED_ACTION == ais_global.G_ACTION_SET_AUDIO_SPEED:
# speed down on remote
state = hass.states.get("input_number.media_player_speed")
_min = float(state.attributes.get("min"))
_step = float(state.attributes.get("step"))
_curr = round(max(float(state.state) - _step, _min), 2)
hass.services.call(
"ais_ai_service",
"publish_command_to_frame",
{"key": "setPlaybackSpeed", "val": _curr},
)
hass.services.call(
"input_number",
"set_value",
{
"entity_id": "input_number.media_player_speed",
"value": _curr,
},
)
_say_it(hass, ais_global.get_audio_speed_name(_curr))
return
# no group is selected go to prev in the groups view menu
if CURR_GROUP is None:
set_prev_group_view()
say_curr_group_view(hass)
return
# group is selected
# check if the entity in the group is selected
if CURR_ENTITIE is None:
set_prev_group(hass)
say_curr_group(hass)
return
# entity in the group is selected
# check if the entity option can be selected
if can_entity_be_changed(hass, CURR_ENTITIE) and CURR_ENTITIE_ENTERED is True:
if CURR_ENTITIE.startswith("media_player."):
hass.services.call(
"media_player", "media_previous_track", {"entity_id": CURR_ENTITIE}
)
return
elif CURR_ENTITIE.startswith("input_text.") and CURR_ENTITIE_ENTERED:
set_prev_virtual_key()
say_curr_virtual_key(hass)
return
elif CURR_ENTITIE.startswith("input_datetime.") and CURR_ENTITIE_ENTERED:
set_time_minute_down(hass, CURR_ENTITIE)
return
else:
set_prev_position(hass)
else:
if CURR_ENTITIE.startswith("media_player.") and CURR_ENTITIE_ENTERED:
hass.services.call(
"media_player", "media_previous_track", {"entity_id": CURR_ENTITIE}
)
return
elif CURR_ENTITIE == "sensor.ais_drives":
hass.services.call("ais_drives_service", "remote_prev_item")
return
else:
# entity not selected or no way to change the entity, go to next one
set_prev_entity(hass)
def set_focus_on_next_entity(hass, long_press):
# next on joystick
if CURR_ENTITIE is not None:
if CURR_ENTITIE.startswith("media_player."):
if long_press:
# seek next on remote
hass.services.call(
"media_player",
"media_seek",
{"entity_id": CURR_ENTITIE, "seek_position": 1},
)
return
else:
if CURR_ENTITIE_SELECTED_ACTION == ais_global.G_ACTION_SET_AUDIO_SPEED:
# speed up on remote
state = hass.states.get("input_number.media_player_speed")
_max = float(state.attributes.get("max"))
_step = float(state.attributes.get("step"))
_curr = round(min(float(state.state) + _step, _max), 2)
hass.services.call(
"input_number",
"set_value",
{
"entity_id": "input_number.media_player_speed",
"value": _curr,
},
)
hass.services.call(
"ais_ai_service",
"publish_command_to_frame",
{"key": "setPlaybackSpeed", "val": _curr},
)
_say_it(hass, ais_global.get_audio_speed_name(_curr))
return
# no group is selected go to next in the groups view menu
if CURR_GROUP is None:
set_next_group_view()
say_curr_group_view(hass)
return
# group is selected
# check if the entity in the group is selected
if CURR_ENTITIE is None:
set_next_group(hass)
say_curr_group(hass)
return
# entity in the group is selected
# check if the entity option can be selected
if can_entity_be_changed(hass, CURR_ENTITIE) and CURR_ENTITIE_ENTERED is True:
if CURR_ENTITIE.startswith("media_player."):
hass.services.call(
"media_player", "media_next_track", {"entity_id": CURR_ENTITIE}
)
elif CURR_ENTITIE.startswith("input_text.") and CURR_ENTITIE_ENTERED:
set_next_virtual_key()
say_curr_virtual_key(hass)
elif CURR_ENTITIE.startswith("input_datetime.") and CURR_ENTITIE_ENTERED:
set_time_minute_up(hass, CURR_ENTITIE)
return
else:
set_next_position(hass)
else:
if CURR_ENTITIE.startswith("media_player.") and CURR_ENTITIE_ENTERED is True:
hass.services.call(
"media_player", "media_next_track", {"entity_id": CURR_ENTITIE}
)
elif CURR_ENTITIE == "sensor.ais_drives":
hass.services.call("ais_drives_service", "remote_next_item")
else:
# entity not selected or no way to change the entity, go to next one
set_next_entity(hass)
def go_up_in_menu(hass):
# on back on remote
global CURR_ENTITIE_ENTERED, CURR_ENTITIE
# check if the entity in the group is selected
if CURR_ENTITIE is not None:
# check if we are browsing files
if CURR_ENTITIE == "sensor.ais_drives":
# check if we can go up
state = hass.states.get("sensor.ais_drives")
if state.state is not None and state.state != "":
hass.services.call("ais_drives_service", "remote_cancel_item")
return
else:
# go up in the group menu
set_curr_group(hass, None)
say_curr_group(hass)
elif CURR_ENTITIE == "media_player.wbudowany_glosnik":
if PREV_CURR_GROUP is not None:
# go back to prev context
set_curr_group(hass, PREV_CURR_GROUP)
# set_curr_entity(hass, None)
CURR_ENTITIE = None
CURR_ENTITIE_ENTERED = False
PREV_CURR_GROUP["friendly_name"]
_say_it(hass, PREV_CURR_GROUP["friendly_name"])
else:
# go home
go_home(hass)
elif not CURR_ENTITIE_ENTERED:
# go up in the group menu
# check if we have group in group
if CURR_GROUP is not None:
if CURR_GROUP["remote_group_view"].startswith("group."):
set_curr_group(hass, CURR_GROUP)
say_curr_group(hass)
return
set_curr_group(hass, None)
say_curr_group(hass)
else:
CURR_ENTITIE_ENTERED = False
if CURR_ENTITIE.startswith("input_text."):
if CURR_VIRTUAL_KEYBOARD_VALUE is None:
hass.services.call(
"input_text",
"set_value",
{"entity_id": CURR_ENTITIE, "value": ""},
)
else:
hass.services.call(
"input_text",
"set_value",
{
"entity_id": CURR_ENTITIE,
"value": CURR_VIRTUAL_KEYBOARD_VALUE,
},
)
say_curr_entity(hass)
return
# no entity is selected, check if the group is selected
elif CURR_GROUP is not None:
# go up in the group view menu
# check if group in group
if CURR_GROUP["remote_group_view"].startswith("group."):
gg = get_group_from_group(CURR_GROUP["remote_group_view"])
set_curr_group(hass, gg)
say_curr_group(hass)
return
else:
set_curr_group_view()
say_curr_group_view(hass)
return
# can't go up, beep
_beep_it(hass, 33)
def type_to_input_text(hass, key):
if CURR_ENTITIE.startswith("input_text.") and CURR_ENTITIE_ENTERED:
# add the letter to the virtual input
global CURR_VIRTUAL_KEYBOARD_VALUE
if CURR_VIRTUAL_KEYBOARD_VALUE is None:
CURR_VIRTUAL_KEYBOARD_VALUE = chr(key)
else:
CURR_VIRTUAL_KEYBOARD_VALUE = CURR_VIRTUAL_KEYBOARD_VALUE + chr(key)
_say_it(hass, "wpisano: " + chr(key))
def type_to_input_text_from_virtual_keyboard(hass):
# add the letter to the virtual input
global CURR_VIRTUAL_KEYBOARD_VALUE
if CURR_VIRTUAL_KEYBOARD_VALUE is None:
CURR_VIRTUAL_KEYBOARD_VALUE = ""
if CURR_VIRTUAL_KEY is None:
if get_curr_virtual_keyboard_mode() == "Usuwanie":
_say_it(hass, "wybierz tryb usuwania")
else:
_say_it(hass, "wybierz znak do wpisania")
return
key = get_curr_virtual_key()
km = get_curr_virtual_keyboard_mode()
if km == "Litery":
key = key.lower()
if km == "Usuwanie":
if key == "ostatni znak":
text = CURR_VIRTUAL_KEYBOARD_VALUE[:-1]
elif key == "<KEY>":
text = CURR_VIRTUAL_KEYBOARD_VALUE.rsplit(" ", 1)[0]
else:
text = ""
else:
text = CURR_VIRTUAL_KEYBOARD_VALUE + key
CURR_VIRTUAL_KEYBOARD_VALUE = text
text = ""
if km == "Litery":
text = "wpisuję literę: " + key.lower()
elif km == "Wielkie litery":
text = "wpisuję wielką literę: " + key
elif km == "Cyfry":
text = "wpisuję cyfrę: " + key
elif km == "Znaki specjalne":
idx = VIRTUAL_KEYBOARD_SYMBOLS.index(key)
text = "" + VIRTUAL_KEYBOARD_SYMBOLS_NAMES[idx]
text = "wpisuję znak: " + text
elif km == "Usuwanie":
text = "OK, usuwam " + key
_say_it(hass, text)
def go_to_player(hass, say):
global CURR_REMOTE_MODE_IS_IN_AUDIO_MODE
CURR_REMOTE_MODE_IS_IN_AUDIO_MODE = True
# remember the previous context
global PREV_CURR_GROUP, PREV_CURR_ENTITIE
# selecting the player to control via remote
global CURR_ENTITIE_ENTERED
if len(GROUP_ENTITIES) == 0:
get_groups(hass)
if CURR_ENTITIE != "media_player.wbudowany_glosnik":
# remember prev group and entity
PREV_CURR_GROUP = CURR_GROUP
PREV_CURR_ENTITIE = CURR_ENTITIE
for group in GROUP_ENTITIES:
if group["entity_id"] == "group.audio_player":
set_curr_group(hass, group)
set_curr_entity(hass, "media_player.wbudowany_glosnik")
CURR_ENTITIE_ENTERED = True
if say:
_say_it(hass, "Sterowanie odtwarzaczem")
break
def go_home(hass):
global CURR_REMOTE_MODE_IS_IN_AUDIO_MODE
CURR_REMOTE_MODE_IS_IN_AUDIO_MODE = False
if len(GROUP_ENTITIES) == 0:
get_groups(hass)
global CURR_GROUP_VIEW
CURR_GROUP_VIEW = "Mój Dom"
# to reset
set_curr_group_view()
say_curr_group_view(hass)
def get_groups(hass):
global GROUP_ENTITIES
all_ais_sensors = []
all_ais_persons = []
all_ais_automations = []
all_ais_scenes = []
all_ais_switches = []
all_ais_lights = []
all_ais_climates = []
all_ais_covers = []
all_ais_locks = []
all_ais_vacuums = []
all_ais_cameras = []
all_ais_fans = []
entities = hass.states.async_all()
GROUP_ENTITIES = []
def add_menu_item(l_entity):
l_group = {
"friendly_name": l_entity.attributes.get("friendly_name"),
"order": l_entity.attributes.get("order"),
"entity_id": l_entity.entity_id,
"entities": l_entity.attributes.get("entity_id"),
"context_key_words": l_entity.attributes.get("context_key_words"),
"context_answer": l_entity.attributes.get("context_answer"),
"context_suffix": l_entity.attributes.get("context_suffix"),
"remote_group_view": l_entity.attributes.get("remote_group_view"),
"player_mode": l_entity.attributes.get("player_mode", ""),
}
GROUP_ENTITIES.append(l_group)
def get_key(item):
return item["order"]
for entity in entities:
if entity.entity_id.startswith("group."):
remote = entity.attributes.get("remote_group_view")
if remote is not None:
add_menu_item(entity)
elif entity.entity_id.startswith("sensor."):
device_class = entity.attributes.get("device_class", None)
if device_class is not None:
all_ais_sensors.append(entity.entity_id)
elif entity.entity_id.startswith("person."):
all_ais_persons.append(entity.entity_id)
elif entity.entity_id.startswith(
"automation."
) and not entity.entity_id.startswith("automation.ais_"):
all_ais_automations.append(entity.entity_id)
elif entity.entity_id.startswith("scene."):
all_ais_scenes.append(entity.entity_id)
elif (
entity.entity_id.startswith("switch.")
and entity.entity_id != "switch.zigbee_tryb_parowania"
):
all_ais_switches.append(entity.entity_id)
elif entity.entity_id.startswith("light."):
all_ais_lights.append(entity.entity_id)
elif entity.entity_id.startswith("climate."):
all_ais_climates.append(entity.entity_id)
elif entity.entity_id.startswith("cover."):
all_ais_covers.append(entity.entity_id)
elif entity.entity_id.startswith("lock."):
all_ais_locks.append(entity.entity_id)
elif entity.entity_id.startswith("vacuum."):
all_ais_vacuums.append(entity.entity_id)
elif entity.entity_id.startswith("camera."):
all_ais_cameras.append(entity.entity_id)
elif entity.entity_id.startswith("fan."):
all_ais_fans.append(entity.entity_id)
# update group on remote
all_unique_sensors = list(set(all_ais_sensors))
all_unique_sensors.sort()
all_unique_persons = list(set(all_ais_persons))
all_unique_persons.sort()
all_unique_automations = list(set(all_ais_automations))
all_unique_automations.sort()
all_unique_scenes = list(set(all_ais_scenes))
all_unique_scenes.sort()
all_unique_switches = list(set(all_ais_switches))
all_unique_switches.sort()
all_unique_lights = list(set(all_ais_lights))
all_unique_lights.sort()
all_unique_climates = list(set(all_ais_climates))
all_unique_climates.sort()
all_unique_covers = list(set(all_ais_covers))
all_unique_covers.sort()
all_unique_locks = list(set(all_ais_locks))
all_unique_locks.sort()
all_unique_vacuums = list(set(all_ais_vacuums))
all_unique_vacuums.sort()
all_unique_cameras = list(set(all_ais_cameras))
all_unique_cameras.sort()
all_unique_fans = list(set(all_ais_fans))
all_unique_fans.sort()
GROUP_ENTITIES = sorted(GROUP_ENTITIES, key=get_key)
for group in GROUP_ENTITIES:
if group["entity_id"] == "group.all_ais_automations":
group["entities"] = all_unique_automations
elif group["entity_id"] == "group.all_ais_scenes":
group["entities"] = all_unique_scenes
elif group["entity_id"] == "group.all_ais_persons":
group["entities"] = all_unique_persons
elif group["entity_id"] == "group.all_ais_sensors":
group["entities"] = all_unique_sensors
elif group["entity_id"] == "group.all_ais_switches":
group["entities"] = all_unique_switches
elif group["entity_id"] == "group.all_ais_lights":
group["entities"] = all_unique_lights
elif group["entity_id"] == "group.all_ais_climates":
group["entities"] = all_unique_climates
elif group["entity_id"] == "group.all_ais_covers":
group["entities"] = all_unique_covers
elif group["entity_id"] == "group.all_ais_locks":
group["entities"] = all_unique_locks
elif group["entity_id"] == "group.all_ais_vacuums":
group["entities"] = all_unique_vacuums
elif group["entity_id"] == "group.all_ais_cameras":
group["entities"] = all_unique_cameras
elif group["entity_id"] == "group.all_ais_fans":
group["entities"] = all_unique_fans
# new way to communicate with frame
async def async_process_json_from_frame(hass, json_req):
res = {"ais": "ok"}
topic = json_req["topic"]
payload = json_req["payload"]
ais_gate_client_id = json_req["ais_gate_client_id"]
if "hot_word_on" in json_req:
hot_word_on = json_req["hot_word_on"]
else:
hot_word_on = False
if topic == "ais/player_auto_discovery":
# AppDiscoveryMode on mobile is ON
# TODO discovery AI Speaker
pass
if topic == "ais/speech_command":
try:
# TODO add info if the intent is media player type - to publish
intent_resp = await _async_process(
hass, payload, ais_gate_client_id, hot_word_on
)
resp_text = intent_resp.speech["plain"]["speech"]
res = {"ais": "ok", "say_it": resp_text}
except Exception as e:
_LOGGER.error("intent_resp " + str(e))
elif topic == "ais/media_player":
hass.async_run_job(
hass.services.async_call(
"media_player",
payload,
{ATTR_ENTITY_ID: "media_player.wbudowany_glosnik"},
)
)
elif topic == "ais/register_wear_os":
# 1. check pin
pin = payload["ais_dom_pin"]
if pin != ais_global.G_AIS_DOM_PIN:
return json_response({"ais": "nok"})
# 2. register device and return webhook
import secrets
from homeassistant.const import CONF_WEBHOOK_ID
webhook_id = secrets.token_hex()
payload[CONF_WEBHOOK_ID] = webhook_id
payload["user_id"] = ""
await hass.async_create_task(
hass.config_entries.flow.async_init(
"mobile_app", data=payload, context={"source": "registration"}
)
)
res = {CONF_WEBHOOK_ID: payload[CONF_WEBHOOK_ID]}
elif topic == "ais/event":
# tag_scanned event
hass.bus.async_fire(payload["event_type"], payload["event_data"])
# add player staus for some topics
if topic in ("ais/player_status", "ais/player_auto_discovery", "ais/media_player"):
attributes = hass.states.get("media_player.wbudowany_glosnik").attributes
j_media_info = {
"media_title": attributes.get("media_title", ""),
"media_source": attributes.get("source", ""),
"media_stream_image": attributes.get("media_stream_image", ""),
"media_album_name": attributes.get("media_album_name", ""),
}
res["player_status"] = j_media_info
res["gate_id"] = ais_global.get_sercure_android_id_dom()
return json_response(res)
async def async_setup(hass, config):
"""Register the process service."""
global aisCloudWS
aisCloudWS = ais_cloud.AisCloudWS(hass)
warnings.filterwarnings("ignore", module="fuzzywuzzy")
config = config.get(DOMAIN, {})
intents = hass.data.get(DOMAIN)
if intents is None:
intents = hass.data[DOMAIN] = {}
for intent_type, utterances in config.get("intents", {}).items():
conf = intents.get(intent_type)
if conf is None:
conf = intents[intent_type] = []
conf.extend(_create_matcher(utterance) for utterance in utterances)
async def process(service):
"""Parse text into commands."""
text = service.data[ATTR_TEXT]
await _async_process(hass, text)
def process_code(service):
"""Parse remote code into action."""
text = json.loads(service.data.get(ATTR_TEXT))
_process_code(hass, text)
def say_it(service):
"""Info to the user."""
text = ""
pitch = None
rate = None
language = None
voice = None
path = None
if ATTR_TEXT in service.data:
text = service.data[ATTR_TEXT]
# TODO else:
# # check message template
# if "template_text" in service.data:
# tpl = template.Template(service.data["template_text"], hass)
# message = tpl.async_render()
# else:
# return
if "img" in service.data:
img = service.data["img"]
if img is not None:
if len(img) < 3:
img = None
else:
img = None
if "pitch" in service.data:
pitch = service.data["pitch"]
if "rate" in service.data:
rate = service.data["rate"]
if "language" in service.data:
language = service.data["language"]
if "voice" in service.data:
voice = service.data["voice"]
if "path" in service.data:
path = service.data["path"]
_say_it(
hass=hass,
message=text,
img=img,
pitch=pitch,
rate=rate,
language=language,
voice=voice,
path=path,
)
def say_in_browser(service):
"""Info to the via browser - this is handled by ais-tts in card"""
pass
def welcome_home(service):
"""Welcome message."""
# display favorites from Spotify only if Spotify is available
if hass.services.has_service("ais_spotify_service", "get_favorites"):
hass.services.call(
"ais_spotify_service", "get_favorites", {"type": "featured-playlists"}
)
text = "Witaj w Domu. Powiedz proszę w czym mogę Ci pomóc?"
if ais_global.G_OFFLINE_MODE:
text = (
"Uwaga, uruchomienie bez dostępu do sieci, część usług może nie działać poprawnie."
"Sprawdź połączenie z Internetem."
)
_say_it(hass, text)
# immersive full mode for all apps
if ais_global.has_root():
hass.services.call(
"ais_shell_command",
"execute_command",
{
"command": "su -c 'settings put global policy_control "
"immersive.full=*'"
},
)
if hass.services.has_service("ais_tts", "play_item"):
# ais_tts - remove all panels
if "lovelace-dom" in hass.data.get(
hass.components.frontend.DATA_PANELS, {}
):
hass.components.frontend.async_remove_panel("lovelace-dom")
if "aisaudio" in hass.data.get(hass.components.frontend.DATA_PANELS, {}):
hass.components.frontend.async_remove_panel("aisaudio")
if "map" in hass.data.get(hass.components.frontend.DATA_PANELS, {}):
hass.components.frontend.async_remove_panel("map")
if "history" in hass.data.get(hass.components.frontend.DATA_PANELS, {}):
hass.components.frontend.async_remove_panel("history")
if "logbook" in hass.data.get(hass.components.frontend.DATA_PANELS, {}):
hass.components.frontend.async_remove_panel("logbook")
# set the flag to info that the AIS start part is done - this is needed to don't say some info before this flag
ais_global.G_AIS_START_IS_DONE = True
async def async_set_context(service):
"""Set the context in app."""
context = service.data[ATTR_TEXT]
# get audio types again if the was a network problem on start
if ais_global.G_AIS_START_IS_DONE:
if context == "radio":
types = hass.states.get("input_select.radio_type").attributes.get(
"options", []
)
if len(types) < 2:
await hass.services.async_call("ais_cloud", "get_radio_types")
# TODO for the rest of audio
if context == "ais_tv":
hass.states.async_set("sensor.ais_player_mode", "ais_tv")
elif context == "ais_tv_on":
hass.states.async_set("sensor.ais_tv_mode", "tv_on")
hass.states.async_set("sensor.ais_tv_activity", "")
_say_it(hass, "Sterowanie na monitorze")
await _publish_command_to_frame(hass, "goToActivity", "ActivityMenu")
elif context == "ais_tv_off":
hass.states.async_set("sensor.ais_tv_mode", "tv_off")
hass.states.async_set("sensor.ais_tv_activity", "")
_say_it(hass, "Sterowanie bez monitora")
await _publish_command_to_frame(
hass, "goToActivity", "SplashScreenActivity"
)
elif context == "ais_tv_youtube":
hass.states.async_set("sensor.ais_tv_activity", "youtube")
_say_it(hass, "Odtwarzacz wideo")
await _publish_command_to_frame(hass, "goToActivity", "ExoPlayerActivity")
elif context == "ais_tv_spotify":
hass.states.async_set("sensor.ais_tv_activity", "spotify")
_say_it(hass, "Odtwarzacz Spotify")
await _publish_command_to_frame(hass, "goToActivity", "SpotifyActivity")
elif context == "ais_tv_cameras":
hass.states.async_set("sensor.ais_tv_activity", "camera")
_say_it(hass, "Podgląd z kamery")
elif context == "ais_tv_show_camera":
hass.states.async_set("sensor.ais_tv_activity", "camera")
cam_id = service.data["entity_id"]
cam_attr = hass.states.get(cam_id).attributes
cam_name = cam_attr.get("friendly_name", "")
_say_it(hass, "Podgląd z kamery " + cam_name)
await _publish_command_to_frame(hass, "showCamera", cam_id)
elif context == "ais_tv_settings":
hass.states.async_set("sensor.ais_tv_activity", "settings")
_say_it(hass, "Ustawienia aplikacji")
await _publish_command_to_frame(hass, "goToActivity", "SettingsActivity")
elif context == "radio_public":
hass.states.async_set("sensor.ais_player_mode", "radio_player")
hass.states.async_set("sensor.ais_radio_origin", "public")
hass.states.async_set("sensor.radiolist", -1, {})
atrr = hass.states.get("input_select.radio_type").attributes
hass.states.async_set("input_select.radio_type", "-", atrr)
elif context == "radio_private":
hass.states.async_set("sensor.ais_player_mode", "radio_player")
hass.states.async_set("sensor.ais_radio_origin", "private")
hass.states.async_set("sensor.radiolist", -1, {})
atrr = hass.states.get("input_select.radio_type").attributes
hass.states.async_set("input_select.radio_type", "-", atrr)
elif context == "radio_shared":
hass.states.async_set("sensor.ais_player_mode", "radio_player")
hass.states.async_set("sensor.ais_radio_origin", "shared")
hass.states.async_set("sensor.radiolist", -1, {})
atrr = hass.states.get("input_select.radio_type").attributes
hass.states.async_set("input_select.radio_type", "-", atrr)
elif context == "podcast_public":
hass.states.async_set("sensor.ais_player_mode", "podcast_player")
hass.states.async_set("sensor.ais_podcast_origin", "public")
hass.states.async_set("sensor.podcastlist", -1, {})
atrr = hass.states.get("input_select.podcast_type").attributes
hass.states.async_set("input_select.podcast_type", "-", atrr)
elif context == "podcast_private":
hass.states.async_set("sensor.ais_player_mode", "podcast_player")
hass.states.async_set("sensor.ais_podcast_origin", "private")
hass.states.async_set("sensor.podcastlist", -1, {})
atrr = hass.states.get("input_select.podcast_type").attributes
hass.states.async_set("input_select.podcast_type", "-", atrr)
elif context == "podcast_shared":
hass.states.async_set("sensor.ais_player_mode", "podcast_player")
hass.states.async_set("sensor.ais_podcast_origin", "shared")
hass.states.async_set("sensor.podcastlist", -1, {})
atrr = hass.states.get("input_select.podcast_type").attributes
hass.states.async_set("input_select.podcast_type", "-", atrr)
elif context == "YouTube":
hass.states.async_set("sensor.ais_player_mode", "music_player")
await hass.services.async_call(
"input_select",
"select_option",
{"entity_id": "input_select.ais_music_service", "option": "YouTube"},
)
elif context == "Spotify":
hass.states.async_set("sensor.ais_player_mode", "music_player")
await hass.services.async_call(
"input_select",
"select_option",
{"entity_id": "input_select.ais_music_service", "option": "Spotify"},
)
elif context == "Radio":
hass.states.async_set("sensor.ais_player_mode", "radio_player")
elif context == "Podcast":
hass.states.async_set("sensor.ais_player_mode", "podcast_player")
else:
for idx, menu in enumerate(GROUP_ENTITIES, start=0):
context_key_words = menu["context_key_words"]
if context_key_words is not None:
context_key_words = context_key_words.split(",")
if context in context_key_words:
set_curr_group(hass, menu)
set_curr_entity(hass, None)
if context == "spotify":
await hass.services.async_call(
"input_select",
"select_option",
{
"entity_id": "input_select.ais_music_service",
"option": "Spotify",
},
)
elif context == "youtube":
await hass.services.async_call(
"input_select",
"select_option",
{
"entity_id": "input_select.ais_music_service",
"option": "YouTube",
},
)
break
async def check_local_ip(service):
"""Set the local ip in app."""
ip = ais_global.get_my_global_ip()
hass.states.async_set(
"sensor.internal_ip_address",
ip,
{"friendly_name": "Lokalny adres IP", "icon": "mdi:access-point-network"},
)
async def publish_command_to_frame(service):
key = service.data["key"]
val = service.data["val"]
ip = "localhost"
if "ip" in service.data:
if service.data["ip"] is not None:
ip = service.data["ip"]
await _publish_command_to_frame(hass, key, val, ip)
# old
def process_command_from_frame(service):
_process_command_from_frame(hass, service)
# fix for the problem on box with remote
def prepare_remote_menu(service):
get_groups(hass)
# register context intent
for menu in GROUP_ENTITIES:
context_key_words = menu["context_key_words"]
if context_key_words is not None:
context_key_words = context_key_words.split(",")
async_register(hass, INTENT_CHANGE_CONTEXT, context_key_words)
def on_new_iot_device_selection(service):
iot = service.data["iot"].lower()
# the name according to the selected model
if "dom_" + ais_global.G_MODEL_SONOFF_S20 in iot:
info = "Inteligentne gniazdo"
elif "dom_" + ais_global.G_MODEL_SONOFF_B1 in iot:
info = "Żarówka"
elif "dom_" + ais_global.G_MODEL_SONOFF_TH in iot:
info = "Przełącznik z czujnikami"
elif "dom_" + ais_global.G_MODEL_SONOFF_SLAMPHER in iot:
info = "Oprawka"
elif "dom_" + ais_global.G_MODEL_SONOFF_TOUCH in iot:
info = "Przełącznik dotykowy"
elif "dom_" + ais_global.G_MODEL_SONOFF_POW in iot:
info = "Przełącznik z pomiarem mocy"
elif "dom_" + ais_global.G_MODEL_SONOFF_DUAL in iot:
info = "Przełącznik podwójny"
elif "dom_" + ais_global.G_MODEL_SONOFF_BASIC in iot:
info = "Przełącznik"
elif "dom_" + ais_global.G_MODEL_SONOFF_IFAN in iot:
info = "Wentylator sufitowy"
elif "dom_" + ais_global.G_MODEL_SONOFF_T11 in iot:
info = "Przełącznik dotykowy pojedynczy"
elif "dom_" + ais_global.G_MODEL_SONOFF_T12 in iot:
info = "Przełącznik dotykowy podwójny"
elif "dom_" + ais_global.G_MODEL_SONOFF_T13 in iot:
info = "Przełącznik dotykowy potrójny"
else:
info = "Nowe urządzenie"
hass.services.call(
"input_text",
"set_value",
{"entity_id": "input_text.ais_iot_device_name", "value": info},
)
# set the WIFI as an current WIFI (only if empty)
wifis = hass.states.get("input_select.ais_android_wifi_network")
if (
wifis.state == ais_global.G_EMPTY_OPTION
and ais_global.GLOBAL_MY_WIFI_SSID is not None
):
options = wifis.attributes.get("options")
for o in options:
if ais_global.GLOBAL_MY_WIFI_SSID in o:
hass.services.call(
"input_select",
"select_option",
{
"entity_id": "input_select.ais_android_wifi_network",
"option": o,
},
)
async def async_mob_request(service):
from homeassistant.components.mobile_app.const import (
ATTR_APP_DATA,
ATTR_APP_ID,
ATTR_APP_VERSION,
ATTR_OS_VERSION,
ATTR_PUSH_TOKEN,
ATTR_PUSH_URL,
)
if "request" not in service.data:
_LOGGER.error("No request in service.data")
return
if "device_id" not in service.data:
_LOGGER.error("No device_id in service.data")
return
session = async_get_clientsession(hass)
device_id = service.data["device_id"]
entry_data = None
data = {"request": service.data["request"]}
if "data" in service.data:
data["data"] = service.data["data"]
else:
data["data"] = {}
for entry in hass.config_entries.async_entries("mobile_app"):
if entry.data["device_name"] == device_id:
entry_data = entry.data
if entry_data is None:
_LOGGER.error("No mob id from " + device_id)
return
app_data = entry_data[ATTR_APP_DATA]
push_token = app_data[ATTR_PUSH_TOKEN]
push_url = app_data[ATTR_PUSH_URL]
data[ATTR_PUSH_TOKEN] = push_token
reg_info = {
ATTR_APP_ID: entry_data[ATTR_APP_ID],
ATTR_APP_VERSION: entry_data[ATTR_APP_VERSION],
}
if ATTR_OS_VERSION in entry_data:
reg_info[ATTR_OS_VERSION] = entry_data[ATTR_OS_VERSION]
data["registration_info"] = reg_info
try:
with async_timeout.timeout(10):
response = await session.post(push_url, json=data)
result = await response.json()
if response.status in [200, 201, 202]:
return
fallback_error = result.get("errorMessage", "Unknown error")
fallback_message = (
f"Internal server error, please try again later: {fallback_error}"
)
message = result.get("message", fallback_message)
_LOGGER.error(message)
except asyncio.TimeoutError:
_LOGGER.error("Timeout sending notification to %s", push_url)
async def async_mob_notify(service):
from homeassistant.components.mobile_app.const import (
ATTR_APP_DATA,
ATTR_APP_ID,
ATTR_APP_VERSION,
ATTR_OS_VERSION,
ATTR_PUSH_TOKEN,
ATTR_PUSH_URL,
)
session = async_get_clientsession(hass)
device_id = service.data["device_id"]
# to allow notation with _
device_id = device_id.replace("mobile_ais_dom_", "mobile_ais_dom-")
entry_data = None
data = {"message": service.data["message"]}
if "title" in service.data:
data["title"] = service.data["title"]
else:
data["title"] = "Powiadomienie z AI-Speaker"
if "image" in service.data:
data["image"] = service.data["image"]
if "say" in service.data:
data["say"] = service.data["say"]
else:
data["say"] = False
if "priority" in service.data:
data["priority"] = service.data["priority"]
else:
data["priority"] = "normal"
if "notification_id" in service.data:
data["notification_id"] = service.data["notification_id"]
else:
data["notification_id"] = 0
if "data" in service.data:
data["data"] = service.data["data"]
else:
data["data"] = {}
if "click_action" in service.data:
data["click_action"] = service.data["click_action"]
else:
data["click_action"] = ""
for entry in hass.config_entries.async_entries("mobile_app"):
if entry.data["device_name"] == device_id:
entry_data = entry.data
if entry_data is None:
# new way - via device id
dev_registry = await hass.helpers.device_registry.async_get_registry()
device = dev_registry.async_get(device_id)
if device is not None:
for entry in hass.config_entries.async_entries("mobile_app"):
if entry.data["device_name"] == device.name:
entry_data = entry.data
if entry_data is None:
_LOGGER.error("No mob id from " + device_id)
return
app_data = entry_data[ATTR_APP_DATA]
push_token = app_data[ATTR_PUSH_TOKEN]
push_url = app_data[ATTR_PUSH_URL]
data[ATTR_PUSH_TOKEN] = push_token
reg_info = {
ATTR_APP_ID: entry_data[ATTR_APP_ID],
ATTR_APP_VERSION: entry_data[ATTR_APP_VERSION],
}
if ATTR_OS_VERSION in entry_data:
reg_info[ATTR_OS_VERSION] = entry_data[ATTR_OS_VERSION]
data["registration_info"] = reg_info
try:
with async_timeout.timeout(10):
response = await session.post(push_url, json=data)
result = await response.json()
if response.status in [200, 201, 202]:
return
fallback_error = result.get("errorMessage", "Unknown error")
fallback_message = (
f"Internal server error, please try again later: {fallback_error}"
)
message = result.get("message", fallback_message)
_LOGGER.error(message)
except asyncio.TimeoutError:
_LOGGER.error("Timeout sending notification to %s", push_url)
# await hass.services.async_call(
# "notify", device_id, {"message": message, "title": title, "data": data}
# )
async def check_night_mode(service):
# check the night / quiet mode
timer = False
if "timer" in service.data:
timer = service.data["timer"]
# TODO - check this fix for 'NoneType' object has no attribute 'state'
quiet_mode = ""
if hass is not None:
if hass.states.get("input_boolean.ais_quiet_mode") is not None:
quiet_mode = hass.states.get("input_boolean.ais_quiet_mode").state
if quiet_mode == "":
hass.async_add_job(
hass.services.async_call("frontend", "set_theme", {"name": "ais"})
)
return
def apply_night_mode():
_LOGGER.info("Start Night ")
ais_global.G_AIS_DAY_MEDIA_VOLUME_LEVEL = hass.states.get(
"media_player.wbudowany_glosnik"
).attributes["volume_level"]
# set volume as min from (0.2, curr_volume_level)
vl = min(0.2, ais_global.G_AIS_DAY_MEDIA_VOLUME_LEVEL)
hass.async_add_job(
hass.services.async_call(
"media_player",
"volume_set",
{"entity_id": "media_player.wbudowany_glosnik", "volume_level": vl},
)
)
hass.async_add_job(
hass.services.async_call("frontend", "set_theme", {"name": "night"})
)
def apply_day_mode():
_LOGGER.info("Stop Night ")
curr_volume_level = hass.states.get(
"media_player.wbudowany_glosnik"
).attributes["volume_level"]
# get volume level
if ais_global.G_AIS_DAY_MEDIA_VOLUME_LEVEL is not None:
vl = max(
0.1, ais_global.G_AIS_DAY_MEDIA_VOLUME_LEVEL, curr_volume_level
)
hass.async_add_job(
hass.services.async_call(
"media_player",
"volume_set",
{
"entity_id": "media_player.wbudowany_glosnik",
"volume_level": vl,
},
)
)
hass.async_add_job(
hass.services.async_call("frontend", "set_theme", {"name": "ais"})
)
if not timer:
# call after change or on start
quiet_mode_start_attr = hass.states.get(
"input_datetime.ais_quiet_mode_start"
).attributes
quiet_mode_stop_attr = hass.states.get(
"input_datetime.ais_quiet_mode_stop"
).attributes
th = datetime.datetime.now().hour * 60 * 60
tm = datetime.datetime.now().minute * 60
ts = th + tm
qm_st = quiet_mode_start_attr["timestamp"]
qm_et = quiet_mode_stop_attr["timestamp"]
# if the times are equal and 0 we can set them as default
if (qm_st == qm_et == 0) and quiet_mode == "on":
hass.async_add_job(
hass.services.async_call(
"input_datetime",
"set_datetime",
{
"entity_id": "input_datetime.ais_quiet_mode_start",
"time": "22:00",
},
)
)
hass.async_add_job(
hass.services.async_call(
"input_datetime",
"set_datetime",
{
"entity_id": "input_datetime.ais_quiet_mode_stop",
"time": "06:00",
},
)
)
# if times are smaller than current time, this means that this time (hour and minute)
# will be again tomorrow - add one day
if int(qm_st) < int(ts):
qm_st = int(qm_st) + 86400
if int(qm_et) < int(ts):
qm_et = int(qm_et) + 86400
# if we are more close to night - apply day mode
if (int(qm_st) > int(qm_et)) and quiet_mode == "on":
apply_night_mode()
else:
apply_day_mode()
if timer and quiet_mode == "on":
# call from timer
quiet_mode_start_attr = hass.states.get(
"input_datetime.ais_quiet_mode_start"
).attributes
quiet_mode_stop_attr = hass.states.get(
"input_datetime.ais_quiet_mode_stop"
).attributes
if quiet_mode_start_attr["timestamp"] != quiet_mode_stop_attr["timestamp"]:
h = datetime.datetime.now().hour
m = datetime.datetime.now().minute
if (
quiet_mode_start_attr["hour"] == h
and quiet_mode_start_attr["minute"] == m
):
apply_night_mode()
if (
quiet_mode_stop_attr["hour"] == h
and quiet_mode_stop_attr["minute"] == m
):
apply_day_mode()
# register services
hass.services.async_register(DOMAIN, "process", process)
hass.services.async_register(DOMAIN, "process_code", process_code)
hass.services.async_register(DOMAIN, "say_it", say_it)
hass.services.async_register(DOMAIN, "say_in_browser", say_in_browser)
hass.services.async_register(DOMAIN, "welcome_home", welcome_home)
hass.services.async_register(
DOMAIN, "publish_command_to_frame", publish_command_to_frame
)
hass.services.async_register(
DOMAIN, "process_command_from_frame", process_command_from_frame
)
hass.services.async_register(DOMAIN, "prepare_remote_menu", prepare_remote_menu)
hass.services.async_register(
DOMAIN, "on_new_iot_device_selection", on_new_iot_device_selection
)
hass.services.async_register(DOMAIN, "set_context", async_set_context)
hass.services.async_register(DOMAIN, "check_local_ip", check_local_ip)
hass.services.async_register(DOMAIN, "check_night_mode", check_night_mode)
hass.services.async_register(DOMAIN, "mob_notify", async_mob_notify)
hass.services.async_register(DOMAIN, "mob_request", async_mob_request)
# register intents
hass.helpers.intent.async_register(GetTimeIntent())
hass.helpers.intent.async_register(GetDateIntent())
hass.helpers.intent.async_register(AisClimateSetTemperature())
hass.helpers.intent.async_register(AisClimateSetPresentMode())
hass.helpers.intent.async_register(AisClimateSetAllOn())
hass.helpers.intent.async_register(AisClimateSetAllOff())
hass.helpers.intent.async_register(TurnOnIntent())
hass.helpers.intent.async_register(TurnOffIntent())
hass.helpers.intent.async_register(ToggleIntent())
hass.helpers.intent.async_register(StatusIntent())
hass.helpers.intent.async_register(PersonStatusIntent())
hass.helpers.intent.async_register(PlayRadioIntent())
hass.helpers.intent.async_register(AisPlayPodcastIntent())
hass.helpers.intent.async_register(AisPlayYtMusicIntent())
hass.helpers.intent.async_register(AisPlaySpotifyIntent())
hass.helpers.intent.async_register(AskQuestionIntent())
hass.helpers.intent.async_register(AskWikiQuestionIntent())
hass.helpers.intent.async_register(ChangeContextIntent())
hass.helpers.intent.async_register(AisGetWeather())
hass.helpers.intent.async_register(AisGetWeather48())
hass.helpers.intent.async_register(AisLampsOn())
hass.helpers.intent.async_register(AisLampsOff())
hass.helpers.intent.async_register(AisSwitchesOn())
hass.helpers.intent.async_register(AisSwitchesOff())
hass.helpers.intent.async_register(AisOpenCover())
hass.helpers.intent.async_register(AisCloseCover())
hass.helpers.intent.async_register(AisStop())
hass.helpers.intent.async_register(AisPlay())
hass.helpers.intent.async_register(AisNext())
hass.helpers.intent.async_register(AisPrev())
hass.helpers.intent.async_register(AisSceneActive())
hass.helpers.intent.async_register(AisRunAutomation())
hass.helpers.intent.async_register(AisAskGoogle())
hass.helpers.intent.async_register(AisSayIt())
hass.helpers.intent.async_register(SpellStatusIntent())
async_register(hass, INTENT_GET_WEATHER, ["[aktualna] pogoda", "jaka jest pogoda"])
async_register(
hass,
INTENT_GET_WEATHER_48,
["prognoza pogody", "pogoda prognoza", "jaka będzie pogoda"],
)
async_register(
hass,
INTENT_CLIMATE_SET_TEMPERATURE,
[
"Ogrzewanie [w] {item} {temp} stopni[e]",
"Ogrzewanie [w] {item} temperatura {temp} stopni[e]",
],
)
async_register(hass, INTENT_CLIMATE_SET_PRESENT_MODE, ["Ogrzewanie tryb {item}"])
async_register(hass, INTENT_CLIMATE_SET_ALL_OFF, ["Wyłącz całe ogrzewanie"])
async_register(hass, INTENT_CLIMATE_SET_ALL_ON, ["Włącz całe ogrzewanie"])
async_register(
hass,
INTENT_LAMPS_ON,
[
"włącz światła",
"zapal światła",
"włącz wszystkie światła",
"zapal wszystkie światła",
],
)
async_register(
hass,
INTENT_LAMPS_OFF,
[
"zgaś światła",
"wyłącz światła",
"wyłącz wszystkie światła",
"zgaś wszystkie światła",
],
)
async_register(
hass, INTENT_SWITCHES_ON, ["włącz przełączniki", "włącz wszystkie przełączniki"]
)
async_register(
hass,
INTENT_SWITCHES_OFF,
["wyłącz przełączniki", "wyłącz wszystkie przełączniki"],
)
async_register(
hass,
INTENT_GET_TIME,
[
"która",
"która [jest] [teraz] godzina",
"którą mamy godzinę",
"jaki [jest] czas",
"[jaka] [jest] godzina",
],
)
async_register(
hass,
INTENT_GET_DATE,
[
"[jaka] [jest] data",
"jaki [mamy] [jest] [dzisiaj] dzień",
"co dzisiaj jest",
"co [mamy] [jest] dzisiaj",
],
)
async_register(
hass,
INTENT_PLAY_RADIO,
[
"Włącz radio",
"Radio {item}",
"Włącz radio {item}",
"Graj radio {item}",
"Graj {item} radio",
"Posłuchał bym radio {item}",
"Włącz stację radiową {item}",
],
)
async_register(
hass,
INTENT_PLAY_PODCAST,
[
"Podcast {item}",
"Włącz podcast {item}",
"Graj podcast {item}",
"Graj {item} podcast",
"Posłuchał bym podcast {item}",
],
)
async_register(
hass,
INTENT_PLAY_YT_MUSIC,
[
"Muzyka {item}",
"Włącz muzykę {item}",
"Graj muzykę {item}",
"Graj {item} muzykę",
"Posłuchał bym muzykę {item}",
"Włącz [z] [na] YouTube {item}",
"YouTube {item}",
],
)
async_register(hass, INTENT_PLAY_SPOTIFY, ["Spotify {item}"])
async_register(hass, INTENT_TURN_ON, ["Włącz {item}", "Zapal światło w {item}"])
async_register(hass, INTENT_TURN_OFF, ["Wyłącz {item}", "Zgaś Światło w {item}"])
async_register(hass, INTENT_TOGGLE, ["Przełącz {item}"])
async_register(
hass,
INTENT_STATUS,
[
"Jaka jest {item}",
"Jaki jest {item}",
"Jak jest {item}",
"Jakie jest {item}",
"[jaki] [ma] status {item}",
],
)
async_register(
hass,
INTENT_ASK_QUESTION,
[
"Co to jest {item}",
"Kto to jest {item}",
"Znajdź informację o {item}",
"Znajdź informacje o {item}",
"Wyszukaj informację o {item}",
"Wyszukaj informacje o {item}",
"Wyszukaj {item}",
"Kim jest {item}",
"Informacje o {item}",
"Czym jest {item}",
"Opowiedz mi o {intem}",
"Informację na temat {item}",
"Co wiesz o {item}",
"Co wiesz na temat {item}",
"Opowiedz o {item}",
"Kim są {item}",
"Kto to {item}",
],
)
async_register(hass, INTENT_SPELL_STATUS, ["Przeliteruj {item}", "Literuj {item}"])
async_register(
hass,
INTENT_ASKWIKI_QUESTION,
["Wikipedia {item}", "wiki {item}", "encyklopedia {item}"],
)
async_register(hass, INTENT_OPEN_COVER, ["Otwórz {item}", "Odsłoń {item}"])
async_register(hass, INTENT_CLOSE_COVER, ["Zamknij {item}", "Zasłoń {item}"])
async_register(
hass, INTENT_STOP, ["Stop", "Zatrzymaj", "Koniec", "Pauza", "Zaniechaj", "Stój"]
)
async_register(hass, INTENT_PLAY, ["Start", "Graj", "Odtwarzaj"])
async_register(hass, INTENT_SCENE, ["Scena {item}", "Aktywuj [scenę] {item}"])
async_register(
hass, INTENT_RUN_AUTOMATION, ["Uruchom {item}", "Automatyzacja {item}", "Jolka {item}"]
)
async_register(hass, INTENT_ASK_GOOGLE, ["Google {item}"])
async_register(
hass, INTENT_PERSON_STATUS, ["Gdzie jest {item}", "Lokalizacja {item}"]
)
async_register(
hass,
INTENT_NEXT,
["[włącz] następny", "[włącz] kolejny", "[graj] następny", "[graj] kolejny"],
)
async_register(
hass,
INTENT_PREV,
[
"[włącz] poprzedni",
"[włącz] wcześniejszy",
"[graj] poprzedni",
"[graj] wcześniejszy",
],
)
async_register(
hass,
INTENT_SAY_IT,
["Powiedz", "Mów", "Powiedz {item}", "Mów {item}", "Echo {item}"],
)
# initial status of the player
hass.states.async_set("sensor.ais_player_mode", "ais_favorites")
# sensors
hass.states.async_set("sensor.aisknowledgeanswer", "", {"text": ""})
hass.states.async_set(
"sensor.ais_wifi_service_current_network_info",
0,
{
"friendly_name": "Prędkość połączenia",
"unit_of_measurement": "MB",
"icon": "mdi:speedometer",
},
)
async def ais_run_each_minute(now):
await hass.services.async_call(
"ais_ai_service", "check_night_mode", {"timer": True}
)
async def ais_run_each_minute2(now):
await hass.services.async_call(
"ais_ai_service", "check_night_mode", {"timer": True}
)
time_now = datetime.datetime.now()
current_time = time_now.strftime("%H%M")
await hass.services.async_call(
"ais_shell_command", "set_clock_display_text", {"text": current_time + "0"}
)
# run each minute at first second
_dt = dt_util.utcnow()
if ais_global.has_front_clock():
event.async_track_utc_time_change(hass, ais_run_each_minute2, second=1)
else:
event.async_track_utc_time_change(hass, ais_run_each_minute, second=1)
# AIS agent
agent = AisAgent(hass)
conversation.async_set_agent(hass, agent)
return True
async def _publish_command_to_frame(hass, key, val, ip=None):
# sent the command to the android frame via http
if ip is None:
ip = "localhost"
url = ais_global.G_HTTP_REST_SERVICE_BASE_URL.format(ip)
if key == "WifiConnectToSid":
ssid = val.split(";")[0]
if ssid is None or ssid == "-" or ssid == "":
_say_it(hass, "Wybierz sieć WiFi z listy")
return
# TODO get password from file
password = hass.states.get("input_text.ais_android_wifi_password").state
if len(password.strip()) == 0:
_say_it(hass, "ok, przełączam na sieć: " + ssid)
else:
_say_it(hass, "ok, łączę z siecią: " + ssid)
wifi_type = val.split(";")[-3]
bssid = val.split(";")[-1].replace("MAC:", "").strip()
requests_json = {
key: ssid,
"ip": ip,
"WifiNetworkPass": password,
"WifiNetworkType": wifi_type,
"bssid": bssid,
}
elif key == "WifiConnectTheDevice":
iot = val.split(";")[0]
if iot == ais_global.G_EMPTY_OPTION:
_say_it(hass, "wybierz urządzenie które mam dołączyć")
return
# check if wifi is selected
ssid = hass.states.get("input_select.ais_android_wifi_network").state.split(
";"
)[0]
if ssid == ais_global.G_EMPTY_OPTION:
_say_it(hass, "wybierz wifi do której mam dołączyć urządzenie")
return
# take bssid
bssid = val.split(";")[-1].replace("MAC:", "").strip()
# check the frequency
wifi_frequency_mhz = val.split(";")[-2]
if not wifi_frequency_mhz.startswith("2.4"):
_say_it(
hass,
"Urządzenia mogą pracować tylko w sieci 2.4 GHz, wybierz inną sieć.",
)
# check if name is selected, if not then add the device name
name = hass.states.get("input_text.ais_iot_device_name").state
# friendly name (32 chars max)
if name == "":
name = iot
if len(name) > 32:
_say_it(hass, "nazwa urządzenie może mieć maksymalnie 32 znaki")
return
_say_it(hass, "OK, dodajemy: " + name)
password = hass.states.get("input_text.ais_iot_device_wifi_password").state
# save the time when this was executed
# to inform the user about new device
import time
ais_global.G_AIS_NEW_DEVICE_NAME = name
ais_global.G_AIS_NEW_DEVICE_START_ADD_TIME = time.time()
requests_json = {
key: iot,
"ip": ip,
"WifiNetworkPass": password,
"WifiNetworkSsid": ssid,
"IotName": name,
"bsssid": bssid,
}
elif key == "showCamera":
component = hass.data.get("camera")
camera = component.get_entity(val)
stream_source = await camera.stream_source()
requests_json = {"showCamera": {"streamUrl": stream_source, "haCamId": val}}
elif key == "WifiConnectionInfo":
requests_json = {key: val, "ip": ip}
# tunnel guard
access = hass.states.get("input_boolean.ais_remote_access").state
gate_id = ais_global.get_sercure_android_id_dom()
if access == "on":
try:
# r = requests.get('http://httpbin.org/status/404', timeout=10)
r = requests.get("http://" + gate_id + ".paczka.pro", timeout=10)
if r.status_code == 404:
pass
# command = "pm2 restart tunnel || pm2 start /data/data/pl.sviete.dom/files/usr/bin/cloudflared" \
# " --name tunnel --output /dev/null --error /dev/null" \
# " --restart-delay=150000 -- --hostname http://{}.paczka.pro" \
# " --url http://localhost:8180".format(gate_id)
# subprocess.Popen(
# command,
# shell=True, # nosec
# stdout=None,
# stderr=None,
# )
except Exception:
pass
else:
requests_json = {key: val, "ip": ip}
try:
requests.post(url + "/command", json=requests_json, timeout=2)
except Exception:
pass
def _wifi_rssi_to_info(rssi):
info = "moc nieznana"
if rssi > -31:
return "moc doskonała (" + str(rssi) + ")"
if rssi > -68:
return "moc bardzo dobra (" + str(rssi) + ")"
if rssi > -71:
return "moc dobra (" + str(rssi) + ")"
if rssi > -81:
return "moc słaba (" + str(rssi) + ")"
if rssi > -91:
return "moc bardzo słaba (" + str(rssi) + ")"
return info
def _wifi_frequency_info(mhz):
if str(mhz).startswith("2"):
return "2.4 GHz"
elif str(mhz).startswith("5"):
return "5 GHz"
return str(mhz)
def _publish_wifi_status(hass, service):
wifis = json.loads(service.data["payload"])
ais_global.GLOBAL_SCAN_WIFI_ANSWER = wifis
wifis_names = [ais_global.G_EMPTY_OPTION]
for item in wifis["ScanResult"]:
if len(item["ssid"]) > 0:
wifis_names.append(
item["ssid"]
+ "; "
+ _wifi_rssi_to_info(item["rssi"])
+ "; "
+ item["capabilities"]
+ "; "
+ _wifi_frequency_info(item["frequency_mhz"])
+ "; MAC: "
+ item["bssid"]
)
hass.async_run_job(
hass.services.call(
"input_select",
"set_options",
{
"entity_id": "input_select.ais_android_wifi_network",
"options": wifis_names,
},
)
)
return len(wifis_names) - 1
def _process_command_from_frame(hass, service):
# process the message from frame
if "topic" not in service.data:
return
if service.data["topic"] == "ais/speech_command":
hass.async_run_job(
hass.services.async_call(
"conversation", "process", {"text": service.data["payload"]}
)
)
return
elif service.data["topic"] == "ais/key_command":
_process_code(hass, json.loads(service.data["payload"]))
return
elif service.data["topic"] == "ais/speech_text":
_say_it(hass, service.data["payload"])
return
elif service.data["topic"] == "ais/speech_status":
# AIS service.data["payload"] can be: START -> DONE/ERROR
event_data = {"status": str(service.data["payload"])}
hass.bus.fire("ais_speech_status", event_data)
hass.states.async_set(
"sensor.ais_speech_status", str(service.data["payload"]), {}
)
_LOGGER.debug("speech_status: " + str(service.data["payload"]))
return
elif service.data["topic"] == "ais/add_bookmark":
try:
bookmark = json.loads(service.data["payload"])
hass.async_run_job(
hass.services.call(
"ais_bookmarks",
"add_bookmark",
{
"attr": {
"media_title": bookmark["media_title"],
"source": bookmark["media_source"],
"media_position": bookmark["media_position"],
"media_content_id": bookmark["media_content_id"],
"media_stream_image": bookmark["media_stream_image"],
}
},
)
)
except Exception as e:
_LOGGER.info("problem to add_bookmark: " + str(e))
return
elif service.data["topic"] == "ais/player_speed":
# speed = json.loads(service.data["payload"])
# _say_it(hass, "prędkość odtwarzania: " + str(speed["currentSpeed"]))
# hass.services.call(
# 'input_number',
# 'set_value', {
# "entity_id": "input_number.media_player_speed",
# "value": round(speed["currentSpeed"], 2)})
return
elif service.data["topic"] == "ais/wifi_scan_info":
len_wifis = _publish_wifi_status(hass, service)
info = "Mamy dostępne " + str(len_wifis) + " wifi."
_say_it(hass, info)
return
elif service.data["topic"] == "ais/iot_scan_info":
iot = json.loads(service.data["payload"])
iot_names = [ais_global.G_EMPTY_OPTION]
for item in iot["ScanResult"]:
if len(item["ssid"]) > 0:
iot_names.append(
item["ssid"]
+ "; "
+ _wifi_rssi_to_info(item["rssi"])
+ "; "
+ item["capabilities"]
)
hass.async_run_job(
hass.services.async_call(
"input_select",
"set_options",
{
"entity_id": "input_select.ais_iot_devices_in_network",
"options": iot_names,
},
)
)
if len(iot_names) == 1:
info = "Nie znaleziono żadnego nowego urządzenia"
elif len(iot_names) == 2:
if item["model"] == ais_global.G_MODEL_SONOFF_S20:
info = "Znaleziono nowe inteligentne gniazdo"
elif item["model"] == ais_global.G_MODEL_SONOFF_SLAMPHER:
info = "Znaleziono nową oprawkę"
elif item["model"] == ais_global.G_MODEL_SONOFF_TOUCH:
info = "Znaleziono nowy przełącznik dotykowy"
elif item["model"] == ais_global.G_MODEL_SONOFF_TH:
info = "Znaleziono nowy przełącznik z czujnikami"
elif item["model"] == ais_global.G_MODEL_SONOFF_B1:
info = "Znaleziono nową żarówkę"
elif item["model"] == ais_global.G_MODEL_SONOFF_POW:
info = "Znaleziono nowy przełącznik z pomiarem mocy"
elif item["model"] == ais_global.G_MODEL_SONOFF_DUAL:
info = "Znaleziono nowy podwójny przełącznik"
elif item["model"] == ais_global.G_MODEL_SONOFF_BASIC:
info = "Znaleziono nowy przełącznik"
elif item["model"] == ais_global.G_MODEL_SONOFF_IFAN:
info = "Znaleziono nowy wentylator sufitowy"
elif item["model"] == ais_global.G_MODEL_SONOFF_T11:
info = "Znaleziono nowy przełącznik dotykowy pojedynczy"
elif item["model"] == ais_global.G_MODEL_SONOFF_T12:
info = "Znaleziono nowy przełącznik dotykowy podwójny"
elif item["model"] == ais_global.G_MODEL_SONOFF_T13:
info = "Znaleziono nowy przełącznik dotykowy potrójny"
else:
info = "Znaleziono nowe inteligentne urządzenie"
else:
info = "Znaleziono " + str(len(iot_names) - 1) + " nowe urządzenia"
# check if we are doing this from remote
if (
len(iot_names) > 1
and CURR_ENTITIE
in (
"sensor.ais_connect_iot_device_info",
"script.ais_scan_iot_devices_in_network",
)
and CURR_BUTTON_CODE == 23
):
info = (
info
+ ". Sprawdź wszystkie parametry, naciśnij strzałkę w prawo, by przejść dalej. "
"Na koniec uruchom: Dołącz nowe urządzenie."
)
# prepare form data
set_curr_entity(hass, "script.ais_scan_iot_devices_in_network")
hass.async_run_job(
hass.services.async_call(
"input_select",
"select_next",
{"entity_id": "input_select.ais_iot_devices_in_network"},
)
)
_say_it(hass, info)
return
elif service.data["topic"] == "ais/wifi_status_info":
_publish_wifi_status(hass, service)
return
elif service.data["topic"] == "ais/ais_gate_req_answer":
cci = json.loads(service.data["payload"])
ais_global.set_ais_gate_req(cci["req_id"], cci["req_answer"])
return
elif service.data["topic"] == "ais/wifi_connection_info":
# current connection info
cci = json.loads(service.data["payload"])
attr = {
"friendly_name": "Prędkość połączenia",
"unit_of_measurement": "MB",
"icon": "mdi:speedometer",
}
desc = ""
speed = 0
if "ais_gate_id" in cci:
pass
# ais_global.G_AIS_SECURE_ANDROID_ID_DOM = cci["ais_gate_id"]
if "pass" in cci:
ais_global.set_my_wifi_pass(cci["pass"])
if "ssid" in cci:
ais_global.set_my_ssid(cci["ssid"])
attr["ssid"] = cci["ssid"]
if cci["ssid"] == "<unknown ssid>":
desc += "brak informacji o połączeniu"
else:
desc += cci["ssid"]
if "link_speed_mbps" in cci:
desc += (
"; prędkość: "
+ str(cci["link_speed_mbps"])
+ " megabitów na sekundę"
)
attr["link_speed_mbps"] = cci["link_speed_mbps"]
speed = cci["link_speed_mbps"]
if "rssi" in cci:
desc += "; " + _wifi_rssi_to_info(cci["rssi"])
attr["rssi"] = cci["rssi"]
if "frequency_mhz" in cci:
desc += "; " + _wifi_frequency_info(cci["frequency_mhz"])
attr["frequency_mhz"] = cci["frequency_mhz"]
attr["description"] = desc
hass.states.async_set(
"sensor.ais_wifi_service_current_network_info", speed, attr
)
return
elif service.data["topic"] == "ais/wifi_state_change_info":
# current connection info
cci = json.loads(service.data["payload"])
ais_global.set_my_ssid(cci["ssid"])
# check if we are now online
if ais_global.GLOBAL_MY_IP == "127.0.0.1":
ais_global.set_global_my_ip(None)
if ais_global.GLOBAL_MY_IP != "127.0.0.1":
pass
# if yes then try to reload the cloud and other components
# TODO reload invalid components
# hass.async_run_job(async_load_platform(hass, 'sun', 'sun', {}, {}))
# hass.async_run_job(async_load_platform(hass, 'ais_cloud', 'ais_cloud', {}, {}))
# hass.async_run_job(async_load_platform(hass, 'ais_yt_service', 'ais_yt_service', {}, {}))
# hass.async_run_job(async_load_platform(hass, 'ais_knowledge_service', 'ais_knowledge_service'...
return
elif service.data["topic"] == "ais/go_to_player":
go_to_player(hass, False)
elif service.data["topic"] == "ais/ip_state_change_info":
pl = json.loads(service.data["payload"])
ais_global.set_global_my_ip(pl["ip"])
icon = "mdi:access-point-network"
friendly_name = "Lokalny adres IP"
if "type" in pl:
# see android ConnectivityManager
if type == "-1":
# TYPE_NONE
icon = "mdi:lan-disconnect"
friendly_name = "Lokalny adres IP - "
elif type == "9":
# TYPE_ETHERNET
icon = "mdi:ethernet"
friendly_name = "Lokalny adres IP (ethernet)"
elif type == "1":
# TYPE_WIFI
icon = "mdi:wifi-strength-4-lock"
friendly_name = "Lokalny adres IP (wifi)"
hass.states.async_set(
"sensor.internal_ip_address",
pl["ip"],
{"friendly_name": friendly_name, "icon": icon},
)
elif service.data["topic"] == "ais/player_status":
# try to get current volume
try:
message = json.loads(service.data["payload"])
ais_global.G_AIS_DAY_MEDIA_VOLUME_LEVEL = (
message.get("currentVolume", 0) / 100
)
except Exception:
_LOGGER.info(
"ais_global.G_AIS_DAY_MEDIA_VOLUME_LEVEL: "
+ str(ais_global.G_AIS_DAY_MEDIA_VOLUME_LEVEL)
)
if "ais_gate_client_id" in service.data:
json_string = json.dumps(service.data["payload"])
else:
json_string = service.data["payload"]
hass.async_run_job(
hass.services.async_call(
"media_player",
"play_media",
{
"entity_id": ais_global.G_LOCAL_EXO_PLAYER_ENTITY_ID,
"media_content_type": "exo_info",
"media_content_id": json_string,
},
)
)
elif service.data["topic"] == "ais/execute_script":
hass.services.call(
"ais_shell_command", "execute_script", {"script": service.data["payload"]}
)
elif service.data["topic"] == "ais/tts_voice":
# this is done only once on start to set the voice on hass from android
voice = service.data["payload"]
set_voice = "Jola lokalnie"
if voice == "pl-pl-x-oda-network":
set_voice = "Jola online"
elif voice == "pl-pl-x-oda#female_1-local":
set_voice = "Celina"
elif voice == "pl-pl-x-oda#female_2-local":
set_voice = "Anżela"
elif voice == "pl-pl-x-oda#female_3-local":
set_voice = "Asia"
elif voice == "pl-pl-x-oda#male_1-local":
set_voice = "Sebastian"
elif voice == "pl-pl-x-oda#male_2-local":
set_voice = "Bartek"
elif voice == "pl-pl-x-oda#male_3-local":
set_voice = "Andrzej"
current_voice = hass.states.get("input_select.assistant_voice").state
if current_voice != set_voice:
# we will inform the frame about change in EVENT_STATE_CHANGED listener
hass.async_run_job(
hass.services.async_call(
"input_select",
"select_option",
{"entity_id": "input_select.assistant_voice", "option": set_voice},
)
)
else:
# EVENT_STATE_CHANGED listener will not notice this change - publish info to frame about voice
hass.services.call(
"ais_ai_service",
"publish_command_to_frame",
{"key": "setTtsVoice", "val": voice},
)
elif service.data["topic"] == "ais/trim_memory":
_LOGGER.warning("trim_memory " + str(service.data["payload"]))
try:
import os
if str(service.data["payload"]) == "15":
# TRIM_MEMORY_RUNNING_CRITICAL
tot_m, used_m, free_m = map(
int, os.popen("free -t -m").readlines()[-1].split()[1:]
)
_LOGGER.warning(
"TRIM_MEMORY_RUNNING_CRITICAL, used memory: " + str(used_m)
)
# check if we can clear database
if "dbUrl" in ais_global.G_DB_SETTINGS_INFO:
if ais_global.G_DB_SETTINGS_INFO["dbUrl"].startswith(
"sqlite:///:memory:"
):
_LOGGER.warning("recorder -> purge keep_days: 0")
hass.services.call(
"recorder", "purge", {"keep_days": 0, "repack": True}
)
else:
# try to kill some heavy process
# Get List of all running process sorted by Highest Memory Usage
list_of_proc_objects = []
# Iterate over the list
for proc in psutil.process_iter():
try:
# Fetch process details as dict
pinfo = proc.as_dict(attrs=["pid", "name", "username"])
pinfo["vms"] = proc.memory_info().vms / (1024 * 1024)
# Append dict to list
list_of_proc_objects.append(pinfo)
except (
psutil.NoSuchProcess,
psutil.AccessDenied,
psutil.ZombieProcess,
):
pass
# Sort list of dict by key vms i.e. memory usage
list_of_proc_objects = sorted(
list_of_proc_objects,
key=lambda proc_obj: proc_obj["vms"],
reverse=True,
)
# print top 5 process by memory usage
for elem in list_of_proc_objects[:5]:
_LOGGER.error("We should kill: " + str(elem))
except Exception as e:
pass
elif service.data["topic"] == "ais/trim_storage":
_LOGGER.warning("trim_storage " + str(service.data["payload"]))
_LOGGER.warning("ACTION_DEVICE_STORAGE_LOW report form Android")
# check if we can clear database
if hass.services.has_service("recorder", "purge"):
_LOGGER.warning("recorder -> purge keep_days: 0")
hass.services.call("recorder", "purge", {"keep_days": 0, "repack": True})
_LOGGER.warning("ais -> flush_logs")
hass.services.call("ais_shell_command", "flush_logs")
elif service.data["topic"] == "ais/sip_event":
event_data = {"event": str(service.data["payload"])}
hass.bus.fire("ais_sip_event", event_data)
_LOGGER.info("sip_event " + str(event_data))
else:
# TODO process this without mqtt
# player_status and speech_status
mqtt.async_publish(hass, service.data["topic"], service.data["payload"], 2)
# TODO
return
def _post_message(
message,
hass,
exclude_say_it=None,
pitch=None,
rate=None,
language=None,
voice=None,
path=None,
):
"""Post the message to TTS service."""
j_data = {
"text": message,
"pitch": pitch if pitch is not None else ais_global.GLOBAL_TTS_PITCH,
"rate": rate if rate is not None else ais_global.GLOBAL_TTS_RATE,
"language": language if language is not None else "pl_PL",
"voice": voice if voice is not None else ais_global.GLOBAL_TTS_VOICE,
"path": path if path is not None else "",
}
tts_browser_text = message
if len(tts_browser_text) > 250:
space_position = tts_browser_text.find(" ", 250)
if space_position > 250:
tts_browser_text = tts_browser_text[0:space_position]
else:
tts_browser_text = tts_browser_text[0:250]
hass.async_add_job(
hass.services.async_call(
"ais_ai_service", "say_in_browser", {"text": tts_browser_text}
)
)
try:
requests.post(
ais_global.G_HTTP_REST_SERVICE_BASE_URL.format("127.0.0.1")
+ "/text_to_speech",
json=j_data,
timeout=1,
)
except Exception as e:
pass
def _beep_it(hass, tone):
"""Post the beep to Android frame."""
# https://android.googlesource.com/platform/frameworks/base/+/b267554/media/java/android/media/ToneGenerator.java
hass.services.call(
"ais_ai_service", "publish_command_to_frame", {"key": "tone", "val": tone}
)
def _say_it(
hass,
message,
img=None,
exclude_say_it=None,
pitch=None,
rate=None,
language=None,
voice=None,
path=None,
):
# sent the tts message to the panel via http api
message = message.replace("°C", "stopni Celsjusza")
_post_message(
message=message,
hass=hass,
exclude_say_it=exclude_say_it,
pitch=pitch,
rate=rate,
language=language,
voice=voice,
path=path,
)
if len(message) > 1999:
tts_text = message[0:1999] + "..."
else:
tts_text = message + " "
if img is not None:
tts_text = tts_text + " \n\n" + ""
if len(message) > 100:
hass.states.async_set(
"sensor.aisknowledgeanswer", message[0:100] + "...", {"text": tts_text}
)
else:
hass.states.async_set("sensor.aisknowledgeanswer", message, {"text": tts_text})
def _create_matcher(utterance):
"""Create a regex that matches the utterance."""
# Split utterance into parts that are type: NORMAL, GROUP or OPTIONAL
# Pattern matches (GROUP|OPTIONAL): Change light to [the color] {item}
parts = re.split(r"({\w+}|\[[\w\s]+\] *)", utterance)
# Pattern to extract name from GROUP part. Matches {item}
group_matcher = re.compile(r"{(\w+)}")
# Pattern to extract text from OPTIONAL part. Matches [the color]
optional_matcher = re.compile(r"\[([\w ]+)\] *")
pattern = ["^"]
for part in parts:
group_match = group_matcher.match(part)
optional_match = optional_matcher.match(part)
# Normal part
if group_match is None and optional_match is None:
pattern.append(part)
continue
# Group part
if group_match is not None:
pattern.append(fr"(?P<{group_match.groups()[0]}>[\w ]+?)\s*")
# Optional part
elif optional_match is not None:
pattern.append(fr"(?:{optional_match.groups()[0]} *)?")
pattern.append("$")
return re.compile("".join(pattern), re.I)
def _process_code(hass, data):
"""Process a code from remote."""
global CURR_BUTTON_CODE
global CURR_BUTTON_LONG_PRESS
global CURR_ENTITIE_ENTERED
global CURR_REMOTE_MODE_IS_IN_AUDIO_MODE
if "Action" not in data or "KeyCode" not in data:
return
action = data["Action"]
code = data["KeyCode"]
if "onDisplay" in data:
# set the code in global variable
CURR_BUTTON_CODE = code
# show the code in web app
hass.states.set("binary_sensor.ais_remote_button", code)
event_data = {"action": action, "code": code, "long": CURR_BUTTON_LONG_PRESS}
hass.bus.fire("ais_key_event", event_data)
return
# fix - when the mouse mode on remote is on, the remote is sending only the code 23 (OK) as key down (action 0)
# to handle this we are ignoring the key up (action 1), and key down (action 0) is changing to key up (action 1)
if code == 23:
if action == 1:
return
else:
action = 1
# ACTION_DOWN = 0; ACTION_UP = 1;
if action == 0:
CURR_BUTTON_LONG_PRESS = False
if "RepeatCount" in data:
if data["RepeatCount"] > 0:
CURR_BUTTON_LONG_PRESS = True
if CURR_BUTTON_LONG_PRESS is False:
return
elif action == 2:
# ACTION_MULTIPLE = 2;
_LOGGER.debug("long press on " + str(data))
return
elif action == 1:
# ACTION_UP
# to prevent up action after long press
if CURR_BUTTON_LONG_PRESS is True:
CURR_BUTTON_LONG_PRESS = False
# set the code in global variable
CURR_BUTTON_CODE = code
# show the code in web app
hass.states.set("binary_sensor.ais_remote_button", code)
event_data = {"action": action, "code": code, "long": CURR_BUTTON_LONG_PRESS}
hass.bus.fire("ais_key_event", event_data)
# remove selected action
remove_selected_action(code)
# decode Key Events
# codes according to android.view.KeyEvent
if code == 93:
# PG- -> KEYCODE_PAGE_DOWN
set_bookmarks_curr_group(hass)
set_curr_entity(hass, "sensor.aisbookmarkslist")
CURR_ENTITIE_ENTERED = True
say_curr_entity(hass)
elif code == 92:
# PG+ -> KEYCODE_PAGE_UP
set_favorites_curr_group(hass)
CURR_ENTITIE_ENTERED = True
# go to bookmarks
set_curr_entity(hass, "sensor.aisfavoriteslist")
say_curr_entity(hass)
elif code == 4:
# Back arrow, go up in menu/groups -> KEYCODE_BACK
# or go up in local folder structure
go_up_in_menu(hass)
elif code == 82:
# Menu -> KEYCODE_MENU
set_next_group_view()
say_curr_group_view(hass)
elif code == 164:
# Mute -> KEYCODE_VOLUME_MUTE
pass
elif code == 71:
# MIC DOWN -> KEYCODE_LEFT_BRACKET
pass
elif code == 72:
# MIC UP -> KEYCODE_RIGHT_BRACKET
pass
elif code == 19:
# Dpad up -> KEYCODE_DPAD_UP
set_on_dpad_up(hass, CURR_BUTTON_LONG_PRESS)
pass
elif code == 20:
# Dpad down -> KEYCODE_DPAD_DOWN
set_on_dpad_down(hass, CURR_BUTTON_LONG_PRESS)
pass
elif code == 21:
# Dpad left -> KEYCODE_DPAD_LEFT
set_focus_on_prev_entity(hass, CURR_BUTTON_LONG_PRESS)
elif code == 22:
# Dpad right -> KEYCODE_DPAD_RIGHT
set_focus_on_next_entity(hass, CURR_BUTTON_LONG_PRESS)
elif code == 23:
# Dpad center -> KEYCODE_DPAD_CENTER
select_entity(hass, CURR_BUTTON_LONG_PRESS)
elif code == 25:
# Volume down -> KEYCODE_VOLUME_DOWN
pass
elif code == 24:
# Volume up -> KEYCODE_VOLUME_UP
pass
# button to switch from dom to audio, 190 - legacy button_3, new 170 - tv
elif code == 190 or code == 170:
# go home -> KEYCODE_HOME
if CURR_BUTTON_LONG_PRESS:
go_to_player(hass, True)
else:
# toggle mode
if CURR_REMOTE_MODE_IS_IN_AUDIO_MODE:
go_home(hass)
else:
go_to_player(hass, True)
# other code on text field
else:
type_to_input_text(hass, code)
def get_context_suffix(hass):
context_suffix = GROUP_ENTITIES[get_curr_group_idx()]["context_suffix"]
if context_suffix == "Muzyka":
context_suffix = hass.states.get("input_select.ais_music_service").state
return context_suffix
async def _async_process(hass, text, calling_client_id=None, hot_word_on=False):
"""Process a line of text."""
global CURR_VIRTUAL_KEYBOARD_VALUE
# clear text
text = text.replace("&", "and")
text = text.replace("-", " ").lower()
# check if the text input is selected
# binary_sensor.selected_entity / binary_sensor.ais_remote_button
if CURR_ENTITIE_ENTERED and CURR_ENTITIE is not None:
if CURR_ENTITIE.startswith("input_text."):
await hass.services.async_call(
"input_text", "set_value", {"entity_id": CURR_ENTITIE, "value": text}
)
# return response to the hass conversation
ir = intent.IntentResponse()
ir.async_set_speech("wpisano w pole tekst: " + text)
CURR_VIRTUAL_KEYBOARD_VALUE = text
ir.hass = hass
return ir
global CURR_BUTTON_CODE
s = False
m = None
m_org = None
found_intent = None
# async_initialize ha agent
ha_agent = hass.data.get("ha_conversation_agent")
if ha_agent is None:
ha_agent = hass.data["ha_conversation_agent"] = DefaultAgent(hass)
await ha_agent.async_initialize(hass.data.get("conversation_config"))
# 1. first check the conversation intents
conv_intents = hass.data.get("conversation", {})
for intent_type, matchers in conv_intents.items():
for matcher in matchers:
match = matcher.match(text)
if not match:
continue
response = await hass.helpers.intent.async_handle(
"conversation",
intent_type,
{key: {"value": value} for key, value in match.groupdict().items()},
text,
)
return response
# 2. check the user automatons intents
if ais_global.G_AUTOMATION_CONFIG is not None:
automations = {
state.entity_id: state.name
for state in hass.states.async_all()
if state.entity_id.startswith("automation")
and not state.entity_id.startswith("automation.ais_")
}
for key, value in automations.items():
if value.lower().startswith("jolka"):
# get aliases
all_commands = []
for auto_config in ais_global.G_AUTOMATION_CONFIG:
if isinstance(auto_config, AutomationConfig):
auto_name = auto_config.get("alias", "").lower().strip()
if (
"description" in auto_config
and auto_name == value.lower().strip()
):
all_commands = auto_config.get("description", "").split(";")
if isinstance(auto_config, BlueprintInputs):
blueprint_inputs = auto_config
raw_blueprint_inputs = blueprint_inputs.config_with_inputs
auto_name = raw_blueprint_inputs.get("alias", "").lower().strip()
if (
"description" in raw_blueprint_inputs
and auto_name == value.lower().strip()
):
all_commands = raw_blueprint_inputs.get(
"description", ""
).split(";")
all_commands = [
each_string.strip().lower() for each_string in all_commands
]
all_commands.append(
value.lower().replace("jolka", "", 1).replace(":", "").strip()
)
text_command = text.lower().replace("jolka", "", 1).replace(":", "").strip()
if text_command in all_commands:
await hass.services.async_call(
"automation", "trigger", {ATTR_ENTITY_ID: key}
)
s = True
found_intent = "AUTO"
m = "DO_NOT_SAY OK"
break
# 3. check the AIS dom intents
if s is False:
intents = hass.data.get(DOMAIN, {})
try:
for intent_type, matchers in intents.items():
if found_intent is not None:
break
for matcher in matchers:
match = matcher.match(text)
if match:
# we have a match
found_intent = intent_type
m, s = await hass.helpers.intent.async_handle(
DOMAIN,
intent_type,
{
key: {"value": value}
for key, value in match.groupdict().items()
},
text,
)
break
# the item was match as INTENT_TURN_ON but we don't have such device - maybe it is radio or podcast???
if s is False and found_intent == INTENT_TURN_ON:
m_org = m
m, s = await hass.helpers.intent.async_handle(
DOMAIN,
INTENT_PLAY_RADIO,
{key: {"value": value} for key, value in match.groupdict().items()},
text.replace("włącz", "włącz radio"),
)
if s is False:
m, s = await hass.helpers.intent.async_handle(
DOMAIN,
INTENT_PLAY_PODCAST,
{
key: {"value": value}
for key, value in match.groupdict().items()
},
text.replace("włącz", "włącz podcast"),
)
if s is False:
m = m_org
# the item was match as INTENT_TURN_ON but we don't have such device - maybe it is climate???
if s is False and found_intent == INTENT_TURN_ON and "ogrzewanie" in text:
m_org = m
m, s = await hass.helpers.intent.async_handle(
DOMAIN,
INTENT_CLIMATE_SET_ALL_ON,
{key: {"value": value} for key, value in match.groupdict().items()},
text,
)
if s is False:
m = m_org
# the item was match as INTENT_TURN_OFF but we don't have such device - maybe it is climate???
if s is False and found_intent == INTENT_TURN_OFF and "ogrzewanie" in text:
m_org = m
m, s = await hass.helpers.intent.async_handle(
DOMAIN,
INTENT_CLIMATE_SET_ALL_OFF,
{key: {"value": value} for key, value in match.groupdict().items()},
text,
)
if s is False:
m = m_org
# 4. the was no match - try again but with current context
# only if hot word is disabled
if found_intent is None and hot_word_on is False:
suffix = get_context_suffix(hass)
if suffix is not None:
for intent_type, matchers in intents.items():
if found_intent is not None:
break
for matcher in matchers:
match = matcher.match(suffix + " " + text)
if match:
# we have a match
found_intent = intent_type
m, s = await hass.helpers.intent.async_handle(
DOMAIN,
intent_type,
{
key: {"value": value}
for key, value in match.groupdict().items()
},
suffix + " " + text,
)
# reset the curr button code
# TODO the mic should send a button code too
# in this case we will know if the call source
CURR_BUTTON_CODE = 0
break
# 5. ask cloud
if s is False or found_intent is None:
# no success - try to ask the cloud
if m is None:
# no message / no match
m = "Nie rozumiem " + text
# asking without the suffix
if text != "":
ws_resp = aisCloudWS.ask(text, m)
m = ws_resp.text.split("---")[0]
else:
m = "Co proszę? Nic nie słyszę!"
except Exception as e:
_LOGGER.info("_process: " + str(e))
m = "Przepraszam, ale mam problem ze zrozumieniem: " + text
# return response to the ais dom
if m.startswith("DO_NOT_SAY"):
m = m.replace("DO_NOT_SAY", "")
else:
_say_it(hass, m, exclude_say_it=calling_client_id)
# return response to the hass conversation
intent_resp = intent.IntentResponse()
intent_resp.async_set_speech(m)
intent_resp.hass = hass
return intent_resp
@core.callback
def _match_entity(hass, name, domain=None):
"""Match a name to an entity."""
from fuzzywuzzy import process as fuzzy_extract
if domain is not None:
# entities = hass.states.async_entity_ids(domain)
entities = {
state.entity_id: state.name
for state in hass.states.async_all()
if state.entity_id.startswith(domain)
}
else:
entities = {state.entity_id: state.name for state in hass.states.async_all()}
try:
entity_id = fuzzy_extract.extractOne(name, entities, score_cutoff=86)[2]
except Exception as e:
entity_id = None
if entity_id is not None:
return hass.states.get(entity_id)
else:
return None
class TurnOnIntent(intent.IntentHandler):
"""Handle turning item on intents."""
intent_type = INTENT_TURN_ON
slot_schema = {"item": cv.string}
async def async_handle(self, intent_obj):
"""Handle turn on intent."""
hass = intent_obj.hass
slots = self.async_validate_slots(intent_obj.slots)
name = slots["item"]["value"]
entity = _match_entity(hass, name)
success = False
if not entity:
message = "Nie znajduję urządzenia do włączenia, o nazwie: " + name
else:
# check if we can turn_on on this device
if is_switch(entity.entity_id):
assumed_state = entity.attributes.get(ATTR_ASSUMED_STATE, False)
if assumed_state is False:
if entity.state == "on":
# check if the device is already on
message = "Urządzenie " + name + " jest już włączone"
elif entity.state == "unavailable":
message = "Urządzenie " + name + " jest niedostępne"
else:
assumed_state = True
if assumed_state:
await hass.services.async_call(
core.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity.entity_id}
)
message = f"OK, włączono {entity.name}"
success = True
else:
message = "Urządzenia " + name + " nie można włączyć"
return message, success
class TurnOffIntent(intent.IntentHandler):
"""Handle turning item off intents."""
intent_type = INTENT_TURN_OFF
slot_schema = {"item": cv.string}
async def async_handle(self, intent_obj):
"""Handle turn off intent."""
hass = intent_obj.hass
slots = self.async_validate_slots(intent_obj.slots)
name = slots["item"]["value"]
entity = _match_entity(hass, name)
success = False
if not entity:
msg = "Nie znajduję urządzenia do wyłączenia, o nazwie: " + name
else:
# check if we can turn_off on this device
if is_switch(entity.entity_id):
assumed_state = entity.attributes.get(ATTR_ASSUMED_STATE, False)
if assumed_state is False:
# check if the device is already off
if entity.state == "off":
msg = f"Urządzenie {entity.name} jest już wyłączone"
elif entity.state == "unavailable":
msg = f"Urządzenie {entity.name} jest niedostępne"
else:
assumed_state = True
if assumed_state:
await hass.services.async_call(
core.DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: entity.entity_id},
)
msg = f"OK, wyłączono {entity.name}"
success = True
else:
msg = "Urządzenia " + name + " nie można wyłączyć"
return msg, success
class ToggleIntent(intent.IntentHandler):
"""Handle toggle item intents."""
intent_type = INTENT_TOGGLE
slot_schema = {"item": cv.string}
async def async_handle(self, intent_obj):
"""Handle toggle intent."""
hass = intent_obj.hass
slots = self.async_validate_slots(intent_obj.slots)
name = slots["item"]["value"]
entity = _match_entity(hass, name)
success = False
if not entity:
msg = f"Nie znajduję urządzenia do przełączenia, o nazwie: {name}"
else:
# check if we can toggle this device
if not hass.services.has_service(entity.domain, SERVICE_TOGGLE):
msg = f"Urządzenia {entity.name} nie można przełączyć"
elif entity.state == "unavailable":
msg = f"Urządzenie {entity.name} jest niedostępne"
success = True
else:
await hass.services.async_call(
entity.domain, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity.entity_id}
)
msg = f"OK, przełączono {entity.name}"
success = True
return msg, success
class StatusIntent(intent.IntentHandler):
"""Handle status item on intents."""
intent_type = INTENT_STATUS
slot_schema = {"item": cv.string}
async def async_handle(self, intent_obj):
"""Handle status intent."""
hass = intent_obj.hass
slots = self.async_validate_slots(intent_obj.slots)
name = slots["item"]["value"]
entity = _match_entity(hass, name)
success = False
if not entity:
message = "Nie znajduję informacji o: " + name
success = False
else:
unit = entity.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
state = translate_state(entity)
if unit is None:
value = state
else:
value = f"{state} {unit}"
message = format(entity.name) + ": " + value
success = True
return message, success
class SpellStatusIntent(intent.IntentHandler):
"""Handle spell status item on intents."""
intent_type = INTENT_SPELL_STATUS
slot_schema = {"item": cv.string}
async def async_handle(self, intent_obj):
"""Handle status intent."""
hass = intent_obj.hass
slots = self.async_validate_slots(intent_obj.slots)
name = slots["item"]["value"]
entity = _match_entity(hass, name)
success = False
if not entity:
message = "; ".join(name)
success = True
else:
unit = entity.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
state = translate_state(entity)
if unit is None:
value = state
else:
value = f"{state} {unit}"
message = "; ".join(value)
success = True
return message, success
class PlayRadioIntent(intent.IntentHandler):
"""Handle PlayRadio intents."""
intent_type = INTENT_PLAY_RADIO
slot_schema = {"item": cv.string}
async def async_handle(self, intent_obj):
"""Handle the intent."""
hass = intent_obj.hass
slots = self.async_validate_slots(intent_obj.slots)
success = False
station = None
try:
item = slots["item"]["value"]
station = item
except Exception:
pass
if station is None:
message = "Powiedz jaką stację mam włączyć"
else:
ws_resp = aisCloudWS.audio(
station, ais_global.G_AN_RADIO, intent_obj.text_input
)
json_ws_resp = ws_resp.json()
json_ws_resp["media_source"] = ais_global.G_AN_RADIO
name = json_ws_resp["name"]
if len(name.replace(" ", "")) == 0:
message = "Niestety nie znajduję radia " + station
else:
await hass.services.async_call("ais_cloud", "play_audio", json_ws_resp)
message = "OK, gramy radio " + name
success = True
return message, success
class AisPlayPodcastIntent(intent.IntentHandler):
"""Handle Podcast intents."""
intent_type = INTENT_PLAY_PODCAST
slot_schema = {"item": cv.string}
async def async_handle(self, intent_obj):
"""Handle the intent."""
hass = intent_obj.hass
slots = self.async_validate_slots(intent_obj.slots)
item = slots["item"]["value"]
success = False
if not item:
message = "Nie wiem jaką audycję chcesz posłuchać."
else:
ws_resp = aisCloudWS.audio(
item, ais_global.G_AN_PODCAST, intent_obj.text_input
)
json_ws_resp = ws_resp.json()
json_ws_resp["media_source"] = ais_global.G_AN_PODCAST
name = json_ws_resp["name"]
if len(name.replace(" ", "")) == 0:
message = "Niestety nie znajduję podcasta " + item
else:
await hass.services.async_call("ais_cloud", "play_audio", json_ws_resp)
message = "OK, pobieram odcinki audycji " + item
success = True
return message, success
class AisPlayYtMusicIntent(intent.IntentHandler):
"""Handle Music intents."""
intent_type = INTENT_PLAY_YT_MUSIC
slot_schema = {"item": cv.string}
async def async_handle(self, intent_obj):
"""Handle the intent."""
hass = intent_obj.hass
slots = self.async_validate_slots(intent_obj.slots)
item = slots["item"]["value"]
success = False
if not item:
message = "Nie wiem jaką muzykę mam szukać "
else:
await hass.services.async_call("ais_yt_service", "search", {"query": item})
# switch UI to YT
await hass.services.async_call(
"ais_ai_service", "set_context", {"text": "YouTube"}
)
#
message = "OK, szukam na YouTube " + item
success = True
return message, success
class AisPlaySpotifyIntent(intent.IntentHandler):
"""Handle Music intents."""
intent_type = INTENT_PLAY_SPOTIFY
slot_schema = {"item": cv.string}
async def async_handle(self, intent_obj):
"""Handle the intent."""
hass = intent_obj.hass
slots = self.async_validate_slots(intent_obj.slots)
item = slots["item"]["value"]
success = False
# check if we have Spotify enabled
if not hass.services.has_service("ais_spotify_service", "search"):
message = (
"Żeby odtwarzać muzykę z serwisu Spotify, dodaj integrację AIS Spotify. Więcej informacji "
"znajdziesz w dokumentacji [Asystenta domowego](https://www.ai-speaker.com)"
)
return message, True
if not item:
message = "Nie wiem jaką muzykę mam szukać "
else:
await hass.services.async_call(
"ais_spotify_service", "search", {"query": item}
)
# switch UI to Spotify
await hass.services.async_call(
"ais_ai_service", "set_context", {"text": "Spotify"}
)
#
message = "OK, szukam na Spotify " + item
success = True
return message, success
class AskQuestionIntent(intent.IntentHandler):
"""Handle AskQuestion intents."""
intent_type = INTENT_ASK_QUESTION
slot_schema = {"item": cv.string}
async def async_handle(self, intent_obj):
"""Handle the intent."""
hass = intent_obj.hass
slots = self.async_validate_slots(intent_obj.slots)
item = slots["item"]["value"]
question = item
if not question:
message = "Nie wiem o co zapytać"
return message, False
else:
from homeassistant.components import ais_knowledge_service
message = await ais_knowledge_service.async_process_ask(hass, question)
return "DO_NOT_SAY " + message, True
class AskWikiQuestionIntent(intent.IntentHandler):
"""Handle AskWikiQuestion intents."""
intent_type = INTENT_ASKWIKI_QUESTION
slot_schema = {"item": cv.string}
async def async_handle(self, intent_obj):
"""Handle the intent."""
hass = intent_obj.hass
slots = self.async_validate_slots(intent_obj.slots)
item = slots["item"]["value"]
question = item
if not question:
message = "Nie wiem o co zapytać"
return message, False
else:
from homeassistant.components import ais_knowledge_service
message = await ais_knowledge_service.async_process_ask_wiki(hass, question)
return "DO_NOT_SAY " + message, True
class ChangeContextIntent(intent.IntentHandler):
"""Handle ChangeContext intents."""
intent_type = INTENT_CHANGE_CONTEXT
async def async_handle(self, intent_obj):
"""Handle the intent."""
hass = intent_obj.hass
if len(GROUP_ENTITIES) == 0:
get_groups(hass)
text = intent_obj.text_input.lower()
for idx, menu in enumerate(GROUP_ENTITIES, start=0):
context_key_words = menu["context_key_words"]
if context_key_words is not None:
context_key_words = context_key_words.split(",")
if text in context_key_words:
set_curr_group(hass, menu)
set_curr_entity(hass, None)
message = menu["context_answer"]
# special case spotify and youtube
if text == "spotify":
await hass.services.async_call(
"input_select",
"select_option",
{
"entity_id": "input_select.ais_music_service",
"option": "Spotify",
},
)
elif text == "youtube":
await hass.services.async_call(
"input_select",
"select_option",
{
"entity_id": "input_select.ais_music_service",
"option": "YouTube",
},
)
return message, True
message = "Nie znajduję odpowiedzi do kontekstu " + text
return message, False
class GetTimeIntent(intent.IntentHandler):
"""Handle GetTimeIntent intents."""
intent_type = INTENT_GET_TIME
async def async_handle(self, intent_obj):
"""Handle the intent."""
import babel.dates
now = datetime.datetime.now()
message = "Jest " + babel.dates.format_time(now, format="short", locale="pl")
return message, True
class AisGetWeather(intent.IntentHandler):
"""Handle GetWeather intents."""
intent_type = INTENT_GET_WEATHER
async def async_handle(self, intent_obj):
"""Handle the intent."""
answer = "niestety nie wiem jaka jest pogoda"
address = ""
try:
# try to do reverse_geocode
from geopy.geocoders import Nominatim
geolocator = Nominatim(user_agent="AIS dom")
location = geolocator.reverse(
query=(
intent_obj.hass.config.latitude,
intent_obj.hass.config.longitude,
),
exactly_one=True,
timeout=5,
language="pl",
addressdetails=True,
zoom=10,
)
address = (
location.address.split(",")[0] + " " + location.address.split(",")[1]
)
command = "pogoda w miejscowości " + address
# ask AIS
ws_resp = aisCloudWS.ask(command, "niestety nie wiem jaka jest pogoda")
answer = ws_resp.text.split("---")[0]
except Exception as e:
_LOGGER.warning(
"Handle the intent problem for location " + address + " " + str(e)
)
return answer, True
class AisGetWeather48(intent.IntentHandler):
"""Handle GetWeather48 intents."""
intent_type = INTENT_GET_WEATHER_48
async def async_handle(self, intent_obj):
"""Handle the intent."""
answer = "niestety nie wiem jaka będzie pogoda"
address = ""
try:
# try to do reverse_geocode
from geopy.geocoders import Nominatim
geolocator = Nominatim(user_agent="AIS dom")
location = geolocator.reverse(
query=(
intent_obj.hass.config.latitude,
intent_obj.hass.config.longitude,
),
exactly_one=True,
timeout=5,
language="pl",
addressdetails=True,
zoom=10,
)
address = (
location.address.split(",")[0] + " " + location.address.split(",")[1]
)
command = "jaka będzie pogoda jutro w miejscowości " + address
ws_resp = aisCloudWS.ask(command, answer)
answer = ws_resp.text.split("---")[0]
except Exception as e:
_LOGGER.warning(
"Handle the intent problem for location " + address + " " + str(e)
)
return answer, True
class AisLampsOn(intent.IntentHandler):
"""Handle AisLampsOn intents."""
intent_type = INTENT_LAMPS_ON
async def async_handle(self, intent_obj):
"""Handle the intent."""
hass = intent_obj.hass
await hass.services.async_call(
"light", "turn_on", {"entity_id": "group.all_lights"}
)
return "ok", True
class AisLampsOff(intent.IntentHandler):
"""Handle AisLampsOff intents."""
intent_type = INTENT_LAMPS_OFF
async def async_handle(self, intent_obj):
"""Handle the intent."""
hass = intent_obj.hass
await hass.services.async_call(
"light", "turn_off", {"entity_id": "group.all_lights"}
)
return "ok", True
class AisSwitchesOn(intent.IntentHandler):
"""Handle AisSwitchesOn intents."""
intent_type = INTENT_SWITCHES_ON
async def async_handle(self, intent_obj):
"""Handle the intent."""
hass = intent_obj.hass
await hass.services.async_call(
"switch", "turn_on", {"entity_id": "group.all_switches"}
)
return "ok", True
class AisSwitchesOff(intent.IntentHandler):
"""Handle AisSwitchesOff intents."""
intent_type = INTENT_SWITCHES_OFF
async def async_handle(self, intent_obj):
"""Handle the intent."""
hass = intent_obj.hass
await hass.services.async_call(
"switch", "turn_off", {"entity_id": "group.all_switches"}
)
return "ok", True
class GetDateIntent(intent.IntentHandler):
"""Handle GetDateIntent intents."""
intent_type = INTENT_GET_DATE
async def async_handle(self, intent_obj):
"""Handle the intent."""
import babel.dates
now = datetime.datetime.now()
message = "Jest " + babel.dates.format_date(now, format="full", locale="pl")
return message, True
class AisOpenCover(intent.IntentHandler):
"""Handle AisOpenCover intents."""
intent_type = INTENT_OPEN_COVER
slot_schema = {"item": cv.string}
async def async_handle(self, intent_obj):
"""Handle the intent."""
hass = intent_obj.hass
slots = self.async_validate_slots(intent_obj.slots)
name = slots["item"]["value"]
entity = _match_entity(hass, name)
success = False
if not entity:
message = "Nie znajduję urządzenia do otwarcia, o nazwie: " + name
else:
# check if we can open on this device
if entity.entity_id.startswith("cover."):
if entity.state == "on":
# check if the device is already on
message = "Urządzenie " + name + " jest już otwarte"
elif entity.state == "unavailable":
message = "Urządzenie " + name + " jest niedostępne"
else:
await hass.services.async_call(
"cover",
SERVICE_OPEN_COVER,
{ATTR_ENTITY_ID: entity.entity_id},
blocking=True,
)
message = f"OK, otwieram {entity.name}"
success = True
else:
message = "Urządzenia " + name + " nie można otworzyć"
return message, success
class AisCloseCover(intent.IntentHandler):
"""Handle AisCloseCover intents."""
intent_type = INTENT_CLOSE_COVER
slot_schema = {"item": cv.string}
async def async_handle(self, intent_obj):
"""Handle turn off intent."""
hass = intent_obj.hass
slots = self.async_validate_slots(intent_obj.slots)
name = slots["item"]["value"]
entity = _match_entity(hass, name)
success = False
if not entity:
msg = "Nie znajduję urządzenia do zamknięcia, o nazwie: " + name
else:
# check if we can close on this device
if entity.entity_id.startswith("cover."):
# check if the device is already closed
if entity.state == "off":
msg = f"Urządzenie {entity.name} jest już zamknięte"
elif entity.state == "unavailable":
msg = f"Urządzenie {entity.name} jest niedostępne"
else:
await hass.services.async_call(
"cover",
SERVICE_CLOSE_COVER,
{ATTR_ENTITY_ID: entity.entity_id},
blocking=True,
)
msg = f"OK, zamykam {entity.name}"
success = True
else:
msg = "Urządzenia " + name + " nie można zamknąć"
return msg, success
class AisStop(intent.IntentHandler):
"""Handle AisStop intents."""
intent_type = INTENT_STOP
async def async_handle(self, intent_obj):
"""Handle the intent."""
hass = intent_obj.hass
await hass.services.async_call(
"media_player", "media_stop", {"entity_id": "all"}
)
message = "ok, stop"
return message, True
class AisPlay(intent.IntentHandler):
"""Handle AisPlay intents."""
intent_type = INTENT_PLAY
async def async_handle(self, intent_obj):
"""Handle the intent."""
hass = intent_obj.hass
await hass.services.async_call(
"media_player",
"media_play",
{ATTR_ENTITY_ID: "media_player.wbudowany_glosnik"},
)
message = "ok, gram"
return message, True
class AisNext(intent.IntentHandler):
"""Handle AisNext intents."""
intent_type = INTENT_NEXT
async def async_handle(self, intent_obj):
"""Handle the intent."""
hass = intent_obj.hass
await hass.services.async_call(
"media_player",
"media_next_track",
{ATTR_ENTITY_ID: "media_player.wbudowany_glosnik"},
)
message = "ok, następny"
return message, True
class AisPrev(intent.IntentHandler):
"""Handle AisPrev intents."""
intent_type = INTENT_PREV
async def async_handle(self, intent_obj):
"""Handle the intent."""
hass = intent_obj.hass
await hass.services.async_call(
"media_player",
"media_previous_track",
{ATTR_ENTITY_ID: "media_player.wbudowany_glosnik"},
)
message = "ok, poprzedni"
return message, True
class AisSceneActive(intent.IntentHandler):
"""Handle AisSceneActive intents."""
intent_type = INTENT_SCENE
slot_schema = {"item": cv.string}
async def async_handle(self, intent_obj):
"""Handle the intent."""
hass = intent_obj.hass
slots = self.async_validate_slots(intent_obj.slots)
name = slots["item"]["value"]
entity = _match_entity(hass, name)
success = False
if not entity:
message = "Nie znajduję sceny, o nazwie: " + name
else:
# check if we can open on this device
if entity.entity_id.startswith("scene."):
await hass.services.async_call(
"scene", "turn_on", {ATTR_ENTITY_ID: entity.entity_id}
)
message = f"OK, aktywuję {entity.name}"
success = True
else:
message = name + " nie można aktywować"
return message, success
class AisRunAutomation(intent.IntentHandler):
"""Handle AisRunAutomation intents."""
intent_type = INTENT_RUN_AUTOMATION
slot_schema = {"item": cv.string}
async def async_handle(self, intent_obj):
"""Handle the intent."""
hass = intent_obj.hass
slots = self.async_validate_slots(intent_obj.slots)
name = slots["item"]["value"]
entity = _match_entity(hass, name)
success = False
if not entity:
message = "Nie znajduję automatyzacji, o nazwie: " + name
else:
# check if we can trigger the automation
if entity.entity_id.startswith("automation."):
await hass.services.async_call(
"automation", "trigger", {ATTR_ENTITY_ID: entity.entity_id}
)
message = f"OK, uruchamiam {entity.name}"
success = True
else:
message = name + " nie można uruchomić"
return message, success
class AisAskGoogle(intent.IntentHandler):
"""Handle AisAskGoogle intents."""
intent_type = INTENT_ASK_GOOGLE
slot_schema = {"item": cv.string}
async def async_handle(self, intent_obj):
"""Handle the intent."""
slots = self.async_validate_slots(intent_obj.slots)
hass = intent_obj.hass
command = slots["item"]["value"]
if hass.services.has_service("ais_google_home", "command"):
await hass.services.async_call(
"ais_google_home", "command", {"text": command}
)
m = ""
else:
m = (
"Żeby wysyłać komendy do serwisu Google, dodaj integrację AIS Google Home. Więcej informacji "
"znajdziesz w dokumentacji [Asystenta domowego]("
"https://www.ai-speaker.com/docs/ais_app_ai_integration_google_home). "
)
return m, True
class AisSayIt(intent.IntentHandler):
"""Handle AisSayIt intents."""
intent_type = INTENT_SAY_IT
slot_schema = {"item": cv.string}
async def async_handle(self, intent_obj):
"""Handle the intent."""
try:
slots = self.async_validate_slots(intent_obj.slots)
text = slots["item"]["value"]
except Exception:
text = None
success = False
if not text:
import random
answers = [
"Nie wiem co mam powiedzieć?",
"Ale co?",
"Mówie mówie",
"OK, dobra zaraz coś wymyślę...",
"Mowa jest tylko srebrem",
"To samo czy coś nowego?",
]
message = random.choice(answers)
else:
# check if we can open on this device
message = text
success = True
return message, success
class AisClimateSetTemperature(intent.IntentHandler):
"""Handle AisClimateSetTemperature intents."""
intent_type = INTENT_CLIMATE_SET_TEMPERATURE
slot_schema = {"temp": cv.string, "item": cv.string}
async def async_handle(self, intent_obj):
"""Handle the intent."""
try:
hass = intent_obj.hass
slots = self.async_validate_slots(intent_obj.slots)
test = slots["temp"]["value"]
name = slots["item"]["value"]
# get name from temp
m = re.search(r"\d+$", test)
if m:
temp = m.group()
name = name + "" + test.replace(temp, "")
else:
temp = test
entity = _match_entity(hass, name)
except Exception:
text = None
success = False
if not entity:
msg = "Nie znajduję grzejnika, o nazwie: " + name
else:
# check if we can close on this device
if entity.entity_id.startswith("climate."):
# check if the device has already this temperature
attr = hass.states.get(entity.entity_id).attributes
if attr.get("temperature") == temp:
msg = "{} ma już ustawioną temperaturę {} {}".format(
entity.name, temp, "stopni"
)
else:
await hass.services.async_call(
"climate",
"set_temperature",
{ATTR_ENTITY_ID: entity.entity_id, "temperature": temp},
)
msg = "OK, ustawiono temperaturę {} {} w {}".format(
temp, "stopni", entity.name
)
success = True
else:
msg = "Na urządzeniu " + name + " nie można zmieniać temperatury."
return msg, success
class AisClimateSetPresentMode(intent.IntentHandler):
"""Handle AisClimateSetPresentMode intents."""
intent_type = INTENT_CLIMATE_SET_PRESENT_MODE
slot_schema = {"item": cv.string}
async def async_handle(self, intent_obj):
"""Handle the intent."""
slots = self.async_validate_slots(intent_obj.slots)
hass = intent_obj.hass
mode = slots["item"]["value"]
present_mode = ""
if mode in ["poza domem", "za domem", "domem"]:
# Device is in away mode
present_mode = "away"
elif mode in ["w domu", "domu", "dom"]:
# Device is in home mode - No preset is active
present_mode = "none"
elif mode in ["eko", "eco", "oszczędzanie", "oszczędny"]:
# Device is running an energy-saving mode
present_mode = "eco"
elif mode in ["podgrzanie", "podgrzewanie"]:
# Device turn all valve full up
present_mode = "boost"
elif mode in ["comfort", "komfort", "wygoda"]:
# Device is in comfort mode
present_mode = "comfort"
elif mode in ["spanie", "noc"]:
# Device is prepared for sleep
present_mode = "sleep"
elif mode in ["aktywność", "ruch"]:
# Device is reacting to activity (e.g. movement sensors)
present_mode = "activity"
if present_mode != "":
await hass.services.async_call(
"climate",
"set_preset_mode",
{"entity_id": "all", "preset_mode": present_mode},
)
message = "ok, ogrzewanie w trybie " + mode
else:
message = "nie znajduje trybu ogrzewania " + mode
return message, True
class AisClimateSetAllOn(intent.IntentHandler):
"""Handle AisClimateSetAllOn intents."""
intent_type = INTENT_CLIMATE_SET_ALL_ON
async def async_handle(self, intent_obj):
"""Handle the intent."""
hass = intent_obj.hass
await hass.services.async_call(
"climate", "set_hvac_mode", {"entity_id": "all", "hvac_mode": "heat"}
)
message = "ok, całe ogrzewanie włączone"
return message, True
class AisClimateSetAllOff(intent.IntentHandler):
"""Handle AisClimateSetAllOff intents."""
intent_type = INTENT_CLIMATE_SET_ALL_OFF
async def async_handle(self, intent_obj):
"""Handle the intent."""
hass = intent_obj.hass
await hass.services.async_call(
"climate", "set_hvac_mode", {"entity_id": "all", "hvac_mode": "off"}
)
message = "ok, całe ogrzewanie wyłączone"
return message, True
class PersonStatusIntent(intent.IntentHandler):
"""Handle status item on intents."""
intent_type = INTENT_PERSON_STATUS
slot_schema = {"item": cv.string}
async def async_handle(self, intent_obj):
"""Handle status intent."""
hass = intent_obj.hass
slots = self.async_validate_slots(intent_obj.slots)
name = slots["item"]["value"]
entity = _match_entity(hass, name, "person")
success = False
if not entity:
message = "Nie znajduję lokalizacji: " + name
success = False
else:
# try to get address
address = ""
if "source" in entity.attributes:
try:
device_tracker = entity.attributes.get("source", "")
semsor = (
device_tracker.replace("device_tracker", "sensor")
+ "_geocoded_location"
)
address = hass.states.get(semsor).state
if address != STATE_UNKNOWN:
address = ", ostatni przesłany adres to " + address
except Exception:
address = ""
if entity.state == STATE_UNKNOWN:
location = "lokalizacja nieznana"
elif entity.state == STATE_HOME:
location = "jest w domu"
elif entity.state == STATE_NOT_HOME:
location = "jest poza domem" + address
else:
location = "lokalizacja " + entity.state
message = format(entity.name) + ": " + location
success = True
return message, success
| StarcoderdataPython |
1635651 | from thinsos.core import SOS
| StarcoderdataPython |
167811 | #!/usr/bin/env python3
import argparse
import glob
import os
import send2trash
script_info = ("""
Script to make a Manifest.csv file for importing fastq.gz files into a qiime 2 environment.
To Install:
Open Qiime2 conda environment
Install python package "send2trash" using: pip install send2trash
Put script in path and navigate to your working directory
python ./q2_manifest_maker.py --input_dir <data_directory>
Acceptable formats include:
<sampleid>.R1.fastq.gz
<sampleid>.R2.fastq.gz
or
<sampleid>_S6_L001_R1_001.fastq.gz
<sampleid>_S6_L001_R2_001.fastq.gz
""")
#Class Objects
class FormatError(Exception):
'''Formating of file is incompatioble with this program.'''
pass
class Fasta_File_Meta:
'''A class used to store metadata for fasta files, for importing into qiime2.'''
def __init__(self, file_path):
self.absolute_path = file_path
path,file_name = os.path.split(file_path)
self.filename = file_name
try:
file_parts = file_name.split(".")
if file_parts[1][0] is "R":
self.format = "Basic"
else:
raise ValueError
self.sample_id = file_parts[0]
except ValueError:
file_parts = file_name.split("_")
if file_parts[1][0] is "S":
self.format = "Illumina"
self.sample_id = file_parts[0]
else:
self.format = "Unknown"
if self.format == "Basic":
if file_parts[1] == "R1":
self.direction = "forward"
else:
if file_parts[1] == "R2":
self.direction = "reverse"
else:
raise FormatError("Files do not follow Illumina or Basic filename conventions.")
if self.format == "Illumina":
if file_parts[3] == "R1":
self.direction = "forward"
else:
if file_parts[3] == "R2":
self.direction = "reverse"
else:
raise FormatError("Files do not follow Illumina or Basic filename conventions.")
if self.format == "Unknown":
raise FormatError("Files do not follow Illumina or Basic filename conventions.")
#Global functions
def delete_file(file_in):
file_exists = os.path.isfile(file_in)
if file_exists == True:
send2trash.send2trash(file_in)
def save_manifest_file(fasta_list):
writer_name = "Manifest.csv"
delete_file(writer_name)
writer = open(writer_name, "w")
header = "sample-id,absolute-filepath,direction\n"
writer.write(header)
for fasta in fasta_list:
line = str(fasta.sample_id) + "," + str(fasta.absolute_path) + "," + str(fasta.direction) + "\n"
writer.write(line)
writer.close()
def assign_fasta_2_class(file_paths):
fasta_meta_list = []
for path in file_paths:
info = Fasta_File_Meta(path)
fasta_meta_list.append(info)
return fasta_meta_list
def get_file_list(directory):
dir_abs = os.path.abspath(directory)
print("Making manifest file for fastq.gz files in " + dir_abs + "/*.fastq.gz")
file_paths_rel = glob.glob(dir_abs + "/*.fastq.gz")
file_paths_abs = []
for path in file_paths_rel:
path_abs = os.path.abspath(path)
file_paths_abs.append(path_abs)
return file_paths_abs
def get_args():
parser = argparse.ArgumentParser(description='''Script to make a Manifest.csv file for importing fastq.gz files into a qiime 2 environment.''')
parser.add_argument("--input_dir", help="Essential: Input directory for samples.", required=True)
args = parser.parse_args()
return args
def main():
options = get_args()
file_paths = get_file_list(options.input_dir)
fasta_class_list = assign_fasta_2_class(file_paths)
save_manifest_file(fasta_class_list)
main()
| StarcoderdataPython |
3291364 | <gh_stars>10-100
from typing import Dict, List, Optional
from .const import TAG_ESCAPED, TAG_UNESCAPED
def _escape_tag(value: str):
for i, char in enumerate(TAG_UNESCAPED):
value = value.replace(char, TAG_ESCAPED[i])
return value
def format(
tags: Optional[Dict[str, str]],
source: Optional[str],
command: str,
params: List[str]):
outs: List[str] = []
if tags:
tags_str = []
for key in sorted(tags.keys()):
if tags[key]:
value = tags[key]
tags_str.append(f"{key}={_escape_tag(value)}")
else:
tags_str.append(key)
outs.append(f"@{';'.join(tags_str)}")
if source is not None:
outs.append(f":{source}")
outs.append(command)
params = params.copy()
if params:
last = params.pop(-1)
for param in params:
if " " in param:
raise ValueError("non last params cannot have spaces")
elif param.startswith(":"):
raise ValueError("non last params cannot start with colon")
outs.extend(params)
if (not last or
" " in last or
last.startswith(":")):
last = f":{last}"
outs.append(last)
return " ".join(outs)
| StarcoderdataPython |
199284 | <gh_stars>10-100
'''
Tests for dumpsegd
'''
import os
import sys
import unittest
from mock import patch
from testfixtures import OutputCapture
from ph5.utilities import dumpsegd
from ph5.core.tests.test_base import LogTestCase, TempDirTestCase
class TestDumpSEGD(TempDirTestCase, LogTestCase):
def test_main(self):
segdfile = os.path.join(
self.home,
'ph5/test_data/segd/smartsolo/'
'453005513.2.2021.05.08.20.06.00.000.E.segd')
segdheaderfile = segdfile + '.header'
with open(segdheaderfile, 'r') as headerFile:
header = headerFile.read().split()
testargs = ['dumpsegd', segdfile]
with patch.object(sys, 'argv', testargs):
with OutputCapture() as out:
dumpsegd.main()
output = out.captured.split()
# skip the filename lines
self.assertEqual(output[2:], header[2:])
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
3296294 | class Solution:
def findDisappearedNumbers(self, nums):
"""
:type nums: List[int]
:rtype: List[int]
"""
length = len(nums)
ans = [0 for _ in range(length)]
for i in range(length):
ans[nums[i] - 1] += 1
return [i + 1 for i in range(length) if ans[i] == 0] | StarcoderdataPython |
4817569 | #! /usr/bin/env python
''' Main program'''
import re
import os.path
import shutil
import json
import sys
from typing import Tuple
class Folder():
''' Stores the folder name and its full path'''
def __init__(self, name: str, path: str):
self.name = name
self.path = path
class File():
''' Stores the file name, file extension and its full path.\n
"name" is purely the file name.\n
"extensions" can include a delimiter "." but is discoraged\n
"path" defines the full path of a file.
'''
def __init__(self, name: str, extension: str, path: str):
self.name = name
self.extension = extension
self.path = path
class FileRule():
''' Used as a filter for files with the following properties:\n
* key words -> E.g. "screenshot" or "wallpaper".\n
* extensions -> E.g. "zip", "7z" or "".\n
* whitelist -> (OPTIONAL) Ignore filenames if it matches an exact word from a whitelist. E.g. "icon".\n
* action -> COPY, MOVE, DELETE
* destination -> Where files should be moved if it satisfies the criteria above.
'''
def __init__(
self,
keywords: list[str],
extensions: list[str],
action: str,
destination: str,
whitelist: list[str] = None,
) -> None:
self.keywords = keywords
self.extensions = extensions
self.whitelist = whitelist
self.action = action
self.destination = destination
class FolderTemplate():
''' Given a root folder and a list of folder names,
we can generate folders with this template.
'''
def __init__(
self,
root_folder: str,
folders: list[str],
place_for_unwanted: str = None
) -> None:
self.root_folder = root_folder
self.folders = folders
self.place_for_unwanted = place_for_unwanted
@property
def as_iter(self) -> list[str]: # too much rust influence
'''Produces a list of folders with their raw path'''
return map(lambda folder: os.path.join(self.root_folder, folder), self.folders)
class Operation():
''' Stores a list of sources and a list of rules'''
def __init__(
self,
scan_sources: list[str],
rules: list[FileRule]
) -> None:
self.scan_sources = scan_sources
self.rules = rules
class Token():
''' Stores the Source of a file/folder and the destination'''
def __init__(
self,
source: str,
destination: str,
action: str,
) -> None:
self.source = source
self.destination = destination
self.action = action
def __repr__(self) -> str:
'''Debugging purposes'''
return f"Token {{is valid: '{self.is_valid()}' }} {{ action: '{self.action}' }} {{ dest: '{self.destination}' }} {{ source: '{self.source}' }}"
@property
def source(self):
return self.__source
@source.setter
def source(self, source_path: str):
self.__source = os.path.normpath(os.path.expanduser(source_path))
@property
def destination(self):
return self.__destination
@destination.setter
def destination(self, dest_path: str):
if dest_path is not None:
self.__destination = os.path.normpath(os.path.expanduser(dest_path))
else: self.__destination = None
def is_valid(self) -> bool:
''' A move token is valid if:\n
* The source exists.
* The source and destination parent folders are NOT equal.\n
e.g "~/Downloads/cheese.txt" -> "~/Downloads/" is not valid.\n
* The source folder is NOT the folder that contains this program.\n
* The source is not this program.
'''
if not os.path.exists(self.source):
return False
if os.path.isdir(self.source):
source_folder = self.source
program_path = os.path.dirname(os.path.realpath(__file__))
else: # Strip the filename to unveil the source folder
source_folder = os.path.dirname(self.source)
program_path = os.path.realpath(__file__)
check_1: bool = source_folder != self.destination
check_2: bool = self.source != program_path
return check_1 and check_2
class Config():
''' The configuration for our file sorting.
Stores a list of FolderTemplates and a list of FileOperations
'''
def __init__(
self,
folder_templates: list[FolderTemplate],
operations: list[Operation]
) -> None:
self.folder_templates = folder_templates
self.operations = operations
def export(self, file_path: str):
'''Serialize Config class to JSON'''
with open(file_path, "w", encoding="UTF-8") as out_file:
json.dump(self, out_file, indent = 2, default=lambda o: o.__dict__)
print(f"Successfully exported to {file_path}")
class Enforcer():
'''Responsible for enforcing rules and configurations set up by the Config class.'''
def __init__(self, config: Config) -> None:
self.config: Config = config
self.tokens: list[Token] = []
self.files: list[File] = []
self.folders: list[Folder] = []
self.scanned_sources: list[File] = []
def generate_folders(self):
'''Generates folders when provided a list of folder templates.'''
for folder_template in self.config.folder_templates:
for folder in folder_template.as_iter:
folder = os.path.expanduser(folder)
if os.path.exists(folder):
print(f"INFO: Ignored folder (Already exists): '{folder}'.")
continue
try:
os.makedirs(folder)
print(f"INFO: Created folder: '{folder}'")
except Exception as err:
print(f"WARN: Could not create folder: '{folder}', {err}")
def sort_folders(self):
''' Move folders not specified by the folder template to a specified folder.\n
Folder templates that do not have a dedicated place for these folders are ignored.
'''
move_tokens: list[Token] = []
for template in self.config.folder_templates:
if template.place_for_unwanted is None:
continue
(_, scanned_folders) = scandir(template.root_folder)
for folder in scanned_folders:
if folder.name not in template.folders:
move_tokens.append(Token(
folder.path, template.place_for_unwanted, "MOVE"))
self.tokens += move_tokens
def sort_files(self):
''' Move files based on their extensions,
key words and whitelist status to a specified location.
'''
for operation in self.config.operations:
for source in operation.scan_sources:
self.scan_files(source)
for rule in operation.rules:
filtered_files = self.filter_files(rule)
self.tokens += self.generate_file_move_tokens(rule, filtered_files)
self.files = []
self.folders = []
self.scanned_sources = []
def scan_files(self, path: str):
'''A user can choose to scan multiple folders before enforcing a rule(s)'''
if path in self.scanned_sources:
print(f"WARN: Scanning operation ignored: Source '{path}' already scanned")
else:
(scanned_files, _) = scandir(path)
self.files += scanned_files
self.scanned_sources.append(path)
print(f"INFO: Scanned {path}. {len(scanned_files)} files scanned")
def filter_files(self, rule: FileRule) -> list[File]:
''' Filtering order: whitelist -> file extension -> key words '''
filtered_files: list[File] = self.files
if rule.whitelist is not None:
filtered_files = filter_by_whitelist(filtered_files, rule.whitelist)
if rule.extensions is not None:
filtered_files = filter_by_extension(filtered_files, rule.extensions)
if rule.keywords is not None:
filtered_files = filter_by_key_word(filtered_files, rule.keywords)
return filtered_files
def generate_file_move_tokens(
self,
rule: FileRule,
filtered_files: list[File]
) -> list[Token]:
''' Generates a list of MoveTokens given a file rule and a list of File objects'''
template_list: list[Token] = []
for file in filtered_files:
template_list.append(Token(file.path, rule.destination, rule.action))
return template_list
def enforce(self) -> None:
'''After we generate some tokens, we use them to sort files!'''
if self.tokens == []:
print("\nThere's nothing to do!")
return
for token in self.tokens:
if not token.is_valid():
print("Skipped invalid token...")
continue
if token.action == "DELETE":
print("Deleting file not implemented...")
continue
if not os.path.exists(token.destination):
os.makedirs(token.destination)
src = check_and_rename_dupes(token.source, token.destination)
if token.action == "MOVE":
move(src, token.destination)
elif token.action == "COPY":
copy(src, token.destination)
else:
print(f"Action:'{token.action}' not implemented.")
print("\nDone!")
def scandir(folder: str) -> Tuple[list[File], list[Folder]]:
'''Scan a directory, return a tuple of scanned files and folders'''
folder = os.path.expanduser(folder)
if not os.path.exists(folder):
raise Exception(f"Path '{folder}' does not exist!")
files = os.scandir(folder)
scanned_files = []
scanned_folders = []
for file in files:
(name, extension) = os.path.splitext(os.path.basename(file))
path = os.path.abspath(file)
if file.is_file():
scanned_files.append(File(name, extension.strip("."), path))
if file.is_dir():
scanned_folders.append(Folder(name, path))
return (scanned_files, scanned_folders)
def move(src: str, dest: str):
''' Move files and folders according to the list of MoveTokens.\n
Will automatically rename duplicates.\n
Will automatically create a folder if it doesn't exist.
'''
try:
shutil.move(src, dest)
print(f"Moved: {dest} <-- {src}")
except Exception as error:
print(f"Move failed: {error}")
def copy(src: str, dest: str):
'''ssaadsa'''
try:
shutil.copy(src, dest)
print(f"Copied: {dest} <-- {src}")
except Exception as error:
print(f"Copy failed: {error}")
def filter_by_whitelist(list_of_files: list[File], whitelist: list[str]) -> list[File]:
''' Return an iterator of non whitelisted Files '''
return filter(lambda file: file.name not in whitelist, list_of_files)
def filter_by_extension(list_of_files: list[File], extensions: list[str]) -> list[File]:
''' Return an iterator that yields File objects such that their extensions are in a list '''
return filter(lambda file: file.extension in map(lambda ext: ext.strip("."), extensions), list_of_files)
def filter_by_key_word(list_of_files: list[File], words: list[str]) -> list[File]:
''' Return an iterator of Files if their filenames satisfy a particluar word'''
return filter(lambda file: re.search(as_regex(words), file.name.lower()), list_of_files)
def as_regex(list_of_key_words: list[str]) -> str:
# TODO sanitise words in list to get rid of special characters
return f"{'|'.join(list_of_key_words)}"
def create_file_rule(
destination: str,
extensions: list[str] = None,
keywords: list[str] = None,
whitelist: list[str] = None,
action: str = "MOVE",
) -> FileRule:
''' Creates a FileRule object given these parameters'''
assert not (extensions is None and keywords is None)
return FileRule(
keywords = keywords,
extensions = extensions,
destination = destination,
whitelist = whitelist,
action = action
)
# -------------------------!! Sloppy stuff, but works !!--------------------------
def check_and_rename_dupes(source: str, destination: str) -> str:
''' Renames a duplicate file/folder.
Needs refactoring
'''
old_file = source
(path, file_name) = os.path.split(source)
potential_destination = os.path.join(destination, file_name)
path_exists = os.path.exists(potential_destination)
if not path_exists:
return source
generation = 1
(file_name, extension) = os.path.splitext(os.path.basename(file_name))
while path_exists:
new_file_name = f"{file_name} ({generation}){extension}"
potential_destination = os.path.join(destination, new_file_name)
path_exists = os.path.exists(potential_destination)
generation += 1
new_source_path_name = os.path.join(path,new_file_name)
os.rename(old_file, new_source_path_name)
print(f"Renamed duplicate file: {source} -> {new_source_path_name}")
return new_source_path_name
def load_config(config_path: str) -> Config:
'''A rough implementation, creates a config class from a .json'''
with open(config_path, "r", encoding="UTF-8") as file:
a = json.load(file)
templates: list[FolderTemplate] = []
operations: list[Operation] = []
for b in a["folder_templates"]:
templates.append(
FolderTemplate(
root_folder = b["root_folder"],
folders = b["folders"],
place_for_unwanted = b["place_for_unwanted"]
)
)
for c in a["operations"]:
file_rules: list(FileRule) = []
for rule in c["rules"]:
file_rules.append(
FileRule(
extensions = rule["extensions"],
keywords = rule["keywords"],
whitelist = rule["whitelist"],
destination = rule["destination"],
action = rule["action"]
)
)
operations.append(
Operation(
scan_sources = c["scan_sources"],
rules = file_rules
)
)
return Config(
folder_templates = templates,
operations = operations
)
# -------------------------!! sloppy stuff ends here !!--------------------------
def main(argv):
'''_'''
print("Epic File Sorter by B0ney\n")
try:
config_path = argv[0]
new_config: Config = load_config(config_path)
except FileNotFoundError:
print(f"ERROR: Invalid path {config_path}")
return
except IndexError:
print("ERROR: \n You need to provide a config file! E.g. \"./configs/default.json\"")
return
test_conf = Enforcer(new_config)
test_conf.generate_folders()
test_conf.sort_folders()
test_conf.sort_files()
test_conf.enforce()
if __name__ == "__main__":
main(sys.argv[1:])
input("\nPress Enter to continue...")
| StarcoderdataPython |
16230 | <filename>examples/tensorboard/nested.py
import tensorboardX
with tensorboardX.SummaryWriter("foo") as w:
w.add_scalar("a", 1.0, 1)
w.add_scalar("a", 2.0, 2)
with tensorboardX.SummaryWriter("foo/bar") as w:
w.add_scalar("a", 3.0, 3)
w.add_scalar("a", 4.0, 4)
with tensorboardX.SummaryWriter("foo/bar/baz") as w:
w.add_scalar("a", 5.0, 5)
w.add_scalar("a", 6.0, 6)
| StarcoderdataPython |
3304456 | <gh_stars>0
lista = []
lista1 = []
while True:
try:
lista.append(input())
except EOFError:
break
for i in lista:
i = i.lower()
lista1.append(i)
lista1.sort()
for i in lista:
if lista1[-1] == i.lower():
print (i)
break
| StarcoderdataPython |
4813010 | import os
from setuptools import find_packages, setup
import versioneer
readMeFile = os.path.join(os.path.abspath(os.path.dirname(__file__)), "README.md")
if os.path.exists(readMeFile):
with open(readMeFile, encoding="utf-8") as readMeFile:
long_description = readMeFile.read()
else:
long_description = ""
setup(
name="docmaker",
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
author="<NAME>",
author_email="<EMAIL>",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
url="https://github.com/HurricaneLabs/docmaker",
description="A PDF generator",
long_description=long_description,
install_requires=[
"boto3",
"defusedxml",
"docx-mailmerge",
"docxcompose",
"falcon",
"jinja2",
"multipart",
"python-dateutil",
"python-frontmatter",
# "pypandoc",
"pypandoc @ git+https://github.com/mcm/pypandoc#egg=pypandoc",
"requests",
"ruamel.yaml",
"toposort",
"werkzeug"
],
entry_points={
"console_scripts": [
"docmaker = docmaker:main",
]
},
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3 :: Only",
"Development Status :: 5 - Production/Stable",
],
bugtrack_url="https://github.com/HurricaneLabs/docmaker/issues",
)
| StarcoderdataPython |
58124 |
def clean_path(path):
"""
Removes illegal characters from path (Windows only)
"""
return ''.join(i for i in path if i not in '<>:"/\|?*')
| StarcoderdataPython |
3238404 | <filename>apprest/tests/unit/services/test_users.py
import json
from django.http import HttpRequest
from apprest.services.user import CalipsoUserServices
from apprest.tests.utils import CalipsoTestCase
class UserServiceTestCase(CalipsoTestCase):
def setUp(self):
self.logger.debug('#################### setup test UserServiceTestCase ####')
self.user_service = CalipsoUserServices()
def test_lookup_existing_umbrella_hash(self):
self.logger.debug('#################### test_lookup_existing_umbrella_hash ####')
eaa_hash = "b0744680-2aa3-4b12-8627-95d23e5e4af9"
uid = "eibarz"
request = HttpRequest()
request.method = 'GET'
request.META['EAAHash'] = eaa_hash
request.META["uid"] = uid
json_umbrella_meta = self.user_service.get_umbrella_session_hash(request)
self.assertEqual(json_umbrella_meta.get('uid'), uid)
self.assertEqual(json_umbrella_meta.get('EAAHash'), eaa_hash)
del request.META['EAAHash']
del request.META["uid"]
request.META["HTTP_EAAHASH"] = eaa_hash
request.META["HTTP_UID"] = uid
json_umbrella_meta = self.user_service.get_umbrella_session_hash(request)
self.assertEqual(json_umbrella_meta.get('uid'), uid)
self.assertEqual(json_umbrella_meta.get('EAAHash'), eaa_hash)
def test_lookup_none_existing_umbrella_hash(self):
self.logger.debug('#################### test_lookup_existing_umbrella_hash ####')
request = HttpRequest()
request.method = 'GET'
json_umbrella_meta = self.user_service.get_umbrella_session_hash(request)
self.assertEqual(json_umbrella_meta, None)
| StarcoderdataPython |
3351543 | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import numpy as np
import time
import unittest
from fvcore.common.config import CfgNode
from fvcore.common.history_buffer import HistoryBuffer
from fvcore.common.timer import Timer
class TestHistoryBuffer(unittest.TestCase):
def setUp(self) -> None:
super().setUp()
np.random.seed(42)
@staticmethod
def create_buffer_with_init(num_values: int, buffer_len: int = 1000000):
"""
Return a HistoryBuffer of the given length filled with random numbers.
Args:
buffer_len: length of the created history buffer.
num_values: number of random numbers added to the history buffer.
"""
max_value = 1000
values = np.random.randint(max_value, size=num_values)
def create_buffer():
buf = HistoryBuffer(buffer_len)
for v in values:
buf.update(v)
return buf, values
return create_buffer
def test_buffer(self) -> None:
"""
Test creation of HistoryBuffer and the methods provided in the class.
"""
num_iters = 100
for _ in range(num_iters):
gt_len = 1000
buffer_len = np.random.randint(1, gt_len)
create_buffer = TestHistoryBuffer.create_buffer_with_init(
gt_len, buffer_len
)
buf, gt = create_buffer()
values, iterations = zip(*buf.values())
self.assertEqual(len(values), buffer_len)
self.assertEqual(len(iterations), buffer_len)
self.assertTrue((values == gt[-buffer_len:]).all())
iterations_gt = np.arange(gt_len - buffer_len, gt_len)
self.assertTrue(
(iterations == iterations_gt).all(),
", ".join(str(x) for x in iterations),
)
self.assertAlmostEqual(buf.global_avg(), gt.mean())
w = 100
effective_w = min(w, buffer_len)
self.assertAlmostEqual(
buf.median(w),
np.median(gt[-effective_w:]),
None,
" ".join(str(x) for x in gt[-effective_w:]),
)
self.assertAlmostEqual(
buf.avg(w),
np.mean(gt[-effective_w:]),
None,
" ".join(str(x) for x in gt[-effective_w:]),
)
class TestTimer(unittest.TestCase):
def test_timer(self):
timer = Timer()
time.sleep(0.5)
self.assertTrue(0.99 > timer.seconds() >= 0.5)
timer.pause()
time.sleep(0.5)
self.assertTrue(0.99 > timer.seconds() >= 0.5)
timer.resume()
time.sleep(0.5)
self.assertTrue(1.49 > timer.seconds() >= 1.0)
timer.reset()
self.assertTrue(0.49 > timer.seconds() >= 0)
class TestCfgNode(unittest.TestCase):
@staticmethod
def gen_default_cfg():
cfg = CfgNode()
cfg.KEY1 = "default"
cfg.KEY2 = "default"
cfg.EXPRESSION = [3.0]
return cfg
def test_merge_from_file(self):
"""
Test merge_from_file function provided in the class.
"""
import pkg_resources
base_yaml = pkg_resources.resource_filename(
__name__, "configs/base.yaml"
)
config_yaml = pkg_resources.resource_filename(
__name__, "configs/config.yaml"
)
cfg = TestCfgNode.gen_default_cfg()
cfg.merge_from_file(base_yaml)
self.assertEqual(cfg.KEY1, "base")
self.assertEqual(cfg.KEY2, "base")
cfg = TestCfgNode.gen_default_cfg()
with self.assertRaises(Exception):
# config.yaml contains unsafe yaml tags,
# test if an exception is thrown
cfg.merge_from_file(config_yaml)
cfg.merge_from_file(config_yaml, allow_unsafe=True)
self.assertEqual(cfg.KEY1, "base")
self.assertEqual(cfg.KEY2, "config")
self.assertEqual(cfg.EXPRESSION, [1, 4, 9])
def test_merge_from_list(self):
"""
Test merge_from_list function provided in the class.
"""
cfg = TestCfgNode.gen_default_cfg()
cfg.merge_from_list(["KEY1", "list1", "KEY2", "list2"])
self.assertEqual(cfg.KEY1, "list1")
self.assertEqual(cfg.KEY2, "list2")
def test_setattr(self):
"""
Test __setattr__ function provided in the class.
"""
cfg = TestCfgNode.gen_default_cfg()
cfg.KEY1 = "new1"
cfg.KEY3 = "new3"
self.assertEqual(cfg.KEY1, "new1")
self.assertEqual(cfg.KEY3, "new3")
# Test computed attributes, which can be inserted regardless of whether
# the CfgNode is frozen or not.
cfg = TestCfgNode.gen_default_cfg()
cfg.COMPUTED_1 = "computed1"
self.assertEqual(cfg.COMPUTED_1, "computed1")
cfg.freeze()
cfg.COMPUTED_2 = "computed2"
self.assertEqual(cfg.COMPUTED_2, "computed2")
# Test computed attributes, which should be 'insert only' (could not be
# updated).
cfg = TestCfgNode.gen_default_cfg()
cfg.COMPUTED_1 = "computed1"
with self.assertRaises(KeyError) as err:
cfg.COMPUTED_1 = "update_computed1"
self.assertTrue(
"Computed attributed 'COMPUTED_1' already exists"
in str(err.exception)
)
# Resetting the same value should be safe:
cfg.COMPUTED_1 = "computed1"
| StarcoderdataPython |
3277150 | """
SimplestCalcLib that contains basic math operations
"""
def add(first_num, second_num):
return first_num + second_num
def subtract(first_num, second_num):
return first_num - second_num | StarcoderdataPython |
77152 | <gh_stars>1-10
# coding=utf-8
# Copyright 2022 The Fiddle-Config Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for fiddle.diff."""
import copy
import dataclasses
from typing import Any
from absl.testing import absltest
import fiddle as fdl
from fiddle import tagging
from fiddle import testing
from fiddle.experimental import daglish
from fiddle.experimental import diff
# Functions and classes that can be used to build Configs.
@dataclasses.dataclass
class SimpleClass:
x: Any
y: Any
z: Any
@dataclasses.dataclass
class AnotherClass:
x: Any
y: Any
a: Any
b: Any
def make_pair(first, second):
return (first, second)
def make_triple(first, second, third):
return (first, second, third)
def basic_fn(arg1, arg2, kwarg1=0, kwarg2=None):
return {'a': arg1 + arg2, 'b': arg2 + kwarg1, 'c': kwarg2}
class GreenTag(tagging.Tag):
"""Fiddle tag for testing."""
class BlueTag(tagging.Tag):
"""Fiddle tag for testing."""
# Helper functions to make expected Paths easier to write (and read).
parse_path = testing.parse_path
parse_reference = testing.parse_reference
@dataclasses.dataclass(frozen=True)
class UnsupportedPathElement(daglish.PathElement):
code = property(lambda self: '<unsupported>')
follow = lambda self, container: container
class DiffAlignmentTest(absltest.TestCase):
def test_constructor(self):
old = fdl.Config(make_pair, fdl.Config(SimpleClass, 1, 2, 3),
fdl.Config(basic_fn, 4, 5, 6))
new = fdl.Config(make_pair, fdl.Config(basic_fn, 1, 2, 3, 4),
fdl.Partial(SimpleClass, z=12))
empty_alignment = diff.DiffAlignment(old, new)
# No values should be aligned (including the root objects `old` and `new`).
self.assertEmpty(empty_alignment.aligned_values())
self.assertEmpty(empty_alignment.aligned_value_ids())
self.assertFalse(empty_alignment.is_old_value_aligned(old))
self.assertFalse(empty_alignment.is_new_value_aligned(new))
self.assertEqual(empty_alignment.old_name, 'old')
self.assertEqual(empty_alignment.new_name, 'new')
self.assertEqual(
repr(empty_alignment),
"<DiffAlignment from 'old' to 'new': 0 object(s) aligned>")
self.assertEqual(
str(empty_alignment), 'DiffAlignment:\n (no objects aligned)')
def test_align(self):
old = fdl.Config(make_pair, fdl.Config(SimpleClass, 1, 2, [3, 4]),
fdl.Config(basic_fn, 5, 6, 7))
new = fdl.Config(make_pair, fdl.Config(basic_fn, 1, 2, 3, 4),
fdl.Partial(SimpleClass, z=[12, 13]))
alignment = diff.DiffAlignment(old, new)
alignment.align(old, new) # Same type, same __fn_or_cls__.
alignment.align(old.first, new.first) # Different __fn_or_cls__.
alignment.align(old.first.z, new.second.z) # Aligned lists.
self.assertIs(alignment.new_from_old(old), new)
self.assertIs(alignment.old_from_new(new), old)
self.assertIs(alignment.new_from_old(old.first), new.first)
self.assertIs(alignment.old_from_new(new.first), old.first)
self.assertIs(alignment.new_from_old(old.first.z), new.second.z)
self.assertIs(alignment.old_from_new(new.second.z), old.first.z)
with self.subTest('aligned_value_ids'):
aligned_value_ids = alignment.aligned_value_ids()
expected_aligned_value_ids = [
diff.AlignedValueIds(id(old), id(new)),
diff.AlignedValueIds(id(old.first), id(new.first)),
diff.AlignedValueIds(id(old.first.z), id(new.second.z)),
]
self.assertCountEqual(aligned_value_ids, expected_aligned_value_ids)
with self.subTest('aligned_values'):
aligned_values = alignment.aligned_values()
expected_aligned_values = [
diff.AlignedValues(old, new),
diff.AlignedValues(old.first, new.first),
diff.AlignedValues(old.first.z, new.second.z),
]
aligned_values.sort(key=lambda p: id(p.old_value))
expected_aligned_values.sort(key=lambda p: id(p.old_value))
self.assertEqual(aligned_values, expected_aligned_values)
with self.subTest('__repr__'):
self.assertEqual(
repr(alignment),
"<DiffAlignment from 'old' to 'new': 3 object(s) aligned>")
with self.subTest('__str__'):
self.assertEqual(
str(alignment), '\n'.join([
'DiffAlignment:',
' old -> new',
' old.first -> new.first',
' old.first.z -> new.second.z',
]))
def test_alignment_errors(self):
old = fdl.Config(make_pair, fdl.Config(SimpleClass, [1], [2], [3]),
fdl.Config(basic_fn, 4, 5, 6))
new = fdl.Config(make_pair, fdl.Config(basic_fn, [1], [2], 3, 4),
fdl.Partial(SimpleClass, z=[12, 13]))
alignment = diff.DiffAlignment(old, new)
alignment.align(old.first.x, new.first.arg1)
with self.subTest('type(old_value) != type(new_value)'):
with self.assertRaisesRegex(diff.AlignmentError, '.* different types .*'):
alignment.align(old.second, new.second)
with self.subTest('old_value already aligned'):
with self.assertRaisesRegex(
diff.AlignmentError,
'An alignment has already been added for old value .*'):
alignment.align(old.first.x, new.first.arg2)
with self.subTest('new_value already aligned'):
with self.assertRaisesRegex(
diff.AlignmentError,
'An alignment has already been added for new value .*'):
alignment.align(old.first.y, new.first.arg1)
with self.subTest('len(old_value) != len(new_value)'):
with self.assertRaisesRegex(diff.AlignmentError,
'.* different lengths .*'):
alignment.align(old.first.z, new.second.z)
with self.subTest('non-memoizable old_value'):
with self.assertRaisesRegex(
diff.AlignmentError,
'old_value=4 may not be aligned because it is not '
'memoizable'):
alignment.align(old.second.arg1, new.second.z)
with self.subTest('non-memoizable new_value'):
with self.assertRaisesRegex(
diff.AlignmentError,
'new_value=3 may not be aligned because it is not '
'memoizable'):
alignment.align(old.first.z, new.first.kwarg1)
def test_align_by_id(self):
old = fdl.Config(make_pair, fdl.Config(SimpleClass, 1, 2, [3, 4]),
fdl.Config(basic_fn, 5, 6, 7))
new = fdl.Config(make_pair, old.first,
fdl.Partial(SimpleClass, z=old.first.z))
alignment = diff.align_by_id(old, new)
self.assertCountEqual(alignment.aligned_values(), [
diff.AlignedValues(old.first.z, new.second.z),
diff.AlignedValues(old.first, new.first),
])
def test_align_heuristically(self):
c = fdl.Config(SimpleClass) # Shared object (same id) in `old` and `new`
d = fdl.Config(SimpleClass, x='bop')
old = fdl.Config(
make_triple,
first=fdl.Config(SimpleClass, x=1, y=2, z=[3, 4]),
second=fdl.Config(basic_fn, arg1=[5], arg2=5, kwarg1=c),
third=[[1], 2])
new = fdl.Config(
make_triple,
first=fdl.Config(basic_fn, arg1=1, arg2=c, kwarg1=3, kwarg2=4),
second=fdl.Partial(basic_fn, arg1=[8], arg2=[3, 4], kwarg1=d),
third=[[1, 2], 2, [3, 4]])
alignment = diff.align_heuristically(old, new)
self.assertCountEqual(
alignment.aligned_values(),
[
# Values aligned by id:
diff.AlignedValues(old.second.kwarg1, new.first.arg2),
# Values aligned by path:
diff.AlignedValues(old, new),
diff.AlignedValues(old.first, new.first),
diff.AlignedValues(old.second.arg1, new.second.arg1),
# Values aligned by equality:
diff.AlignedValues(old.first.z, new.second.arg2),
])
class ReferenceTest(absltest.TestCase):
def test_repr(self):
reference = diff.Reference(
'old', (daglish.Attr('foo'), daglish.Index(1), daglish.Key('bar')))
self.assertEqual(repr(reference), "<Reference: old.foo[1]['bar']>")
class DiffTest(absltest.TestCase):
def test_str(self):
cfg_diff = diff.Diff(
changes={
parse_path('.foo[1]'):
diff.ModifyValue(2),
parse_path('.foo[2]'):
diff.SetValue(parse_reference('old', '.bar')),
parse_path('.bar.x'):
diff.DeleteValue(),
parse_path('.bar.y'):
diff.ModifyValue(parse_reference('new_shared_values', '[0]')),
parse_path('.bar.z'):
diff.SetValue(
{'a': parse_reference('new_shared_values', '[0]')}),
},
new_shared_values=([1, 2, parse_reference('old', '.bar')],))
expected_str = (
'Diff(changes=[\n'
' .foo[1]: ModifyValue(new_value=2)\n'
' .foo[2]: SetValue(new_value=<Reference: old.bar>)\n'
' .bar.x: DeleteValue()\n'
' .bar.y: ModifyValue(new_value='
'<Reference: '
'new_shared_values[0]>)\n'
' .bar.z: SetValue(new_value='
"{'a': <Reference: new_shared_values[0]>})\n"
' ],\n'
' new_shared_values=[\n'
' [1, 2, <Reference: old.bar>]\n'
' ])')
self.assertEqual(str(cfg_diff), expected_str)
class DiffFromAlignmentBuilderTest(absltest.TestCase):
def check_diff(self,
old,
new,
expected_changes,
expected_new_shared_values=()):
"""Checks that building a Diff generates the expected values.
Builds a diff using a heuristic alignment between `old` and `new`, and
then checks that `diff.changes` and `diff.new_shared_values` have the
indicated values.
Args:
old: The `old` value for the diff.
new: The `new` value for the diff.
expected_changes: Dictionary mapping string path representations to
DiffOperations. The keys are parsed using `parse_path`.
expected_new_shared_values: Tuple of value
"""
alignment = diff.align_heuristically(old, new)
cfg_diff = diff.build_diff_from_alignment(alignment)
self.assertEqual(
cfg_diff.changes,
dict([(parse_path(p), c) for (p, c) in expected_changes.items()]))
self.assertEqual(cfg_diff.new_shared_values, expected_new_shared_values)
def make_test_diff_builder(self):
"""Returns a DiffBuilder that can be used for testing."""
c = fdl.Config(SimpleClass) # Shared object (same id)
old = fdl.Config(make_pair, fdl.Config(SimpleClass, 1, 2, [3, 4]),
fdl.Config(basic_fn, [5], [6, 7], c))
new = fdl.Config(make_pair, fdl.Config(basic_fn, 1, c, 3, 4.0),
fdl.Partial(basic_fn, [8], 9, [3, 4]))
aligned_values = [
diff.AlignedValues(old, new),
diff.AlignedValues(old.first, new.first),
diff.AlignedValues(old.second.arg1, new.second.arg1),
diff.AlignedValues(old.second.kwarg1, new.first.arg2),
diff.AlignedValues(old.first.z, new.second.kwarg1),
]
alignment = diff.DiffAlignment(old, new)
for aligned_value in aligned_values:
alignment.align(aligned_value.old_value, aligned_value.new_value)
return diff._DiffFromAlignmentBuilder(alignment)
def test_modify_buildable_callable(self):
old = fdl.Config(AnotherClass, fdl.Config(SimpleClass, 1, 2), 3)
new = copy.deepcopy(old)
fdl.update_callable(new, SimpleClass)
fdl.update_callable(new.x, AnotherClass)
expected_changes = {
'.__fn_or_cls__': diff.ModifyValue(SimpleClass),
'.x.__fn_or_cls__': diff.ModifyValue(AnotherClass)
}
self.check_diff(old, new, expected_changes)
def test_modify_buildable_argument(self):
old = fdl.Config(SimpleClass, 1, fdl.Config(AnotherClass, 2, 3))
new = copy.deepcopy(old)
new.x = 11
new.y.x = 22
expected_changes = {
'.x': diff.ModifyValue(11),
'.y.x': diff.ModifyValue(22)
}
self.check_diff(old, new, expected_changes)
def test_modify_sequence_element(self):
old = fdl.Config(SimpleClass, [1, 2, [3]])
new = copy.deepcopy(old)
new.x[0] = 11
new.x[2][0] = 33
expected_changes = {
'.x[0]': diff.ModifyValue(11),
'.x[2][0]': diff.ModifyValue(33)
}
self.check_diff(old, new, expected_changes)
def test_modify_dict_item(self):
old = fdl.Config(SimpleClass, {'a': 2, 'b': 4, 'c': {'d': 7}})
new = copy.deepcopy(old)
new.x['a'] = 11
new.x['c']['d'] = 33
expected_changes = {
".x['a']": diff.ModifyValue(11),
".x['c']['d']": diff.ModifyValue(33)
}
self.check_diff(old, new, expected_changes)
def test_set_buildable_argument(self):
old = fdl.Config(SimpleClass, 1, fdl.Config(AnotherClass, 2, 3))
new = copy.deepcopy(old)
new.z = 11
new.y.a = 22
expected_changes = {'.z': diff.SetValue(11), '.y.a': diff.SetValue(22)}
self.check_diff(old, new, expected_changes)
def test_set_dict_item(self):
old = fdl.Config(SimpleClass, {'a': 2, 'b': 4, 'c': {'d': 7}})
new = copy.deepcopy(old)
new.x['foo'] = 11
new.x['c']['bar'] = 33
expected_changes = {
".x['foo']": diff.SetValue(11),
".x['c']['bar']": diff.SetValue(33)
}
self.check_diff(old, new, expected_changes)
def test_delete_buildable_argument(self):
old = fdl.Config(SimpleClass, 1, fdl.Config(AnotherClass, 2, 3),
fdl.Config(SimpleClass, 4))
new = copy.deepcopy(old)
del new.x
del new.y.x
del new.z
expected_changes = {
'.x': diff.DeleteValue(),
'.y.x': diff.DeleteValue(),
'.z': diff.DeleteValue()
}
self.check_diff(old, new, expected_changes)
def test_delete_dict_item(self):
old = fdl.Config(SimpleClass, {'a': 2, 'b': {}, 'c': {'d': 7}})
new = copy.deepcopy(old)
del new.x['a']
del new.x['b']
del new.x['c']['d']
expected_changes = {
".x['a']": diff.DeleteValue(),
".x['b']": diff.DeleteValue(),
".x['c']['d']": diff.DeleteValue()
}
self.check_diff(old, new, expected_changes)
def test_add_shared_new_objects(self):
old = fdl.Config(
SimpleClass,
x=1,
y=fdl.Config(SimpleClass, x=2, y=3, z=[12]),
z=fdl.Config(SimpleClass, x=4))
new = copy.deepcopy(old)
new.x = [1, 2, [3, 4], new.y.z]
new.y.x = new.x
new.y.y = [99]
new.z.y = fdl.Config(SimpleClass, new.x[2], new.y.y)
expected_new_shared_values = (
[3, 4],
[
1, 2,
parse_reference('new_shared_values', '[0]'),
parse_reference('old', '.y.z')
],
[99],
)
expected_changes = {
'.x':
diff.ModifyValue(parse_reference('new_shared_values', '[1]')),
'.y.x':
diff.ModifyValue(parse_reference('new_shared_values', '[1]')),
'.y.y':
diff.ModifyValue(parse_reference('new_shared_values', '[2]')),
'.z.y':
diff.SetValue(
fdl.Config(SimpleClass,
parse_reference('new_shared_values', '[0]'),
parse_reference('new_shared_values', '[2]'))),
}
self.check_diff(old, new, expected_changes, expected_new_shared_values)
def test_multiple_modifications(self):
cfg_diff = self.make_test_diff_builder().build_diff()
expected_changes = {
'.first.__fn_or_cls__': diff.ModifyValue(basic_fn),
'.first.x': diff.DeleteValue(),
'.first.y': diff.DeleteValue(),
'.first.z': diff.DeleteValue(),
'.first.arg1': diff.SetValue(1),
'.first.arg2': diff.SetValue(parse_reference('old', '.second.kwarg1')),
'.first.kwarg1': diff.SetValue(3),
'.first.kwarg2': diff.SetValue(4.0),
'.second': diff.ModifyValue(
fdl.Partial(basic_fn, parse_reference('old', '.second.arg1'),
9, parse_reference('old', '.first.z'))),
'.second.arg1[0]': diff.ModifyValue(8)
} # pyformat: disable
self.assertEqual(
cfg_diff.changes,
dict([(parse_path(p), c) for (p, c) in expected_changes.items()]))
self.assertEqual(cfg_diff.new_shared_values, ())
def test_replace_object_with_equal_value(self):
c = SimpleClass(1, 2, 3)
with self.subTest('with sharing'):
old = fdl.Config(SimpleClass, x=c, y=[4, c, 5])
new = copy.deepcopy(old)
new.y[1] = SimpleClass(1, 2, 3)
self.assertEqual(new.x, new.y[1])
self.assertIsNot(new.x, new.y[1])
# new.y[1] can't be aligned with old.y[1], since old.y[1] is the
# same object as old.x, and new.x is not new.y[1]. So the diff generates
# a new value.
expected_changes = {'.y[1]': diff.ModifyValue(SimpleClass(1, 2, 3))}
self.check_diff(old, new, expected_changes)
with self.subTest('without sharing'):
# But in this example, we change x=c to x=9, so now new.y[1] can be
# aligned with old.y[1], and the diff contains no changes.
old = fdl.Config(SimpleClass, x=9, y=[4, c, 5])
new = copy.deepcopy(old)
new.y[1] = SimpleClass(1, 2, 3)
self.check_diff(old, new, {})
def test_modify_tagged_values(self):
old = fdl.Config(
SimpleClass,
x=GreenTag.new([1]),
y=GreenTag.new([5]),
z=GreenTag.new(BlueTag.new([20])))
new = fdl.Config(
SimpleClass,
x=BlueTag.new([1]),
y=GreenTag.new([6]),
z=BlueTag.new(GreenTag.new({1: 2})))
expected_changes = {
'.x.tags': diff.ModifyValue({BlueTag}),
'.y.value[0]': diff.ModifyValue(6),
'.z.tags': diff.ModifyValue({BlueTag}),
'.z.value.tags': diff.ModifyValue({GreenTag}),
'.z.value.value': diff.ModifyValue({1: 2}),
}
self.check_diff(old, new, expected_changes)
def test_replace_value_with_tags(self):
tagged_value = BlueTag.new(5)
self.check_diff(
old=[tagged_value.tags],
new=[tagged_value],
expected_changes={'[0]': diff.ModifyValue(tagged_value)})
self.check_diff(
old=[tagged_value],
new=[tagged_value.tags],
expected_changes={'[0]': diff.ModifyValue(tagged_value.tags)})
def test_shared_new_tags(self):
tagged_value = BlueTag.new([0])
old = fdl.Config(SimpleClass)
new = fdl.Config(SimpleClass, x=tagged_value, y=tagged_value)
expected_changes = {
'.x': diff.SetValue(parse_reference('new_shared_values', '[1]')),
'.y': diff.SetValue(parse_reference('new_shared_values', '[1]'))
}
expected_new_shared_values = (
[0],
BlueTag.new(parse_reference('new_shared_values', '[0]')),
)
self.check_diff(old, new, expected_changes, expected_new_shared_values)
def test_modify_root_tag(self):
old = GreenTag.new([1])
new = BlueTag.new([1])
expected_changes = {
'.tags': diff.ModifyValue({BlueTag}),
}
self.check_diff(old, new, expected_changes)
def test_diff_from_alignment_builder_can_only_build_once(self):
diff_builder = self.make_test_diff_builder()
diff_builder.build_diff()
with self.assertRaisesRegex(ValueError,
'build_diff should be called at most once'):
diff_builder.build_diff()
def test_aligned_or_equal(self):
diff_builder = self.make_test_diff_builder()
old = diff_builder.alignment.old
new = diff_builder.alignment.new
self.assertTrue(diff_builder.aligned_or_equal(old, new))
self.assertTrue(diff_builder.aligned_or_equal(old.first, new.first))
self.assertTrue(diff_builder.aligned_or_equal(old.first.x, new.first.arg1))
self.assertTrue(
diff_builder.aligned_or_equal(old.second.kwarg1, new.first.arg2))
self.assertFalse(diff_builder.aligned_or_equal(old.second, new.second))
self.assertFalse(diff_builder.aligned_or_equal(old.first.x, new.first.arg2))
self.assertFalse(diff_builder.aligned_or_equal(old.second, new.second))
self.assertFalse(
diff_builder.aligned_or_equal(old.first.z[1], new.first.kwarg2))
class ResolveDiffReferencesTest(absltest.TestCase):
def test_resolve_ref_from_change_to_old(self):
old = fdl.Config(SimpleClass, x=[1])
cfg_diff = diff.Diff(
changes={parse_path('.z'): diff.SetValue(parse_reference('old', '.x'))})
resolved_diff = diff._resolve_diff_references(cfg_diff, old)
diff_z = resolved_diff.changes[parse_path('.z')]
self.assertIsInstance(diff_z, diff.SetValue)
self.assertIs(diff_z.new_value, old.x)
def test_resolve_ref_from_change_to_new_shared_value(self):
old = fdl.Config(SimpleClass, x=[1])
changes = {
parse_path('.z'):
diff.SetValue(parse_reference('new_shared_values', '[0]'))
}
new_shared_values = ([1],)
cfg_diff = diff.Diff(changes, new_shared_values)
resolved_diff = diff._resolve_diff_references(cfg_diff, old)
diff_z = resolved_diff.changes[parse_path('.z')]
self.assertIsInstance(diff_z, diff.SetValue)
self.assertIs(diff_z.new_value, resolved_diff.new_shared_values[0])
def test_resolve_ref_from_new_shared_value_to_old(self):
old = fdl.Config(SimpleClass, x=[1])
changes = {
parse_path('.z'):
diff.SetValue(parse_reference('new_shared_values', '[0]')),
}
new_shared_values = ([parse_reference('old', '.x')],)
cfg_diff = diff.Diff(changes, new_shared_values)
resolved_diff = diff._resolve_diff_references(cfg_diff, old)
diff_z = resolved_diff.changes[parse_path('.z')]
self.assertIsInstance(diff_z, diff.SetValue)
self.assertIs(diff_z.new_value, resolved_diff.new_shared_values[0])
self.assertIs(resolved_diff.new_shared_values[0][0], old.x)
def test_resolve_ref_from_new_shared_value_to_new_shared_value(self):
old = fdl.Config(SimpleClass, x=[1])
changes = {
parse_path('.z'):
diff.SetValue([
parse_reference('new_shared_values', '[0]'),
parse_reference('new_shared_values', '[1]')
])
}
new_shared_values = ([1], [parse_reference('new_shared_values', '[0]')])
cfg_diff = diff.Diff(changes, new_shared_values)
resolved_diff = diff._resolve_diff_references(cfg_diff, old)
diff_z = resolved_diff.changes[parse_path('.z')]
self.assertIsInstance(diff_z, diff.SetValue)
self.assertIs(diff_z.new_value[0], resolved_diff.new_shared_values[0])
self.assertIs(diff_z.new_value[1], resolved_diff.new_shared_values[1])
self.assertIs(resolved_diff.new_shared_values[1][0],
resolved_diff.new_shared_values[0])
def test_resolve_diff_multiple_references(self):
old = [[1], {'x': [2], 'y': [3]}, fdl.Config(SimpleClass, z=4), [5]]
cfg_diff = diff.Diff(
changes={
parse_path("[1]['x']"):
diff.ModifyValue(parse_reference('old', "[1]['y']")),
parse_path("[1]['y']"):
diff.ModifyValue(parse_reference('old', "[1]['x']")),
parse_path("[1]['z']"):
diff.SetValue(parse_reference('old', '[2]')),
parse_path('[2].x'):
diff.SetValue(parse_reference('new_shared_values', '[0]')),
parse_path('[2].y'):
diff.SetValue(parse_reference('new_shared_values', '[0]')),
parse_path('[2].z'):
diff.ModifyValue(parse_reference('new_shared_values', '[1]')),
},
new_shared_values=([parse_reference('old', '[3]')], [
parse_reference('old', '[0]'),
parse_reference('new_shared_values', '[0]')
]),
)
resolved_diff = diff._resolve_diff_references(cfg_diff, old)
diff_1_x = resolved_diff.changes[parse_path("[1]['x']")]
self.assertIsInstance(diff_1_x, diff.ModifyValue)
self.assertIs(diff_1_x.new_value, old[1]['y'])
diff_1_y = resolved_diff.changes[parse_path("[1]['y']")]
self.assertIsInstance(diff_1_y, diff.ModifyValue)
self.assertIs(diff_1_y.new_value, old[1]['x'])
diff_1_z = resolved_diff.changes[parse_path("[1]['z']")]
self.assertIsInstance(diff_1_z, diff.SetValue)
self.assertIs(diff_1_z.new_value, old[2])
diff_2_x = resolved_diff.changes[parse_path('[2].x')]
self.assertIsInstance(diff_2_x, diff.SetValue)
self.assertIs(diff_2_x.new_value, resolved_diff.new_shared_values[0])
diff_2_y = resolved_diff.changes[parse_path('[2].y')]
self.assertIsInstance(diff_2_y, diff.SetValue)
self.assertIs(diff_2_y.new_value, resolved_diff.new_shared_values[0])
diff_2_z = resolved_diff.changes[parse_path('[2].z')]
self.assertIsInstance(diff_2_z, diff.ModifyValue)
self.assertIs(diff_2_z.new_value, resolved_diff.new_shared_values[1])
def test_error_unexpected_reference_root(self):
old = fdl.Config(SimpleClass, x=[1])
cfg_diff = diff.Diff(
changes={parse_path('.z'): diff.SetValue(parse_reference('foo', '.x'))})
with self.assertRaisesRegex(ValueError, 'Unexpected Reference.root'):
diff._resolve_diff_references(cfg_diff, old)
class ApplyDiffTest(absltest.TestCase):
def test_delete_buildable_argument(self):
old = fdl.Config(SimpleClass, x=5, y=2)
cfg_diff = diff.Diff({parse_path('.x'): diff.DeleteValue()})
diff.apply_diff(cfg_diff, old)
self.assertEqual(old, fdl.Config(SimpleClass, y=2))
def test_modify_buildable_argument(self):
old = fdl.Config(SimpleClass, x=5, y=2)
cfg_diff = diff.Diff({parse_path('.x'): diff.ModifyValue(6)})
diff.apply_diff(cfg_diff, old)
self.assertEqual(old, fdl.Config(SimpleClass, x=6, y=2))
def test_set_buildable_argument(self):
old = fdl.Config(SimpleClass, x=5, y=2)
cfg_diff = diff.Diff({parse_path('.z'): diff.SetValue(6)})
diff.apply_diff(cfg_diff, old)
self.assertEqual(old, fdl.Config(SimpleClass, x=5, y=2, z=6))
def test_modify_buildable_callable(self):
old = fdl.Config(SimpleClass, x=5, z=2)
cfg_diff = diff.Diff({
parse_path('.__fn_or_cls__'): diff.ModifyValue(AnotherClass),
parse_path('.z'): diff.DeleteValue(),
parse_path('.a'): diff.SetValue(3)
})
diff.apply_diff(cfg_diff, old)
self.assertEqual(old, fdl.Config(AnotherClass, x=5, a=3))
def test_delete_dict_item(self):
old = fdl.Config(SimpleClass, x={'1': 2})
cfg_diff = diff.Diff({parse_path('.x["1"]'): diff.DeleteValue()})
diff.apply_diff(cfg_diff, old)
self.assertEqual(old, fdl.Config(SimpleClass, x={}))
def test_modify_dict_item(self):
old = fdl.Config(SimpleClass, x={'1': 2})
cfg_diff = diff.Diff({parse_path('.x["1"]'): diff.ModifyValue(6)})
diff.apply_diff(cfg_diff, old)
self.assertEqual(old, fdl.Config(SimpleClass, x={'1': 6}))
def test_set_dict_item(self):
old = fdl.Config(SimpleClass, x={'1': 2})
cfg_diff = diff.Diff({parse_path('.x["2"]'): diff.SetValue(6)})
diff.apply_diff(cfg_diff, old)
self.assertEqual(old, fdl.Config(SimpleClass, x={'1': 2, '2': 6}))
def test_modify_list_item(self):
old = fdl.Config(SimpleClass, x=[1, 2])
cfg_diff = diff.Diff({parse_path('.x[0]'): diff.ModifyValue(8)})
diff.apply_diff(cfg_diff, old)
self.assertEqual(old, fdl.Config(SimpleClass, x=[8, 2]))
def test_swap_siblings(self):
old = [fdl.Config(SimpleClass, 1), fdl.Config(basic_fn, 2)]
cfg_diff = diff.Diff({
parse_path('[0]'): diff.ModifyValue(parse_reference('old', '[1]')),
parse_path('[1]'): diff.ModifyValue(parse_reference('old', '[0]'))
})
diff.apply_diff(cfg_diff, old)
self.assertEqual(old, [fdl.Config(basic_fn, 2), fdl.Config(SimpleClass, 1)])
def test_swap_child_and_parent(self):
original_child = fdl.Config(AnotherClass)
original_parent = fdl.Config(SimpleClass, x=original_child)
old = [original_parent]
cfg_diff = diff.Diff({
parse_path('[0]'): diff.ModifyValue(parse_reference('old', '[0].x')),
parse_path('[0].x'): diff.DeleteValue(),
parse_path('[0].x.x'): diff.SetValue(parse_reference('old', '[0]'))
})
diff.apply_diff(cfg_diff, old)
self.assertEqual(old, [fdl.Config(AnotherClass, x=fdl.Config(SimpleClass))])
self.assertIs(old[0], original_child)
self.assertIs(old[0].x, original_parent)
def test_apply_diff_with_multiple_references(self):
old = [[1], {'x': [2], 'y': [3]}, fdl.Config(SimpleClass, z=4), [5]]
cfg_diff = diff.Diff(
changes={
parse_path("[1]['x']"):
diff.ModifyValue(parse_reference('old', "[1]['y']")),
parse_path("[1]['y']"):
diff.ModifyValue(parse_reference('old', "[1]['x']")),
parse_path("[1]['z']"):
diff.SetValue(parse_reference('old', '[2]')),
parse_path('[2].x'):
diff.SetValue(parse_reference('new_shared_values', '[0]')),
parse_path('[2].z'):
diff.ModifyValue(parse_reference('new_shared_values', '[1]')),
},
new_shared_values=(parse_reference('old', '[3]'), [
parse_reference('old', '[0]'),
parse_reference('new_shared_values', '[0]')
]),
)
# Manually apply the same changes described by the diff:
new = copy.deepcopy(old)
new[1]['x'], new[1]['y'] = new[1]['y'], new[1]['x']
new[1]['z'] = new[2]
new[2].x = new[3]
new[2].z = [new[0], new[3]]
diff.apply_diff(cfg_diff, old)
self.assertEqual(old, new)
def test_error_modify_root(self):
old = fdl.Config(SimpleClass, x=[1, 2])
cfg_diff = diff.Diff({(): diff.ModifyValue(8)})
with self.assertRaisesRegex(
ValueError, 'Modifying the root `structure` object is not supported'):
diff.apply_diff(cfg_diff, old)
def test_error_parent_does_not_exist(self):
old = fdl.Config(SimpleClass, x=[1, 2])
cfg_diff = diff.Diff({parse_path('.y[1]'): diff.ModifyValue(8)})
with self.assertRaisesRegex(ValueError, 'parent does not exist'):
diff.apply_diff(cfg_diff, old)
def test_error_wrong_child_path_type(self):
old = fdl.Config(SimpleClass, x=[1, 2])
cfg_diff = diff.Diff({parse_path('.x.y'): diff.ModifyValue(8)})
with self.assertRaisesRegex(ValueError, 'parent has unexpected type'):
diff.apply_diff(cfg_diff, old)
def test_error_delete_value_not_found(self):
old = fdl.Config(SimpleClass, x=[1, 2])
cfg_diff = diff.Diff({parse_path('.y'): diff.DeleteValue()})
with self.assertRaisesRegex(ValueError, r'value not found\.'):
diff.apply_diff(cfg_diff, old)
def test_error_modify_value_not_found(self):
old = fdl.Config(SimpleClass, x=[1, 2])
cfg_diff = diff.Diff({parse_path('.y'): diff.ModifyValue(5)})
with self.assertRaisesRegex(ValueError, 'value not found; use SetValue'):
diff.apply_diff(cfg_diff, old)
def test_error_set_value_already_has_value(self):
old = fdl.Config(SimpleClass, x=[1, 2])
cfg_diff = diff.Diff({parse_path('.x'): diff.SetValue(5)})
with self.assertRaisesRegex(
ValueError, 'already has a value; use ModifyValue to overwrite'):
diff.apply_diff(cfg_diff, old)
def test_error_multiple_errors(self):
old = fdl.Config(SimpleClass, x=[1, 2])
cfg_diff = diff.Diff({
parse_path('.y.z'): diff.SetValue(5),
parse_path('.x.y'): diff.ModifyValue(3),
parse_path('.x.z'): diff.DeleteValue()
})
with self.assertRaisesRegex(
ValueError, '\n'.join([
r'Unable to apply diff:',
r' \* For <root>.x.y=ModifyValue\(new_value=3\): .*',
r' \* For <root>.x.z=DeleteValue\(\): .*',
r' \* For <root>.y.z=SetValue\(new_value=5\): .*',
])):
diff.apply_diff(cfg_diff, old)
def test_error_delete_index(self):
old = fdl.Config(SimpleClass, x=[1, 2])
cfg_diff = diff.Diff({parse_path('.x[0]'): diff.DeleteValue()})
with self.assertRaisesRegex(ValueError,
'DeleteValue does not support Index'):
diff.apply_diff(cfg_diff, old)
def test_error_set_index(self):
old = fdl.Config(SimpleClass, x=[1, 2])
cfg_diff = diff.Diff({parse_path('.x[2]'): diff.SetValue(5)})
with self.assertRaisesRegex(ValueError, 'SetValue does not support Index'):
diff.apply_diff(cfg_diff, old)
def test_error_modify_unsupported_path_elt(self):
# Exception unreachable via public methods; test directly for coverage.
with self.assertRaisesRegex(
ValueError, 'ModifyValue does not support UnsupportedPathElement'):
diff.ModifyValue(5).apply([], UnsupportedPathElement())
def test_error_child_has_value_unsupported_path_elt(self):
# Exception unreachable via public methods; test directly for coverage.
with self.assertRaisesRegex(
ValueError, 'Unsupported PathElement: UnsupportedPathElement'):
diff._child_has_value([], UnsupportedPathElement())
if __name__ == '__main__':
absltest.main()
| StarcoderdataPython |
1696542 | # Copyright 2020 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import unittest
from . import file_utils
TEST_DIR = os.path.join(
os.path.dirname(__file__),
'test_data'
)
class GetFilesTest(unittest.TestCase):
def test_getfiles_ignores_dotfiles(self):
files = file_utils._get_file_paths(
TEST_DIR, lambda x: True)
assert 'dotfile_tag' not in str(files)
class GetPythonFilesTest(unittest.TestCase):
def test_finds_python_files(self):
files = file_utils.get_python_files(TEST_DIR)
assert 'test.py' in files[0]
def test_excludes_appengine_lib_folders(self):
files = file_utils.get_python_files(TEST_DIR)
assert 'should_be_ignored.py' not in str(files)
def test_includes_appengine_lib_files(self):
files = file_utils.get_python_files(TEST_DIR)
assert 'lib.py' in str(files)
assert 'library.py' in str(files)
def test_includes_appengine_folders(self):
files = file_utils.get_python_files(TEST_DIR)
assert 'gae_sample.py' in str(files)
def test_includes_non_appengine_lib_folders(self):
files = file_utils.get_python_files(TEST_DIR)
assert 'gcf_lib.py' in str(files)
class GetDriftYamlFilesTest(unittest.TestCase):
def test_finds_yml_files(self):
files = file_utils.get_drift_yaml_files(TEST_DIR)
assert '.drift-data.yml' in str(files)
def test_finds_yaml_files(self):
files = file_utils.get_drift_yaml_files(TEST_DIR)
assert '.drift-data.yaml' in str(files)
def test_excludes_yml_files_with_wrong_name(self):
files = file_utils.get_drift_yaml_files(TEST_DIR)
assert '.bad-name.yml' not in str(files)
class GetRegionTagsTest(unittest.TestCase):
def test_finds_region_tags(self):
region_tags = file_utils.get_region_tags(TEST_DIR)
assert 'region_tag' in region_tags
def test_includes_dockerfile_case_insensitive(self):
region_tags = file_utils.get_region_tags(TEST_DIR)
assert 'dockerfile_tag1' in region_tags
assert 'dockerfile_tag_odd_casing' in region_tags
def test_excludes_appengine_lib(self):
region_tags = file_utils.get_region_tags(TEST_DIR)
assert 'appengine_lib' not in region_tags
def test_includes_block_comments(self):
region_tags = file_utils.get_region_tags(TEST_DIR)
assert 'gae_block_comment_tag' in region_tags
def test_excludes_node_modules(self):
region_tags = file_utils.get_region_tags(TEST_DIR)
assert 'node_modules' not in region_tags
def test_includes_appengine_html(self):
region_tags = file_utils.get_region_tags(TEST_DIR)
assert 'gae_html_1' in region_tags
assert 'gae_html_2' in region_tags
def test_includes_appengine_css(self):
region_tags = file_utils.get_region_tags(TEST_DIR)
assert 'gae_css_1' in region_tags
assert 'gae_css_2' in region_tags
def test_excludes_webapps_outside_of_appengine(self):
region_tags = file_utils.get_region_tags(TEST_DIR)
assert 'css_outside_gae' not in region_tags
assert 'html_outside_gae' not in region_tags
def test_includes_yml_and_yaml(self):
region_tags = file_utils.get_region_tags(TEST_DIR)
assert 'yml_tag' in region_tags
assert 'yaml_tag' in region_tags
def test_includes_node_config_json_files(self):
region_tags = file_utils.get_region_tags(TEST_DIR)
assert 'package_json' in region_tags
assert 'config_json' in region_tags
def test_includes_directories_with_node_modules_suffix(self):
region_tags = file_utils.get_region_tags(TEST_DIR)
assert 'not_really_node_modules' in region_tags
| StarcoderdataPython |
157906 | """
Tests for salt.modules.boto3_route53
"""
import random
import string
import salt.loader
import salt.modules.boto3_route53 as boto3_route53
from salt.utils.versions import LooseVersion
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.mock import MagicMock, patch
from tests.support.unit import TestCase, skipIf
try:
import boto3
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
# the boto3_route53 module relies on the connect_to_region() method
# which was added in boto 2.8.0
# https://github.com/boto/boto/commit/33ac26b416fbb48a60602542b4ce15dcc7029f12
REQUIRED_BOTO3_VERSION = "1.2.1"
def __virtual__():
"""
Returns True/False boolean depending on if Boto3 is installed and correct
version.
"""
if not HAS_BOTO3:
return False
if LooseVersion(boto3.__version__) < LooseVersion(REQUIRED_BOTO3_VERSION):
return (
False,
"The boto3 module must be greater or equal to version {}".format(
REQUIRED_BOTO3_VERSION
),
)
return True
REGION = "us-east-1"
ACCESS_KEY = "<KEY>"
SECRET_KEY = "<KEY>"
CONN_PARAMETERS = {
"region": REGION,
"key": ACCESS_KEY,
"keyid": SECRET_KEY,
"profile": {},
}
LIST_RESOURCE_RECORD_SETS_RETURN = {
"IsTruncated": True,
"MaxItems": "100",
"NextRecordName": "blog3.saltstack.furniture.",
"NextRecordType": "CNAME",
"ResourceRecordSets": [
{
"Name": "blog.saltstack.furniture.",
"ResourceRecords": [{"Value": "127.0.0.1"}],
"TTL": 60,
"Type": "A",
},
{
"Name": "blog2.saltstack.furniture.",
"ResourceRecords": [{"Value": "127.0.0.1"}],
"TTL": 60,
"Type": "A",
},
],
}
@skipIf(HAS_BOTO3 is False, "The boto module must be installed.")
@skipIf(
LooseVersion(boto3.__version__) < LooseVersion(REQUIRED_BOTO3_VERSION),
"The boto3 module must be greater or equal to version {}".format(
REQUIRED_BOTO3_VERSION
),
)
class Boto3Route53TestCase(TestCase, LoaderModuleMockMixin):
"""
TestCase for salt.modules.boto3_route53 moodule
"""
conn = None
def setup_loader_modules(self):
self.opts = salt.config.DEFAULT_MINION_OPTS.copy()
utils = salt.loader.utils(
self.opts,
whitelist=["boto3", "args", "systemd", "path", "platform"],
context={},
)
return {boto3_route53: {"__utils__": utils}}
def setUp(self):
super().setUp()
boto3_route53.__init__(self.opts)
del self.opts
# Set up MagicMock to replace the boto3 session
# connections keep getting cached from prior tests, can't find the
# correct context object to clear it. So randomize the cache key, to prevent any
# cache hits
CONN_PARAMETERS["key"] = "".join(
random.choice(string.ascii_lowercase + string.digits) for _ in range(50)
)
self.conn = MagicMock()
self.addCleanup(delattr, self, "conn")
self.patcher = patch("boto3.session.Session")
self.addCleanup(self.patcher.stop)
self.addCleanup(delattr, self, "patcher")
mock_session = self.patcher.start()
session_instance = mock_session.return_value
session_instance.configure_mock(client=MagicMock(return_value=self.conn))
self.paginator = MagicMock()
self.addCleanup(delattr, self, "paginator")
self.conn.configure_mock(get_paginator=MagicMock(return_value=self.paginator))
def test_get_resource_records(self):
"""
Test get_resource_records behaviour.
"""
# The patch below is not neccesary per se,
# as .exists returns positive as long as no exception is raised.
with patch.object(
self.conn,
"list_resource_record_sets",
return_value=LIST_RESOURCE_RECORD_SETS_RETURN,
):
self.assertEqual(
boto3_route53.get_resource_records(
HostedZoneId="Z2P70J7EXAMPLE",
StartRecordName="blog.saltstack.furniture.",
StartRecordType="A",
**CONN_PARAMETERS
),
[
{
"Name": "blog.saltstack.furniture.",
"ResourceRecords": [{"Value": "127.0.0.1"}],
"TTL": 60,
"Type": "A",
}
],
)
| StarcoderdataPython |
3263336 | import sys
import setuptools
if sys.version_info < (3, 7, 0):
raise OSError(f'Streamlit requires Python 3.7 and above, but yours is {sys.version}')
try:
with open('README.md', encoding='utf8') as fp:
_long_description = fp.read()
except FileNotFoundError:
_long_description = ''
setuptools.setup(
name="streamlit-jina",
version="0.1.8",
author="<NAME>",
author_email="<EMAIL>",
license='Apache 2.0',
url="https://github.com/jina-ai/streamlit-jina",
download_url='https://github.com/jina-ai/streamlit-jina/tags',
description="Streamlit component for Jina neural search",
long_description=_long_description,
long_description_content_type="text/markdown",
packages=setuptools.find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Unix Shell',
'Environment :: Console',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Topic :: Database :: Database Engines/Servers',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Internet :: WWW/HTTP :: Indexing/Search',
'Topic :: Scientific/Engineering :: Image Recognition',
'Topic :: Multimedia :: Video',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords='jina cloud-native neural-search query search index elastic neural-network encoding '
'embedding serving docker container image video audio deep-learning streamlit frontend',
python_requires=">3.7",
install_requires=["streamlit >= 0.63"],
)
| StarcoderdataPython |
94444 | from nose.tools import assert_equal, assert_almost_equal, assert_true, \
assert_false, assert_raises, assert_is_instance
from stats import mean, median, mode, std, var
# mean tests
def test_mean1():
obs = mean([0, 0, 0, 0])
exp = 0
assert_equal(obs, exp)
obs = mean([0, 200])
exp = 100
assert_equal(obs, exp)
obs = mean([0, -200])
exp = -100
assert_equal(obs, exp)
obs = mean([0])
exp = 0
assert_equal(obs, exp)
def test_floating_mean1():
obs = mean([1, 2])
exp = 1.5
assert_equal(obs, exp)
# median tests
def test_median1():
obs = median([0, 0, 0, 0])
exp = 0
assert_equal(obs, exp)
obs = median([0, 0, 0, 1])
exp = 0
assert_equal(obs, exp)
obs = median([0, 0, 1, 0, 0])
exp = 0
assert_equal(obs, exp)
obs = median([0, 1, 2, 3, 4])
exp = 2
assert_equal(obs, exp)
obs = median([0, 1, -1, 2, 3])
exp = 1
assert_equal(obs, exp)
obs = median([0, 200])
exp = 100
assert_equal(obs, exp)
obs = median([0, -200])
exp = -100
assert_equal(obs, exp)
obs = median([0])
exp = 0
assert_equal(obs, exp)
def test_floating_median1():
obs = mean([1, 2])
exp = 1.5
assert_equal(obs, exp)
# FIXME Put Mode tests here
def test_std1():
obs = std([0.0, 2.0])
exp = 1.0
assert_equal(obs, exp)
def test_std2():
obs = std([])
exp = 0.0
assert_equal(obs, exp)
def test_std3():
obs = std([0.0, 4.0])
exp = 2.0
assert_equal(obs, exp)
def test_std4():
obs = std([1.0, 3.0])
exp = 1.0
assert_equal(obs, exp)
def test_std5():
obs = std([1.0, 1.0, 1.0])
exp = 0.0
assert_equal(obs, exp)
def test_std6():
obs = std([1e500])
exp = NotImplemented
assert_equal(obs, exp)
def test_std7():
obs = std([0.0, 1e4242])
exp = NotImplemented
assert_equal(obs, exp)
# FIXME Put Variance tests here
| StarcoderdataPython |
1742470 | """External Routes functions tests."""
# run these tests like:
#
# flask_env=production python -m unittest test_external_routes.py
from unittest import TestCase
from external_routes import search_board_games, update_mechanics, update_categories, add_game_to_db
from models import db, User, Game, Collection, Category, Mechanic
from sqlalchemy.exc import IntegrityError
import sys
from app import app # nopep8
# checks if database is test database
if app.config['SQLALCHEMY_DATABASE_URI'] != 'postgresql:///boardgames-test':
print('Wrong database, BAD BAD BAD - use boardgames-test')
print('source .env-test')
sys.exit(1)
db.create_all()
class ExternalRoutesTestCase(TestCase):
"""Test External Routes functions."""
def setUp(self):
"""Create test client, add sample data."""
User.query.delete()
Game.query.delete()
Collection.query.delete()
Mechanic.query.delete()
Category.query.delete()
self.client = app.test_client()
def tearDown(self):
"""Clean up any fouled transaction."""
db.session.rollback()
def test_search_board_games(self):
"""Searches API using arguments sent to function"""
name = 'name'
search_params = {name: 'lol'}
games = search_board_games(search_params)
self.assertGreater(len(games), 0)
self.assertIn(search_params[name], games['games'][0]['name'].lower())
def test_update_mechanics(self):
"""Updates mechanics in database from api"""
predicted_response = 'good_update'
original_mechanics = Mechanic.query.all()
actual_response = update_mechanics()
updated_mechanics = Mechanic.query.all()
self.assertEqual(len(original_mechanics), 0)
self.assertEqual(actual_response, predicted_response)
self.assertGreater(len(updated_mechanics), len(original_mechanics))
def test_update_categories(self):
"""Updates categories in database from api"""
predicted_response = 'good_update'
original_categories = Category.query.all()
actual_response = update_categories()
updated_categories = Category.query.all()
self.assertEqual(len(original_categories), 0)
self.assertEqual(actual_response, predicted_response)
self.assertGreater(len(updated_categories), len(original_categories))
def test_add_game_to_db(self):
"""using the api id of a game, adds game to database and returns it, if game already in database, just returns it"""
id = 'TAAifFP590'
update_categories()
update_mechanics()
first_query_all = Game.query.all()
first_game_search = Game.query.filter(
Game.api_id == id).first()
added_game = add_game_to_db(id)
second_query_all = Game.query.all()
second_game_search = Game.query.filter(
Game.api_id == id).first()
self.assertEqual(first_game_search, None)
self.assertEqual(len(first_query_all), 0)
self.assertEqual(second_game_search, added_game)
self.assertEqual(second_game_search.api_id, id)
self.assertEqual(len(second_query_all), 1)
def test_add_game_to_db_duplicate_game(self):
id = 'TAAifFP590'
add_game = add_game_to_db(id)
query_all = Game.query.all()
db.session.close()
add_game_again = add_game_to_db(id)
query_all_again = Game.query.all()
self.assertEqual(len(query_all), len(query_all_again))
self.assertEqual(add_game.name, add_game_again.name)
def test_add_game_to_db_not_a_game(self):
id = 'ImnotagameIPromise'
add_game = add_game_to_db(id)
self.assertEqual(add_game, 'bad_response')
| StarcoderdataPython |
3203466 | from base64 import b64encode, b64decode
from bs4 import BeautifulSoup as soup
from bz2 import BZ2File
from collections import Counter, OrderedDict
from copy import deepcopy
from datetime import datetime as dt, timedelta
try:
from etk.extractors.date_extractor import DateExtractor
except OSError:
from spacy.cli import download
download('en_core_web_sm')
from etk.extractors.date_extractor import DateExtractor
from etk.extractors.spacy_ner_extractor import SpacyNerExtractor
from hashlib import sha256
from json import load, dump, loads, dumps
from math import sqrt, ceil, floor
from nltk import word_tokenize, pos_tag, ne_chunk, download as nltk_download
from nltk.corpus import stopwords
from numpy import array
from os import makedirs, listdir, rename, remove, chmod
from os.path import dirname, abspath, exists, join
from pandas import DataFrame
from pickle import load as pload, dump as pdump
from pprint import pprint
from random import choices, shuffle, seed
from regex import findall, sub, search, compile, match, DOTALL, MULTILINE, VERBOSE
from requests import get, post, head
from selenium.common.exceptions import TimeoutException
from selenium.webdriver import Firefox
from selenium.webdriver.firefox.options import Options
from selenium.common.exceptions import WebDriverException
from shutil import rmtree
from sklearn.cluster import KMeans
from sys import stdout, exc_info
from tarfile import open as tar_open
from threading import Thread
from time import strftime, sleep, time
from traceback import print_exc, format_exc
from urllib.parse import urljoin, quote
from hashlib import md5
from xml.etree.cElementTree import iterparse
from wikipediaapi import Wikipedia
# --- constants ---------------------------------------------------------------
PATH_RESOURCES = join(dirname(__file__), 'resources')
PATH_LOG = join(PATH_RESOURCES, 'log_%s.txt')
PATH_ALL_TABLES = join(PATH_RESOURCES, 'all_tables.jsonl')
PATTERN_LOG = '[%s] %s\n'
SCRIPT_ADD_RENDER = """
function pathTo(element) {
if (element === document) return ""
var ix = 0
var siblings = element.parentNode.childNodes
for (var i = 0; i < siblings.length; i++) {
if (siblings[i] === element) return pathTo(element.parentNode) + '/' + element.tagName + '[' + (ix + 1) + ']'
if (siblings[i].nodeType === 1 && siblings[i].tagName === element.tagName) ix++
}
}
var removeElements = []
function addRender(subtree) {
var style = getComputedStyle(subtree)
if (subtree.tagName == "TR" && subtree.children.length == 0 || subtree.offsetWidth == undefined || style["display"] == "none" || subtree.tagName == "SUP" && subtree.className == "reference") {
removeElements.push(subtree)
return
}
var serialStyle = ""
for (let prop of style) {
if (prop[0] != "-") {
serialStyle += prop + ":" + style[prop].replace(/:/g, "") + "|"
}
}
serialStyle += "width:" + subtree.offsetWidth / document.body.offsetWidth + "|height:" + subtree.offsetHeight / document.body.offsetHeight
if (subtree.tagName == "TD" || subtree.tagName == "TH") {
serialStyle += "|colspan:" + subtree.colSpan + "|rowspan:" + subtree.rowSpan
}
subtree.setAttribute("data-computed-style", serialStyle)
subtree.setAttribute("data-xpath", pathTo(subtree).toLowerCase())
for (let child of subtree.children) addRender(child)
}
function preprocess() {
var elements = document.querySelectorAll(injected_script_selector)
for (let subtree of elements) addRender(subtree)
for (let elem of removeElements) elem.remove()
}
const injected_script_selector = arguments[0]
if (document.readyState == 'complete') {
preprocess()
} else {
window.onload = function(){preprocess()}
}
"""
# --- import directives -------------------------------------------------------
makedirs(PATH_RESOURCES, exist_ok=True)
try:
stopwords.words("english")
except:
nltk_download('stopwords')
# --- format ------------------------------------------------------------------
def date_stamp():
''' Return the current timestamp. '''
return strftime('%Y-%m-%d, %H:%M:%S')
def bytes_to_human(size, decimal_places=2):
''' Returns a human readable file size from a number of bytes. '''
for unit in ['', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']:
if size < 1024: break
size /= 1024
return f'{size:.{decimal_places}f}{unit}B'
def seconds_to_human(seconds):
''' Returns a human readable string from a number of seconds. '''
return str(timedelta(seconds=int(seconds))).zfill(8)
def hashed(text):
''' Returns the md5 hash of a text. Not recommended for security concerns. '''
return md5(text.encode()).hexdigest()
def fname_escape(text):
return sub(r'([^\w\s\.])', lambda x: f'_{ord(x.group())}_', text.replace('_', '_95_'))
def fname_unescape(text):
return sub(r'(_\d+_)', lambda x: chr(int(x.group()[1:-1])), text)
# --- log ---------------------------------------------------------------------
def log(log_name, text):
''' Logs the given text to the log specified, and prints it. '''
text = PATTERN_LOG % (date_stamp(), text)
print('[%s] %s' % (log_name, text), end='')
with open(PATH_LOG % log_name, 'a', encoding='utf-8') as fp:
fp.write(text)
def log_error():
''' Used inside an except sentence, logs the error to the error log. '''
log('error', format_exc())
def cache(target, args, identifier=None, cache_life=3 * 24 * 3600):
''' Run the target function with the given args, and store it to a pickled
cache folder using the given identifier or the name of the function. The
next time it is executed, the cached output is returned unless cache_life
time expires. '''
if identifier == None: identifier = target.__name__
identifier = sub(r'[/\\\*;\[\]\'\":=,<>]', '_', identifier)
path = join(PATH_RESOURCES, f'.pickled/{identifier}.pk')
makedirs(dirname(path), exist_ok=True)
now = time()
if exists(path):
with open(path, 'rb') as fp:
save_time, value = pload(fp)
if now - save_time <= cache_life:
return value
res = target(*args)
with open(path, 'wb') as fp:
pdump((now, res), fp, protocol=3)
return res
# --- network -----------------------------------------------------------------
def download_file(url, path=None, chunk_size=10**5):
''' Downloads a file keeping track of the progress. '''
if path == None: path = url.split('/')[-1]
r = get(url, stream=True)
total_bytes = int(r.headers.get('content-length'))
bytes_downloaded = 0
start = time()
print('Downloading %s (%s)' % (url, bytes_to_human(total_bytes)))
with open(path, 'wb') as fp:
for chunk in r.iter_content(chunk_size=chunk_size):
if not chunk: continue
fp.write(chunk)
bytes_downloaded += len(chunk)
percent = bytes_downloaded / total_bytes
bar = ('█' * int(percent * 32)).ljust(32)
time_delta = time() - start
eta = seconds_to_human((total_bytes - bytes_downloaded) * time_delta / bytes_downloaded)
avg_speed = bytes_to_human(bytes_downloaded / time_delta).rjust(9)
stdout.flush()
stdout.write('\r %6.02f%% |%s| %s/s eta %s' % (100 * percent, bar, avg_speed, eta))
print()
_driver = None
def get_driver(headless=True, disable_images=True, open_links_same_tab=False):
''' Returns a Firefox webdriver, and run one if there is no any active. '''
global _driver
if _driver == None:
opts = Options()
opts.set_preference('dom.ipc.plugins.enabled.libflashplayer.so', 'false')
if open_links_same_tab:
opts.set_preference('browser.link.open_newwindow.restriction', 0)
opts.set_preference('browser.link.open_newwindow', 1)
if headless: opts.set_headless()
if disable_images: opts.set_preference('permissions.default.image', 2)
_driver = Firefox(options=opts)
_driver.set_page_load_timeout(15)
return _driver
def close_driver():
''' Close the current Firefox webdriver, if any. '''
global _driver
if _driver != None:
print('Closing Firefox driver')
_driver.close()
def get_with_render(url, render_selector='table', headless=True, disable_images=True, open_links_same_tab=False):
''' Downloads a page and renders it to return the page source, the width,
and the height in pixels. Elements on the subtree selected using
render_selector contain a data-computed-style attribute and a data-xpath. '''
driver = get_driver(headless, disable_images, open_links_same_tab)
driver.get(url)
driver.execute_script(SCRIPT_ADD_RENDER, render_selector)
sleep(.5)
return driver.page_source
# --- vector ------------------------------------------------------------------
def vectors_average(vectors):
''' Given a list of mixed feature vectors, returns the average of all them.
For numerical features, aritmetic average is used. For categorical ones,
the most common is used. '''
vectors = [v for v in vectors if len(v)]
res = {}
if len(vectors):
for feat in vectors[0]:
if type(vectors[0][feat]) == str:
val = Counter(v[feat] for v in vectors).most_common(1)[0][0]
else:
val = sum(v[feat] for v in vectors) / len(vectors)
res[feat] = val
return res
def vectors_weighted_average(vectors):
''' Given a list of tuples of type <weight, mixed feature vector>, returns
the weighted average of all them. For numerical features, aritmetic average
is used. For categorical ones, weighted frequencies are used to return the
most common. '''
if len(vectors) == 1: return vectors[0][1]
res = {}
total_weight = sum(v[0] for v in vectors)
if total_weight == 0:
total_weight = len(vectors)
for n in range(total_weight):
vectors[n][0] = 1
vectors = [(w / total_weight, fs) for w, fs in vectors]
for f in vectors[0][1]:
if type(vectors[0][1][f]) == str:
sum_feat = {}
for weight, features in vectors:
if features[f] in sum_feat:
sum_feat[features[f]] += weight
else:
sum_feat[features[f]] = weight
res[f] = max(sum_feat.items(), key=lambda v: v[1])[0]
else:
val = 0
for weight, features in vectors:
val += weight * features[f]
res[f] = val
return res
def vectors_difference(v1, v2, prefix=''):
''' Given two mixed feature vectors, return another vector with the
differences amongst them. For numerical features, absolute value difference
is computed. For categorical features, Gower distance is used. '''
res = {}
for feat in v1:
if type(v1[feat]) == str:
res[prefix + feat] = 0 if v1[feat] == v2[feat] else 1
else:
res[prefix + feat] = abs(v1[feat] - v2[feat])
return res
def vector_module(vector):
''' Given a mixed feature vector, return the norm of their numerical
attributes. '''
nums = [v**2 for v in vector.values() if type(v) != str]
return sqrt(sum(nums))
def binarize_categorical(vectors):
''' Given a 2-D list of mixed feature vectors, transform every categorical
feature into a binary one, using the seen values of all the vectors. '''
vectors = deepcopy(vectors)
cat_vector = next([k for k, v in cell.items() if type(v) == str] for row in vectors for cell in row if len(cell))
for f in cat_vector:
values = list(set(cell[f] for row in vectors for cell in row if len(cell)))
for r, row in enumerate(vectors):
for c, cell in enumerate(row):
if len(cell) == 0: continue
for v in values:
vectors[r][c][f'{f}-{v}'] = 1 if v == cell[f] else 0
del vectors[r][c][f]
return vectors
# --- parsing -----------------------------------------------------------------
_find_dates_extractor = DateExtractor()
def find_dates(text):
try:
res = _find_dates_extractor.extract(text, prefer_language_date_order=False)
if len(res): return res[0].value
except:
log('info', f'ETK DateExtractor raised an error on value {text}. Using RegEx fallback instead.')
_find_entities_extractor = SpacyNerExtractor('dummy_parameter')
def find_entities(text):
try:
return {ext.value: ext.tag for ext in _find_entities_extractor.extract(text)}
except:
log('info', f'ETK SpacyNerExtractor raised an error on value {text}.')
return {}
# --- math --------------------------------------------------------------------
def distinct(lst, uniqueness_function):
''' Returns a list in the same order with just the elements with a distinct
value on the uniqueness_function.
I.e.: `distinct([1, 5, 7, 9], lambda x: x % 3)` would return [1, 5, 9].'''
values = []
keys = []
for v in lst:
k = uniqueness_function(v)
if k not in keys:
keys.append(k)
values.append(v)
return values | StarcoderdataPython |
12815 | <reponame>learning-nn/nn_from_scratch
import numpy
import numpy as np
# converting to a layer with 4 input and 3 neuron
inputs = [[1.2, 2.1, 3.4, 1.2],
[1.2, 2.1, 3.4, 1.2],
[1.2, 2.1, 3.4, 1.2]]
print(numpy.shape(inputs))
weights = [[4.1, -4.5, 3.1, 2.3],
[-4.1, 4.5, 2.1, 2.3],
[4.1, 4.5, 3.1, -2.3]]
print(numpy.shape(weights))
biases = [1, 2, 3]
weights2 = [[4.1, -4.5, 3.1],
[-4.1, 4.5, 2.1],
[4.1, 4.5, 3.1]]
biases2 = [1, 2, 3]
layer1_outputs = np.dot(inputs, np.array(weights).T) + biases
layer2_outputs = np.dot(layer1_outputs, np.array(weights2).T) + biases2
print(layer2_outputs)
| StarcoderdataPython |
3371394 | <gh_stars>1-10
from django.conf.urls import url
from django.conf.urls import include
from rest_framework.routers import DefaultRouter
from .views import (HelloApiView,HelloViewSet,UserProfileViewSet,LoginViewSet,
UserProfileFeedViewSet)
router = DefaultRouter()
router.register('hello-viewset', HelloViewSet, base_name='hello-viewset')
router.register('profile', UserProfileViewSet)
router.register('login',LoginViewSet, base_name='login')
router.register('feed',UserProfileFeedViewSet)
urlpatterns = [
url(r'^hello-view/', HelloApiView.as_view()),
url(r'', include(router.urls)),
] | StarcoderdataPython |
156544 | <filename>analisis-de-algoritmos/algoritmos/binarySearch.py
#
# The Binary Search
#
# cretid: https://interactivepython.org/runestone/static/pythonds/SortSearch/TheBinarySearch.html
def binary_search(vector, item):
inicio = 0
fin = len(vector)-1
encontrado = False
while inicio<=fin and not encontrado:
medio = (inicio + fin)//2
if vector[medio] == item:
encontrado = True
else:
if item < vector[medio]:
fin = medio-1
else:
inicio = medio+1
return encontrado
testlist = [0, 1, 2, 8, 13, 17, 19, 32, 42,]
print binary_search(testlist, 3)
print binary_search(testlist, 13)
| StarcoderdataPython |
3234230 | <filename>bspump/random/source.py
import logging
import asyncio
import random
from ..abc.source import TriggerSource
L = logging.getLogger(__name__)
class RandomSource(TriggerSource):
'''
`RandomSource` is mostly meant for testing. It
generates n (specified in `Config` as `number`, default is 1000) events per trigger fire.
There can be 2 options of usage:
a) User provides `choice` as an array of values to choose randomly;
b) The random integer between `Config['lower_bound']` and `Config['upper_bound']`
If `field` from `Config` is not an empty string, the random source generates dictionary
event `{`field`: `generated value`}`. Otherwise it's a random number.
'''
ConfigDefaults = {
'field': '',
'number': 1000,
'lower_bound': 0,
'upper_bound': 1000,
'event_idle_time': 0.01,
'events_till_idle': 10000,
}
def __init__(self, app, pipeline, choice=None, id=None, config=None):
super().__init__(app, pipeline, id=id, config=config)
self.Number = int(self.Config['number'])
self.LowerBound = int(self.Config['lower_bound'])
self.UpperBound = int(self.Config['upper_bound'])
self.EventIdleTime = float(self.Config['event_idle_time'])
self.EventsTillIdle = int(self.Config['events_till_idle'])
self.EventCounter = 0
self.Choice = None
self.Field = None
if choice is not None:
self.Choice = choice
if self.Config['field'] != '':
self.Field = self.Config['field']
def generate_random(self):
'''
Override this method to generate differently
'''
if self.Choice is not None:
if self.Field is not None:
return {self.Field: random.choice(self.Choice)}
return random.choice(self.Choice)
else:
if self.Field is not None:
return {self.Field: random.randint(self.LowerBound, self.UpperBound)}
return random.randint(self.LowerBound, self.UpperBound)
async def cycle(self):
for i in range(0, self.Number):
event = self.generate_random()
await self.process(event)
if self.EventCounter >= self.EventsTillIdle:
await asyncio.sleep(self.EventIdleTime)
self.EventCounter = 0
self.EventCounter += 1
| StarcoderdataPython |
1738397 | <reponame>uwase-diane/NewsAPI
class Article:
'''
Source class to define Source Objects
'''
def __init__(self,urlToImage,title,description,url,publishedAt):
self.urlToImage = urlToImage
self.title = title
self.description = description
self.url = url
self.publishedAt = publishedAt | StarcoderdataPython |
137985 | <gh_stars>10-100
from pydantic import BaseModel
class MyBase(BaseModel):
"""MyBase"""
field_on_base: str
"""Base Field"""
class MySubclass(MyBase):
"""MySubClass"""
field_on_subclass: str
"""Subclass field"""
| StarcoderdataPython |
3287681 | import warnings
import logging
from .base import Attack
from .base import call_decorator
from .saltandpepper import SaltAndPepperNoiseAttack
from .. import rng
class PointwiseAttack(Attack):
"""Starts with an adversarial and performs a binary search between
the adversarial and the original for each dimension of the input
individually.
References
----------
.. [1] <NAME>, <NAME>, <NAME>, <NAME>: "Towards the first
adversarially robust neural network model on MNIST", ICLR (2019)
https://arxiv.org/abs/1805.09190
"""
@call_decorator
def __call__(self, input_or_adv, label=None, unpack=True,
starting_point=None, initialization_attack=None):
"""Starts with an adversarial and performs a binary search between
the adversarial and the original for each dimension of the input
individually.
Parameters
----------
input_or_adv : `numpy.ndarray` or :class:`Adversarial`
The original, unperturbed input as a `numpy.ndarray` or
an :class:`Adversarial` instance.
label : int
The reference label of the original input. Must be passed
if `a` is a `numpy.ndarray`, must not be passed if `a` is
an :class:`Adversarial` instance.
unpack : bool
If true, returns the adversarial input, otherwise returns
the Adversarial object.
starting_point : `numpy.ndarray`
Adversarial input to use as a starting point, in particular
for targeted attacks.
initialization_attack : :class:`Attack`
Attack to use to find a starting point. Defaults to
SaltAndPepperNoiseAttack.
"""
a = input_or_adv
del input_or_adv
del label
del unpack
self._starting_point = starting_point
self._initialization_attack = initialization_attack
self.initialize_starting_point(a)
if a.perturbed is None:
warnings.warn(
'Initialization failed. If the criterion is targeted,'
' it might be necessary to pass an explicit starting'
' point or targeted initialization attack.')
return
shape = a.unperturbed.shape
N = a.unperturbed.size
original = a.unperturbed.reshape(-1)
x = a.perturbed.copy().reshape(-1)
assert original.dtype == x.dtype
while True:
# draw random shuffling of all indices
indices = list(range(N))
rng.shuffle(indices)
for index in indices:
# change index
old_value = x[index]
new_value = original[index]
if old_value == new_value:
continue
x[index] = new_value
# check if still adversarial
_, is_adversarial = a.forward_one(x.reshape(shape))
# if adversarial, restart from there
if is_adversarial:
logging.info('Reset value to original -> new distance:'
' {}'.format(a.distance))
break
# if not, undo change
x[index] = old_value
else:
# no index was succesful
break
logging.info('Starting binary searches')
while True:
# draw random shuffling of all indices
indices = list(range(N))
rng.shuffle(indices)
# whether that run through all values made any improvement
improved = False
logging.info('Starting new loop through all values')
for index in indices:
# change index
old_value = x[index]
original_value = original[index]
if old_value == original_value:
continue
x[index] = original_value
# check if still adversarial
_, is_adversarial = a.forward_one(x.reshape(shape))
# if adversarial, no binary search needed
if is_adversarial: # pragma: no cover
logging.info('Reset value at {} to original ->'
' new distance: {}'.format(
index, a.distance))
improved = True
else:
# binary search
adv_value = old_value
non_adv_value = original_value
best_adv_value = self.binary_search(
a, x, index, adv_value, non_adv_value, shape)
if old_value != best_adv_value:
x[index] = best_adv_value
improved = True
logging.info('Set value at {} from {} to {}'
' (original has {}) ->'
' new distance: {}'.format(
index, old_value, best_adv_value,
original_value, a.distance))
if not improved:
# no improvement for any of the indices
break
def binary_search(self, a, x, index, adv_value, non_adv_value, shape):
for i in range(10):
next_value = (adv_value + non_adv_value) / 2
x[index] = next_value
_, is_adversarial = a.forward_one(x.reshape(shape))
if is_adversarial:
adv_value = next_value
else:
non_adv_value = next_value
return adv_value
def initialize_starting_point(self, a):
starting_point = self._starting_point
init_attack = self._initialization_attack
if a.perturbed is not None:
if starting_point is not None: # pragma: no cover
warnings.warn(
'Ignoring starting_point because the attack'
' is applied to a previously found adversarial.')
if init_attack is not None: # pragma: no cover
warnings.warn(
'Ignoring initialization_attack because the attack'
' is applied to a previously found adversarial.')
return
if starting_point is not None:
a.forward_one(starting_point)
assert a.perturbed is not None, (
'Invalid starting point provided. Please provide a starting point that is adversarial.')
return
if init_attack is None:
init_attack = SaltAndPepperNoiseAttack
logging.info(
'Neither starting_point nor initialization_attack given.'
' Falling back to {} for initialization.'.format(
init_attack.__name__))
if issubclass(init_attack, Attack):
# instantiate if necessary
init_attack = init_attack()
init_attack(a)
| StarcoderdataPython |
1703721 | import random
from discord.ext import commands
from extras.constants import COMMAND_PREFIXES
# from extras.errors import MusicErros as errors
def get_prefix(client, message):
"""
A callable Prefix for our client. This could be edited to allow per server prefixes.
"""
prefixes = COMMAND_PREFIXES
# Check to see if we are outside of a guild. e.g DM's etc.
if not message.guild:
return '?'
# If we are in a guild, we allow for the user to mention us or use any of
# the prefixes in our list.
return commands.when_mentioned_or(*prefixes)(client, message)
# class Queue:
# '''
# Queue used in Music Cog.
# Attributes:
# - _queue (lst): Queue itself
# - is_empty (bool): True if queue is empty
# - current_track (str): Url of current track
# - upcoming (lst): List of all upcoming tracks
# - history (lst): List of all past tracks
# '''
# def __init__(self):
# self._queue = []
# self.position = 0
#
# @property
# def is_empty(self):
# '''True if queue is empty.'''
# return not self._queue
#
# @property
# def current_track(self):
# '''Returns the current track.'''
# if not self._queue:
# raise errors.QueueIsEmpty
#
# if self.position <= len(self._queue) - 1:
# return self._queue[self.position]
#
# return None
#
# @property
# def upcoming(self):
# '''Returns all upcoming items in a list.'''
# if self.is_empty:
# raise errors.QueueIsEmpty
#
# return self._queue[self.position + 1:]
#
# @property
# def history(self):
# '''Return all past items in a list.'''
# if self.is_empty:
# raise errors.QueueIsEmpty
#
# return self._queue[:self.position]
#
# def __len__(self):
# return len(self._queue)
#
# def add(self, *args):
# '''Adds new items to the end of the queue.'''
# self._queue.extend(args)
#
# def get_next_track(self):
# '''Returns the next track. None if already done.'''
# if self.is_empty:
# raise errors.QueueIsEmpty
#
# self.position += 1
#
# if self.position < 0 or self.position > len(self._queue) - 1:
# return None
#
# return self._queue[self.position]
#
# def shuffle(self):
# '''Shuffles the current reminder queue.'''
# if self.is_empty:
# raise errors.QueueIsEmpty
#
# upcoming = self.upcoming
# random.shuffle(upcoming)
# self._queue = self._queue[:self.position + 1]
# self._queue.extend(upcoming)
#
# def empty(self):
# '''
# Clears the Queue. (does NOT delete it, must be managed by garbege collector)
# '''
# self._queue.clear()
# self.position = 0
| StarcoderdataPython |
156974 | """Bundles all exceptions and warnings used in the package prodsim"""
class InvalidValue(Exception):
""" Raises when a value is not within the permissible range """
pass
class InvalidType(Exception):
""" Raises when a value has the wrong type """
pass
class MissingParameter(Exception):
""" Raised when a required parameter is missing """
pass
class MissingAttribute(Exception):
""" Raises when a not defined attribute is used """
pass
class NotSupportedParameter(Exception):
""" Raised when a not defined parameter is passed """
pass
class FileNotFound(Exception):
""" Raised when a file couldn't be found """
pass
class InvalidFormat(Exception):
""" Raises when a parameter has the wrong format """
class UndefinedFunction(Exception):
""" Raises when a function isn't defined """
pass
class UndefinedObject(Exception):
""" Raises if an referenced object is not defined """
pass
class InvalidFunction(Exception):
""" Raises when a function is not valid """
pass
class InvalidYield(Exception):
""" Raises when a generator function doesn't yield the correct types """
class InvalidSignature(Exception):
""" Raises when a signature """
class ToManyArguments(Exception):
""" Raises, when to many arguments are passed """
pass
class MissingData(Exception):
""" Raises, when required data is missing """
pass
class BlockedIdentifier(Exception):
""" Raises, when an identifier is already blocked """
pass
class InfiniteLoop(Exception):
""" Raises, when a function contains an infinite loop """
class BadType(Warning):
""" when a parameter has a bad type """
pass
class BadSignature(Warning):
""" when a argument has not the expected name """
pass
class BadYield(Warning):
""" when a yield is possible but can lead to problems """
pass
class NotDefined(Warning):
""" when a non pre defined identifier is used """
pass
| StarcoderdataPython |
4826970 | <gh_stars>1-10
# templatefilters.py - common template expansion filters
#
# Copyright 2005-2008 <NAME> <<EMAIL>>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import
import os
import re
import time
from .i18n import _
from .node import hex
from . import (
encoding,
error,
pycompat,
registrar,
smartset,
templateutil,
url,
util,
)
from .utils import (
cborutil,
dateutil,
stringutil,
)
urlerr = util.urlerr
urlreq = util.urlreq
# filters are callables like:
# fn(obj)
# with:
# obj - object to be filtered (text, date, list and so on)
filters = {}
templatefilter = registrar.templatefilter(filters)
@templatefilter(b'addbreaks', intype=bytes)
def addbreaks(text):
"""Any text. Add an XHTML "<br />" tag before the end of
every line except the last.
"""
return text.replace(b'\n', b'<br/>\n')
agescales = [
(b"year", 3600 * 24 * 365, b'Y'),
(b"month", 3600 * 24 * 30, b'M'),
(b"week", 3600 * 24 * 7, b'W'),
(b"day", 3600 * 24, b'd'),
(b"hour", 3600, b'h'),
(b"minute", 60, b'm'),
(b"second", 1, b's'),
]
@templatefilter(b'age', intype=templateutil.date)
def age(date, abbrev=False):
"""Date. Returns a human-readable date/time difference between the
given date/time and the current date/time.
"""
def plural(t, c):
if c == 1:
return t
return t + b"s"
def fmt(t, c, a):
if abbrev:
return b"%d%s" % (c, a)
return b"%d %s" % (c, plural(t, c))
now = time.time()
then = date[0]
future = False
if then > now:
future = True
delta = max(1, int(then - now))
if delta > agescales[0][1] * 30:
return b'in the distant future'
else:
delta = max(1, int(now - then))
if delta > agescales[0][1] * 2:
return dateutil.shortdate(date)
for t, s, a in agescales:
n = delta // s
if n >= 2 or s == 1:
if future:
return b'%s from now' % fmt(t, n, a)
return b'%s ago' % fmt(t, n, a)
@templatefilter(b'basename', intype=bytes)
def basename(path):
"""Any text. Treats the text as a path, and returns the last
component of the path after splitting by the path separator.
For example, "foo/bar/baz" becomes "baz" and "foo/bar//" becomes "".
"""
return os.path.basename(path)
def _tocborencodable(obj):
if isinstance(obj, smartset.abstractsmartset):
return list(obj)
return obj
@templatefilter(b'cbor')
def cbor(obj):
"""Any object. Serializes the object to CBOR bytes."""
# cborutil is stricter about type than json() filter
obj = pycompat.rapply(_tocborencodable, obj)
return b''.join(cborutil.streamencode(obj))
@templatefilter(b'commondir')
def commondir(filelist):
"""List of text. Treats each list item as file name with /
as path separator and returns the longest common directory
prefix shared by all list items.
Returns the empty string if no common prefix exists.
The list items are not normalized, i.e. "foo/../bar" is handled as
file "bar" in the directory "foo/..". Leading slashes are ignored.
For example, ["foo/bar/baz", "foo/baz/bar"] becomes "foo" and
["foo/bar", "baz"] becomes "".
"""
def common(a, b):
if len(a) > len(b):
a = b[: len(a)]
elif len(b) > len(a):
b = b[: len(a)]
if a == b:
return a
for i in pycompat.xrange(len(a)):
if a[i] != b[i]:
return a[:i]
return a
try:
if not filelist:
return b""
dirlist = [f.lstrip(b'/').split(b'/')[:-1] for f in filelist]
if len(dirlist) == 1:
return b'/'.join(dirlist[0])
a = min(dirlist)
b = max(dirlist)
# The common prefix of a and b is shared with all
# elements of the list since Python sorts lexicographical
# and [1, x] after [1].
return b'/'.join(common(a, b))
except TypeError:
raise error.ParseError(_(b'argument is not a list of text'))
@templatefilter(b'count')
def count(i):
"""List or text. Returns the length as an integer."""
try:
return len(i)
except TypeError:
raise error.ParseError(_(b'not countable'))
@templatefilter(b'dirname', intype=bytes)
def dirname(path):
"""Any text. Treats the text as a path, and strips the last
component of the path after splitting by the path separator.
"""
return os.path.dirname(path)
@templatefilter(b'domain', intype=bytes)
def domain(author):
"""Any text. Finds the first string that looks like an email
address, and extracts just the domain component. Example: ``User
<<EMAIL>>`` becomes ``example.com``.
"""
f = author.find(b'@')
if f == -1:
return b''
author = author[f + 1 :]
f = author.find(b'>')
if f >= 0:
author = author[:f]
return author
@templatefilter(b'email', intype=bytes)
def email(text):
"""Any text. Extracts the first string that looks like an email
address. Example: ``User <<EMAIL>>`` becomes
``<EMAIL>``.
"""
return stringutil.email(text)
@templatefilter(b'escape', intype=bytes)
def escape(text):
"""Any text. Replaces the special XML/XHTML characters "&", "<"
and ">" with XML entities, and filters out NUL characters.
"""
return url.escape(text.replace(b'\0', b''), True)
para_re = None
space_re = None
def fill(text, width, initindent=b'', hangindent=b''):
'''fill many paragraphs with optional indentation.'''
global para_re, space_re
if para_re is None:
para_re = re.compile(b'(\n\n|\n\\s*[-*]\\s*)', re.M)
space_re = re.compile(br' +')
def findparas():
start = 0
while True:
m = para_re.search(text, start)
if not m:
uctext = encoding.unifromlocal(text[start:])
w = len(uctext)
while w > 0 and uctext[w - 1].isspace():
w -= 1
yield (
encoding.unitolocal(uctext[:w]),
encoding.unitolocal(uctext[w:]),
)
break
yield text[start : m.start(0)], m.group(1)
start = m.end(1)
return b"".join(
[
stringutil.wrap(
space_re.sub(b' ', stringutil.wrap(para, width)),
width,
initindent,
hangindent,
)
+ rest
for para, rest in findparas()
]
)
@templatefilter(b'fill68', intype=bytes)
def fill68(text):
"""Any text. Wraps the text to fit in 68 columns."""
return fill(text, 68)
@templatefilter(b'fill76', intype=bytes)
def fill76(text):
"""Any text. Wraps the text to fit in 76 columns."""
return fill(text, 76)
@templatefilter(b'firstline', intype=bytes)
def firstline(text):
"""Any text. Returns the first line of text."""
try:
return text.splitlines(True)[0].rstrip(b'\r\n')
except IndexError:
return b''
@templatefilter(b'hex', intype=bytes)
def hexfilter(text):
"""Any text. Convert a binary Mercurial node identifier into
its long hexadecimal representation.
"""
return hex(text)
@templatefilter(b'hgdate', intype=templateutil.date)
def hgdate(text):
"""Date. Returns the date as a pair of numbers: "1157407993
25200" (Unix timestamp, timezone offset).
"""
return b"%d %d" % text
@templatefilter(b'isodate', intype=templateutil.date)
def isodate(text):
"""Date. Returns the date in ISO 8601 format: "2009-08-18 13:00
+0200".
"""
return dateutil.datestr(text, b'%Y-%m-%d %H:%M %1%2')
@templatefilter(b'isodatesec', intype=templateutil.date)
def isodatesec(text):
"""Date. Returns the date in ISO 8601 format, including
seconds: "2009-08-18 13:00:13 +0200". See also the rfc3339date
filter.
"""
return dateutil.datestr(text, b'%Y-%m-%d %H:%M:%S %1%2')
def indent(text, prefix, firstline=b''):
'''indent each non-empty line of text after first with prefix.'''
lines = text.splitlines()
num_lines = len(lines)
endswithnewline = text[-1:] == b'\n'
def indenter():
for i in pycompat.xrange(num_lines):
l = lines[i]
if l.strip():
yield prefix if i else firstline
yield l
if i < num_lines - 1 or endswithnewline:
yield b'\n'
return b"".join(indenter())
@templatefilter(b'json')
def json(obj, paranoid=True):
"""Any object. Serializes the object to a JSON formatted text."""
if obj is None:
return b'null'
elif obj is False:
return b'false'
elif obj is True:
return b'true'
elif isinstance(obj, (int, pycompat.long, float)):
return pycompat.bytestr(obj)
elif isinstance(obj, bytes):
return b'"%s"' % encoding.jsonescape(obj, paranoid=paranoid)
elif isinstance(obj, type(u'')):
raise error.ProgrammingError(
b'Mercurial only does output with bytes: %r' % obj
)
elif util.safehasattr(obj, b'keys'):
out = [
b'"%s": %s'
% (encoding.jsonescape(k, paranoid=paranoid), json(v, paranoid))
for k, v in sorted(pycompat.iteritems(obj))
]
return b'{' + b', '.join(out) + b'}'
elif util.safehasattr(obj, b'__iter__'):
out = [json(i, paranoid) for i in obj]
return b'[' + b', '.join(out) + b']'
raise error.ProgrammingError(b'cannot encode %r' % obj)
@templatefilter(b'lower', intype=bytes)
def lower(text):
"""Any text. Converts the text to lowercase."""
return encoding.lower(text)
@templatefilter(b'nonempty', intype=bytes)
def nonempty(text):
"""Any text. Returns '(none)' if the string is empty."""
return text or b"(none)"
@templatefilter(b'obfuscate', intype=bytes)
def obfuscate(text):
"""Any text. Returns the input text rendered as a sequence of
XML entities.
"""
text = pycompat.unicode(
text, pycompat.sysstr(encoding.encoding), r'replace'
)
return b''.join([b'&#%d;' % ord(c) for c in text])
@templatefilter(b'permissions', intype=bytes)
def permissions(flags):
if b"l" in flags:
return b"lrwxrwxrwx"
if b"x" in flags:
return b"-rwxr-xr-x"
return b"-rw-r--r--"
@templatefilter(b'person', intype=bytes)
def person(author):
"""Any text. Returns the name before an email address,
interpreting it as per RFC 5322.
"""
return stringutil.person(author)
@templatefilter(b'revescape', intype=bytes)
def revescape(text):
"""Any text. Escapes all "special" characters, except @.
Forward slashes are escaped twice to prevent web servers from prematurely
unescaping them. For example, "@foo bar/baz" becomes "@foo%20bar%252Fbaz".
"""
return urlreq.quote(text, safe=b'/@').replace(b'/', b'%252F')
@templatefilter(b'rfc3339date', intype=templateutil.date)
def rfc3339date(text):
"""Date. Returns a date using the Internet date format
specified in RFC 3339: "2009-08-18T13:00:13+02:00".
"""
return dateutil.datestr(text, b"%Y-%m-%dT%H:%M:%S%1:%2")
@templatefilter(b'rfc822date', intype=templateutil.date)
def rfc822date(text):
"""Date. Returns a date using the same format used in email
headers: "Tue, 18 Aug 2009 13:00:13 +0200".
"""
return dateutil.datestr(text, b"%a, %d %b %Y %H:%M:%S %1%2")
@templatefilter(b'short', intype=bytes)
def short(text):
"""Changeset hash. Returns the short form of a changeset hash,
i.e. a 12 hexadecimal digit string.
"""
return text[:12]
@templatefilter(b'shortbisect', intype=bytes)
def shortbisect(label):
"""Any text. Treats `label` as a bisection status, and
returns a single-character representing the status (G: good, B: bad,
S: skipped, U: untested, I: ignored). Returns single space if `text`
is not a valid bisection status.
"""
if label:
return label[0:1].upper()
return b' '
@templatefilter(b'shortdate', intype=templateutil.date)
def shortdate(text):
"""Date. Returns a date like "2006-09-18"."""
return dateutil.shortdate(text)
@templatefilter(b'slashpath', intype=bytes)
def slashpath(path):
"""Any text. Replaces the native path separator with slash."""
return util.pconvert(path)
@templatefilter(b'splitlines', intype=bytes)
def splitlines(text):
"""Any text. Split text into a list of lines."""
return templateutil.hybridlist(text.splitlines(), name=b'line')
@templatefilter(b'stringescape', intype=bytes)
def stringescape(text):
return stringutil.escapestr(text)
@templatefilter(b'stringify', intype=bytes)
def stringify(thing):
"""Any type. Turns the value into text by converting values into
text and concatenating them.
"""
return thing # coerced by the intype
@templatefilter(b'stripdir', intype=bytes)
def stripdir(text):
"""Treat the text as path and strip a directory level, if
possible. For example, "foo" and "foo/bar" becomes "foo".
"""
dir = os.path.dirname(text)
if dir == b"":
return os.path.basename(text)
else:
return dir
@templatefilter(b'tabindent', intype=bytes)
def tabindent(text):
"""Any text. Returns the text, with every non-empty line
except the first starting with a tab character.
"""
return indent(text, b'\t')
@templatefilter(b'upper', intype=bytes)
def upper(text):
"""Any text. Converts the text to uppercase."""
return encoding.upper(text)
@templatefilter(b'urlescape', intype=bytes)
def urlescape(text):
"""Any text. Escapes all "special" characters. For example,
"foo bar" becomes "foo%20bar".
"""
return urlreq.quote(text)
@templatefilter(b'user', intype=bytes)
def userfilter(text):
"""Any text. Returns a short representation of a user name or email
address."""
return stringutil.shortuser(text)
@templatefilter(b'emailuser', intype=bytes)
def emailuser(text):
"""Any text. Returns the user portion of an email address."""
return stringutil.emailuser(text)
@templatefilter(b'utf8', intype=bytes)
def utf8(text):
"""Any text. Converts from the local character encoding to UTF-8."""
return encoding.fromlocal(text)
@templatefilter(b'xmlescape', intype=bytes)
def xmlescape(text):
text = (
text.replace(b'&', b'&')
.replace(b'<', b'<')
.replace(b'>', b'>')
.replace(b'"', b'"')
.replace(b"'", b''')
) # ' invalid in HTML
return re.sub(b'[\x00-\x08\x0B\x0C\x0E-\x1F]', b' ', text)
def websub(text, websubtable):
""":websub: Any text. Only applies to hgweb. Applies the regular
expression replacements defined in the websub section.
"""
if websubtable:
for regexp, format in websubtable:
text = regexp.sub(format, text)
return text
def loadfilter(ui, extname, registrarobj):
"""Load template filter from specified registrarobj"""
for name, func in pycompat.iteritems(registrarobj._table):
filters[name] = func
# tell hggettext to extract docstrings from these functions:
i18nfunctions = filters.values()
| StarcoderdataPython |
197544 | <gh_stars>10-100
import sys
import types
import uuid
from StringIO import StringIO
import json
from .outputhandlers.shellcolors import OutputHandler
from .. import unicodehelper
class BaseErrorBundle(object):
"""Keyword Arguments:
**determined**
Whether the validator should continue after a tier fails
**instant**
Who knows what this does
"""
def __init__(self, determined=True, instant=False, *args, **kwargs):
self.handler = None
self.errors = []
self.warnings = []
self.notices = []
self.ending_tier = self.tier = 1
self.unfinished = False
self.instant = instant
self.determined = determined
super(BaseErrorBundle, self).__init__(*args, **kwargs)
def _message(type_, message_type):
def wrap(self, *args, **kwargs):
arg_len = len(args)
message = {
"uid": uuid.uuid4().hex,
"id": kwargs.get("err_id") or args[0],
"message": unicodehelper.decode(
kwargs.get(message_type) or args[1]),
"description": unicodehelper.decode(
kwargs.get("description", args[2] if
arg_len > 2 else None)),
# Filename is never None.
"file": kwargs.get("filename",
args[3] if arg_len > 3 else ""),
"line": kwargs.get("line",
args[4] if arg_len > 4 else None),
"column": kwargs.get("column",
args[5] if arg_len > 5 else None),
"tier": kwargs.get("tier", self.tier),
"context": None,
}
destination = getattr(self, type_)
# Don't show duplicate messages.
if any(x["id"] == message["id"] and
x["file"] == message["file"] and
x["line"] == message["line"] and
x["column"] == message["column"] for x in destination):
return self
context = kwargs.get("context")
if context is not None:
if isinstance(context, tuple):
message["context"] = context
else:
message["context"] = context.get_context(
line=message["line"], column=message["column"])
# Append the message to the right stack.
destination.append(message)
# If instant mode is turned on, output the message immediately.
if self.instant:
self._print_message(type_, message, verbose=True)
return self
return wrap
# And then all the real functions. Ahh, how clean!
error = _message("errors", "error")
warning = _message("warnings", "warning")
notice = _message("notices", "notice")
def set_tier(self, tier):
"Updates the tier and ending tier"
self.tier = tier
if tier > self.ending_tier:
self.ending_tier = tier
@property
def message_count(self):
return len(self.errors) + len(self.warnings) + len(self.notices)
def failed(self, fail_on_warnings=True):
"""Returns a boolean value describing whether the validation
succeeded or not."""
return bool(self.errors) or (fail_on_warnings and bool(self.warnings))
def render_json(self):
"Returns a JSON summary of the validation operation."
types = {0: "unknown", 8: "webapp"}
output = {"ending_tier": self.ending_tier,
"success": not self.failed(),
"messages": [],
"errors": len(self.errors),
"warnings": len(self.warnings),
"notices": len(self.notices)}
messages = output["messages"]
# Copy messages to the JSON output
for error in self.errors:
error["type"] = "error"
messages.append(error)
for warning in self.warnings:
warning["type"] = "warning"
messages.append(warning)
for notice in self.notices:
notice["type"] = "notice"
messages.append(notice)
output.update(self._extend_json())
# Output the JSON.
return json.dumps(output, ensure_ascii=True)
def _extend_json(self):
"""Override this method to extend the JSON produced by the bundle."""
pass
def print_summary(self, verbose=False, no_color=False):
"Prints a summary of the validation process so far."
buffer = StringIO()
self.handler = OutputHandler(buffer, no_color)
# Make a neat little printout.
self.handler.write("\n<<GREEN>>Summary:").write("-" * 30)
self.handler.write("%s Errors, %s Warnings, %s Notices" %
(len(self.errors), len(self.warnings), len(self.notices)))
if self.failed():
self.handler.write("<<BLUE>>Test failed! Errors:")
# Print out all the errors/warnings:
for error in self.errors:
self._print_message("<<RED>>Error:<<NORMAL>>\t",
error, verbose)
for warning in self.warnings:
self._print_message("<<YELLOW>>Warning:<<NORMAL>> ",
warning, verbose)
else:
self.handler.write("<<GREEN>>All tests succeeded!")
if self.notices:
for notice in self.notices:
self._print_message(prefix="<<WHITE>>Notice:<<NORMAL>>\t",
message=notice,
verbose=verbose)
self.handler.write("\n")
if self.unfinished:
self.handler.write("<<RED>>Validation terminated early")
self.handler.write("Errors during validation are preventing"
"the validation proecss from completing.")
self.handler.write("Use the <<YELLOW>>--determined<<NORMAL>> "
"flag to ignore these errors.")
self.handler.write("\n")
return buffer.getvalue()
def _flatten_list(self, data):
"Flattens nested lists into strings."
if data is None:
return ""
if isinstance(data, types.StringTypes):
return data
elif isinstance(data, (list, tuple)):
return "\n".join(map(self._flatten_list, data))
def _print_message(self, prefix, message, verbose=True):
"Prints a message and takes care of all sorts of nasty code"
# Load up the standard output.
output = ["\n", prefix, message["message"]]
# We have some extra stuff for verbose mode.
if verbose:
verbose_output = []
# Detailed problem description.
if message["description"]:
verbose_output.append(
self._flatten_list(message["description"]))
# Show the user what tier we're on
verbose_output.append("\tTier:\t%d" % message["tier"])
# If file information is available, output that as well.
files = message["file"]
if files is not None and files != "":
fmsg = "\tFile:\t%s"
# Nested files (subpackes) are stored in a list.
if type(files) is list:
if files[-1] == "":
files[-1] = "(none)"
verbose_output.append(fmsg % ' > '.join(files))
else:
verbose_output.append(fmsg % files)
# If there is a line number, that gets put on the end.
if message["line"]:
verbose_output.append("\tLine:\t%s" % message["line"])
if message["column"] and message["column"] != 0:
verbose_output.append("\tColumn:\t%d" % message["column"])
if "context" in message and message["context"]:
verbose_output.append("\tContext:")
verbose_output.extend(
[("\t> %s" % ("-" * 20 if x is None else x)) for
x in message.get("context", [])])
# Stick it in with the standard items.
output.append("\n")
output.append("\n".join(verbose_output))
# Send the final output to the handler to be rendered.
self.handler.write(u''.join(map(unicodehelper.decode, output)))
def discard_unused_messages(self, ending_tier):
"""
Delete messages from errors, warnings, and notices whose tier is
greater than the ending tier.
"""
for stack in [self.errors, self.warnings, self.notices]:
for message in stack:
if message["tier"] > ending_tier:
stack.remove(message)
| StarcoderdataPython |
130302 | <reponame>gpapaz/eve-wspace
# Eve W-Space
# Copyright 2014 <NAME> and contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from core.models import ConfigEntry
from Teamspeak.models import TeamspeakServer
from core.utils import get_config
def load_defaults():
ts3 = TeamspeakServer.create("localhost", "baduser","badpass","10011", "9887")
ts3.save()
| StarcoderdataPython |
141205 | # © 2019 Nokia
# Licensed under the BSD 3 Clause license
# SPDX-License-Identifier: BSD-3-Clause
import os
from setuptools import setup, find_packages
pkg_name = 'radish_rest'
def _packages():
packages = [f'{pkg_name}.{sub_pkg_name}' for sub_pkg_name in
find_packages(os.path.join(os.path.dirname(__file__), pkg_name))
]
packages.append(pkg_name)
return packages
setup(name=pkg_name,
version='0.1',
description='radish bdd config extension',
url='https://github.com/',
author='<NAME>, <NAME>',
author_email='<EMAIL>',
packages=_packages(),
package_data={},
include_package_data=True,
install_requires=['nose',
'requests'
],
zip_safe=False,
license='BSD 3-Clause License'
)
| StarcoderdataPython |
3261242 | # Copyright (C) 2021. Huawei Technologies Co., Ltd. All rights reserved.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the MIT License.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# MIT License for more details.
from model.utils import fix_len_compatibility
# data parameters
train_filelist_path = 'resources/filelists/final6_merged_train.txt'
valid_filelist_path = 'resources/filelists/final6_merged_val.txt'
test_filelist_path = 'resources/filelists/final6_merged_test.txt'
cmudict_path = 'resources/cmu_dictionary'
n_feats = 80
add_blank = False
# multispeaker
n_speakers =2 #n_Speakers+1
gin_channels_spk = 80
# expressive
#n_emotions = 5# n_emotions + 1,
#gin_channels_emotion = 80
n_langs = 2
gin_channels_langs = 80
# encoder parameters
n_enc_channels = 192
filter_channels = 768
filter_channels_dp = 256
n_enc_layers = 6
enc_kernel = 3
enc_dropout = 0.1
n_heads = 2
window_size = 4
# decoder parameters
dec_dim = 64
beta_min = 0.05
beta_max = 20.0
pe_scale = 1000 # 1 for `grad-tts-old.pt` checkpoint
# training parameters
log_dir = '/srv/storage/multis<EMAIL>/software_project/akriukova/gradtts_model/logs_v/' # model dir path
test_size = 4
n_epochs = 10000
batch_size = 16
learning_rate = 1e-4
seed = 37
save_every = 1
out_size = fix_len_compatibility(2*22050//256)
language_id = {0:"fr", 1:"en"}
| StarcoderdataPython |
85598 | # -*- coding: utf-8 -*-
#
# Copyright © 2021–2022 <NAME> <<EMAIL>>
# Released under the MIT Licence
#
import pytest
from datetime import date
from pytcnz.squashnz.player import Player
from .test_playerbase import PLAYER
PLAYER = PLAYER | dict(
id=14,
squash_code="WNTHJXD",
points=3050,
dob="1-Sep-1999",
phone="043841234",
mobile="021234567",
email="<EMAIL>",
vaccinated="X",
vaccination_expiry="30-Dec-2099",
comment="player comment",
)
def make_player_data(**kwargs):
return PLAYER | kwargs
@pytest.fixture
def player_data():
return make_player_data()
def test_init_player(player_data):
Player(**player_data)
@pytest.fixture
def player(player_data):
return Player(**player_data)
@pytest.fixture(params=["gender", "name", "points"])
def attr_to_check(request):
return request.param
def test_init_player_missing_data(player_data, attr_to_check):
del player_data[attr_to_check]
with pytest.raises(TypeError):
Player(**player_data)
def test_player_age(player):
assert player.get_age(onday=date(2021, 10, 6)) == 22
def test_player_age_group(player):
assert (
player.get_age_group(onday=date(2021, 10, 6)) == Player.AgeGroup.Senior
)
def test_player_age_no_dob(player):
player = Player(**make_player_data(dob=None))
assert player.age is None
assert player.age_group == Player.AgeGroup.Unknown
def test_player_get_age_no_dob(player):
player = Player(**make_player_data(dob=None))
assert player.get_age() is None
assert player.get_age_group() == Player.AgeGroup.Unknown
def test_player_age_dob_emptystring(player):
player = Player(**make_player_data(dob=""))
assert player.age is None
assert player.age_group == Player.AgeGroup.Unknown
def test_player_get_age_dob_emptystring(player):
player = Player(**make_player_data(dob=""))
assert player.get_age() is None
assert player.get_age_group() == Player.AgeGroup.Unknown
def test_player_age_group_junior(player_data):
player = Player(**player_data | dict(grade="J1"))
assert player.age_group == Player.AgeGroup.Junior
def test_player_strict_invalid_phonenumber(player_data):
with pytest.raises(Player.InvalidPhoneNumber):
Player(strict=True, **player_data | dict(phone="041234567"))
def test_player_relaxed_invalid_phonenumber(player_data):
Player(strict=False, **player_data | dict(phone="041234567"))
@pytest.fixture(
params=[
("Mrs. <NAME>", "<NAME>"),
("Ms. <NAME>", "<NAME>"),
("<NAME>", "<NAME>"),
("Dr. <NAME>", "<NAME>"),
("Mr. <NAME>", "<NAME>"),
("<NAME>", "<NAME>"),
("<NAME>", "<NAME>"),
("Mrs <NAME>", "<NAME>"),
]
)
def name_and_cleaned_name(request):
return request.param
def test_name_cleaning(name_and_cleaned_name):
assert (
Player.get_name_cleaned(name_and_cleaned_name[0])
== name_and_cleaned_name[1]
)
@pytest.fixture(
params=[
("<NAME>", "Jane"),
("Ms. <NAME>", "Jane"),
("<NAME>", "Carry-Ann"),
("<NAME>", "John"),
("<NAME>", "John"),
("Dr. <NAME>", "John"),
]
)
def name_and_firstname(request):
return request.param
def test_first_name_extraction(name_and_firstname):
assert (
Player.get_first_name(name_and_firstname[0]) == name_and_firstname[1]
)
def test_equality_ignores_id(player_data):
p1 = Player(**player_data)
p2 = Player(**player_data | dict(id="foo"))
assert p1 == p2
def test_single_word_name(player_data):
p = Player(**player_data | dict(name="Bye"))
def test_vaccination_status(player):
assert player.is_vaccinated() == Player.VaccinationStatus.V
def test_vaccination_status_bool(player):
assert player.is_vaccinated()
def test_vaccination_status_no_expiry_bool(player_data):
p = Player(**player_data | dict(vaccination_expiry=""))
assert not p.is_vaccinated()
def test_vaccination_status_expired(player):
assert (
player.is_vaccinated(onday=date(2099, 12, 31))
== Player.VaccinationStatus.E
)
def test_vaccination_status_expired_bool(player):
assert not player.is_vaccinated(onday=date(2099, 12, 31))
def test_unvaccinated_player(player_data):
p = Player(**player_data | dict(vaccinated=""))
assert p.is_vaccinated() == Player.VaccinationStatus.N
def test_unvaccinated_player_bool(player_data):
p = Player(**player_data | dict(vaccinated=""))
assert not p.is_vaccinated()
| StarcoderdataPython |
3212929 | # -*- coding: utf-8 -*-
import io
import tqdm
import requests
from PIL import Image
BASE_URL = 'https://api.nosconecta.com.ar/'
PATH = 'eform/thumbnail/{}'
BASE_PARAMS = {
'resize': 'full',
'page': '0',
}
FOLDER_URL = 'https://ar.turecibo.com/bandeja.php?apiendpoint=/folders/{}/documents/available'
MAX_FAILED_REQUESTS = 3
class DocumentDownloader:
def __init__(self, doc_hash, filename=None):
self.doc_hash = doc_hash
self.filename = filename if filename is not None else '{}.pdf'.format(doc_hash)
self.url = BASE_URL + PATH.format(self.doc_hash)
def download(self):
pages = self.get_pages()
self.save_as_pdf(pages)
def get_pages(self):
"""
Downloads all the pages and returns them as an ordered list of images
loaded in memory
"""
session = requests.session()
params = BASE_PARAMS.copy()
pages = []
page = 0
req_failed = 0
bar = tqdm.tqdm(desc=self.filename, unit='pages')
while req_failed < MAX_FAILED_REQUESTS:
page += 1
params.update({'page': '{}'.format(page)})
req = session.get(self.url, params=params)
if req.headers.get('Content-Type').startswith('application/json'):
# This is probably an error message, we are most likely
# out of bounds. Continue trying to get pages though
req_failed += 1
continue
bar.update(1)
img = Image.open(io.BytesIO(req.content))
pages.append(img)
bar.close()
return pages
def save_as_pdf(self, pages):
first_page, pages = pages[0], pages[1:]
first_page.save(
self.filename, 'PDF', resolution=100.0,
save_all=True, append_images=pages
)
class FolderDownloader:
def __init__(self, cookie, folder):
self.cookie = cookie
self.folder = folder
def download(self):
url = FOLDER_URL.format(self.folder)
data = dict(reload='1')
headers = dict(cookie=self.cookie)
req = requests.post(url, data=data, headers=headers)
try:
response = req.json()
except:
print('Error, invalid cookie?')
return -1
categories = response.get('categorias', {})
for document in categories.get('documentos', []):
doc_hash = document.get('archivo')
DocumentDownloader(doc_hash=doc_hash).download()
| StarcoderdataPython |
3256056 | <filename>comath/metric/metric.py
"""metric-related utility functions."""
import abc
class MovingMetricTracker(metaclass=abc.ABCMeta):
"""An object that tracks and computes a moving metric."""
def __init__(self, metric_name):
self.metric_name = metric_name
@abc.abstractmethod
def add_value(self, value):
"""Incorporate a new value into the metric tracker.
Arguments
---------
value : float
Add a new value of the metric.
"""
pass
@abc.abstractmethod
def get_metric(self):
"""Get the value of the metric."""
pass
class MovingAverageTracker(MovingMetricTracker):
"""An object that tracks and computes a moving average."""
def __init__(self, metric_name=None):
MovingMetricTracker.__init__(
self,
metric_name=metric_name or 'Average'
)
self.val_sum = 0
self.n = 0
def add_value(self, value):
self.val_sum += value
self.n += 1
def get_metric(self):
try:
return self.val_sum / self.n
except ZeroDivisionError:
return 0
class MovingVarianceTracker(MovingMetricTracker):
"""An object that tracks and computes a moving variance measure.
Uses Welford's Algorithm:
https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Online_algorithm
"""
def __init__(self, metric_name=None):
MovingMetricTracker.__init__(
self,
metric_name or 'Variance'
)
self.n = 0
self.delta = 0
self.delta2 = 0
self.mean = 0
self.m2k = 0
def add_value(self, value):
self.n += 1
self.delta = value - self.mean
self.mean += self.delta / self.n
self.delta2 = value - self.mean
self.m2k += self.delta * self.delta2
def get_metric(self):
if self.n < 2:
return None
else:
return self.m2k / (self.n - 1)
class MovingPrecisionTracker(MovingMetricTracker):
"""An object that tracks and computes a moving precision measure."""
def __init__(self, metric_name=None):
MovingMetricTracker.__init__(
self,
metric_name or 'Precision'
)
self.true_count = 0
self.n = 0
def add_value(self, value):
if value:
self.true_count += 1
self.n += 1
def get_metric(self):
try:
return self.true_count / self.n
except ZeroDivisionError:
return 0
| StarcoderdataPython |
29031 | class BaseAnsiblerException(Exception):
message = "Error"
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args)
self.__class__.message = kwargs.get("message", self.message)
def __str__(self) -> str:
return self.__class__.message
class CommandNotFound(BaseAnsiblerException):
message = "Command not found"
class RolesParseError(BaseAnsiblerException):
message = "Could not parse default roles"
class MetaYMLError(BaseAnsiblerException):
message = "Invalid meta/main.yml"
class RoleMetadataError(BaseAnsiblerException):
message = "Role metadata error"
class MoleculeTestsNotFound(BaseAnsiblerException):
message = "Molecule tests not foound"
class MoleculeTestParseError(BaseAnsiblerException):
message = "Could not parse molecule test file"
class NoPackageJsonError(BaseAnsiblerException):
message = "No package.json"
| StarcoderdataPython |
3227675 | <gh_stars>10-100
class Material:
def __init__(self, diffuse=[.8, .8, .8], spec_weight=0., specular=[0., 0., 0.],
ambient=[0., 0., 0.], opacity=1., flat_shading=False, texture_file=None):
self.diffuse = diffuse
self.spec_weight = spec_weight
self.specular = specular
self.ambient = ambient
self.opacity = opacity
self.flat_shading = flat_shading
self.texture_file = texture_file
| StarcoderdataPython |
131004 | """
inheritance-diagram:: dfo.optimizer.direct
:parts: 1
"""
from misc.debug import DbgMsgOut, DbgMsg
from .base import BoxConstrainedOptimizer
from numpy import max, min, abs, array
import numpy as np
import heapq
__all__ = ['Cube', 'DIRECT']
class Cube(object):
def __init__(self, x, f, depth):
self.x = array(x)
self.f = f
self.ndim = self.x.shape[0]
self.depth = depth
def increase_depth(self, i=None):
if i is not None:
self.depth[i] += 1
else:
for i in range(self.ndim):
self.depth[i] += 1
class DIRECT(BoxConstrainedOptimizer):
def __init__(self, function, xlo=None, xhi=None, debug=0, fstop=None, maxiter=None):
BoxConstrainedOptimizer.__init__(self, function, xlo, xhi, debug, fstop, maxiter, cache=True)
self.pq_cache = None
self.eps = 1e-2
self.K = 0
self.max_depth = 5
self.visited = []
def check(self):
"""
Checks the optimization algorithm's settings and raises an exception if
something is wrong.
"""
BoxConstrainedOptimizer.check(self)
# if self.samplesize is None:
# raise Exception(DbgMsg("DIRECT", "The sample size should not be None."))
def reset(self, x0):
"""
Puts the optimizer in its initial state and sets the initial point to
be the 1-dimensional array *x0*. The length of the array becomes the
dimension of the optimization problem (:attr:`ndim` member). The shape
of *x* must match that of *xlo* and *xhi*.
"""
BoxConstrainedOptimizer.reset(self, x0)
# Debug message
if self.debug:
DbgMsgOut("DIRECT", "Resetting DIRECT")
def run(self):
"""
Run the DIRECT algorithm.
"""
# Debug message
if self.debug:
DbgMsgOut("CSOPT", "Starting a coordinate search run at i=" + str(self.niter))
# Reset stop flag
self.stop = False
# Check
self.check()
self.x = 0.5 * np.ones(shape=(self.ndim,))
self.f = self.fun(self.denormalize(self.x))
# pq: potentiality, f(center), depth
self.pq_cache = [(self.f - 1.0 / 3.0 * self.K, 1, Cube(self.x, self.f, np.ones(shape=(self.ndim,))))]
while not self.stop and self.pq_cache:
val, it, cube = heapq.heappop(self.pq_cache)
self.update_cube(cube)
x, depth = cube.x, cube.depth
minimum_depth = min(depth)
# print("depth: ", depth)
if self.debug:
DbgMsgOut("DIRECT", "Cube.f =" + str(cube.f))
inc_index, better_index, same_index, worse_index = [], [], [], []
for i in range(self.ndim):
# try points with length of the maximum side of hyper-rectangle
if depth[i] == minimum_depth:
x[i] -= (1 / 3)**depth[i]
improved = self.update_potential_rectangle(x, depth, i)
if improved == 0:
same_index.append(i)
elif improved > 0:
better_index.append(i)
else:
worse_index.append(i)
x[i] += 2 * (1 / 3)**depth[i]
improved = self.update_potential_rectangle(x, depth, i)
if improved == 0:
same_index.append(i)
elif improved > 0:
better_index.append(i)
else:
worse_index.append(i)
x[i] -= (1 / 3) ** depth[i]
inc_index.append(i)
if better_index != [] and worse_index != []:
# Decrease the size of the cube and save it in the cache
for idx in inc_index:
cube.increase_depth(idx)
self.niter += 1
# Push the smaller cube into the cache centering at self.x
heapq.heappush(self.pq_cache, (cube.f - 0.5**depth[0] * self.K, self.niter, cube))
if self.debug:
DbgMsgOut("DIRECT", "Iteration i=" + str(self.niter) + " fbest=" + str(self.f))
def update_cube(self, cube):
'''
:param cube: class Cube object
:return: None
'''
# print("update cube")
x = cube.x
depth = cube.depth
for i in range(self.ndim):
if self.cache.isVisited(x + (1.0 / 3.0)**depth[i]) or self.cache.isVisited(x - (1.0 / 3.0)**depth[i]):
cube.increase_depth(i)
# print("cube's depth: ", cube.depth)
def update_potential_rectangle(self, x, depth, i):
'''
Check potentially Potential Hyper-rectangles.
:param x:
:param depth:
:param i:
:return: updated or not
'''
# print("x::::::::::", x)
f = self.fun(self.denormalize(x))
if depth[i] <= self.max_depth and f <= self.f:
# build new cube with x_new, depth_new
x_new = x.copy()
depth_new = depth.copy()
depth_new[i] += 1
cube = Cube(x_new, f, depth_new)
heapq.heappush(self.pq_cache, (f - self.K * 0.5**depth_new[i], self.niter, cube))
if f < self.f:
self.f = f
self.x = x.copy()
if self.debug:
DbgMsgOut("DIRECT", "Better centers found in iteration i=" + str(self.niter) + " fbest=" + str(self.f))
return 1
elif f == self.f:
return 0
else:
return -1
| StarcoderdataPython |
3288493 | <gh_stars>0
from collections import deque
from .node import Node
class AhoCorasick(object):
def __init__(self):
'''
AhoCorasick 이니셜라이징
'''
self.head = Node()
self.head.fail = 0
self.pattern = set()
self.idx = 1
self.aho_corasick = {0: self.head}
def __call__(self, text):
'''
미리 만들어 놓음 Trie를 기반으로 AhoCorasick 알고리즘을 구현
:param text: 매칭할 텍스트
:return: 매칭 결과를 시작, 끝, 키워드 튜플 배열로 반환 [(INT start, INT end, STRING keyword), ...]
'''
current = self.head
ret = []
for idx, char in enumerate(text):
while True:
if not current.goto(char) and current.idx is not 0:
current = self.aho_corasick[current.fail]
else:
child = current.goto(char)
break
if child:
current = child
if child.output:
keyword = max(list(child.output), key=len)
start = idx - len(keyword) + 1
end = start + len(keyword)
ret.append((start, end, keyword))
return ret
def add_pattern(self, pattern):
'''
TRIE에 생성할 패턴을 입력받는 함수
:param pattern: TRIE를 생성할 패턴
:return: -
'''
self.pattern.add(pattern)
current = self.head
for char in pattern:
if char not in current.children.keys():
current.children[char] = Node(self.idx, char)
self.aho_corasick[self.idx] = current.children[char]
self.idx += 1
current = current.children[char]
current.output.add(pattern)
def add_patterns(self, patterns):
'''
배열 형식으로 패턴을 여러개를 한꺼번에 입력받는 함수
:param patterns: TRIE를 생성할 패턴 배열
:return: -
'''
if type(patterns) is str:
patterns = patterns.split()
assert type(patterns) is list, "Please input list or str with space"
for pattern in patterns:
self.add_pattern(pattern)
def _compute_fail_func(self):
'''
입력받은 패턴들을 기반으로 failure function을 계산하는 함수
:return: -
'''
queue = deque()
for node in self.head.children.values():
queue.append(node)
while queue:
target = queue.popleft()
for node in target.children.values():
queue.append(node)
idx = target.fail
char = node.char
current = self.aho_corasick[idx]
while not current.goto(char) and current.idx is not 0:
new_idx = current.fail
current = self.aho_corasick[new_idx]
if not current.goto(char):
node.fail = current.idx
else:
node.fail = current.goto(char).idx
node.set_output(self.aho_corasick[node.fail].output)
def build(self):
'''
패턴 입력 후 트리거 함수
:return: -
'''
self._compute_fail_func()
if __name__ == "__main__":
aho = AhoCorasick()
aho.add_pattern("hi")
aho.add_pattern("this")
aho.build()
aho("this is my first aho-corasick implemented. and") | StarcoderdataPython |
1773369 | from disasm import Types
from utils.ail_utils import ELF_utils, unify_int_list, dec_hex, get_loc
class lib32_helper(object):
"""
Manage PC relative code for x86 32bit binaries
"""
def __init__(self, instrs, funcs):
"""
:param instrs: instruction list
:param funcs: function list
"""
self.instrs = instrs
self.funcs = {f.func_begin_addr: f.func_end_addr for f in funcs if f.func_begin_addr != 0}
self.funcs = sorted(self.funcs.iteritems(), key=lambda e: e[0])
self.label = []
self.sec = []
self.curr_func = 0
self.curr_regs = set() # Set of the register holding .got.plt address
self.gotaddr = 0
self.section_collect()
def match_get_pc_thunk(self, instr):
"""
Check if insturction after pcthunk invocation
:param instr: instruction tuple
:return: True on success
"""
return isinstance(instr[2], Types.Label) \
and instr[0].upper().startswith('ADD') \
and instr[2] == '$_GLOBAL_OFFSET_TABLE_'
def v_exp(self, e):
"""
Check if PC relative expression and transform using labels
:param e: expression
:return: transformed expression if matching, original one otherwise
"""
if isinstance(e, (Types.BinOP_PLUS, Types.BinOP_MINUS)):
r1, addr = e
if r1.upper() in self.curr_regs:
addr = -addr if isinstance(e, Types.BinOP_MINUS) else addr
des = self.gotaddr + addr
s = self.check_sec(des)
if s is not None:
self.label.append(des)
return Types.Label('S_' + dec_hex(des))
return e
def scan(self):
"""
Scan instruction list and modify PC relative code with labels
"""
i = 0
inlen = len(self.instrs) - 1
while i < inlen:
h1 = self.instrs[i]
if get_loc(h1).loc_addr >= self.funcs[self.curr_func][1]:
# It can be assumed that the base register is set only inside a single function
self.curr_func += 1
self.curr_regs.clear()
if isinstance(h1, Types.TripleInstr) and (self.match_get_pc_thunk(h1) \
or (h1[0].upper() == 'MOV' and isinstance(h1[2], Types.RegClass) \
and h1[2].upper() in self.curr_regs and isinstance(h1[1], Types.RegClass))):
# .got.plt address can also be copied to more than one register
self.curr_regs.add(h1[1].upper())
elif len(self.curr_regs) > 0:
if isinstance(h1, Types.DoubleInstr):
self.instrs[i] = Types.DoubleInstr((h1[0], self.v_exp(h1[1]), h1[2], h1[3]))
elif not isinstance(h1, Types.SingleInstr):
if isinstance(h1, Types.TripleInstr):
self.instrs[i] = Types.TripleInstr((h1[0], self.v_exp(h1[1]), self.v_exp(h1[2]), h1[3], h1[4]))
elif isinstance(h1, Types.FourInstr):
self.instrs[i] = Types.FourInstr((h1[0], h1[1], self.v_exp(h1[2]), h1[3], h1[4], h1[5]))
if isinstance(h1[1], Types.RegClass) and h1[1].upper() in self.curr_regs:
# Remove if overridden
self.curr_regs.remove(h1[1].upper())
i += 1
def traverse(self):
"""
Analyze and modify instructions
:return: list of generated labels
"""
if ELF_utils.elf_32() and not ELF_utils.elf_arm():
self.scan()
return unify_int_list(self.label)
def get_instrs(self):
"""
Get instruction list
"""
return self.instrs
def section_collect(self):
"""
Load sections information
"""
with open('sections.info') as f:
def mapper(l):
items = l.split()
return Types.Section(items[0], int(items[1], 16), int(items[3], 16))
self.sec = map(mapper, f)
with open('gotplt.info') as f:
self.gotaddr = int(f.readline().split()[1], 16)
def check_sec(self, addr):
"""
Find the section an address belongs to
:param addr: address
:return: section object, None on failure
"""
for h in self.sec:
b = h.sec_begin_addr
e = b + h.sec_size
if b <= addr < e: return h
return None
| StarcoderdataPython |
3397951 | <filename>bfassist/standalone/monitoring/realtimeround.py
#############################################################################
#
#
# Module of BFA that manages server statistics in realtime
#
#
#############################################################################
""" This module implements the real-time logging of in-game statistics specifically for one current bf round.
Dependencies:
2nd-party dependency numpy
bfassist <- (standalone.monitoring.)realtimeround
|
\-> bfa_logging
-> standalone -> monitoring
note:: Author(s): Mitch last-check: 08.07.2021 """
from datetime import datetime
from numpy import array, array2string
from bfassist.bfa_logging import log
from bfassist.standalone import Server
from bfassist.standalone.monitoring import RealTimePlayer, RealTimeVehicle, BfRound, BfServerSetting, BfPlayerRound, \
Player
# noinspection PyUnusedLocal
def __preload__(forClient: bool = True):
pass
# noinspection PyUnusedLocal
def __postload__(forClient: bool = True):
pass
class RealTimeRound:
""" A real time round is supposed to correspond to a bf round in dice::xmlns::bf and contains all relevant
information being updated in realtime. BfRounds in online mode are actually instantiated from the bfxml module that
is parsing the bf round information.
:param server: The Server the real time round takes place.
:param eventDict: Dictionary containing all events with their timestamp as key.
:param livePlayers: Dictionary of real time live players with their ids as keys.
:param dcedPlayers: Dictionary of real time players that left during a running round.
Now using their keyhash as key.
:param liveTicketsAxis: Current ticket count of axis.
:param liveTicketsAllies: Current ticket count of allies.
:param roundStats: Roundstats object from serverstats module.
:param roundStart: The start of the round as datetime.
:param liveRound: Flag that determines if a round is live and should be sent to the master when
finished.
todo:: Disconnecting/Reconnecting players could be handled together with a henk-patch?...
note:: Author(s): Mitch """
def __init__(self, server: Server, eventDict: dict = None, livePlayers: dict = None, dcedPlayers: dict = None,
liveTicketsAxis: int = None, liveTicketsAllies: int = None, roundStats: BfRound = None,
roundStart: datetime = None, liveRound: bool = False):
self.server = server
if eventDict:
self.eventDict = eventDict
else:
self.eventDict = {}
if livePlayers:
self.livePlayers = livePlayers
else:
self.livePlayers = {}
if dcedPlayers:
self.dcedPlayers = dcedPlayers
else:
self.dcedPlayers = {}
self.liveTicketsAxis = liveTicketsAxis
self.liveTicketsAllies = liveTicketsAllies
if roundStats:
self.roundStats = roundStats
else:
self.roundStats = BfRound(self.server, BfServerSetting(), {}, datetime.now())
if roundStart:
self.roundStart = roundStart
else:
self.roundStart = self.roundStats.getStart()
self.liveRound = liveRound
# noinspection PyUnusedLocal
def beginMedPack(self, player_id: int, player_location: array, medpack_status: int, healed_player: int):
self.livePlayers[player_id].updatePosition(player_location)
self.livePlayers[player_id].updateMedPackStatus(medpack_status)
# noinspection PyUnusedLocal
def beginRepair(self, player_id: int, player_location: array, repair_status: int, vehicle_type: str):
self.livePlayers[player_id].updatePosition(player_location)
self.livePlayers[player_id].updateRepairStatus(repair_status)
# noinspection PyUnusedLocal
def changePlayerName(self, player_id: int, player_location: array, name: str):
self.livePlayers[player_id].updatePosition(player_location)
self.livePlayers[player_id].updateName(name)
# noinspection PyUnusedLocal
def chat(self, player_id: int, player_location: array, team: int, text: str):
self.livePlayers[player_id].updatePosition(player_location)
self.livePlayers[player_id].updateTeam(team)
def createPlayer(self, player_id: int, player_location: array, name: str, is_ai: int, team: int):
new_player = RealTimePlayer(player_id, name, self.roundStats.getRoundId(), position=player_location,
is_ai=is_ai, team=team)
self.livePlayers[player_id] = new_player
return new_player
def destroyPlayer(self, player_id: int, player_location: array):
player = self.livePlayers[player_id]
player.updatePosition(player_location)
self.livePlayers.pop(player_id)
self.dcedPlayers[player.player.getKeyhash()] = player
# noinspection PyUnusedLocal
def destroyVehicle(self, player_id: int, player_location: array, vehicle: str, vehicle_pos: array):
self.livePlayers[player_id].updatePosition(player_location)
def disconnectPlayer(self, player_id: int, player_location: array):
self.livePlayers[player_id].updatePosition(player_location)
def endMedPack(self, player_id: int, player_location: array, medpack_status: int):
self.livePlayers[player_id].updatePosition(player_location)
self.livePlayers[player_id].updateMedPackStatus(medpack_status)
def endRepair(self, player_id: int, player_location: array, repair_status: int):
self.livePlayers[player_id].updatePosition(player_location)
self.livePlayers[player_id].updateRepairStatus(repair_status)
# noinspection PyUnusedLocal
def enterVehicle(self, player_id: int, player_location: array, vehicle_name: str, player_seat: str, is_default: int,
is_fake: int):
self.livePlayers[player_id].updatePosition(player_location)
self.livePlayers[player_id].updateVehicle(RealTimeVehicle(position=player_location, is_fake=is_fake,
seats={player_id: self.livePlayers[player_id]}))
# noinspection PyUnusedLocal
def exitVehicle(self, player_id: int, player_location: array, vehicle_name: str, is_fake: int):
self.livePlayers[player_id].updatePosition(player_location)
self.livePlayers[player_id].updateVehicle(None)
def pickupFlag(self, player_id: int, player_location: array):
self.livePlayers[player_id].updatePosition(player_location)
self.livePlayers[player_id].pickupFlag()
def pickupKit(self, player_id: int, player_location: array, kit):
self.livePlayers[player_id].updatePosition(player_location)
self.livePlayers[player_id].pickupKit(kit)
# noinspection PyUnusedLocal
def playerKeyHash(self, player_id: int, player_keyhash: str):
self.livePlayers[player_id].player = self.server.playerLinks[player_id]
# noinspection PyUnusedLocal
def radioMessage(self, player_id: int, player_location: array, message: int, broadcast: int):
self.livePlayers[player_id].updatePosition(player_location)
def reSpawnEvent(self, player_id: int, player_location: array):
self.livePlayers[player_id].updatePosition(player_location)
self.livePlayers[player_id].spawn()
def restartMap(self, tickets_team1: int, tickets_team2: int):
self.liveTicketsAxis = tickets_team1
self.liveTicketsAllies = tickets_team2
def roundInit(self, tickets_team1: int, tickets_team2: int):
self.liveTicketsAxis = tickets_team1
self.liveTicketsAllies = tickets_team2
def scoreEvent(self, player_id: int, player_location: array, score_type: str, victim_id: int, weapon: str):
self.livePlayers[player_id].updatePosition(player_location)
if score_type in ['Attack', 'Defence', 'FlagCapture', 'Objective', 'ObjectiveTK', 'Spawned', '(unknown)']:
self.livePlayers[player_id].evaluate(score_type)
elif score_type in ['Death', 'DeathNoMsg']:
self.livePlayers[player_id].die()
elif score_type == 'Kill':
self.livePlayers[player_id].kill(victim_id, weapon)
elif score_type == 'TK':
self.livePlayers[player_id].teamKill(victim_id, weapon)
def setTeam(self, player_id: int, player_location: array, team: int):
self.livePlayers[player_id].updatePosition(player_location)
self.livePlayers[player_id].updateTeam(team)
def spawnEvent(self, player_id: int, player_location: array):
self.livePlayers[player_id].updatePosition(player_location)
self.livePlayers[player_id].spawn()
@staticmethod
def typeHintDcedPlayer():
return {
'__type__': dict,
'__keys__': str,
'__values__': RealTimePlayer.typeHint()
}
@classmethod
def typeHint(cls):
return {
'dcedPlayers': cls.typeHintDcedPlayer(),
'liveTicketsAxis': int,
'liveTicketsAllies': int,
'roundStart': str
}
def toLocalDict(self):
""" Function for getting the real time round object as a dictionary of strings representing attributes and the
values for the local bfa perspective so it can also be json serialised. (excluding event dict)
:return: Real time round as a dictionary.
note:: Author(s): Mitch """
return {
'dcedPlayers': {player_keyhash: self.dcedPlayers[player_keyhash].toLocalDict()
for player_keyhash in self.dcedPlayers},
'liveTicketsAxis': self.liveTicketsAxis,
'liveTicketsAllies': self.liveTicketsAllies,
'roundStart': str(self.roundStart)
}
| StarcoderdataPython |
1715985 | class TestData:
CHROME_EXECUTABLE_PATH = "/Users/User/Desktop/selenium/selinium/python chromedriver/chromedriver"
FIREFOX_EXECUTABLE_PATH = "/Users/User/Desktop/selenium/selinium/python chromedriver/geckodriver"
BASE_URL = "https://app.hubspot.com/login"
"""https://app.hubspot.com/login"""
USER_NAME = "<EMAIL>"
PASSWORD = "<PASSWORD>"
LOGIN_PAGE_TITLE = "HubSpot Login" | StarcoderdataPython |
3304143 | import os
TEST_WEBGPU = os.environ.get("TEST_WEBGPU", "1") == "1"
TEST_WEBGL = os.environ.get("TEST_WEBGL", "1") == "1"
TEST_WEBASSEMBLY = os.environ.get("TEST_WEBASSEMBLY", "1") == "1"
TEST_FALLBACK = os.environ.get("TEST_FALLBACK", "1") == "1"
| StarcoderdataPython |
3348841 | <reponame>bopopescu/sage-5
r"""
Hasse diagrams of posets
"""
#*****************************************************************************
# Copyright (C) 2008 <NAME> <<EMAIL>>,
# <NAME> <<EMAIL>>
#
# Distributed under the terms of the GNU General Public License (GPL)
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# The full text of the GPL is available at:
#
# http://www.gnu.org/licenses/
#*****************************************************************************
from copy import copy
from sage.graphs.digraph import DiGraph
from sage.matrix.constructor import matrix
from sage.rings.integer_ring import ZZ
from sage.misc.misc import uniq
from sage.misc.lazy_attribute import lazy_attribute
from sage.misc.cachefunc import cached_method
class HasseDiagram(DiGraph):
"""
The Hasse diagram of a poset. This is just a transitively-reduced,
directed, acyclic graph without loops or multiple edges.
.. note::
We assume that ``range(n)`` is a linear extension of the poset.
That is, ``range(n)`` is the vertex set and a topological sort of
the digraph.
This should not be called directly, use Poset instead; all type
checking happens there.
EXAMPLES::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[1,2],1:[3],2:[3],3:[]}); H
Hasse diagram of a poset containing 4 elements
sage: TestSuite(H).run()
"""
# Hasse diagrams are immutable. This temporary hack enables the
# __hash__ method of DiGraph
_immutable = True
def _repr_(self):
r"""
TESTS::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[1,2],1:[3],2:[3],3:[]})
sage: H._repr_()
'Hasse diagram of a poset containing 4 elements'
"""
return "Hasse diagram of a poset containing %s elements"%self.order()
def linear_extension(self):
r"""
TESTS::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[1,2],1:[3],2:[3],3:[]})
sage: H.linear_extension()
[0, 1, 2, 3]
"""
# Recall: we assume range(n) is a linear extension.
return range(len(self))
def linear_extensions(self):
r"""
TESTS::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[1,2],1:[3],2:[3],3:[]})
sage: H.linear_extensions()
[[0, 1, 2, 3], [0, 2, 1, 3]]
"""
return self.topological_sort_generator()
def is_linear_extension(self,lin_ext=None):
r"""
TESTS::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[1,2],1:[3],2:[3],3:[]})
sage: H.is_linear_extension(range(4))
True
sage: H.is_linear_extension([3,2,1,0])
False
"""
if lin_ext is None or lin_ext == range(len(self)):
for x,y in self.cover_relations_iterator():
if not x < y:
return False
return True
else:
for x,y in self.cover_relations_iterator():
if not lin_ext.index(x) < lin_ext.index(y):
return False
return True
# Could this be achieved by adding some options to
# GenericGraph.plot, and just overriding graphics_array_defaults?
def plot(self, label_elements=True, element_labels=None,
label_font_size=12,label_font_color='black', layout = "acyclic", **kwds):
"""
Returns a Graphics object corresponding to the Hasse diagram.
EXAMPLES::
sage: uc = [[2,3], [], [1], [1], [1], [3,4]]
sage: elm_lbls = Permutations(3).list()
sage: P = Poset(uc,elm_lbls)
sage: H = P._hasse_diagram
sage: levels = H.level_sets()
sage: heights = dict([[i, levels[i]] for i in range(len(levels))])
sage: type(H.plot(label_elements=True))
<class 'sage.plot.graphics.Graphics'>
::
sage: P = Posets.SymmetricGroupBruhatIntervalPoset([1,2,3,4], [3,4,1,2])
sage: P._hasse_diagram.plot()
"""
# Set element_labels to default to the vertex set.
if element_labels is None:
element_labels = range(self.num_verts())
# Create the underlying graph.
graph = DiGraph(self)
graph.relabel(element_labels)
return graph.plot(layout = layout, **kwds)
def show(self, label_elements=True, element_labels=None,
label_font_size=12,label_font_color='black',
vertex_size=300, vertex_colors=None,**kwds):
"""
Shows the Graphics object corresponding to the Hasse diagram.
Optionally, it is labelled.
INPUT:
- ``label_elements`` - whether to display element
labels
- ``element_labels`` - a dictionary of element
labels
EXAMPLES::
sage: uc = [[2,3], [], [1], [1], [1], [3,4]]
sage: elm_lbls = Permutations(3).list()
sage: P = Poset(uc,elm_lbls)
sage: H = P._hasse_diagram
sage: levels = H.level_sets()
sage: heights = dict([[i, levels[i]] for i in range(len(levels))])
sage: H.show(label_elements=True)
"""
self.plot(label_elements=label_elements, element_labels=element_labels,
label_font_size=label_font_size,label_font_color=label_font_color,
vertex_size=vertex_size, vertex_colors=vertex_colors).show(**kwds)
def cover_relations_iterator(self):
r"""
TESTS::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[2,3], 1:[3,4], 2:[5], 3:[5], 4:[5]})
sage: list(H.cover_relations_iterator())
[(0, 2), (0, 3), (1, 3), (1, 4), (2, 5), (3, 5), (4, 5)]
"""
for u,v,l in self.edge_iterator():
yield (u,v)
def cover_relations(self,element=None):
r"""
TESTS::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[2,3], 1:[3,4], 2:[5], 3:[5], 4:[5]})
sage: H.cover_relations()
[(0, 2), (0, 3), (1, 3), (1, 4), (2, 5), (3, 5), (4, 5)]
"""
return [c for c in self.cover_relations_iterator()]
def is_lequal(self, i, j):
"""
Returns True if i is less than or equal to j in the poset, and
False otherwise.
.. note::
If the :meth:`lequal_matrix` has been computed, then this method is
redefined to use the cached matrix (see :meth:`_alternate_is_lequal`).
TESTS::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[2], 1:[2], 2:[3], 3:[4], 4:[]})
sage: x,y,z = 0, 1, 4
sage: H.is_lequal(x,y)
False
sage: H.is_lequal(y,x)
False
sage: H.is_lequal(x,z)
True
sage: H.is_lequal(y,z)
True
sage: H.is_lequal(z,z)
True
"""
return i == j or \
(i < j and j in self.breadth_first_search(i))
def is_less_than(self, x, y):
r"""
Returns True if ``x`` is less than or equal to ``y`` in the
poset, and False otherwise.
TESTS::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[2], 1:[2], 2:[3], 3:[4], 4:[]})
sage: x,y,z = 0, 1, 4
sage: H.is_less_than(x,y)
False
sage: H.is_less_than(y,x)
False
sage: H.is_less_than(x,z)
True
sage: H.is_less_than(y,z)
True
sage: H.is_less_than(z,z)
False
"""
if x == y:
return False
else:
return self.is_lequal(x,y)
def is_gequal(self, x, y):
r"""
Returns ``True`` if ``x`` is greater than or equal to ``y``, and
``False`` otherwise.
EXAMPLES::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: Q = HasseDiagram({0:[2], 1:[2], 2:[3], 3:[4], 4:[]})
sage: x,y,z = 0,1,4
sage: Q.is_gequal(x,y)
False
sage: Q.is_gequal(y,x)
False
sage: Q.is_gequal(x,z)
False
sage: Q.is_gequal(z,x)
True
sage: Q.is_gequal(z,y)
True
sage: Q.is_gequal(z,z)
True
"""
return self.is_lequal(y,x)
def is_greater_than(self, x, y):
"""
Returns ``True`` if ``x`` is greater than but not equal to
``y``, and ``False`` otherwise.
EXAMPLES::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: Q = HasseDiagram({0:[2], 1:[2], 2:[3], 3:[4], 4:[]})
sage: x,y,z = 0,1,4
sage: Q.is_greater_than(x,y)
False
sage: Q.is_greater_than(y,x)
False
sage: Q.is_greater_than(x,z)
False
sage: Q.is_greater_than(z,x)
True
sage: Q.is_greater_than(z,y)
True
sage: Q.is_greater_than(z,z)
False
"""
return self.is_less_than(y,x)
def minimal_elements(self):
"""
Returns a list of the minimal elements of the poset.
EXAMPLES::
sage: P = Poset({0:[3],1:[3],2:[3],3:[4],4:[]})
sage: P(0) in P.minimal_elements()
True
sage: P(1) in P.minimal_elements()
True
sage: P(2) in P.minimal_elements()
True
"""
indegs = self.in_degree(labels=True)
return [x for x in indegs if indegs[x]==0]
def maximal_elements(self):
"""
Returns a list of the maximal elements of the poset.
EXAMPLES::
sage: P = Poset({0:[3],1:[3],2:[3],3:[4],4:[]})
sage: P.maximal_elements()
[4]
"""
outdegs = self.out_degree(labels=True)
return [x for x,d in outdegs.iteritems() if d==0]
def bottom(self):
"""
Returns the bottom element of the poset, if it exists.
EXAMPLES::
sage: P = Poset({0:[3],1:[3],2:[3],3:[4],4:[]})
sage: P.bottom() is None
True
sage: Q = Poset({0:[1],1:[]})
sage: Q.bottom()
0
"""
min_elms = self.minimal_elements()
if len(min_elms) == 1: return min_elms[0]
return None
def has_bottom(self):
"""
Returns True if the poset has a unique minimal element.
EXAMPLES::
sage: P = Poset({0:[3],1:[3],2:[3],3:[4],4:[]})
sage: P.has_bottom()
False
sage: Q = Poset({0:[1],1:[]})
sage: Q.has_bottom()
True
"""
if self.bottom() is not None: return True
return False
def top(self):
"""
Returns the top element of the poset, if it exists.
EXAMPLES::
sage: P = Poset({0:[3],1:[3],2:[3],3:[4,5],4:[],5:[]})
sage: P.top() is None
True
sage: Q = Poset({0:[1],1:[]})
sage: Q.top()
1
"""
max_elms = self.maximal_elements()
if len(max_elms) == 1: return max_elms[0]
return None
def has_top(self):
"""
Returns ``True`` if the poset contains a unique maximal element, and
``False`` otherwise.
EXAMPLES::
sage: P = Poset({0:[3],1:[3],2:[3],3:[4,5],4:[],5:[]})
sage: P.has_top()
False
sage: Q = Poset({0:[1],1:[]})
sage: Q.has_top()
True
"""
if not self.top() is None: return True
return False
def is_bounded(self):
"""
Returns True if the poset contains a unique maximal element and a
unique minimal element, and False otherwise.
EXAMPLES::
sage: P = Poset({0:[3],1:[3],2:[3],3:[4,5],4:[],5:[]})
sage: P.is_bounded()
False
sage: Q = Poset({0:[1],1:[]})
sage: Q.is_bounded()
True
"""
return self.has_top() and self.has_bottom()
def is_chain(self):
"""
Returns True if the poset is totally ordered, and False otherwise.
EXAMPLES::
sage: L = Poset({0:[1],1:[2],2:[3],3:[4]})
sage: L.is_chain()
True
sage: V = Poset({0:[1,2]})
sage: V.is_chain()
False
"""
outdegs = self.out_degree()
outdegs.remove(0)
if len(set(outdegs))==1: return True
return False
def dual(self):
"""
Returns a poset that is dual to the given poset.
EXAMPLES::
sage: P = Posets.IntegerPartitions(4)
sage: H = P._hasse_diagram; H
Hasse diagram of a poset containing 5 elements
sage: H.dual()
Hasse diagram of a poset containing 5 elements
TESTS::
sage: H = Posets.IntegerPartitions(4)._hasse_diagram
sage: H.is_isomorphic( H.dual().dual() )
True
sage: H.is_isomorphic( H.dual() )
False
"""
H = HasseDiagram(self.reverse())
H.relabel(perm=range(H.num_verts()-1,-1,-1), inplace=True)
return H
def interval(self, x, y):
"""
Returns a list of the elements z such that x <= z <= y. The order is
that induced by the ordering in self.linear_extension.
INPUT:
- ``x`` - any element of the poset
- ``y`` - any element of the poset
EXAMPLES::
sage: uc = [[1,3,2],[4],[4,5,6],[6],[7],[7],[7],[]]
sage: dag = DiGraph(dict(zip(range(len(uc)),uc)))
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram(dag)
sage: I = set([2,5,6,4,7])
sage: I == set(H.interval(2,7))
True
"""
return [z for z in range(self.order())[x:y+1] if
self.is_lequal(x,z) and self.is_lequal(z,y)]
def closed_interval(self, x, y):
"""
Returns a list of the elements z such that x = z = y. The order is
that induced by the ordering in self.linear_extension.
EXAMPLES::
sage: uc = [[1,3,2],[4],[4,5,6],[6],[7],[7],[7],[]]
sage: dag = DiGraph(dict(zip(range(len(uc)),uc)))
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram(dag)
sage: set([2,5,6,4,7]) == set(H.closed_interval(2,7))
True
"""
return self.interval(x,y)
def open_interval(self, x, y):
"""
Returns a list of the elements `z` such that `x < z < y`. The
order is that induced by the ordering in
``self.linear_extension``.
EXAMPLES::
sage: uc = [[1,3,2],[4],[4,5,6],[6],[7],[7],[7],[]]
sage: dag = DiGraph(dict(zip(range(len(uc)),uc)))
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram(dag)
sage: set([5,6,4]) == set(H.open_interval(2,7))
True
sage: H.open_interval(7,2)
[]
"""
ci = self.interval(x,y)
if len(ci) == 0:
return []
else:
return ci[1:-1]
def rank_function(self):
r"""
Returns a rank function of the poset, if it exists.
A *rank function* of a poset `P` is a function `r`
that maps elements of `P` to integers and satisfies:
`r(x) = r(y) + 1` if `x` covers `y`. The function `r`
is normalized such that its smallest value is `0`.
When `P` has several components, this is done for each
component separately.
EXAMPLES::
sage: P = Poset([[1,3,2],[4],[4,5,6],[6],[7],[7],[7],[]])
sage: P.rank_function() is not None
True
sage: P = Poset(([1,2,3,4],[[1,4],[2,3],[3,4]]), facade = True)
sage: P.rank_function() is not None
True
sage: P = Poset(([1,2,3,4,5],[[1,2],[2,3],[3,4],[1,5],[5,4]]), facade = True)
sage: P.rank_function() is not None
False
sage: P = Poset(([1,2,3,4,5,6,7,8],[[1,4],[2,3],[3,4],[5,7],[6,7]]), facade = True)
sage: f = P.rank_function(); f is not None
True
sage: f(5)
0
sage: f(2)
0
TESTS::
sage: P = Poset([[1,3,2],[4],[4,5,6],[6],[7],[7],[7],[]])
sage: r = P.rank_function()
sage: for u,v in P.cover_relations_iterator():
... if r(v) != r(u) + 1:
... print "Bug in rank_function!"
::
sage: Q = Poset([[1,2],[4],[3],[4],[]])
sage: Q.rank_function() is None
True
test for ticket :trac:`14006`::
sage: H = Poset()._hasse_diagram
sage: s = dumps(H)
sage: f = H.rank_function()
sage: s = dumps(H)
"""
if(self._rank_dict is None):
return None
return self._rank_dict.__getitem__ #the rank function is just the getitem of the dict
@lazy_attribute
def _rank_dict(self):
r"""
Builds the rank dictionnary of the poset, if it exists, i.e.
a dictionary ``d`` where ``d[object] = self.rank_function()(object)
A *rank function* of a poset `P` is a function `r`
that maps elements of `P` to integers and satisfies:
`r(x) = r(y) + 1` if `x` covers `y`. The function `r`
is normalized such that its smallest value is `0`.
When `P` has several components, this is done for each
component separately.
EXAMPLES::
sage: H = Poset()._hasse_diagram
sage: H._rank_dict
{}
sage: H = Poset([[1,3,2],[4],[4,5,6],[6],[7],[7],[7],[]])._hasse_diagram
sage: H._rank_dict
{0: 0, 1: 1, 2: 1, 3: 2, 4: 2, 5: 1, 6: 2, 7: 3}
sage: H = Poset(([1,2,3,4,5],[[1,2],[2,3],[3,4],[1,5],[5,4]]))._hasse_diagram
sage: H._rank_dict is None
True
"""
rank_fcn = {} # rank_fcn will be the dictionary whose i-th entry
# is the rank of vertex i for every i.
not_found = set(self.vertices())
while not_found:
y = not_found.pop()
rank_fcn[y] = ZZ.zero() # We set some vertex to have rank 0
component = set([y])
queue = set([y])
while queue: # look at the neighbors of y and set the ranks;
# then look at the neighbors of the neighbors ...
y = queue.pop()
for x in self.neighbors_out(y):
if x not in rank_fcn:
rank_fcn[x] = rank_fcn[y] + 1
queue.add(x)
component.add(x)
for x in self.neighbors_in(y):
if x not in rank_fcn:
rank_fcn[x] = rank_fcn[y] - 1
queue.add(x)
component.add(x)
elif rank_fcn[x] != rank_fcn[y] - 1:
return None
# Normalize the ranks of vertices in the connected component
# so that smallest is 0:
m = min(rank_fcn[j] for j in component)
for j in component:
rank_fcn[j] -= m
not_found.difference_update(component)
#now, all ranks are set.
return rank_fcn
def rank(self,element=None):
r"""
Returns the rank of ``element``, or the rank of the poset if
``element`` is ``None``. (The rank of a poset is the length of
the longest chain of elements of the poset.)
EXAMPLES::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[1,3,2],1:[4],2:[4,5,6],3:[6],4:[7],5:[7],6:[7],7:[]})
sage: H.rank(5)
2
sage: H.rank()
3
sage: Q = HasseDiagram({0:[1,2],1:[3],2:[],3:[]})
sage: Q.rank()
2
sage: Q.rank(1)
1
"""
if element is None:
return len(self.level_sets())-1
else:
return self.rank_function()(element)
def is_ranked(self):
r"""
Returns True if the poset is ranked, and False otherwise.
A poset is *ranked* if it admits a rank function. For more information
about the rank function, see :meth:`~rank_function`
and :meth:`~is_graded`.
EXAMPLES::
sage: P = Poset([[1],[2],[3],[4],[]])
sage: P.is_ranked()
True
sage: Q = Poset([[1,5],[2,6],[3],[4],[],[6,3],[4]])
sage: Q.is_ranked()
False
"""
return bool(self.rank_function())
def is_graded(self):
r"""
Returns True if the poset is graded, and False otherwise.
A poset is *graded* if it admits a rank function. For more information
about the rank function, see :meth:`~rank_function`
and :meth:`~is_ranked`.
EXAMPLES::
sage: P = Poset([[1],[2],[3],[4],[]])
sage: P.is_graded()
True
sage: Q = Poset([[1,5],[2,6],[3],[4],[],[6,3],[4]])
sage: Q.is_graded()
False
"""
return self.is_ranked()
def covers(self,x,y):
"""
Returns True if y covers x and False otherwise.
EXAMPLES::
sage: Q = Poset([[1,5],[2,6],[3],[4],[],[6,3],[4]])
sage: Q.covers(Q(1),Q(6))
True
sage: Q.covers(Q(1),Q(4))
False
"""
return self.has_edge(x,y)
def upper_covers_iterator(self,element):
r"""
Returns the list of elements that cover ``element``.
EXAMPLES::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[1,3,2],1:[4],2:[4,5,6],3:[6],4:[7],5:[7],6:[7],7:[]})
sage: list(H.upper_covers_iterator(0))
[1, 2, 3]
sage: list(H.upper_covers_iterator(7))
[]
"""
for x in self.neighbor_out_iterator(element):
yield x
def lower_covers_iterator(self,element):
r"""
Returns the list of elements that are covered by ``element``.
EXAMPLES::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[1,3,2],1:[4],2:[4,5,6],3:[6],4:[7],5:[7],6:[7],7:[]})
sage: list(H.lower_covers_iterator(0))
[]
sage: list(H.lower_covers_iterator(4))
[1, 2]
"""
for x in self.neighbor_in_iterator(element):
yield x
def cardinality(self):
r"""
Returns the number of elements in the poset.
EXAMPLES::
sage: Poset([[1,2,3],[4],[4],[4],[]]).cardinality()
5
TESTS:
For a time, this function was named ``size()``, which
would override the same-named method of the underlying
digraph. Trac #8735 renamed this method to ``cardinality()``
with a deprecation warning. Trac #11214 removed the warning
since code for graphs was raising the warning inadvertently.
This tests that ``size()`` for a Hasse diagram returns the
number of edges in the digraph. ::
sage: L = Posets.BooleanLattice(5)
sage: H = L.hasse_diagram()
sage: H.size()
80
sage: H.size() == H.num_edges()
True
"""
return self.order()
def mobius_function(self,i,j): # dumb algorithm
r"""
Returns the value of the M\"obius function of the poset
on the elements ``i`` and ``j``.
EXAMPLES::
sage: P = Poset([[1,2,3],[4],[4],[4],[]])
sage: H = P._hasse_diagram
sage: H.mobius_function(0,4)
2
sage: for u,v in P.cover_relations_iterator():
... if P.mobius_function(u,v) != -1:
... print "Bug in mobius_function!"
"""
try:
return self._mobius_function_values[(i,j)]
except AttributeError:
self._mobius_function_values = {}
return self.mobius_function(i,j)
except KeyError:
if i == j:
self._mobius_function_values[(i,j)] = 1
elif i > j:
self._mobius_function_values[(i,j)] = 0
else:
ci = self.closed_interval(i,j)
if len(ci) == 0:
self._mobius_function_values[(i,j)] = 0
else:
self._mobius_function_values[(i,j)] = \
-sum([self.mobius_function(i,k) for k in ci[:-1]])
return self._mobius_function_values[(i,j)]
def mobius_function_matrix(self):
r"""
Returns the matrix of the Mobius function of this poset
This returns the sparse matrix over `\ZZ` whose ``(x, y)`` entry
is the value of the M\"obius function of ``self`` evaluated on
``x`` and ``y``, and redefines :meth:`mobius_function` to use
it.
.. NOTE::
The result is cached in :meth:`_mobius_function_matrix`.
EXAMPLES::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[1,3,2],1:[4],2:[4,5,6],3:[6],4:[7],5:[7],6:[7],7:[]})
sage: H.mobius_function_matrix()
[ 1 -1 -1 -1 1 0 1 0]
[ 0 1 0 0 -1 0 0 0]
[ 0 0 1 0 -1 -1 -1 2]
[ 0 0 0 1 0 0 -1 0]
[ 0 0 0 0 1 0 0 -1]
[ 0 0 0 0 0 1 0 -1]
[ 0 0 0 0 0 0 1 -1]
[ 0 0 0 0 0 0 0 1]
TESTS::
sage: H.mobius_function_matrix().is_immutable()
True
sage: hasattr(H,'_mobius_function_matrix')
True
sage: H.mobius_function == H._mobius_function_from_matrix
True
"""
if not hasattr(self,'_mobius_function_matrix'):
self._mobius_function_matrix = self.lequal_matrix().inverse().change_ring(ZZ)
self._mobius_function_matrix.set_immutable()
self.mobius_function = self._mobius_function_from_matrix
return self._mobius_function_matrix
# Redefine self.mobius_function
def _mobius_function_from_matrix(self, i,j):
r"""
Returns the value of the M\"obius function of the poset
on the elements ``i`` and ``j``.
EXAMPLES::
sage: P = Poset([[1,2,3],[4],[4],[4],[]])
sage: H = P._hasse_diagram
sage: H.mobius_function(0,4) # indirect doctest
2
sage: for u,v in P.cover_relations_iterator():
... if P.mobius_function(u,v) != -1:
... print "Bug in mobius_function!"
This uses ``self._mobius_function_matrix``, as computed by
:meth:`mobius_function_matrix`.
"""
return self._mobius_function_matrix[i,j]
@cached_method
def coxeter_transformation(self):
r"""
Returns the matrix of the Auslander-Reiten translation acting on
the Grothendieck group of the derived category of modules on the
poset, in the basis of simple modules.
EXAMPLES::
sage: M = Posets.PentagonPoset()._hasse_diagram.coxeter_transformation(); M
[ 0 0 0 0 -1]
[ 0 0 0 1 -1]
[ 0 1 0 0 -1]
[-1 1 1 0 -1]
[-1 1 0 1 -1]
TESTS::
sage: M = Posets.PentagonPoset()._hasse_diagram.coxeter_transformation()
sage: M**8 == 1
True
"""
return - self.lequal_matrix()*self.mobius_function_matrix().transpose()
def order_filter(self,elements):
"""
Returns the order filter generated by a list of elements.
`I` is an order filter if, for any `x` in `I` and `y` such that
`y \ge x`, then `y` is in `I`.
EXAMPLES::
sage: H = Posets.BooleanLattice(4)._hasse_diagram
sage: H.order_filter([3,8])
[3, 7, 8, 9, 10, 11, 12, 13, 14, 15]
"""
of = []
for i in elements:
for j in self.breadth_first_search(i):
of.append(j)
return uniq(of)
def principal_order_filter(self, i):
"""
Returns the order filter generated by ``i``.
EXAMPLES::
sage: H = Posets.BooleanLattice(4)._hasse_diagram
sage: H.principal_order_filter(2)
[2, 3, 6, 7, 10, 11, 14, 15]
"""
return self.order_filter([i])
def order_ideal(self,elements):
"""
Returns the order ideal generated by a list of elements.
`I` is an order ideal if, for any `x` in `I` and `y` such that
`y \le x`, then `y` is in `I`.
EXAMPLES::
sage: H = Posets.BooleanLattice(4)._hasse_diagram
sage: H.order_ideal([7,10])
[0, 1, 2, 3, 4, 5, 6, 7, 8, 10]
"""
H = copy(self).reverse()
oi = []
for i in elements:
for j in H.breadth_first_search(i):
oi.append(j)
return uniq(oi)
def principal_order_ideal(self, i):
"""
Returns the order ideal generated by `i`.
EXAMPLES::
sage: H = Posets.BooleanLattice(4)._hasse_diagram
sage: H.principal_order_ideal(6)
[0, 2, 4, 6]
"""
return self.order_ideal([i])
@lazy_attribute
def _leq_matrix(self):
r"""
Computes a matrix whose ``(i,j)`` entry is 1 if ``i`` is less than
``j`` in the poset, and 0 otherwise; and redefines ``__lt__`` to
use this matrix.
EXAMPLES::
sage: P = Poset([[1,3,2],[4],[4,5,6],[6],[7],[7],[7],[]])
sage: H = P._hasse_diagram
sage: H._leq_matrix
[1 1 1 1 1 1 1 1]
[0 1 0 1 0 0 0 1]
[0 0 1 1 1 0 1 1]
[0 0 0 1 0 0 0 1]
[0 0 0 0 1 0 0 1]
[0 0 0 0 0 1 1 1]
[0 0 0 0 0 0 1 1]
[0 0 0 0 0 0 0 1]
"""
# Create the matrix
n = self.order()
D = {}
for i in range(n):
for v in self.breadth_first_search(i):
D[(i,v)] = 1
M = matrix(ZZ, n, n, D, sparse=True)
M.set_immutable()
# Redefine self.is_lequal
self.is_lequal = self._alternate_is_lequal
# Return the matrix
return M
def lequal_matrix(self):
"""
Returns the matrix whose ``(i,j)`` entry is 1 if ``i`` is less
than ``j`` in the poset, and 0 otherwise; and redefines
``__lt__`` to use this matrix.
EXAMPLES::
sage: P = Poset([[1,3,2],[4],[4,5,6],[6],[7],[7],[7],[]])
sage: H = P._hasse_diagram
sage: H.lequal_matrix()
[1 1 1 1 1 1 1 1]
[0 1 0 1 0 0 0 1]
[0 0 1 1 1 0 1 1]
[0 0 0 1 0 0 0 1]
[0 0 0 0 1 0 0 1]
[0 0 0 0 0 1 1 1]
[0 0 0 0 0 0 1 1]
[0 0 0 0 0 0 0 1]
TESTS::
sage: H.lequal_matrix().is_immutable()
True
"""
return self._leq_matrix
def _alternate_is_lequal(self,i,j):
r"""
Returns ``True`` if ``i`` is less than or equal to ``j`` in
``self``, and ``False`` otherwise.
.. NOTE::
If the :meth:`lequal_matrix` has been computed, then
:meth:`is_lequal` is redefined to use the cached matrix.
EXAMPLES::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[2], 1:[2], 2:[3], 3:[4], 4:[]})
sage: H.lequal_matrix()
[1 0 1 1 1]
[0 1 1 1 1]
[0 0 1 1 1]
[0 0 0 1 1]
[0 0 0 0 1]
sage: x,y,z = 0, 1, 4
sage: H._alternate_is_lequal(x,y)
False
sage: H._alternate_is_lequal(y,x)
False
sage: H._alternate_is_lequal(x,z)
True
sage: H._alternate_is_lequal(y,z)
True
sage: H._alternate_is_lequal(z,z)
True
"""
return bool(self._leq_matrix[i,j])
@lazy_attribute
def _meet(self):
r"""
Computes the matrix of meets of ``self``. The ``(x,y)``-entry of
this matrix is the meet of ``x`` and ``y`` in ``self``.
EXAMPLES::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[1,3,2],1:[4],2:[4,5,6],3:[6],4:[7],5:[7],6:[7],7:[]})
sage: H._meet
[0 0 0 0 0 0 0 0]
[0 1 0 0 1 0 0 1]
[0 0 2 0 2 2 2 2]
[0 0 0 3 0 0 3 3]
[0 1 2 0 4 2 2 4]
[0 0 2 0 2 5 2 5]
[0 0 2 3 2 2 6 6]
[0 1 2 3 4 5 6 7]
TESTS::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[2,3],1:[2,3]})
sage: H.meet_matrix()
Traceback (most recent call last):
...
ValueError: Not a meet-semilattice: no bottom element.
sage: H = HasseDiagram({0:[1,2],1:[3,4],2:[3,4]})
sage: H.meet_matrix()
Traceback (most recent call last):
...
ValueError: No meet for x=...
sage: L = LatticePoset({0:[1,2,3],1:[4],2:[4],3:[4]})
sage: P = L.dual()
sage: P.meet(2,3)
4
"""
n = self.cardinality()
meet = [[0 for x in range(n)] for x in range(n)]
le = copy(self.lequal_matrix())
for i in range(n): le[i,i] = 1
if not all([le[0,x]==1 for x in range(n)]):
raise ValueError("Not a meet-semilattice: no bottom element.")
lc = [[y[0] for y in self.incoming_edges([x])] for x in range(n)]
for x in range(n): # x=x_k
meet[x][x] = x
for y in range(x):
T = []
for z in lc[x]:
T.append(meet[y][z]) # T = {x_i \wedge z : z>-x_k}
q = T[0]
for z in T:
if z>q: q = z
for z in T:
if not le[z,q]:
raise ValueError("No meet for x=%s y=%s"%(x,y))
meet[x][y] = q
meet[y][x] = q
return matrix(ZZ,meet)
def meet_matrix(self):
r"""
Returns the matrix of meets of ``self``. The ``(x,y)``-entry of
this matrix is the meet of ``x`` and ``y`` in ``self``.
This algorithm is modelled after the algorithm of Freese-Jezek-Nation
(p217). It can also be found on page 140 of [Gec81]_.
.. NOTE::
Once the matrix has been computed, it is stored in
:meth:`_meet_matrix`. Delete this attribute if you want to
recompute the matrix.
EXAMPLES::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[1,3,2],1:[4],2:[4,5,6],3:[6],4:[7],5:[7],6:[7],7:[]})
sage: H.meet_matrix()
[0 0 0 0 0 0 0 0]
[0 1 0 0 1 0 0 1]
[0 0 2 0 2 2 2 2]
[0 0 0 3 0 0 3 3]
[0 1 2 0 4 2 2 4]
[0 0 2 0 2 5 2 5]
[0 0 2 3 2 2 6 6]
[0 1 2 3 4 5 6 7]
REFERENCE:
.. [Gec81] Fundamentals of Computation Theory
<NAME>.
Proceedings of the 1981 International Fct-Conference
Szeged, Hungaria, August 24-28, vol 117
Springer-Verlag, 1981
TESTS::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[2,3],1:[2,3]})
sage: H.meet_matrix()
Traceback (most recent call last):
...
ValueError: Not a meet-semilattice: no bottom element.
sage: H = HasseDiagram({0:[1,2],1:[3,4],2:[3,4]})
sage: H.meet_matrix()
Traceback (most recent call last):
...
ValueError: No meet for x=...
"""
return self._meet
def is_meet_semilattice(self):
r"""
Returns ``True`` if ``self`` has a meet operation, and
``False`` otherwise.
EXAMPLES::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[1,3,2],1:[4],2:[4,5,6],3:[6],4:[7],5:[7],6:[7],7:[]})
sage: H.is_meet_semilattice()
True
sage: H = HasseDiagram({0:[1,2],1:[3],2:[3],3:[]})
sage: H.is_meet_semilattice()
True
sage: H = HasseDiagram({0:[2,3],1:[2,3]})
sage: H.is_meet_semilattice()
False
"""
try:
self.meet_matrix()
except ValueError:
return False
else:
return True
@lazy_attribute
def _join(self):
r"""
Computes a matrix whose ``(x,y)``-entry is the join of ``x``
and ``y`` in ``self``
EXAMPLES::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[1,3,2],1:[4],2:[4,5,6],3:[6],4:[7],5:[7],6:[7],7:[]})
sage: H.join_matrix() # indirect doctest
[0 1 2 3 4 5 6 7]
[1 1 4 7 4 7 7 7]
[2 4 2 6 4 5 6 7]
[3 7 6 3 7 7 6 7]
[4 4 4 7 4 7 7 7]
[5 7 5 7 7 5 7 7]
[6 7 6 6 7 7 6 7]
[7 7 7 7 7 7 7 7]
TESTS::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[2,3],1:[2,3]})
sage: H.join_matrix()
Traceback (most recent call last):
...
ValueError: Not a join-semilattice: no top element.
sage: H = HasseDiagram({0:[2,3],1:[2,3],2:[4],3:[4]})
sage: H.join_matrix()
Traceback (most recent call last):
...
ValueError: No join for x=...
sage: L = LatticePoset({0:[1,2,3],1:[4],2:[4],3:[4]})
sage: P = L.dual()
sage: P.join(2,3)
0
"""
n = self.cardinality()
join = [[0 for x in range(n)] for x in range(n)]
le = copy(self.lequal_matrix())
for i in range(n): le[i,i] = 1
if not all([le[x,n-1]==1 for x in range(n)]):
raise ValueError("Not a join-semilattice: no top element.")
uc = [sorted([n-1-y[1] for y in self.outgoing_edges([x])]) for
x in reversed(range(n))]
for x in range(n): # x=x_k
join[x][x] = x
for y in range(x):
T = []
for z in uc[x]:
T.append(join[y][z]) # T = {x_i \vee z : z>-x_k}
q = T[0]
for z in T:
if z>q: q = z
for z in T:
if not le[n-1-q,n-1-z]:
raise ValueError("No join for x=%s y=%s"%(x,y))
join[x][y] = q
join[y][x] = q
return matrix(ZZ,[[n-1-join[n-1-x][n-1-y] for y in range(n)] for x in range(n)])
def join_matrix(self):
r"""
Returns the matrix of joins of ``self``. The ``(x,y)``-entry
of this matrix is the join of ``x`` and ``y`` in ``self``.
This algorithm is modelled after the algorithm of Freese-Jezek-Nation
(p217). It can also be found on page 140 of [Gec81]_.
.. note::
Once the matrix has been computed, it is stored in
:meth:`_join_matrix`. Delete this attribute if you want
to recompute the matrix.
EXAMPLES::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[1,3,2],1:[4],2:[4,5,6],3:[6],4:[7],5:[7],6:[7],7:[]})
sage: H.join_matrix()
[0 1 2 3 4 5 6 7]
[1 1 4 7 4 7 7 7]
[2 4 2 6 4 5 6 7]
[3 7 6 3 7 7 6 7]
[4 4 4 7 4 7 7 7]
[5 7 5 7 7 5 7 7]
[6 7 6 6 7 7 6 7]
[7 7 7 7 7 7 7 7]
TESTS::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[2,3],1:[2,3]})
sage: H.join_matrix()
Traceback (most recent call last):
...
ValueError: Not a join-semilattice: no top element.
sage: H = HasseDiagram({0:[2,3],1:[2,3],2:[4],3:[4]})
sage: H.join_matrix()
Traceback (most recent call last):
...
ValueError: No join for x=...
"""
return self._join
def is_join_semilattice(self):
r"""
Returns ``True`` if ``self`` has a join operation, and
``False`` otherwise.
EXAMPLES::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[1,3,2],1:[4],2:[4,5,6],3:[6],4:[7],5:[7],6:[7],7:[]})
sage: H.is_join_semilattice()
True
sage: H = HasseDiagram({0:[2,3],1:[2,3]})
sage: H.is_join_semilattice()
False
sage: H = HasseDiagram({0:[2,3],1:[2,3],2:[4],3:[4]})
sage: H.is_join_semilattice()
False
"""
try:
self.join_matrix()
except ValueError:
return False
else:
return True
def is_distributive_lattice(self): # still a dumb algorithm...
r"""
Returns ``True`` if ``self`` is the Hasse diagram of a
distributive lattice, and ``False`` otherwise.
EXAMPLES::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[1,3,2],1:[4],2:[4,5,6],3:[6],4:[7],5:[7],6:[7],7:[]})
sage: H.is_distributive_lattice()
False
sage: H = HasseDiagram({0:[1,2],1:[3],2:[3]})
sage: H.is_distributive_lattice()
True
sage: H = HasseDiagram({0:[1,2,3],1:[4],2:[4],3:[4]})
sage: H.is_distributive_lattice()
False
"""
try:
jn = self.join_matrix()
mt = self.meet_matrix()
except ValueError:
return False
n = jn.ncols()
for x in range(n):
for y in range(n):
for z in range(n):
if mt[x][jn[y][z]]!=jn[mt[x][y]][mt[x][z]]: return False
return True
def is_complemented_lattice(self):
r"""
Returns ``True`` if ``self`` is the Hasse diagram of a
complemented lattice, and ``False`` otherwise.
EXAMPLES::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[1,2,3],1:[4],2:[4],3:[4]})
sage: H.is_complemented_lattice()
True
sage: H = HasseDiagram({0:[1,2],1:[3],2:[3],3:[4]})
sage: H.is_complemented_lattice()
False
"""
try:
jn = self.join_matrix()
mt = self.meet_matrix()
except ValueError:
return False
n = self.cardinality()
c = [-1 for x in range(n)]
for x in range(n):
for y in range(x,n):
if jn[x][y]==n-1 and mt[x][y]==0:
c[x]=y
c[y]=x
return all([c[x]!=-1 for x in range(n)])
def complements(self):
r"""
Returns a list ``l`` such that ``l[i]`` is a complement of
``i`` in ``self``.
A complement of ``x`` is an element ``y`` such that the meet
of ``x`` and ``y`` is the bottom element of ``self`` and the
join of ``x`` and ``y`` is the top element of ``self``.
EXAMPLES::
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[1,2,3],1:[4],2:[4],3:[4]})
sage: H.complements()
[4, 3, 3, 2, 0]
sage: H = HasseDiagram({0:[1,2],1:[3],2:[3],3:[4]})
sage: H.complements()
[4, None, None, None, 0]
"""
jn = self.join_matrix()
mt = self.meet_matrix()
n = self.cardinality()
c = [None for x in range(n)]
for x in range(n):
for y in range(x,n):
if jn[x][y]==n-1 and mt[x][y]==0:
c[x]=y
c[y]=x
return c
def antichains_iterator(self):
r"""
Return an iterator over the antichains of the poset.
.. note::
The algorithm is based on Freese-Jezek-Nation p. 226.
It does a depth first search through the set of all
antichains organized in a prefix tree.
EXAMPLES::
sage: P = posets.PentagonPoset()
sage: H = P._hasse_diagram
sage: H.antichains_iterator()
<generator object antichains_iterator at ...>
sage: list(H.antichains_iterator())
[[], [4], [3], [2], [1], [1, 3], [1, 2], [0]]
sage: from sage.combinat.posets.hasse_diagram import HasseDiagram
sage: H = HasseDiagram({0:[1,2],1:[4],2:[3],3:[4]})
sage: list(H.antichains_iterator())
[[], [4], [3], [2], [1], [1, 3], [1, 2], [0]]
sage: H = HasseDiagram({0:[],1:[],2:[]})
sage: list(H.antichains_iterator())
[[], [2], [1], [1, 2], [0], [0, 2], [0, 1], [0, 1, 2]]
sage: H = HasseDiagram({0:[1],1:[2],2:[3],3:[4]})
sage: list(H.antichains_iterator())
[[], [4], [3], [2], [1], [0]]
TESTS::
sage: H = Poset()._hasse_diagram
sage: list(H.antichains_iterator())
[[]]
"""
# Complexity note:
# antichains_queues never grows longer than self.cardinality().
# Indeed, if a appears before b in antichains_queues, then
# the largest element of a is strictly smaller than that of b.
antichains_queues = [([], range(self.cardinality()-1,-1,-1))]
leq = self.lequal_matrix()
while antichains_queues:
(antichain, queue) = antichains_queues.pop()
# Invariant:
# - the elements of antichain are independent
# - the elements of queue are independent from those of antichain
yield antichain
while queue:
x = queue.pop()
new_antichain = antichain + [x]
new_queue = [t for t in queue if not (leq[t,x] or leq[x,t])]
antichains_queues.append((new_antichain, new_queue))
def are_incomparable(self, i, j):
"""
Returns whether ``i`` and ``j`` are incomparable in the poset
INPUT:
- ``i``, ``j`` -- vertices of this Hasse diagram
EXAMPLES::
sage: P = posets.PentagonPoset()
sage: H = P._hasse_diagram
sage: H.are_incomparable(1,2)
True
sage: [ (i,j) for i in H.vertices() for j in H.vertices() if H.are_incomparable(i,j)]
[(1, 2), (1, 3), (2, 1), (3, 1)]
"""
mat = self._leq_matrix
return not mat[i,j] and not mat[j,i]
def are_comparable(self, i, j):
"""
Returns whether ``i`` and ``j`` are comparable in the poset
INPUT:
- ``i``, ``j`` -- vertices of this Hasse diagram
EXAMPLES::
sage: P = posets.PentagonPoset()
sage: H = P._hasse_diagram
sage: H.are_comparable(1,2)
False
sage: [ (i,j) for i in H.vertices() for j in H.vertices() if H.are_comparable(i,j)]
[(0, 0), (0, 1), (0, 2), (0, 3), (0, 4), (1, 0), (1, 1), (1, 4), (2, 0), (2, 2), (2, 3), (2, 4), (3, 0), (3, 2), (3, 3), (3, 4), (4, 0), (4, 1), (4, 2), (4, 3), (4, 4)]
"""
mat = self._leq_matrix
return bool(mat[i,j]) or bool(mat[j,i])
def antichains(self, element_class = list):
"""
Returns all antichains of ``self``, organized as a
prefix tree
INPUT:
- ``element_class`` -- (default:list) an iterable type
EXAMPLES::
sage: P = posets.PentagonPoset()
sage: H = P._hasse_diagram
sage: A = H.antichains()
sage: list(A)
[[], [0], [1], [1, 2], [1, 3], [2], [3], [4]]
sage: A.cardinality()
8
sage: [1,3] in A
True
sage: [1,4] in A
False
TESTS::
sage: TestSuite(A).run(skip = "_test_pickling")
.. note:: It's actually the pickling of the cached method
:meth:`coxeter_transformation` that fails ...
TESTS::
sage: A = Poset()._hasse_diagram.antichains()
sage: list(A)
[[]]
sage: TestSuite(A).run()
"""
from sage.combinat.subsets_pairwise import PairwiseCompatibleSubsets
return PairwiseCompatibleSubsets(self.vertices(),
self.are_incomparable,
element_class = element_class)
def chains(self, element_class = list):
"""
Returns all chains of ``self``, organized as a prefix tree
INPUT:
- ``element_class`` -- (default:list) an iterable type
EXAMPLES::
sage: P = posets.PentagonPoset()
sage: H = P._hasse_diagram
sage: A = H.chains()
sage: list(A)
[[], [0], [0, 1], [0, 1, 4], [0, 2], [0, 2, 3], [0, 2, 3, 4], [0, 2, 4], [0, 3], [0, 3, 4], [0, 4], [1], [1, 4], [2], [2, 3], [2, 3, 4], [2, 4], [3], [3, 4], [4]]
sage: A.cardinality()
20
sage: [1,3] in A
False
sage: [1,4] in A
True
.. seealso:: :meth:`antichains`
"""
from sage.combinat.subsets_pairwise import PairwiseCompatibleSubsets
return PairwiseCompatibleSubsets(self.vertices(),
self.are_comparable,
element_class = element_class)
| StarcoderdataPython |
104827 | #!/usr/bin/env python
import os
import re
from collections import OrderedDict
from functools import reduce
import pandas as pd
from unidecode import unidecode
def get_place_names(data_dir):
places = []
for fn in os.listdir(data_dir):
if not fn.endswith('.xlsx'):
continue
print('.', fn)
fn = os.path.join(data_dir, fn)
df = get_sheet(fn, 'Naissances')
if df.any():
df_places = get_places(df, ['father\'s ', 'mother\'s '])
places.extend(df_places)
df = get_sheet(fn, 'Mariages')
if df.any():
df_places = get_places(df, ['groom\'s ', 'bride\'s '])
places.extend(df_places)
df = get_sheet(fn, 'Décès')
if df.any():
df_places = get_places(df, [''])
places.extend(df_places)
places = list(set(places))
places_dict = reduce(reduce_places, places, dict())
places_dict = OrderedDict(sorted(places_dict.items()))
places = list(places_dict.values())
with open('data/interim/places.txt', 'w') as f:
f.writelines('\n'.join(places))
f.close()
def get_sheet(fn, sheet_name):
try:
df = pd.read_excel(fn, sheet_name=sheet_name)
df.columns = map(str.lower, df.columns)
df.dropna(how='all', inplace=True)
except ValueError:
return None
return df
def get_places(df, keys):
places = []
for key in keys:
places.extend(get_unique(df, ['{}domicile'.format(key)]))
places.extend(get_unique(df, [
'{}birthplace (locality)'.format(key),
'{}birthplace (region or département)'.format(key)]))
places.extend(get_unique(df, [
'{}previous domicile (locality)'.format(key),
'{}previous domicile (region or département)'.format(key)]))
return places
def get_unique(df, columns):
# columns not in the df
if not set(columns).issubset(df.columns):
return []
df = df[columns].dropna(how='all').astype('str')
return df.apply(
lambda names: merge_place_names(names), axis=1).unique().tolist()
def merge_place_names(names):
if isinstance(names, str):
return names.strip()
merged = []
for name in names:
name = name.strip()
if name and name != 'nan':
merged.append(name)
return ', '.join(merged)
def reduce_places(acc, item):
key = unidecode(item).lower()
key = re.sub(r'\W+', '_', key)
key = re.sub(r'^_|_$', '', key)
if key not in acc:
acc[key] = item
return acc
if __name__ == '__main__':
get_place_names('data/raw')
| StarcoderdataPython |
57845 | constants.physical_constants["electron-triton mass ratio"] | StarcoderdataPython |
1642232 | <filename>cengal/RequestCache.py
#!/usr/bin/env python
# coding=utf-8
# Copyright © 2016 ButenkoMS. All rights reserved. Contacts: <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import json
"""
Module Docstring
Docstrings: http://www.python.org/dev/peps/pep-0257/
"""
__author__ = "ButenkoMS <<EMAIL>>"
__copyright__ = "Copyright © 2016 ButenkoMS. All rights reserved. Contacts: <<EMAIL>>"
__credits__ = ["ButenkoMS <<EMAIL>>", ]
__license__ = "Apache License, Version 2.0"
__version__ = "1.0.0"
__maintainer__ = "ButenkoMS <<EMAIL>>"
__email__ = "<EMAIL>"
# __status__ = "Prototype"
__status__ = "Development"
# __status__ = "Production"
class RequestCache:
# TODO: add GC which will clean lists every X seconds (really - every Y hours)
def __init__(self, itemsQntLimit, timeLimitInSeconds=None, clock_function=None):
# timeLimitInSeconds:
# - number - limit in seconds (remember that the accuracy of the server clock is
# approximately 1 second);
# - 0 (zero) - cache is not used (try_to_get_data_for_request() will return 'None'
# on every request);
# - None - cache is trying to be permanent (time limit is not used at all)
super().__init__()
self._clock_function = clock_function or time.time
self._itemsQntLimit = itemsQntLimit
self._timeLimitInSeconds = timeLimitInSeconds
self._requestsAndData = {} # key - request; data - (data, Server Time of last change). Server Time of last
# change must be less or equal to self._itemsQntLimit
self._requestsHistory = [] # [request0, request1, ..., requestN]
self.isWasChanged = False
def put_new_request(self, request, data):
self._move_request_to_the_end_of_history(request)
if request in self._requestsAndData:
dataAndTime = self._requestsAndData[request]
is_was_changed = False
if isinstance(data, type(dataAndTime[0])):
if data != dataAndTime[0]:
is_was_changed = True
else:
is_was_changed = True
if is_was_changed:
self._requestsAndData[request] = (data, self._clock_function())
self.isWasChanged = True
else:
if len(self._requestsAndData) >= self._itemsQntLimit:
forDeleting = self._requestsHistory[0]
if forDeleting in self._requestsAndData:
del self._requestsAndData[forDeleting]
del self._requestsHistory[0]
self._requestsAndData[request] = (data, self._clock_function())
self.isWasChanged = True
def put_new_request_or_renew_it(self, request, data):
self._move_request_to_the_end_of_history(request)
if request in self._requestsAndData:
dataAndTime = self._requestsAndData[request]
self._requestsAndData[request] = (data, self._clock_function())
self.isWasChanged = True
else:
if len(self._requestsAndData) >= self._itemsQntLimit:
forDeleting = self._requestsHistory[0]
if forDeleting in self._requestsAndData:
del self._requestsAndData[forDeleting]
del self._requestsHistory[0]
self._requestsAndData[request] = (data, self._clock_function())
self.isWasChanged = True
def try_to_get_raw_data_for_request(self, request):
if request in self._requestsAndData:
dataAndTime = self._requestsAndData[request]
return dataAndTime[0]
else:
return None
def try_to_get_raw_data_with_time_for_request(self, request):
if request in self._requestsAndData:
dataAndTime = self._requestsAndData[request]
return dataAndTime
else:
return None
def try_to_get_data_for_request(self, request):
if request in self._requestsAndData:
dataAndTime = self._requestsAndData[request]
lastChangingTime = dataAndTime[1]
tLimit = self._timeLimitInSeconds
if (tLimit is not None) and ((self._clock_function() - lastChangingTime) >= tLimit):
del self._requestsAndData[request]
if request in self._requestsHistory:
self._requestsHistory.remove(request)
return None
else:
self._move_request_to_the_end_of_history(request)
return dataAndTime[0]
else:
return None
def try_to_get_data_for_request_and_renew_it(self, request):
if request in self._requestsAndData:
dataAndTime = self._requestsAndData[request]
lastChangingTime = dataAndTime[1]
tLimit = self._timeLimitInSeconds
if (tLimit is not None) and ((self._clock_function() - lastChangingTime) >= tLimit):
del self._requestsAndData[request]
if request in self._requestsHistory:
self._requestsHistory.remove(request)
return None
else:
self._move_request_to_the_end_of_history(request)
self._requestsAndData[request] = (dataAndTime[0], self._clock_function())
self.isWasChanged = True
return dataAndTime[0]
else:
return None
def try_to_remove_request(self, request):
if request in self._requestsAndData:
del self._requestsAndData[request]
if request in self._requestsHistory:
self._requestsHistory.remove(request)
def _move_request_to_the_end_of_history(self, request):
if request in self._requestsAndData:
if request in self._requestsHistory:
self._requestsHistory.remove(request)
self._requestsHistory.append(request)
def update(self, anotherRequestCache):
if type(anotherRequestCache) == RequestCache:
self._itemsQntLimit += anotherRequestCache._itemsQntLimit
self._requestsAndData.update(anotherRequestCache._requestsAndData)
self.isWasChanged = True
# Do not do this:
# self._requestsHistory += anotherRequestCache._requestsHistory
def clear(self):
self._requestsAndData.clear()
# self._requestsHistory.clear() # Python 3.3+ only so can't be used under PyPy yet.
del self._requestsHistory[:]
self.isWasChanged = True
def get_state(self):
reqAndDat = []
for item in self._requestsAndData.items():
reqAndDat.append(item)
data = (self._itemsQntLimit
, self._timeLimitInSeconds
, reqAndDat
, self._requestsHistory)
return json.dumps(data)
def set_state(self, state):
data = json.loads(state)
self._itemsQntLimit = data[0]
self._timeLimitInSeconds = data[1]
for item in data[2]:
self._requestsAndData[item[0]] = item[1]
self._requestsHistory = data[3]
self.isWasChanged = True
| StarcoderdataPython |
1644354 | <reponame>charlesxin97/ToolFinder_binder<filename>binary_classifier/DNN.py
import torch.nn as nn
import torch
from collections import OrderedDict
class FFN(nn.Module):
def __init__(self, layer_arch, input_size, output_size, bias=True):
super(FFN, self).__init__()
self.layer_arch = layer_arch
self.input_size = input_size
self.output_size = output_size
self.bias = bias
self.build_model()
def build_model(self):
model_arch = []
unit = self.input_size
for i, num in enumerate(self.layer_arch):
model_arch.append(("dense_" + str(i), nn.Linear(unit, num, bias=self.bias)))
model_arch.append(("nonlinear_" + str(i), nn.ReLU()))
if (i == 1 or i == 3 or i == 5):
model_arch.append(("dropout_" + str(i), nn.Dropout()))
unit = num
model_arch.append(("dense_final", nn.Linear(unit, self.output_size, bias=self.bias)))
model_arch.append(("act_final", nn.Sigmoid()))
self.model = nn.Sequential(OrderedDict(model_arch))
def forward(self, inputs):
return self.model(inputs) | StarcoderdataPython |
1711823 | import argparse
import numpy as np
import math
def is_pos_def(x):
return np.all(np.linalg.eigvals(x) > 0)
def get_multigaussian_pdf(_mean, _cov, _cov_i, num_variable, Y_variable):
"""
calculate multivariate Gaussian PDF for given mean & cov for each sample.
Parameters
----------
_mean: numpy array
N x num_variable (N: sample size), mean
_cov: numpy array
num_variable x num_variable, covariance matrix
_cov_i: numpy array
num_variable x num_variable, inverse of the covariance matrix
num_variable: integer
number of variables
Y_variable: numpy array
N x num_variable, realized values of the random variables
Returns
-------
likelihoods: list of float
list of likelihoods for every sample
Raise
-----
NotImplementedError
Current approach of calculating cannot handle when number of variables larger than 200
"""
_det = np.exp(np.linalg.slogdet(_cov)[1])
_part_a = (Y_variable -_mean) @ _cov_i
_inner = np.einsum('ij,ij->i', _part_a, (Y_variable -_mean))
_nominator = np.exp(-.5*_inner)
_denominator = np.sqrt(np.power(2*math.pi, num_variable)*_det)
if math.isinf(_denominator):
raise NotImplementedError(f"The current approach suffers from infinity large denominator when number "
f"of variables is large (e.g. > 200)")
return _nominator/_denominator
flatten = lambda l: [item for sublist in l for item in sublist]
def str2bool(v):
"""
Helper to pass arguements. Source: https://stackoverflow.com/questions/15008758/parsing-boolean-values-with-argparse
"""
if isinstance(v, bool):
return v
if v.lower() in ('yes', 'true', 't', 'y', '1'):
return True
elif v.lower() in ('no', 'false', 'f', 'n', '0'):
return False
else:
raise argparse.ArgumentTypeError('Boolean value expected.') | StarcoderdataPython |
1726339 | <gh_stars>0
class Relation:
def __init__(self,giver,receiver):
self.name = ""
self.description = ""
self.giver = giver
self.receiver = receiver
giver.diplomatic_relations.append(self)
receiver.diplomatic_relations.append(self)
def describe_relation(self):
print(self.description.replace("*",giver.official_name).replace("^",receiver.official_name))
def get_other_side(self,wisher):
if wisher==self.giver:
return self.receiver
else:
return self.giver
| StarcoderdataPython |
76694 | <reponame>SOFIE-project/IAA
import pytest
import requests
import jwt
import nacl.signing
import nacl.encoding
privateKeyHex = '<KEY>'
publicKeyHex = 'E390CF3B5B93E921C45ED978737D89F61B8CAFF9DE76BFA5F63DA20386BCCA3B'
class TestJWTwithPoP:
def test_valid_bearer_get(self):
token = "<KEY>"
headers = {'Authorization':'Bearer ' + token, 'Accept': 'application/json'}
response = requests.get("http://localhost:9000/secure/jwt-pop", headers = headers)
print(response.text)
assert(response.status_code == 403)
def test_dublicate_challenge(self):
token = "<KEY>"
headers = {'Authorization':'Bearer ' + token, 'Accept': 'application/json'}
response = requests.get("http://localhost:9000/secure/jwt-pop", headers = headers)
print(response.text)
assert(response.status_code == 403)
| StarcoderdataPython |
1684533 | <reponame>kraj/intel-iot-refkit<filename>meta-iotqa/lib/oeqa/runtime/multimedia/vaapi/test_vaapi_present.py
'''
This test suit tests VAAPI is present or not
'''
from oeqa.oetest import oeRuntimeTest
class VAAPITest(oeRuntimeTest):
def test_vaapi_present(self):
(status, output) = self.target.run("vainfo")
self.assertEqual(status, 0, msg="Error messages: VAAPI not Present %s" % output)
| StarcoderdataPython |
94240 | # -*- coding: utf-8 -*-
"""Call back view for OAuth2 authentication."""
from django import http
from django.contrib import messages
from django.contrib.auth.decorators import user_passes_test
from django.core.handlers.wsgi import WSGIRequest
from django.shortcuts import redirect, reverse
from django.utils.translation import ugettext, ugettext_lazy as _
from ontask.core import SessionPayload
from ontask.core.permissions import is_instructor
from ontask.oauth import services
@user_passes_test(is_instructor)
def callback(request: WSGIRequest) -> http.HttpResponse:
"""Process the call received from the server.
This is supposed to contain the token so it is saved to the database and
then redirects to a page previously stored in the session object.
:param request: Request object
:return: Redirection to the stored page
"""
payload = SessionPayload(request.session)
# If there is no payload, something went wrong.
if payload is None:
# Something is wrong with this execution. Return to action table.
messages.error(
request,
_('Incorrect Canvas callback invocation.'))
return redirect('action:index')
# Check first if there has been some error
error_string = request.GET.get('error')
if error_string:
messages.error(
request,
ugettext('Error in OAuth2 step 1 ({0})').format(error_string))
return redirect('action:index')
status = services.process_callback(request, payload)
if status:
messages.error(request, status)
return redirect('action:index')
return redirect(
request.session.get(services.return_url_key, reverse('action:index')))
| StarcoderdataPython |
3215984 | <gh_stars>100-1000
# -*- coding: utf-8 -*-
#
# Copyright 2017 Ricequant, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import defaultdict
from rqalpha.utils.i18n import gettext as _
from rqalpha.const import ORDER_TYPE, SIDE, BAR_STATUS
from rqalpha.model.trade import Trade
from rqalpha.environment import Environment
from rqalpha.events import EVENT
class Matcher(object):
def __init__(self,
deal_price_decider,
bar_limit=True,
volume_percent=0.25):
self._board = None
self._turnover = defaultdict(int)
self._calendar_dt = None
self._trading_dt = None
self._deal_price_decider = deal_price_decider
self._volume_percent = volume_percent
self._bar_limit = bar_limit
def update(self, calendar_dt, trading_dt, bar_dict):
self._board = bar_dict
self._turnover.clear()
self._calendar_dt = calendar_dt
self._trading_dt = trading_dt
def match(self, open_orders):
for account, order in open_orders:
slippage_decider = account.slippage_decider
commission_decider = account.commission_decider
tax_decider = account.tax_decider
bar = self._board[order.order_book_id]
bar_status = bar._bar_status
if bar_status == BAR_STATUS.ERROR:
listed_date = bar.instrument.listed_date.date()
if listed_date == self._trading_dt.date():
reason = _("Order Cancelled: current security [{order_book_id}] can not be traded in listed date [{listed_date}]").format(
order_book_id=order.order_book_id,
listed_date=listed_date,
)
else:
reason = _("Order Cancelled: current bar [{order_book_id}] miss market data.").format(
order_book_id=order.order_book_id)
order._mark_rejected(reason)
continue
deal_price = self._deal_price_decider(bar)
if order.type == ORDER_TYPE.LIMIT:
if order.price > bar.limit_up:
reason = _(
"Order Rejected: limit order price {limit_price} is higher than limit up {limit_up}."
).format(
limit_price=order.price,
limit_up=bar.limit_up
)
order._mark_rejected(reason)
continue
if order.price < bar.limit_down:
reason = _(
"Order Rejected: limit order price {limit_price} is lower than limit down {limit_down}."
).format(
limit_price=order.price,
limit_down=bar.limit_down
)
order._mark_rejected(reason)
continue
if order.side == SIDE.BUY and order.price < deal_price:
continue
if order.side == SIDE.SELL and order.price > deal_price:
continue
else:
if self._bar_limit and order.side == SIDE.BUY and bar_status == BAR_STATUS.LIMIT_UP:
reason = _(
"Order Cancelled: current bar [{order_book_id}] reach the limit_up price."
).format(order_book_id=order.order_book_id)
order._mark_rejected(reason)
continue
elif self._bar_limit and order.side == SIDE.SELL and bar_status == BAR_STATUS.LIMIT_DOWN:
reason = _(
"Order Cancelled: current bar [{order_book_id}] reach the limit_down price."
).format(order_book_id=order.order_book_id)
order._mark_rejected(reason)
continue
if self._bar_limit:
if order.side == SIDE.BUY and bar_status == BAR_STATUS.LIMIT_UP:
continue
if order.side == SIDE.SELL and bar_status == BAR_STATUS.LIMIT_DOWN:
continue
volume_limit = round(bar.volume * self._volume_percent) - self._turnover[order.order_book_id]
round_lot = bar.instrument.round_lot
volume_limit = (volume_limit // round_lot) * round_lot
if volume_limit <= 0:
if order.type == ORDER_TYPE.MARKET:
reason = _('Order Cancelled: market order {order_book_id} volume {order_volume}'
' due to volume limit').format(
order_book_id=order.order_book_id,
order_volume=order.quantity
)
order._mark_cancelled(reason)
continue
unfilled = order.unfilled_quantity
fill = min(unfilled, volume_limit)
ct_amount = account.portfolio.positions[order.order_book_id]._cal_close_today_amount(fill, order.side)
price = slippage_decider.get_trade_price(order, deal_price)
trade = Trade.__from_create__(order=order, calendar_dt=self._calendar_dt, trading_dt=self._trading_dt,
price=price, amount=fill, close_today_amount=ct_amount)
trade._commission = commission_decider.get_commission(trade)
trade._tax = tax_decider.get_tax(trade)
order._fill(trade)
self._turnover[order.order_book_id] += fill
Environment.get_instance().event_bus.publish_event(EVENT.TRADE, account, trade)
if order.type == ORDER_TYPE.MARKET and order.unfilled_quantity != 0:
reason = _(
"Order Cancelled: market order {order_book_id} volume {order_volume} is"
" larger than 25 percent of current bar volume, fill {filled_volume} actually"
).format(
order_book_id=order.order_book_id,
order_volume=order.quantity,
filled_volume=order.filled_quantity
)
order._mark_cancelled(reason)
| StarcoderdataPython |
3247765 | <gh_stars>10-100
import torch
from torch import nn
from torch.nn import functional as F
from torch import optim
import torchvision
from matplotlib import pyplot as plot
from utils import plot_image, plot_curve, one_hot, save_data
# step1 装数据
batch_size = 512
# step1. load dataset
train_loader = torch.utils.data.DataLoader(
torchvision.datasets.MNIST('mnist_data', train=True, download=True,
transform=torchvision.transforms.Compose([
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize(
(0.1307,), (0.3081,))
])),
batch_size=batch_size, shuffle=True)
test_loader = torch.utils.data.DataLoader(
torchvision.datasets.MNIST('mnist_data/', train=False, download=True,
transform=torchvision.transforms.Compose([
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize(
(0.1307,), (0.3081,))
])),
batch_size=batch_size, shuffle=False)
x,y = next(iter(train_loader))
print(x.shape,y.shape,x.min(),y.min())
# plot_image(x,y,'sample')
# step2 构建神经网络
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
# xw+b
self.fc1 = nn.Linear(28*28, 256)
self.fc2 = nn.Linear(256, 64)
self.fc3 = nn.Linear(64, 10)
def forward(self, x):
# x: [b, 1, 28, 28]
# h1 = relu(xw1+b1)
x = F.relu(self.fc1(x))
# h2 = relu(h1w2+b2)
x = F.relu(self.fc2(x))
# h3 = h2w3+b3
x = self.fc3(x)
return x
def backward(self, x):
# x: [b, 1, 28, 28]
# h1 = relu(xw1+b1)
x = F.relu(self.fc1(x))
# h2 = relu(h1w2+b2)
x = F.relu(self.fc2(x))
# h3 = h2w3+b3
x = self.fc3(x)
return x
# step3 训练网络
net = Net()
optimizer = optim.SGD(net.parameters(), lr=0.01, momentum=0.9)
train_loss = []
for epoch in range(10):
for idx,(x,y) in enumerate(train_loader):
x = x.view(x.size(0),28*28)
# =>[b,10]
out = net(x)
y_onehot = one_hot(y)
loss = F.mse_loss(out,y_onehot)
optimizer.zero_grad()
loss.backward()
optimizer.step()
train_loss.append(loss.item())
if idx %10 ==0:
print(epoch,idx,loss.item())
# step4 可视化显示
save_data(train_loss,'train_loss.csv')
plot_curve(train_loss)
total_correct = 0
for x,y in test_loader:
x = x.view(x.size(0),28*28)
out = net(x)
pred = out.argmax(dim = 1)
correct = pred.eq(y).sum().float().item()
total_correct+=correct
total_num = len(test_loader.dataset)
acc = total_correct / total_num
print("acuccy",acc)
x,y = next(iter(test_loader))
out = net(x.view(x.size(0),28*28))
pred = out.argmax(dim = 1)
plot_image(x,pred,"test")
| StarcoderdataPython |
4815282 | from typing import Dict, List, NamedTuple, Optional, Tuple
from app import db
from app.models import Project, Schedule, User, Team
from sqlalchemy import func
class WeekProject(NamedTuple):
week: int
project_id: int
class WeekUser(NamedTuple):
week: int
user_id: int
def set_schedule(
user_id: int, project_id: int, week: int, hours: int
) -> Optional[Schedule]:
"""
Logs the number of hours a given user plans to work on a given project for
a given week. This will override existing values if present.
:param user_id: The ID of the user that will be logged for these hours.
:param project_id: The ID of the project on which the suer will work.
:param week: The week number, where week 0 is the week staring on the 1st
of January 1AD.
:param hours: The number of hours to be logged for that week.
:returns: The schedule created if none existed for that week/project
combination, the exsting schedule if it was already present, or none if
either the user_id or project_id did not correspond to this user, or if
the user is not part of the project.
"""
session = db.get_session()
user = session.query(User).filter(User.id == user_id).one_or_none()
project = session.query(Project).filter(Project.id == project_id).one_or_none()
if not user or not project:
return None
if project not in user.team.projects:
return None
schedule = (
session.query(Schedule)
.filter(Schedule.project_id == project_id, Schedule.week == week)
.one_or_none()
)
if schedule:
return schedule
schedule = Schedule(user=user, project=project, week=week, hours=hours)
session.add(schedule)
session.commit()
return schedule
def get_schedule(user_id: int, project_id: int, week: int) -> Optional[Schedule]:
"""
Returns the schedule for a given user and project in a given week, if such
a schedule exists.
:param user_id: ID of the user for which to fetch the schedule.
:param project_id: ID of the project for which to fetch the schedule.
:param week: The week for which to fetch the schedule.
:returns: The schedule if it exists, or None if it does not exist.
"""
session = db.get_session()
return (
session.query(Schedule)
.filter(
Schedule.user_id == user_id,
Schedule.project_id == project_id,
Schedule.week == week,
)
.one_or_none()
)
def get_user_schedules(
user_id: int, start: int, end: int
) -> Dict[WeekProject, Schedule]:
"""
Returns all the schedules for a given user by week, filtered by weeks.
:param user_id: ID of the user for which to get schedules.
:param start: The lower bound (inclusive) for the dates, or None if there is
no lower bound.
:param end: The upper bound (inclusive) for the dates, or None if there is
no upper bound.
:returns: A dictionary of schedules for the user in a, key-ed by
week-project pairs, which are enforced to be unique in the API.
"""
session = db.get_session()
schedules = (
session.query(Schedule)
.filter(Schedule.user_id == user_id)
.filter(Schedule.week >= start)
.filter(Schedule.week <= end)
.all()
)
return {WeekProject(sched.week, sched.project_id): sched for sched in schedules}
def get_project_schedules(project_id: int) -> Dict[WeekUser, Schedule]:
"""
Returns all schedules for a given project.
:param project_id: The ID of the project for which to get schedules.
:returns: A dictionary of the schedules of a given project, key-ed by
user-week pairs.
"""
session = db.get_session()
schedules = session.query(Schedule).filter(Schedule.project_id == project_id).all()
return {WeekUser(sched.week, sched.user_id): sched for sched in schedules}
def get_project_week_schedule(project_id: int, week: int) -> Dict[int, Schedule]:
"""
Gets all the schedules of a given project for a given week any user who
logged hours on that project for the week.
:param project_id: The ID of the project being searched.
:param week: The week for which to search for the schedule.
:returns: The schedule for a given week and project, if one exists. None
otherwise
"""
session = db.get_session()
schedules = (
session.query(Schedule)
.filter(Schedule.project_id == project_id, Schedule.week == week)
.all()
)
return {sched.user_id: sched for sched in schedules}
def get_team_schedules(team_id: int, start: int, end: int) -> List[Schedule]:
"""
Gets all the schedules for all the projects owned by a given team, filtered
to fall between two weeks.
:param team_id: The ID of the team from which schedules are being fetched.
:param start: Start week of the filter, inclusive, as an ordinal ISO week date
:param end: End week of the filter, inclusive, as an ordinal ISO week date
:returns: A list of all schedules for said team.
"""
session = db.get_session()
return (
session.query(Schedule)
.join(Project)
.filter(Project.team_id == team_id)
.filter(Schedule.week >= start)
.filter(Schedule.week <= end)
.all()
)
def get_team_summary_schedule(
team_id: int, start: int, end: int, period: float
) -> List[Tuple]:
"""
Get statistics on the schedules for a given team, with limits on dates, and
a period over which averages and other similar statistics are calculated.
:param team_id: The ID for the team to search for
:param start: Start week of the filter, inclusive, as an ordinal ISO week date
:param end: End week of the filter, inclusive, as an ordinal ISO week date
:returns: The summary of the schedule as a list of 3-tuples. Each tuple
contains the average number of hours worked by a given user on a given
project, the user ID, and the project ID.
"""
session = db.get_session()
results = (
session.query(func.sum(Schedule.hours) / period, User.name, Project.name)
.filter(Project.team_id == team_id)
.filter(Schedule.week >= start)
.filter(Schedule.week <= end)
.group_by(Schedule.user_id, Schedule.project_id)
.join(User)
.join(Project)
.all()
)
return results
| StarcoderdataPython |
104624 | from collections import OrderedDict, defaultdict
import numpy as np
import torch.nn as nn
import torch.nn.functional as F
import time
import torch
from FClip.line_parsing import OneStageLineParsing
from FClip.config import M
from FClip.losses import ce_loss, sigmoid_l1_loss, focal_loss, l12loss
from FClip.nms import structure_nms_torch
class FClip(nn.Module):
def __init__(self, backbone):
super(FClip, self).__init__()
self.backbone = backbone
self.M_dic = M.to_dict()
self._get_head_size()
def _get_head_size(self):
head_size = []
for h in self.M_dic['head']['order']:
head_size.append([self.M_dic['head'][h]['head_size']])
self.head_off = np.cumsum([sum(h) for h in head_size])
def lcmap_head(self, output, target):
name = "lcmap"
_, batch, row, col = output.shape
order = self.M_dic['head']['order']
offidx = order.index(name)
s = 0 if offidx == 0 else self.head_off[offidx-1]
pred = output[s: self.head_off[offidx]].reshape(self.M_dic['head'][name]['head_size'], batch, row, col)
if self.M_dic['head'][name]['loss'] == "Focal_loss":
alpha = self.M_dic['head'][name]['focal_alpha']
loss = focal_loss(pred, target, alpha)
elif self.M_dic['head'][name]['loss'] == "CE":
loss = ce_loss(pred, target, None)
else:
raise NotImplementedError
weight = self.M_dic['head'][name]['loss_weight']
return pred.permute(1, 0, 2, 3).softmax(1)[:, 1], loss * weight
def lcoff_head(self, output, target, mask):
name = 'lcoff'
_, batch, row, col = output.shape
order = self.M_dic['head']['order']
offidx = order.index(name)
s = 0 if offidx == 0 else self.head_off[offidx - 1]
pred = output[s: self.head_off[offidx]].reshape(self.M_dic['head'][name]['head_size'], batch, row, col)
loss = sum(
sigmoid_l1_loss(pred[j], target[j], offset=-0.5, mask=mask)
for j in range(2)
)
weight = self.M_dic['head'][name]['loss_weight']
return pred.permute(1, 0, 2, 3).sigmoid() - 0.5, loss * weight
def lleng_head(self, output, target, mask):
name = 'lleng'
_, batch, row, col = output.shape
order = self.M_dic['head']['order']
offidx = order.index(name)
s = 0 if offidx == 0 else self.head_off[offidx - 1]
pred = output[s: self.head_off[offidx]].reshape(batch, row, col)
if self.M_dic['head'][name]['loss'] == "sigmoid_L1":
loss = sigmoid_l1_loss(pred, target, mask=mask)
pred = pred.sigmoid()
elif self.M_dic['head'][name]['loss'] == "L1":
loss = l12loss(pred, target, mask=mask)
pred = pred.clamp(0., 1.)
else:
raise NotImplementedError
weight = self.M_dic['head'][name]['loss_weight']
return pred, loss * weight
def angle_head(self, output, target, mask):
name = 'angle'
_, batch, row, col = output.shape
order = self.M_dic['head']['order']
offidx = order.index(name)
s = 0 if offidx == 0 else self.head_off[offidx - 1]
pred = output[s: self.head_off[offidx]].reshape(batch, row, col)
if self.M_dic['head'][name]['loss'] == "sigmoid_L1":
loss = sigmoid_l1_loss(pred, target, mask=mask)
pred = pred.sigmoid()
elif self.M_dic['head'][name]['loss'] == "L1":
loss = l12loss(pred, target, mask=mask)
pred = pred.clamp(0., 1.)
else:
raise NotImplementedError
weight = self.M_dic['head'][name]['loss_weight']
return pred, loss * weight
def jmap_head(self, output, target, n_jtyp):
name = "jmap"
_, batch, row, col = output.shape
order = self.M_dic['head']['order']
offidx = order.index(name)
s = 0 if offidx == 0 else self.head_off[offidx - 1]
pred = output[s: self.head_off[offidx]].reshape(n_jtyp, self.M_dic['head'][name]['head_size'], batch, row, col)
if self.M_dic['head'][name]['loss'] == "Focal_loss":
alpha = self.M_dic['head'][name]['focal_alpha']
loss = sum(
focal_loss(pred[i], target[i], alpha) for i in range(n_jtyp)
)
elif self.M_dic['head'][name]['loss'] == "CE":
loss = sum(
ce_loss(pred[i], target[i], None) for i in range(n_jtyp)
)
else:
raise NotImplementedError
weight = self.M_dic['head'][name]['loss_weight']
return pred.permute(2, 0, 1, 3, 4).softmax(2)[:, :, 1], loss * weight
def joff_head(self, output, target, n_jtyp, mask):
name = "joff"
_, batch, row, col = output.shape
order = self.M_dic['head']['order']
offidx = order.index(name)
s = 0 if offidx == 0 else self.head_off[offidx - 1]
pred = output[s: self.head_off[offidx]].reshape(
n_jtyp, self.M_dic['head'][name]['head_size'], batch, row, col)
loss = sum(
sigmoid_l1_loss(pred[i, j], target[i, j], scale=1.0, offset=-0.5, mask=mask[i])
for i in range(n_jtyp)
for j in range(2)
)
weight = self.M_dic['head'][name]['loss_weight']
return pred.permute(2, 0, 1, 3, 4).sigmoid() - 0.5, loss * weight
def lmap_head(self, output, target):
name = "lmap"
_, batch, row, col = output.shape
order = self.M_dic['head']['order']
offidx = order.index(name)
s = 0 if offidx == 0 else self.head_off[offidx - 1]
pred = output[s: self.head_off[offidx]].reshape(batch, row, col)
loss = (
F.binary_cross_entropy_with_logits(pred, target, reduction="none")
.mean(2)
.mean(1)
)
weight = self.M_dic['head'][name]['loss_weight']
return pred.sigmoid(), loss * weight
def forward(self, input_dict, isTest=False):
if isTest:
return self.test_forward(input_dict)
else:
return self.trainval_forward(input_dict)
def test_forward(self, input_dict):
extra_info = {
'time_front': 0.0,
'time_stack0': 0.0,
'time_stack1': 0.0,
'time_backbone': 0.0,
}
extra_info['time_backbone'] = time.time()
image = input_dict["image"]
outputs, feature, backbone_time = self.backbone(image)
extra_info['time_front'] = backbone_time['time_front']
extra_info['time_stack0'] = backbone_time['time_stack0']
extra_info['time_stack1'] = backbone_time['time_stack1']
extra_info['time_backbone'] = time.time() - extra_info['time_backbone']
output = outputs[0]
heatmap = {}
heatmap["lcmap"] = output[:, 0: self.head_off[0]].softmax(1)[:, 1]
heatmap["lcoff"] = output[:, self.head_off[0]: self.head_off[1]].sigmoid() - 0.5
heatmap["lleng"] = output[:, self.head_off[1]: self.head_off[2]].sigmoid()
heatmap["angle"] = output[:, self.head_off[2]: self.head_off[3]].sigmoid()
parsing = True
if parsing:
lines, scores = [], []
for k in range(output.shape[0]):
line, score = OneStageLineParsing.fclip_torch(
lcmap=heatmap["lcmap"][k],
lcoff=heatmap["lcoff"][k],
lleng=heatmap["lleng"][k],
angle=heatmap["angle"][k],
delta=M.delta,
resolution=M.resolution
)
if M.s_nms > 0:
line, score = structure_nms_torch(line, score, M.s_nms)
lines.append(line[None])
scores.append(score[None])
heatmap["lines"] = torch.cat(lines)
heatmap["score"] = torch.cat(scores)
return {'heatmaps': heatmap, 'extra_info': extra_info}
def trainval_forward(self, input_dict):
image = input_dict["image"]
outputs, feature, backbone_time = self.backbone(image)
result = {"feature": feature}
batch, channel, row, col = outputs[0].shape
T = input_dict["target"].copy()
n_jtyp = 1
T["lcoff"] = T["lcoff"].permute(1, 0, 2, 3)
losses = []
accuracy = []
for stack, output in enumerate(outputs):
output = output.transpose(0, 1).reshape([-1, batch, row, col]).contiguous()
L = OrderedDict()
Acc = OrderedDict()
heatmap = {}
lcmap, L["lcmap"] = self.lcmap_head(output, T["lcmap"])
lcoff, L["lcoff"] = self.lcoff_head(output, T["lcoff"], mask=T["lcmap"])
heatmap["lcmap"] = lcmap
heatmap["lcoff"] = lcoff
lleng, L["lleng"] = self.lleng_head(output, T["lleng"], mask=T["lcmap"])
angle, L["angle"] = self.angle_head(output, T["angle"], mask=T["lcmap"])
heatmap["lleng"] = lleng
heatmap["angle"] = angle
losses.append(L)
accuracy.append(Acc)
if stack == 0 and input_dict["do_evaluation"]:
result["heatmaps"] = heatmap
result["losses"] = losses
result["accuracy"] = accuracy
return result
| StarcoderdataPython |
110922 | <gh_stars>0
import urllib, urllib3, json
from datetime import datetime
from core.art.modelsART import ARTSubResult
from django.db import connection, transaction, DatabaseError
from core.settings import defaultDatetimeFormat
import logging
_logger = logging.getLogger('bigpandamon-error')
def getJobReport(guid, lfn, scope):
filebrowserURL = "http://bigpanda.cern.ch/filebrowser/" # This is deployment specific because memory monitoring is intended to work in ATLAS
jobSubResult = []
http = urllib3.PoolManager()
resp = http.request('GET', filebrowserURL, fields={'guid': guid, 'lfn': lfn, 'scope': scope, 'json': 1})
if resp and len(resp.data) > 0:
try:
data = json.loads(resp.data)
HOSTNAME = data['HOSTNAME']
tardir = data['tardir']
MEDIA_URL = data['MEDIA_URL']
dirprefix = data['dirprefix']
files = data['files']
files = [f for f in files if 'jobReport.json' in f['name']]
except:
return -2
else:
return -2
urlBase = "http://" + HOSTNAME + "/" + MEDIA_URL + dirprefix + "/" + tardir
for f in files:
url = urlBase + "/" + f['name']
response = http.request('GET', url)
data = json.loads(response.data)
return data
def getARTjobSubResults(data):
jobSubResult = {}
if isinstance(data, dict) and 'art' in data:
jobSubResult = data['art']
# protection of json format change from list to list of dicts
if 'result' in jobSubResult and isinstance(jobSubResult['result'], list):
resultlist = []
for r in jobSubResult['result']:
if not isinstance(r, dict):
resultlist.append({'name': '', 'result': r})
else:
resultlist.append({'name': r['name'] if 'name' in r else '', 'result': r['result'] if 'result' in r else r})
jobSubResult['result'] = resultlist
return jobSubResult
def subresults_getter(url_params_str):
"""
A function for getting ART jobs sub results in multithreading mode
:return: dictionary with sub-results
"""
base_url = "http://bigpanda.cern.ch/filebrowser/?json=1"
subresults_dict = {}
pandaidstr = url_params_str.split('=')[-1]
try:
pandaid = int(pandaidstr)
except:
_logger.exception('Exception was caught while transforming pandaid from str to int.')
raise
print('Loading {}'.format(base_url+url_params_str))
http = urllib3.PoolManager()
resp = http.request('GET', base_url + url_params_str)
if resp and len(resp.data) > 0:
try:
data = json.loads(resp.data)
HOSTNAME = data['HOSTNAME']
tardir = data['tardir']
MEDIA_URL = data['MEDIA_URL']
dirprefix = data['dirprefix']
files = data['files']
files = [f for f in files if 'jobReport.json' in f['name']]
except:
_logger.exception('Exception was caught while seeking jobReport.json in logs for PanDA job: {}'.format(str(pandaid)))
return {}
else:
_logger.exception('Exception was caught while downloading logs using Rucio for PanDA job: {}'.format(str(pandaid)))
return {}
urlBase = "http://" + HOSTNAME + "/" + MEDIA_URL + dirprefix + "/" + tardir
for f in files:
url = urlBase + "/" + f['name']
response = http.request('GET', url)
data = json.loads(response.data)
if isinstance(data, dict) and 'art' in data:
subresults_dict = data['art']
# protection of json format change from list to list of dicts
if 'result' in subresults_dict and isinstance(subresults_dict['result'], list):
resultlist = []
for r in subresults_dict['result']:
if not isinstance(r, dict):
resultlist.append({'name': '', 'result': r})
else:
resultlist.append({'name': r['name'] if 'name' in r else '', 'result': r['result'] if 'result' in r else r})
subresults_dict['result'] = resultlist
print('ART Results for {} is {}'.format(str(pandaid), str(subresults_dict)))
# clean up ART test logs from media/filebrowser/ where guid is folder name
# guid = None
# try:
# guid = url_params_str.split('=')[1].split('&')[0]
# except:
# _logger.exception('Exception was caught while getting GUID by parsing URL params str: {}'.format(url_params_str))
# pass
# if guid is not None:
# urlClean = "http://" + HOSTNAME + '/filebrowser/delete/?json=1&guid=' + guid
# http.request('GET', urlClean)
return {pandaid: subresults_dict}
def save_subresults(subResultsDict):
"""
A function to save subresults of ART jobs to the special table - ART_SUBRESULT
:param subResultsDict:
:return: True or False
"""
try:
with transaction.atomic():
for pandaid, data in subResultsDict.iteritems():
row = ARTSubResult(pandaid=pandaid,
subresult=data)
row.save()
except DatabaseError as e:
print e.message
return False
return True
def lock_nqueuedjobs(cur, nrows):
"""
Function to lock first N rows for futher processing
:param nrows:
:return: lock_time
"""
lock_time = datetime.now().strftime(defaultDatetimeFormat)
lquery = """UPDATE atlas_pandabigmon.art_results_queue
SET IS_LOCKED = 1,
LOCK_TIME = to_date('%s', 'YYYY-MM-DD HH24:MI:SS')
WHERE rownum <= %i AND IS_LOCKED = 0""" % (lock_time, nrows)
try:
cur.execute(lquery)
except DatabaseError as e:
print e.message
raise
return lock_time
def delete_queuedjobs(cur, lock_time):
"""
A function to delete processed jobs from ART_RESULTS_QUEUE
:param lock_time:
:return:
"""
dquery = """DELETE FROM atlas_pandabigmon.art_results_queue
WHERE IS_LOCKED = 1
AND LOCK_TIME = to_date('%s', 'YYYY-MM-DD HH24:MI:SS')""" % (lock_time)
try:
cur.execute(dquery)
except DatabaseError as e:
print e.message
raise
return True
def clear_queue(cur):
"""
A function to delete processed jobs from ART_RESULTS_QUEUE
:param lock_time:
:return:
"""
cquery = """DELETE FROM atlas_pandabigmon.art_results_queue
WHERE IS_LOCKED = 1"""
try:
cur.execute(cquery)
except DatabaseError as e:
print e.message
raise
return True | StarcoderdataPython |
1622635 | from setuptools import setup
def read(fname):
import os
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='pdtweak',
version='0.1.1',
description='pandas utility functions',
long_description=read('README.md'),
long_description_content_type='text/markdown',
url='http://github.com/kmedian/pdtweak',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
packages=['pdtweak'],
install_requires=[
'setuptools>=40.0.0',
'pandas>=0.25.3'],
python_requires='>=3.5',
zip_safe=False)
| StarcoderdataPython |
164568 | <filename>triflow/core/compilers.py
#!/usr/bin/env python
# coding=utf8
from functools import partial
import numpy as np
from scipy.sparse import csc_matrix
from sympy import lambdify
def theano_compiler(model):
"""Take a triflow model and return optimized theano routines.
Parameters
----------
model: triflow.Model:
Model to compile
Returns
-------
(theano function, theano_function):
Optimized routine that compute the evolution equations and their
jacobian matrix.
"""
from theano import tensor as T
from theano.ifelse import ifelse
import theano.sparse as ths
from theano import function
def th_Min(a, b):
if isinstance(a, T.TensorVariable) or isinstance(b, T.TensorVariable):
return T.where(a < b, a, b)
return min(a, b)
def th_Max(a, b):
if isinstance(a, T.TensorVariable) or isinstance(b, T.TensorVariable):
return T.where(a < b, b, a)
return max(a, b)
def th_Heaviside(a):
if isinstance(a, T.TensorVariable):
return T.where(a < 0, 1, 1)
return 0 if a < 0 else 1
mapargs = {arg: T.vector(arg)
for arg, sarg
in zip(model._args, model._symbolic_args)}
to_feed = mapargs.copy()
x_th = mapargs['x']
N = x_th.size
L = x_th[-1] - x_th[0]
dx = L / (N - 1)
to_feed['dx'] = dx
periodic = T.scalar("periodic", dtype="int32")
middle_point = int((model._window_range - 1) / 2)
th_args = [mapargs[key]
for key
in [*model._indep_vars,
*model._dep_vars,
*model._help_funcs,
*model._pars]] + [periodic]
map_extended = {}
for (varname, discretisation_tree) in \
model._symb_vars_with_spatial_diff_order.items():
pad_left, pad_right = model._bounds
th_arg = mapargs[varname]
per_extended_var = T.concatenate([th_arg[pad_left:],
th_arg,
th_arg[:pad_right]])
edge_extended_var = T.concatenate([[th_arg[0]] * middle_point,
th_arg,
[th_arg[-1]] * middle_point])
extended_var = ifelse(periodic,
per_extended_var,
edge_extended_var)
map_extended[varname] = extended_var
for order in range(pad_left, pad_right + 1):
if order != 0:
var = ("{}_{}{}").format(varname,
'm' if order < 0 else 'p',
np.abs(order))
else:
var = varname
new_var = extended_var[order - pad_left:
extended_var.size +
order - pad_right]
to_feed[var] = new_var
F = lambdify((model._symbolic_args),
expr=model.F_array.tolist(),
modules=[T, {"Max": th_Max,
"Min": th_Min,
"Heaviside": th_Heaviside}])(
*[to_feed[key]
for key
in model._args]
)
F = T.concatenate(F, axis=0).reshape((model._nvar, N)).T
F = T.stack(F).flatten()
J = lambdify((model._symbolic_args),
expr=model.J_array.tolist(),
modules=[T, {"Max": th_Max,
"Min": th_Min,
"Heaviside": th_Heaviside}])(
*[to_feed[key]
for key
in model._args]
)
J = [j if j != 0 else T.constant(0.)
for j in J]
J = [j if not isinstance(j, (int, float)) else T.constant(j)
for j in J]
J = T.stack([T.repeat(j, N) if j.ndim == 0 else j
for j in J])
J = J[model._sparse_indices[0]].T.squeeze()
i = T.arange(N).dimshuffle([0, 'x'])
idx = T.arange(N * model._nvar).reshape((N, model._nvar)).T
edge_extended_idx = T.concatenate([T.repeat(idx[:, :1],
middle_point,
axis=1),
idx,
T.repeat(idx[:, -1:],
middle_point,
axis=1)],
axis=1).T.flatten()
per_extended_idx = T.concatenate([idx[:, -middle_point:],
idx,
idx[:, :middle_point]],
axis=1).T.flatten()
extended_idx = ifelse(periodic,
per_extended_idx,
edge_extended_idx)
rows = T.tile(T.arange(model._nvar),
model._window_range * model._nvar) + i * model._nvar
cols = T.repeat(T.arange(model._window_range * model._nvar),
model._nvar) + i * model._nvar
rows = rows[:, model._sparse_indices].reshape(J.shape).flatten()
cols = extended_idx[cols][:, model._sparse_indices] \
.reshape(J.shape).flatten()
permutation = T.argsort(cols)
J = J.flatten()[permutation]
rows = rows[permutation]
cols = cols[permutation]
count = T.zeros((N * model._nvar + 1,), dtype=int)
uq, cnt = T.extra_ops.Unique(False, False, True)(cols)
count = T.set_subtensor(count[uq + 1], cnt)
indptr = T.cumsum(count)
shape = T.stack([N * model._nvar, N * model._nvar])
sparse_J = ths.CSC(J, rows, indptr, shape)
F_theano_function = function(inputs=th_args,
outputs=F,
on_unused_input='ignore',
allow_input_downcast=True)
J_theano_function = function(inputs=th_args,
outputs=sparse_J,
on_unused_input='ignore',
allow_input_downcast=True)
return F_theano_function, J_theano_function
def numpy_compiler(model):
"""Take a triflow model and return optimized numpy routines.
Parameters
----------
model: triflow.Model:
Model to compile
Returns
-------
(numpy function, numpy function):
Optimized routine that compute the evolution equations and their
jacobian matrix.
"""
def np_Min(args):
a, b = args
return np.where(a < b, a, b)
def np_Max(args):
a, b = args
return np.where(a < b, b, a)
def np_Heaviside(a):
return np.where(a < 0, 1, 1)
f_func = lambdify((model._symbolic_args),
expr=model.F_array.tolist(),
modules=[{"amax": np_Max,
"amin": np_Min,
"Heaviside": np_Heaviside},
"numpy"])
j_func = lambdify((model._symbolic_args),
expr=model._J_sparse_array.tolist(),
modules=[{"amax": np_Max,
"amin": np_Min,
"Heaviside": np_Heaviside},
"numpy"])
compute_F = partial(compute_F_numpy, model, f_func)
compute_J = partial(compute_J_numpy, model, j_func)
return compute_F, compute_J
def init_computation_numpy(model, *input_args):
mapargs = {key: input_args[i]
for i, key
in enumerate([*model._indep_vars,
*model._dep_vars,
*model._help_funcs,
*[*model._pars, "periodic"]])}
x = mapargs["x"]
N = x.size
L = x[-1] - x[0]
dx = L / (N - 1)
periodic = mapargs["periodic"]
middle_point = int((model._window_range - 1) / 2)
args = [mapargs[key]
for key
in [*model._indep_vars,
*model._dep_vars,
*model._help_funcs,
*model._pars]] + [periodic]
mapargs['dx'] = dx
map_extended = mapargs.copy()
for (varname, discretisation_tree) in \
model._symb_vars_with_spatial_diff_order.items():
pad_left, pad_right = model._bounds
arg = mapargs[varname]
if periodic:
extended_var = np.concatenate([arg[pad_left:],
arg,
arg[:pad_right]])
else:
extended_var = np.concatenate([[arg[0]] * middle_point,
arg,
[arg[-1]] * middle_point])
map_extended[varname] = extended_var
for order in range(pad_left, pad_right + 1):
if order != 0:
var = ("{}_{}{}").format(varname,
'm' if order < 0 else 'p',
np.abs(order))
else:
var = varname
new_var = extended_var[order - pad_left:
extended_var.size +
order - pad_right]
map_extended[var] = new_var
return args, map_extended, N, middle_point, periodic
def compute_F_numpy(model, f_func, *input_args):
args, map_extended, N, middle_point, periodic = \
init_computation_numpy(model, *input_args)
F = f_func(*[map_extended[key]
for key
in model._args])
F = np.concatenate(F, axis=0).reshape((model._nvar, N)).T
F = np.stack(F).flatten()
return F
def compute_J_numpy(model, j_func, *input_args):
args, map_extended, N, middle_point, periodic = \
init_computation_numpy(model, *input_args)
J = j_func(*[map_extended[key]
for key
in model._args])
J = np.stack([np.repeat(j, N) if len(
np.array(j).shape) == 0 else j for j in J])
J = J.T.squeeze()
i = np.arange(N)[:, None]
idx = np.arange(N * model._nvar).reshape((N, model._nvar)).T
if periodic:
extended_idx = np.concatenate([idx[:, -middle_point:],
idx,
idx[:, :middle_point]],
axis=1).T.flatten()
else:
extended_idx = np.concatenate([np.repeat(idx[:, :1],
middle_point,
axis=1),
idx,
np.repeat(idx[:, -1:],
middle_point,
axis=1)],
axis=1).T.flatten()
rows = np.tile(np.arange(model._nvar),
model._window_range * model._nvar) + i * model._nvar
cols = np.repeat(np.arange(model._window_range * model._nvar),
model._nvar) + i * model._nvar
rows = rows[:, model._sparse_indices].reshape(J.shape)
cols = extended_idx[cols][:, model._sparse_indices].reshape(J.shape)
rows = rows.flatten()
cols = cols.flatten()
sparse_J = csc_matrix((J.flatten(), (rows, cols)),
shape=(N * model._nvar, N * model._nvar))
return sparse_J
| StarcoderdataPython |
133287 | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import contextlib
import re
import numpy as np
import proto.framework_pb2 as framework_pb2
from . import core
import unique_name
__all__ = [
'Block',
'Variable',
'Program',
'Operator',
'default_startup_program',
'default_main_program',
'program_guard',
'switch_startup_program',
'switch_main_program',
'get_var',
]
EMPTY_VAR_NAME = core.kEmptyVarName()
TEMP_VAR_NAME = core.kTempVarName()
GRAD_VAR_SUFFIX = core.kGradVarSuffix()
ZERO_VAR_SUFFIX = core.kZeroVarSuffix()
def grad_var_name(var_name):
"""
return gradient name for a certain var name
"""
return var_name + GRAD_VAR_SUFFIX
def convert_np_dtype_to_dtype_(np_dtype):
"""
Convert the data type in numpy to the data type in Paddle
Args:
np_dtype(np.dtype): the data type in numpy
Returns(core.VarDesc.VarType): the data type in Paddle
"""
dtype = np.dtype(np_dtype)
if dtype == np.float32:
return core.VarDesc.VarType.FP32
elif dtype == np.float64:
return core.VarDesc.VarType.FP64
elif dtype == np.float16:
return core.VarDesc.VarType.FP16
elif dtype == np.int32:
return core.VarDesc.VarType.INT32
elif dtype == np.int16:
return core.VarDesc.VarType.INT16
elif dtype == np.int64:
return core.VarDesc.VarType.INT64
elif dtype == np.bool:
return core.VarDesc.VarType.BOOL
elif dtype == np.uint8:
return core.VarDesc.VarType.UINT8
else:
raise ValueError("Not supported numpy dtype " + str(dtype))
def dtype_is_floating(dtype):
"""
Check the data type is floating or not.
Args:
dtype(np.dtype|core.VarDesc.VarType): data type.
Could be numpy format or Paddle format
Returns(bool): True if data type is a float value
"""
if not isinstance(dtype, core.VarDesc.VarType):
dtype = convert_np_dtype_to_dtype_(dtype)
return dtype in [
core.VarDesc.VarType.FP16, core.VarDesc.VarType.FP32,
core.VarDesc.VarType.FP64
]
def _debug_string_(proto, throw_on_error=True):
"""
Get the debug string of a protobuf message. The message could be not
initialized.
Args:
proto(google.protobuf.message.Message): The protobuf message
throw_on_error(bool): True if raise an error when the protobuf message
is not initialized.
Returns(str): The debug string of the protobuf message
"""
error_fields = list()
if not proto.IsInitialized(error_fields) and throw_on_error:
raise ValueError("{0} are not initialized.\nThe message is {1}:\n".
format(error_fields, proto))
return proto.__str__()
class Variable(object):
"""
Python variable. Every input and output of an operator is a variable. Every
variable belongs to a block. The variable has a name and two variables in
different blocks could have the same name.
There are many kinds of variables. Please reference the framework.proto for
details.
Notes: The constructor of Variable should not be invoked directly. Please
use `Block.create_var` to create a variable.
>>> cur_program = Program()
>>> cur_block = cur_program.current_block()
>>> new_variable = cur_block.create_var(
>>> name="X", shape=[-1, 23, 48], dtype='float32')
Args:
block(Block): The associated block. It will be passed by
`Block.create_var` automatically.
type(core.VarDesc.VarType): Variable type. Please reference the
framework.proto for details.
shape(tuple|list|None): The shape of variable. -1 means the batch size.
Some kinds of variable do not contain shape, just set it to None.
dtype(np.dtype|core.VarDesc.VarType|str): The data type of variable.
lod_level(int): The level of lod tensor. 0 means it is not a time
series data.
capacity(int): The capacity of Channel variable. Ignored
for other types.
persistable(bool): True if the variable should be saved as check point.
Defaults to False.
stop_gradient(bool): True if the variable will stop to calculate
gradients when backward. Defaults to False.
"""
def __init__(self,
block,
type=core.VarDesc.VarType.LOD_TENSOR,
name=None,
shape=None,
dtype=None,
lod_level=None,
capacity=None,
persistable=None,
error_clip=None,
stop_gradient=False,
is_data=False,
**kwargs):
self.block = block
self.error_clip = error_clip
if name is None:
name = unique_name.generate('_generated_var')
is_new_var = False
self.desc = self.block.desc.find_var(name)
if self.desc is None:
self.desc = self.block.desc.var(name)
is_new_var = True
if is_new_var:
self.desc.set_type(type)
elif self.desc.type() != type:
raise ValueError("Variable {0} has been created before. The "
"previous type is {1}; the new type is {2}. They"
" are not matched".format(self.name,
self.desc.type(), type))
if shape is not None:
if is_new_var:
self.desc.set_shape(shape)
else:
old_shape = self.shape
shape = tuple(shape)
if shape != old_shape:
raise ValueError(
"Variable {0} has been created before. the previous "
"shape is {1}; the new shape is {2}. They are not "
"matched.".format(self.name, old_shape, shape))
if dtype is not None:
if not isinstance(dtype, core.VarDesc.VarType):
dtype = convert_np_dtype_to_dtype_(dtype)
if is_new_var:
self.desc.set_dtype(dtype)
else:
old_dtype = self.dtype
if dtype != old_dtype:
raise ValueError("Variable {0} has been created before. "
"The previous data type is {1}; the new "
"data type is {2}. They are not "
"matched.".format(self.name, old_dtype,
dtype))
if lod_level is not None:
if is_new_var:
self.desc.set_lod_level(lod_level)
else:
if lod_level != self.lod_level:
raise ValueError("Variable {0} has been created before. "
"The previous lod_level is {1}; the new "
"lod_level is {2}. They are not "
"matched".format(self.name, self.lod_level,
lod_level))
if persistable is not None:
if is_new_var:
self.desc.set_persistable(persistable)
else:
if persistable != self.persistable:
raise ValueError(
"Variable {0} has been created before."
"The previous persistable is {1}; the new "
"persistable is {2}. They are not matched".format(
self.name, self.persistable, persistable))
if capacity is not None:
if is_new_var:
self.desc.set_capacity(capacity)
else:
# TODO(abhinavarora) : Compare with set capacity once,
# get_capacity is implemented
pass
self.block.vars[name] = self
self.op = None
self.stop_gradient = stop_gradient
self.is_data = is_data
def __str__(self):
return self.to_string(True)
def to_string(self, throw_on_error, with_details=False):
"""
Get debug string.
Args:
throw_on_error(bool): True if raise an exception when self is not
intialized.
with_details(bool): more details about variables and parameters
(e.g. trainable, optimize_attr, ...) will be printed when with_details is True
Returns(str): The debug string.
"""
assert isinstance(throw_on_error, bool) and isinstance(with_details,
bool)
protostr = self.desc.serialize_to_string()
proto = framework_pb2.VarDesc.FromString(str(protostr))
res_str = _debug_string_(proto, throw_on_error)
if with_details:
additional_attr = ("error_clip", "stop_gradient")
for attr_name in additional_attr:
res_str += "%s: %s\n" % (attr_name,
str(getattr(self, attr_name)))
return res_str
__repr__ = __str__
def set_desc(self, input):
self.desc = input
@property
def persistable(self):
return self.desc.persistable()
@persistable.setter
def persistable(self, p):
self.desc.set_persistable(p)
@property
def name(self):
return self.desc.name()
@name.setter
def name(self, new_name):
self.desc.set_name(new_name)
@property
def shape(self):
# convert to tuple, make it as same as numpy API.
return tuple(self.desc.shape())
@property
def dtype(self):
return self.desc.dtype()
@property
def lod_level(self):
return self.desc.lod_level()
@property
def type(self):
return self.desc.type()
def set_error_clip(self, error_clip):
self.error_clip = error_clip
def get_all_op_protos():
"""
Get all registered op proto from PaddlePaddle C++ end.
Returns(list): list of OpProto
"""
protostrs = core.get_all_op_protos()
ret_values = []
for pbstr in protostrs:
op_proto = framework_pb2.OpProto.FromString(str(pbstr))
ret_values.append(op_proto)
return ret_values
class OpProtoHolder(object):
"""
A global variable to hold all OpProtos from C++ as a map
"""
@classmethod
def instance(cls):
if not hasattr(cls, '_instance'):
cls._instance = cls()
return cls._instance
def __init__(self):
assert not hasattr(
self.__class__,
'_instance'), 'Please use `instance()` to get OpProtoHolder object!'
op_protos = get_all_op_protos()
self.op_proto_map = {}
for proto in op_protos:
self.op_proto_map[proto.type] = proto
def get_op_proto(self, type):
"""
Get OpProto by a type string.
Args:
type(str): The type that operator registered in C++ side.
Returns(framework_pb2.OpProto): The OpProto
"""
if type not in self.op_proto_map:
raise ValueError("Operator \"%s\" has not been registered." % type)
return self.op_proto_map[type]
class Operator(object):
"""
Python Operator class. The operator represents the build in instructions in a
Block. Users can use the build in instructions to describe their neural
network.
"""
def __init__(self,
block,
desc,
type=None,
inputs=None,
outputs=None,
attrs=None):
"""
Constructor.
Notes: The constructor of operator should not be invoked directly. Use
Block.append_op or Block.prepend_op instead.
>>> cur_program = Program()
>>> cur_block = cur_program.current_block()
>>> # var1 += var2 + var3
>>> cur_block.append_op(type="sum",
>>> inputs={"X": [var1, var2, var3]},
>>> outputs={"Out": [var1]})
Args:
block(Block): The block has the current operator.
desc(core.OpDesc): The protobuf description.
type(str): The type of operator.
inputs(dict): The input dictionary. Key is the input parameter name.
Value is a list of variables.
outputs(dict): The output dictionary which has the same format with
inputs.
attrs(dict): The attributes dictionary. Key is attribute name. Value
is the attribute value. The attribute type should be as same as
the type registered in C++
"""
self.block = block
self.desc = desc
self.attrs = attrs
if len(self.desc.type()) != 0:
return
if type is None:
raise ValueError(
"`type` to initilized an Operator can not be None.")
self.desc.set_type(type)
proto = OpProtoHolder.instance().get_op_proto(type)
def find_name(var_list, name):
for var_name in var_list:
if var_list[var_name] is not None and var_name == name:
return True
return False
if inputs is not None:
for in_proto in proto.inputs:
found = find_name(inputs, in_proto.name)
assert found or in_proto.dispensable, "Input {} not found".format(
in_proto.name)
if found:
in_args = inputs[in_proto.name]
if not isinstance(in_args, list):
in_args = [in_args]
if not in_proto.duplicable and len(in_args) > 1:
raise ValueError(
"Input %s expects only one input, but %d are given."
% (in_proto.name, len(in_args)))
in_arg_names = []
for arg in in_args:
if isinstance(arg, basestring):
in_arg_names.append(arg)
else:
in_arg_names.append(arg.name)
self.desc.set_input(in_proto.name, in_arg_names)
else:
self.desc.set_input(in_proto.name, [])
if outputs is not None:
given = set()
need = set()
for n in outputs:
given.add(n)
for m in proto.outputs:
need.add(m.name)
if not given == need:
raise ValueError(("Incorrect setting for output(s) of "
"operator \"%s\". Need: [%s] Given: [%s]") %
(type, ", ".join(str(e) for e in need),
", ".join(str(e) for e in given)))
for out_proto in proto.outputs:
out_args = outputs[out_proto.name]
if not isinstance(out_args, list):
out_args = [out_args]
if not out_proto.duplicable and len(out_args) > 1:
raise ValueError(
"Output %s expects only one output, but %d are given." %
(out_proto.name, len(out_args)))
out_arg_names = []
for arg in out_args:
out_arg_names.append(arg.name)
arg.op = self
self.desc.set_output(out_proto.name, out_arg_names)
if attrs is not None:
if not isinstance(attrs, dict):
raise TypeError("'attrs' should be a dict.")
for attr in proto.attrs:
attr_name = attr.name
if (attr_name not in attrs) or (attrs[attr_name] is None):
continue
if isinstance(attrs[attr_name], Block):
self.desc.set_block_attr(attr_name, attrs[attr_name].desc)
elif isinstance(attrs[attr_name], core.BlockDesc) or \
isinstance(attrs[attr_name], core.ProgramDesc):
self.desc.set_serialized_attr(
attr_name, attrs[attr_name].serialize_to_string())
else:
self.desc.set_attr(attr_name, attrs[attr_name])
self.desc.check_attrs()
no_kernel_op_set = {
'feed', 'fetch', 'save', 'load', 'recurrent', 'go',
'rnn_memory_helper_grad', 'conditional_block', 'while', 'send',
'recv', 'listen_and_serv', 'parallel_do', 'save_combine',
'load_combine', 'ncclInit', 'channel_create', 'channel_close',
'channel_send', 'channel_recv', 'select', 'gen_nccl_id'
}
if type not in no_kernel_op_set:
self.desc.infer_var_type(self.block.desc)
self.desc.infer_shape(self.block.desc)
def to_string(self, throw_on_error):
"""
To debug string.
Args:
throw_on_error(bool): raise exception when self is not initialized
when throw_on_error is True
Returns(str): The debug string.
"""
protostr = self.desc.serialize_to_string()
proto = framework_pb2.OpDesc.FromString(str(protostr))
return _debug_string_(proto, throw_on_error)
def __str__(self):
return self.to_string(True)
__repr__ = __str__
@property
def type(self):
return self.desc.type()
def input(self, name):
"""
Get input arguments by the input parameter name
Args:
name(str): The input parameter name
Returns(list): return the list of argument names associated with the
specific parameter name.
"""
return self.desc.input(name)
def rename_input(self, old_name, new_name):
self.desc.rename_input(old_name, new_name)
def rename_output(self, old_name, new_name):
self.desc.rename_output(old_name, new_name)
@property
def input_names(self):
"""
Get all input parameter names
Returns(list): return a list of input parameter names
"""
return self.desc.input_names()
@property
def input_arg_names(self):
return self.desc.input_arg_names()
@property
def output_arg_names(self):
return self.desc.output_arg_names()
def output(self, name):
"""
Get output arguments by the output parameter name
Args:
name(str): The output parameter name
Returns(list): return the list of argument names associated with the
specific parameter name.
"""
return self.desc.output(name)
@property
def output_names(self):
"""
Get all output parameter names
Returns(list): return a list of output parameter names
"""
return self.desc.output_names()
@property
def idx(self):
"""
Return the array index of current operator.
Returns(int): The array index in block.ops array
Raises:
ValueError: when the operator is not found.
"""
for i, op in enumerate(self.block.ops):
if op == self:
return i
raise ValueError(
"Can't find op itself in it's block. It could be a bug of Paddle.")
def has_attr(self, name):
"""
operator has the attribute with name or not.
Args:
name(str): the attribute name
Returns(bool): True if has this attribute.
"""
return self.desc.has_attr(name)
def attr_type(self, name):
"""
Get the type of attribute by attribute name
Args:
name(str): the attribute name
Returns(core.AttrType): the attribute type
"""
return self.desc.attr_type(name)
@property
def attr_names(self):
"""
Get all attribute names
Returns(list): The list of attribute name
"""
return self.desc.attr_names()
def attr(self, name):
"""
Get attribute by name
Args:
name(str): the attribute name
Returns(bool|int|str|float|list): The attribute value. The return value
can be any valid attribute type.
"""
return self.desc.attr(name)
def block_attr(self, name):
"""
Get the block attribute by name
Args:
name(str): the attribute name
Returns(int): the block index
"""
return self.desc.block_attr(name)
def all_attrs(self):
"""
Get the attribute dict
Returns(dict): The Operator's attribute dict
"""
attr_names = self.attr_names
attr_map = {}
for n in attr_names:
if n == 'sub_block':
attr_map[n] = self.block_attr(n)
else:
attr_map[n] = self.attr(n)
return attr_map
class Block(object):
def __init__(self, program, idx):
self.desc = program.desc.block(idx)
self.vars = collections.OrderedDict() # var_name --> var
self.ops = list() # operator list
self.program = program
self.removed_vars = collections.OrderedDict()
def __str__(self):
return self.to_string(True)
def to_string(self, throw_on_error, with_details=False):
"""
To debug string.
Args:
throw_on_error(bool): raise exception when self is not initialized
when throw_on_error is True
with_details(bool): more details about variables and parameters
(e.g. trainable, optimize_attr, ...) will be printed when with_details is True
Returns(str): The debug string.
"""
assert isinstance(throw_on_error, bool) and isinstance(with_details,
bool)
if with_details:
re_add_indent = re.compile(r"\n(.)")
res_str = "blocks {\n idx: %d\n parent_idx: %d" % (
self.idx, self.parent_idx)
for var in self.vars.itervalues():
res_str += "\n vars {\n %s }" % re_add_indent.sub(
r"\n \1", var.to_string(throw_on_error, with_details))
for op in self.ops:
res_str += "\n ops {\n %s }" % re_add_indent.sub(
r"\n \1", op.to_string(throw_on_error))
res_str += "\n}"
else:
protostr = self.desc.serialize_to_string()
proto = framework_pb2.BlockDesc.FromString(str(protostr))
res_str = _debug_string_(proto, throw_on_error)
return res_str
__repr__ = __str__
@property
def parent_idx(self):
return self.desc.parent
@property
def forward_block_idx(self):
return self.desc.get_forward_block_idx()
def set_forward_block_idx(self, idx):
self.desc.set_forward_block_idx(idx)
@property
def idx(self):
return self.desc.id
def var(self, name):
if not isinstance(name, basestring):
raise TypeError()
v = self.vars.get(name, None)
if v is None:
raise ValueError("var %s not in this block" % name)
return v
def var_recursive(self, name):
frontier = list()
visited = set()
frontier.append(self)
prog = self.program
while len(frontier) != 0: # BFS
cur = frontier[0]
frontier = frontier[1:]
if id(cur) in visited:
continue
if cur.has_var(name):
return cur.var(name)
if cur.parent_idx != -1:
frontier.append(prog.block(cur.parent_idx))
if cur.forward_block_idx != -1:
frontier.append(prog.block(cur.forward_block_idx))
visited.add(id(cur))
raise ValueError("Var {0} is not found recursively".format(name))
def all_parameters(self):
return list(self.iter_parameters())
def iter_parameters(self):
return (item[1] for item in self.vars.iteritems()
if isinstance(item[1], Parameter))
def create_var(self, *args, **kwargs):
var = Variable(block=self, *args, **kwargs)
if 'initializer' in kwargs:
kwargs['initializer'](var, self)
return var
def has_var(self, name):
return name in self.vars
def rename_var(self, name, new_name):
"""
Rename variable in vars and ops' inputs and outputs
"""
if not self.has_var(name):
raise ValueError("var %s is not in current" % name)
v = self.var(name)
if type(v) == Parameter:
var_type = "Parameter"
stop_gradient = v.stop_gradient
trainable = v.trainable
optimize_attr = v.optimize_attr
regularizer = v.regularizer
gradient_clip_attr = v.gradient_clip_attr
error_clip = v.error_clip
elif type(v) == Variable:
var_type = "Variable"
error_clip = v.error_clip
stop_gradient = v.stop_gradient
else:
raise ValueError("unsupported var type: %s", type(v))
orig_var_type = v.type
self.desc.rename_var(name, new_name)
# NOTE: v is destroyed by C++ after calling rename_var.
d = self.desc.find_var(new_name)
if var_type == "Parameter":
var = Parameter(
self,
d.shape(),
d.dtype(),
type=orig_var_type,
name=new_name,
stop_gradient=stop_gradient,
trainable=trainable,
optimize_attr=optimize_attr,
regularizer=regularizer,
gradient_clip_attr=gradient_clip_attr,
error_clip=error_clip)
elif var_type == "Variable":
var = Variable(
self,
type=orig_var_type,
name=new_name,
error_clip=error_clip,
stop_gradient=stop_gradient)
# rename the python side, sync_with_cpp will only add
# new vars/ops to python side.
self.vars[new_name] = var
del self.vars[name]
self.sync_with_cpp()
def remove_var(self, name):
self.sync_with_cpp()
self.desc.remove_var(name)
del self.vars[name]
def create_parameter(self, *args, **kwargs):
global_block = self.program.global_block()
param = Parameter(global_block, *args, **kwargs)
if 'initializer' in kwargs:
kwargs['initializer'](param, self)
return param
def append_op(self, *args, **kwargs):
op_desc = self.desc.append_op()
op = Operator(block=self, desc=op_desc, *args, **kwargs)
self.ops.append(op)
return op
def insert_op(self, index, *args, **kwargs):
self.sync_with_cpp()
op_desc = self.desc.insert_op(index)
op = Operator(block=self, desc=op_desc, *args, **kwargs)
self.ops.insert(index, op)
return op
def remove_op(self, index):
self.sync_with_cpp()
self.desc.remove_op(index, index + 1)
del self.ops[index]
def slice_ops(self, start, end):
return self.ops[start:end]
def prepend_op(self, *args, **kwargs):
op_desc = self.desc.prepend_op()
op = Operator(self, op_desc, *args, **kwargs)
self.ops.insert(0, op)
return op
def sync_with_cpp(self):
"""
Sync from the desc on the c++ end.
This method is used to synchronize the c++ desc instance generated by backward.
"""
# sync variables from cpp
for var in self.desc.all_vars():
if not self.has_var(var.name()):
self.create_var(name=var.name(), desc=var, type=var.type())
# sync variables removed from c++ end
for var in self.vars.keys():
if not self.desc.find_var(var):
self.vars.pop(var)
# sync operators from cpp
ops_in_cpp = []
for op_idx in range(0, self.desc.op_size()):
ops_in_cpp.append(self.desc.op(op_idx))
if len(self.ops) != 0:
first_op_in_python = self.ops[0].desc
last_op_in_python = self.ops[len(self.ops) - 1].desc
start_index = None
end_index = None
for index in range(len(ops_in_cpp)):
if first_op_in_python == ops_in_cpp[index]:
start_index = index
if last_op_in_python == ops_in_cpp[index]:
end_index = index
assert start_index is not None
assert end_index is not None
assert start_index <= end_index
else:
start_index = 0
end_index = -1
# sync ops append to the head of cpp_ops
for index in range((start_index - 1 - 1), -1, -1):
op_desc = ops_in_cpp[index]
op = Operator(self, op_desc)
self.ops.insert(0, op)
# sync ops append to the end of cpp_ops
for index in range((end_index + 1), len(ops_in_cpp)):
op_desc = ops_in_cpp[index]
op = Operator(self, op_desc)
self.ops.append(op)
# sync ops removed from c++ end
if end_index != -1 and end_index < len(self.ops):
ops_in_cpp_index = 0
ops_in_python_index = 0
while ops_in_python_index < len(
self.ops) and ops_in_cpp_index < len(ops_in_cpp):
if self.ops[ops_in_python_index].desc != ops_in_cpp[
ops_in_cpp_index]:
del self.ops[ops_in_python_index]
else:
ops_in_cpp_index += 1
ops_in_python_index += 1
assert len(self.ops) == len(ops_in_cpp)
for index in range(len(self.ops)):
assert self.ops[index].desc == ops_in_cpp[index]
def copy_param_info_from(self, other):
"""
Copy the information of parameters from the other block
Args:
other(Block): the other block
Returns:
None
"""
if not isinstance(other, Block):
raise TypeError("copy_param_info_from should be invoked with Block")
for p in other.iter_parameters():
assert isinstance(p, Parameter)
v = self.vars.get(p.name, None)
if v is None:
raise ValueError("copy_param_info_from should be invoked with "
"same topology")
assert isinstance(v, Variable)
new_p = Parameter(
block=self,
shape=v.shape,
dtype=v.dtype,
type=v.type,
lod_level=v.lod_level,
stop_gradient=p.stop_gradient,
trainable=p.trainable,
optimize_attr=p.optimize_attr,
regularizer=p.regularizer,
gradient_clip_attr=p.gradient_clip_attr,
error_clip=p.error_clip,
name=v.name)
self.vars[new_p.name] = new_p
def clone_variable(self, var):
"""
Clone a variable into current block.
Args:
var: the variable to be cloned.
Returns:
The new variable cloned from 'var' in current block.
"""
assert isinstance(var, Variable)
ret_var = None
# make STEP_SCOPES var can be safely cloned.
if var.type == core.VarDesc.VarType.STEP_SCOPES:
ret_var = self.create_var(
name=var.name, persistable=var.persistable, type=var.type)
elif var.type == core.VarDesc.VarType.SELECTED_ROWS:
ret_var = self.create_var(
name=var.name,
shape=var.shape,
dtype=var.dtype,
type=var.type,
persistable=True,
is_data=var.is_data)
else:
ret_var = self.create_var(
name=var.name,
shape=var.shape,
dtype=var.dtype,
type=var.type,
lod_level=var.lod_level,
persistable=True,
is_data=var.is_data)
return ret_var
class Program(object):
def __init__(self):
self.desc = core.ProgramDesc()
self.blocks = [Block(self, 0)]
self.current_block_idx = 0
self._seed = 0
def __str__(self):
return self.to_string(True)
def to_string(self, throw_on_error, with_details=False):
"""
To debug string.
Args:
throw_on_error(bool): raise exception when self is not initialized
when throw_on_error is True
with_details(bool): more details about variables and parameters
(e.g. trainable, optimize_attr, ...) will be printed when with_details is True
Returns(str): The debug string.
"""
assert isinstance(throw_on_error, bool) and isinstance(with_details,
bool)
if with_details:
res_str = ""
for block in self.blocks:
res_str += block.to_string(throw_on_error, with_details)
else:
protostr = self.desc.serialize_to_string()
proto = framework_pb2.ProgramDesc.FromString(str(protostr))
res_str = _debug_string_(proto, throw_on_error)
return res_str
def get_desc(self):
return self.desc
def clone(self, for_test=False):
"""Clone the Program object
Set for_test to False when we want to clone the program for training.
Set for_test to True when we want to clone the program for testing.
Args:
for_test(bool): Some operators, such as batch_norm and drop_out ops,
behave differently in training and testing. If for_test is True,
the is_test attributes in these operators will be set to True for
testing purposes, otherwise, they remain unchanged.
Returns(Program):
The cloned Program object.
"""
if for_test:
p = self.inference_optimize()
else:
p = Program()
p.desc = core.ProgramDesc(self.desc)
p.blocks = [Block(p, i) for i in xrange(self.desc.num_blocks())]
p.sync_with_cpp()
p.copy_param_info_from(self)
p.copy_data_info_from(self)
return p
def prune(self, targets):
if not isinstance(targets, list):
targets = [targets]
targets_idx = []
for t in targets:
if not isinstance(t, Operator):
if isinstance(t, Variable):
# After transpiler processing, the op that output this
# variable maybe has been changed, so t.op is not reliable
# and we need to find the current op that generate this
# variable here.
t.op = None
global_block = self.global_block()
for idx, op in enumerate(global_block.ops):
if t.name in op.output_arg_names:
t.op = op
break
t = t.op
if t is None:
raise ValueError(
"The target variable must have an "
"associated operator that generates it.")
else:
raise ValueError("All targets of prune() can only be "
"Variable or Operator.")
targets_idx.append([t.block.idx, t.idx])
res = Program()
res.desc = core.prune(self.desc, targets_idx)
res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
res.sync_with_cpp()
return res
def inference_optimize(self):
# this is an alternative implement before
# core.inference_optimize being fixed.
res = Program()
res.desc = core.ProgramDesc(self.desc)
for i in xrange(res.desc.num_blocks()):
block = res.desc.block(i)
for j in xrange(block.op_size()):
op = block.op(j)
if op.has_attr('is_test'):
op.set_attr('is_test', True)
res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
res.sync_with_cpp()
return res
@staticmethod
def parse_from_string(binary_str):
p = Program()
p.desc = core.ProgramDesc(binary_str)
p.blocks = [Block(p, i) for i in xrange(p.desc.num_blocks())]
p.sync_with_cpp()
return p
@property
def random_seed(self):
return self._seed
@property
def num_blocks(self):
return self.desc.num_blocks()
@random_seed.setter
def random_seed(self, seed):
if not isinstance(seed, int):
raise ValueError("Seed must be a integer.")
self._seed = seed
def __repr__(self):
return str(self)
def global_block(self):
return self.blocks[0]
def block(self, index):
return self.blocks[index]
def current_block(self):
return self.blocks[self.current_block_idx]
def create_block(self, parent_idx=None):
new_block_idx = len(self.blocks)
parent = self.current_block() if parent_idx is None else self.block(
parent_idx)
self.desc.append_block(parent.desc)
self.current_block_idx = new_block_idx
self.blocks.append(Block(self, self.current_block_idx))
return self.current_block()
def rollback(self):
self.current_block_idx = self.current_block().parent_idx
def sync_with_cpp(self):
for block_idx in range(len(self.blocks), self.desc.num_blocks()):
self.blocks.append(Block(self, block_idx))
for block in self.blocks:
block.sync_with_cpp()
def copy_param_info_from(self, other):
"""
Copy the information of parameters from other program.
Args:
other(Program): Other program
Returns:
None
"""
if not isinstance(other, Program):
raise TypeError("copy_param_info_from should be invoked with "
"Program")
if len(self.blocks) != len(other.blocks):
raise ValueError("copy_param_info_from should be invoked with two "
"program, with represent the same topology")
self.global_block().copy_param_info_from(other.global_block())
def copy_data_info_from(self, other):
"""
Copy the information of data variables from other program.
Args:
other(Program): Other program
Returns:
None
"""
if not isinstance(other, Program):
raise TypeError("copy_param_info_from should be invoked with "
"Program")
if len(self.blocks) != len(other.blocks):
raise ValueError("copy_param_info_from should be invoked with two "
"program, with represent the same topology")
for var in other.global_block().vars.itervalues():
if var.is_data:
self.global_block().var(var.name).is_data = True
def list_vars(self):
for each_block in self.blocks:
for each_var in each_block.vars.itervalues():
yield each_var
class Parameter(Variable):
def __init__(self, block, shape, dtype, **kwargs):
if shape is None or dtype is None:
raise ValueError("Parameter must set shape and dtype")
if len(shape) == 0:
raise ValueError("Parameter shape cannot be empty")
for each in shape:
if each < 0:
raise ValueError("Parameter shape should not be related with "
"batch-size")
Variable.__init__(
self, block, persistable=True, shape=shape, dtype=dtype, **kwargs)
self.trainable = kwargs.get('trainable', True)
self.optimize_attr = kwargs.get('optimize_attr', {'learning_rate': 1.0})
self.regularizer = kwargs.get('regularizer', None)
self.gradient_clip_attr = kwargs.get('gradient_clip_attr', None)
self.do_model_average = kwargs.get('do_model_average', None)
def __str__(self):
return self.to_string(True)
def to_string(self, throw_on_error, with_details=False):
"""
To debug string.
Args:
throw_on_error(bool): raise exception when self is not initialized
when throw_on_error is True
with_details(bool): more details about variables and parameters
(e.g. trainable, optimize_attr, ...) will be printed when with_details is True
Returns(str): The debug string.
"""
assert isinstance(throw_on_error, bool) and isinstance(with_details,
bool)
if with_details:
res_str = Variable.to_string(self, throw_on_error, True)
additional_attr = ("trainable", "optimize_attr", "regularizer",
"gradient_clip_attr", "do_model_average")
for attr_name in additional_attr:
res_str += "%s: %s\n" % (attr_name,
str(getattr(self, attr_name)))
else:
res_str = Variable.to_string(self, throw_on_error, False)
return res_str
__repr__ = __str__
# program is a global instance.
_main_program_ = Program()
_startup_program_ = Program()
def default_startup_program():
"""
Get default startup program. In startup program, Paddle will initialize
parameters, initialize nccl handle, etc.
Returns:
Program: startup program
"""
return _startup_program_
def default_main_program():
"""
Get default main program. The main program is used for training or testing.
Returns:
Program: main program
"""
return _main_program_
def switch_main_program(program):
"""
Switch the main program to a new program.
Args:
program(Program): The new main program
Returns:
Program: The previous main program
"""
global _main_program_
prev_program = _main_program_
_main_program_ = program
return prev_program
def switch_startup_program(program):
"""
Switch the startup program to a new program
Args:
program(Program): The new startup program
Returns:
Program: The previous startup program
"""
global _startup_program_
prev_program = _startup_program_
_startup_program_ = program
return prev_program
@contextlib.contextmanager
def program_guard(main_program, startup_program=None):
"""
Switch program with `with` statement
Examples:
>>> with program_guard(Program()):
>>> data = fluid.layers.data(...)
>>> hidden = fluid.layers.fc(...)
Args:
main_program(Program): New main program inside `with` statement
startup_program(Program): New startup program inside `with` statement.
None means do not change startup program.
Returns:
None
"""
if not isinstance(main_program, Program):
raise TypeError("main_program should be Program")
main_program = switch_main_program(main_program)
if startup_program is not None:
if not isinstance(startup_program, Program):
raise TypeError("startup_program should be Program")
startup_program = switch_startup_program(startup_program)
yield
switch_main_program(main_program)
if startup_program is not None:
switch_startup_program(startup_program)
def get_var(name, program=None):
"""
Get a variable by name from the global block of a program
Args:
name(str): name of the variable
program(Program|None): program object.
If None, default_global_program() will be used.
Returns:
Variable
"""
if program is None:
program = default_main_program()
assert isinstance(name, str)
assert isinstance(program, Program)
return program.global_block().var(name)
| StarcoderdataPython |
1611290 | <reponame>daniele-mc/HacktoberFest2020-4
from operator import ixor
from functools import reduce
def xop(n=4,start=3):
nums=[]
for i in range(0,n):
nums.append(start+2*i)
return (reduce(ixor,nums))
print(xop())
| StarcoderdataPython |
3359602 | import json
import unittest
from os import path
import xarray as xr
from granule_ingester.processors import TileSummarizingProcessor
from granule_ingester.processors.reading_processors import GridMultiVariableReadingProcessor
from granule_ingester.processors.reading_processors.GridReadingProcessor import GridReadingProcessor
from nexusproto import DataTile_pb2 as nexusproto
class TestTileSummarizingProcessor(unittest.TestCase):
def test_standard_name_exists_01(self):
"""
Test that the standard_name attribute exists in a
Tile.TileSummary object after being processed with
TileSummarizingProcessor
"""
reading_processor = GridReadingProcessor(
variable='analysed_sst',
latitude='lat',
longitude='lon',
time='time',
tile='tile'
)
relative_path = '../granules/20050101120000-NCEI-L4_GHRSST-SSTblend-AVHRR_OI-GLOB-v02.0-fv02.0.nc'
granule_path = path.join(path.dirname(__file__), relative_path)
tile_summary = nexusproto.TileSummary()
tile_summary.granule = granule_path
tile_summary.data_var_name = json.dumps('analysed_sst')
input_tile = nexusproto.NexusTile()
input_tile.summary.CopyFrom(tile_summary)
dims = {
'lat': slice(0, 30),
'lon': slice(0, 30),
'time': slice(0, 1),
'tile': slice(10, 11),
}
with xr.open_dataset(granule_path, decode_cf=True) as ds:
output_tile = reading_processor._generate_tile(ds, dims, input_tile)
tile_summary_processor = TileSummarizingProcessor('test')
new_tile = tile_summary_processor.process(tile=output_tile, dataset=ds)
self.assertEqual('"sea_surface_temperature"', new_tile.summary.standard_name, f'wrong new_tile.summary.standard_name')
def test_hls_single_var01(self):
"""
Test that the standard_name attribute exists in a
Tile.TileSummary object after being processed with
TileSummarizingProcessor
"""
input_var_list = [f'B{k:02d}' for k in range(1, 12)]
input_var_list = ['B01']
reading_processor = GridReadingProcessor(input_var_list, 'lat', 'lon', time='time', tile='tile')
granule_path = path.join(path.dirname(__file__), '../granules/HLS.S30.T11SPC.2020001.v1.4.hdf.nc')
tile_summary = nexusproto.TileSummary()
tile_summary.granule = granule_path
tile_summary.data_var_name = json.dumps(input_var_list)
input_tile = nexusproto.NexusTile()
input_tile.summary.CopyFrom(tile_summary)
dimensions_to_slices = {
'time': slice(0, 1),
'lat': slice(0, 30),
'lon': slice(0, 30),
'tile': slice(10, 11),
}
with xr.open_dataset(granule_path, decode_cf=True) as ds:
output_tile = reading_processor._generate_tile(ds, dimensions_to_slices, input_tile)
tile_summary_processor = TileSummarizingProcessor('test')
new_tile = tile_summary_processor.process(tile=output_tile, dataset=ds)
self.assertEqual('null', new_tile.summary.standard_name, f'wrong new_tile.summary.standard_name')
self.assertEqual(None, json.loads(new_tile.summary.standard_name), f'unable to convert new_tile.summary.standard_name from JSON')
self.assertTrue(abs(new_tile.summary.stats.mean - 0.26137) < 0.001, f'mean value is not close expected: 0.26137. actual: {new_tile.summary.stats.mean}')
def test_hls_multiple_var_01(self):
"""
Test that the standard_name attribute exists in a
Tile.TileSummary object after being processed with
TileSummarizingProcessor
"""
input_var_list = [f'B{k:02d}' for k in range(1, 12)]
reading_processor = GridMultiVariableReadingProcessor(input_var_list, 'lat', 'lon', time='time', tile='tile')
granule_path = path.join(path.dirname(__file__), '../granules/HLS.S30.T11SPC.2020001.v1.4.hdf.nc')
tile_summary = nexusproto.TileSummary()
tile_summary.granule = granule_path
tile_summary.data_var_name = json.dumps(input_var_list)
input_tile = nexusproto.NexusTile()
input_tile.summary.CopyFrom(tile_summary)
dimensions_to_slices = {
'time': slice(0, 1),
'lat': slice(0, 30),
'lon': slice(0, 30),
'tile': slice(10, 11),
}
with xr.open_dataset(granule_path, decode_cf=True) as ds:
output_tile = reading_processor._generate_tile(ds, dimensions_to_slices, input_tile)
tile_summary_processor = TileSummarizingProcessor('test')
new_tile = tile_summary_processor.process(tile=output_tile, dataset=ds)
self.assertEqual('[null, null, null, null, null, null, null, null, null, null, null]', new_tile.summary.standard_name, f'wrong new_tile.summary.standard_name')
self.assertEqual([None for _ in range(11)], json.loads(new_tile.summary.standard_name), f'unable to convert new_tile.summary.standard_name from JSON')
self.assertTrue(abs(new_tile.summary.stats.mean - 0.26523) < 0.001, f'mean value is not close expected: 0.26523. actual: {new_tile.summary.stats.mean}') | StarcoderdataPython |
80668 | from setuptools import setup, Extension
import numpy as np
from Cython.Build import cythonize
from Cython.Distutils import build_ext
from torch.utils.cpp_extension import BuildExtension, CUDAExtension
# Obtain the numpy include directory. This logic works across numpy versions.
try:
numpy_include = np.get_include()
except AttributeError:
numpy_include = np.get_numpy_include()
# extensions
ext_args = dict(
include_dirs=[numpy_include],
language='c++',
)
ext_modules = [
Extension(
"nms_cpu",
sources=["src/nms_cpu.cpp"],
**ext_args
),
Extension(
"soft_nms_cpu",
sources=["src/soft_nms_cpu.pyx"],
**ext_args
),
]
setup(
name='nms',
ext_modules=cythonize(ext_modules),
# inject our custom trigger
cmdclass={'build_ext': BuildExtension},
) | StarcoderdataPython |
178032 | <reponame>tykling/qwiic_exporter
# type: ignore
"""qwiic_exporter.py test suite.
Runs with pytest and tox.
"""
import logging
from qwiic_exporter import QwiicExporter
def test_get_sensor_signatures():
"""Make sure the get_sensor_signatures() method returns the expected signatures for known sensors."""
qwe = QwiicExporter()
for name, data in qwe.sensors.items():
if name == "ICM-20948 IMU":
assert qwe.get_subsensor_signatures(data) == [
["aX,aY,aZ", "gX,gY,gZ", "mX,mY,mZ", "imu_degC"],
["aX,aY,aZ", "gX,gY,gZ", "mX,mY,mZ"],
["aX,aY,aZ", "gX,gY,gZ", "imu_degC"],
["aX,aY,aZ", "mX,mY,mZ", "imu_degC"],
["gX,gY,gZ", "mX,mY,mZ", "imu_degC"],
["aX,aY,aZ", "gX,gY,gZ"],
["aX,aY,aZ", "mX,mY,mZ"],
["aX,aY,aZ", "imu_degC"],
["gX,gY,gZ", "mX,mY,mZ"],
["gX,gY,gZ", "imu_degC"],
["mX,mY,mZ", "imu_degC"],
["aX,aY,aZ"],
["gX,gY,gZ"],
["mX,mY,mZ"],
["imu_degC"],
]
elif name == "BME280 atmospheric sensor":
assert qwe.get_subsensor_signatures(data) == [
["pressure_Pa", "humidity_%", "altitude_m", "temp_degC"],
["pressure_Pa", "humidity_%", "altitude_m"],
["pressure_Pa", "humidity_%", "temp_degC"],
["pressure_Pa", "altitude_m", "temp_degC"],
["humidity_%", "altitude_m", "temp_degC"],
["pressure_Pa", "humidity_%"],
["pressure_Pa", "altitude_m"],
["pressure_Pa", "temp_degC"],
["humidity_%", "altitude_m"],
["humidity_%", "temp_degC"],
["altitude_m", "temp_degC"],
["pressure_Pa"],
["humidity_%"],
["altitude_m"],
["temp_degC"],
]
elif name == "VCNL4040 proximity sensor":
assert qwe.get_subsensor_signatures(data) == [
["prox(no unit)", "ambient_lux"],
["prox(no unit)"],
["ambient_lux"],
]
elif name == "OpenLog Artemis":
assert qwe.get_subsensor_signatures(data) == [
["output_Hz", "count"],
["output_Hz"],
["count"],
]
elif name == "CCS811 air quality sensor":
assert qwe.get_subsensor_signatures(data) == [
["tvoc_ppb", "co2_ppm"],
["tvoc_ppb"],
["co2_ppm"],
]
elif name == "MS8607 PHT sensor":
assert qwe.get_subsensor_signatures(data) == [
["humidity_%", "hPa", "degC"],
["humidity_%", "hPa"],
["humidity_%", "degC"],
["hPa", "degC"],
["humidity_%"],
["hPa"],
["degC"],
]
else:
assert (
qwe.get_subsensor_signatures(data) is False
), "Unknown sensor, cannot test get_sensor_signatures()"
def test_parse_sensor_config():
"""Make sure the parse_sensor_config() function does the right thing with various headerlines.
TODO: Also check gaugeindex here maybe.
"""
qwe = QwiicExporter()
# the full lineup
qwe.parse_sensor_config(
headerline="rtcDate,rtcTime,aX,aY,aZ,gX,gY,gZ,mX,mY,mZ,imu_degC,tvoc_ppb,co2_ppm,prox(no unit),ambient_lux,pressure_Pa,humidity_%,altitude_m,temp_degC,output_Hz,count,"
)
assert qwe.sensorconfig == [
("ICM-20948 IMU", ["Accelerometer", "Gyro", "Magnetometer", "Temperature"]),
("CCS811 air quality sensor", ["TVOC", "CO2"]),
("VCNL4040 proximity sensor", ["Proximity", "Ambient Light"]),
(
"BME280 atmospheric sensor",
[
"Pressure",
"Humidity",
"Altitude",
"Temperature",
],
),
("OpenLog Artemis", ["Frequency", "Counter"]),
]
for metric in qwe.sensors["ICM-20948 IMU"]["Accelerometer"]:
assert metric[1] in qwe.registry._names_to_collectors
for metric in qwe.sensors["ICM-20948 IMU"]["Gyro"]:
assert metric[1] in qwe.registry._names_to_collectors
for metric in qwe.sensors["ICM-20948 IMU"]["Magnetometer"]:
assert metric[1] in qwe.registry._names_to_collectors
for metric in qwe.sensors["ICM-20948 IMU"]["Temperature"]:
assert metric[1] in qwe.registry._names_to_collectors
# partial lineup
qwe.parse_sensor_config(
headerline="rtcDate,rtcTime,aX,aY,aZ,gX,gY,gZ,co2_ppm,prox(no unit),ambient_lux,pressure_Pa,humidity_%,altitude_m,output_Hz,"
)
assert qwe.sensorconfig == [
("ICM-20948 IMU", ["Accelerometer", "Gyro"]),
("CCS811 air quality sensor", ["CO2"]),
("VCNL4040 proximity sensor", ["Proximity", "Ambient Light"]),
(
"BME280 atmospheric sensor",
[
"Pressure",
"Humidity",
"Altitude",
],
),
("OpenLog Artemis", ["Frequency"]),
]
# duplicate sensors
qwe.parse_sensor_config(
headerline="rtcDate,rtcTime,aX,aY,aZ,gX,gY,gZ,mX,mY,mZ,imu_degC,tvoc_ppb,co2_ppm,prox(no unit),ambient_lux,prox(no unit),ambient_lux,prox(no unit),ambient_lux,pressure_Pa,humidity_%,altitude_m,temp_degC,tvoc_ppb,co2_ppm,output_Hz,"
)
assert qwe.sensorconfig == [
("ICM-20948 IMU", ["Accelerometer", "Gyro", "Magnetometer", "Temperature"]),
("CCS811 air quality sensor", ["TVOC", "CO2"]),
("VCNL4040 proximity sensor", ["Proximity", "Ambient Light"]),
("VCNL4040 proximity sensor", ["Proximity", "Ambient Light"]),
("VCNL4040 proximity sensor", ["Proximity", "Ambient Light"]),
(
"BME280 atmospheric sensor",
[
"Pressure",
"Humidity",
"Altitude",
"Temperature",
],
),
("CCS811 air quality sensor", ["TVOC", "CO2"]),
("OpenLog Artemis", ["Frequency"]),
]
# nothing except hz enabled
qwe.parse_sensor_config(headerline="rtcDate,rtcTime,output_Hz,")
assert qwe.sensorconfig == [("OpenLog Artemis", ["Frequency"])]
class MockSerial:
"""A mock serial device."""
def write(self, data):
"""Faked write method for writing data to serial device."""
# print(f"written to MockSerial: {data}")
pass
def test_ingest_data():
"""Make sure the ingest_data method works as expected."""
qwe = QwiicExporter()
qwe.serial = MockSerial()
qwe.parse_sensor_config(
headerline="rtcDate,rtcTime,aX,aY,aZ,gX,gY,gZ,mX,mY,mZ,imu_degC,tvoc_ppb,co2_ppm,prox(no unit),ambient_lux,pressure_Pa,humidity_%,altitude_m,temp_degC,output_Hz,count,"
)
qwe.ingest_data(
data="01/07/2000,16:18:45.54,-638.67,153.32,782.23,-1.69,1.47,-0.42,21.45,37.80,-5.85,9.77,2,417,20,0,99500.64,53.06,152.98,6.32,1.00,2523,"
)
# check labels
assert list(
qwe.registry._names_to_collectors["qwiic_accelerometer_x_gs"]._samples()
)[0][1] == {
"sensor": "ICM-20948 IMU",
"sensorindex": "1",
"subsensor": "Accelerometer",
}
# check values
assert (
list(qwe.registry._names_to_collectors["qwiic_accelerometer_x_gs"]._samples())[
0
][2]
== -0.63867
)
assert (
list(qwe.registry._names_to_collectors["qwiic_accelerometer_y_gs"]._samples())[
0
][2]
== 0.15331999999999998
)
assert (
list(qwe.registry._names_to_collectors["qwiic_accelerometer_z_gs"]._samples())[
0
][2]
== 0.78223
)
assert (
list(qwe.registry._names_to_collectors["qwiic_output_hertz"]._samples())[0][2]
== 1.0
)
assert (
list(qwe.registry._names_to_collectors["qwiic_measurements_total"]._samples())[
0
][2]
== 2523
)
def test_ingest_data_wrong_metric_count(caplog):
"""Make sure the ingest_data() method does what it is supposed to when given a line of data with fewer elements than expected."""
caplog.set_level(logging.DEBUG)
qwe = QwiicExporter()
qwe.serial = MockSerial()
qwe.parse_sensor_config(
headerline="rtcDate,rtcTime,aX,aY,aZ,gX,gY,gZ,mX,mY,mZ,imu_degC,tvoc_ppb,co2_ppm,prox(no unit),ambient_lux,pressure_Pa,humidity_%,altitude_m,temp_degC,output_Hz,count,"
)
qwe.ingest_data(
data="01/07/2000,16:18:45.54,-638.67,153.32,782.23,-1.69,1.47,-0.42,21.45,37.80,-5.85,9.77,2,417,20,0,99500.64,53.06,152.98,6.32,1.00,"
)
assert "Gauge index is out of sync" in caplog.text
| StarcoderdataPython |
1747904 | # Script to create an env_var by calling the http endpoint localhost:8080/conf/env/{env_var_name}/{env_var_value}
import os
from flask import Flask
app = Flask(__name__)
@app.route("/env/<name>/<var>")
def set_env_var(name, var):
os.environ[name] = str(var)
return os.environ.get(name)
if __name__=='__main__':
app.run(debug=True, port=8080)
| StarcoderdataPython |
3334744 | <reponame>jattoabdul/vanhack-cms
from app.repositories.base_repo import BaseRepo
from app.models.student import Student
from uuid import uuid4
from sqlalchemy.sql.expression import or_
class StudentRepo(BaseRepo):
def __init__(self):
BaseRepo.__init__(self, Student)
def new_user(self, first_name, last_name, email, password, is_verified, is_premium):
user = Student(
first_name=first_name, last_name=last_name, email=email, password=password,
is_verified=is_verified, is_premium=is_premium
)
user.save()
return user
@staticmethod
def refresh_auth_key(user):
auth_key = str(uuid4())
user.auth_key = auth_key
user.save()
return user
def name_or_email_like(self, query_keyword):
query_keyword = f'%{query_keyword}%'
return self._model.query.filter(or_((Student.email.ilike(query_keyword)), (Student.first_name.ilike(query_keyword)),
(Student.last_name.ilike(query_keyword)))).paginate(error_out=False)
| StarcoderdataPython |
172206 | # --------------
#Importing header files
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
data = pd.read_csv(path)
data['Rating'].hist()
data = data[data['Rating']<=5]
data['Rating'].hist()
#Code starts here
#Code ends here
# --------------
# code starts here
total_null = data.isnull().sum()
percent_null = (total_null/data.isnull().count())
missing_data = pd.concat((total_null,percent_null),axis =1,keys=['Total','Percent'])
print(missing_data)
data = data.dropna(axis=0)
total_null_1 = data.isnull().sum()
percent_null_1 = (total_null/data.isnull().count())
missing_data_1 = pd.concat((total_null_1,percent_null_1),axis =1,keys=['Total','Percent'])
print(missing_data_1)
# code ends here
# --------------
#Code starts here
sns.catplot(x='Category',y='Rating',data=data,kind='box',height=10)
plt.xticks(rotation=90)
plt.title('Rating vs Category [BoxPlot]')
#Code ends here
# --------------
#Importing header files
from sklearn.preprocessing import MinMaxScaler, LabelEncoder
#Code starts here
print(data['Installs'].value_counts())
data['Installs'] = data['Installs'].str.replace('\W', '')
print(data['Installs'].value_counts())
data['Installs'] = data['Installs'].astype('int')
le = LabelEncoder()
data['Installs'] = le.fit_transform(data['Installs'])
sns.regplot(x=data['Installs'],y=data['Rating'],data=data)
plt.title('Rating vs Installs [RegPlot')
#Code ends here
# --------------
#Code starts here
print(data['Price'].value_counts())
data['Price'] = data['Price'].str.replace('$','')
data['Price'] = data['Price'].astype('float')
print(data['Price'].head())
sns.regplot(x='Price',y='Rating',data=data)
plt.title('Rating vs Price [RegPlot')
#Code ends here
# --------------
#Code starts here
print(data['Genres'].value_counts())
genres = []
for x in data['Genres']:
genres.append(x.split(';')[0])
data['Genres'] = genres
# print(data['Genres'].value_counts())
gr_mean = data[['Rating','Genres']].groupby(['Genres'],as_index=False).mean()
# print(gr_mean.head())
print(gr_mean.describe())
gr_mean = gr_mean.sort_values('Rating')
print(gr_mean.head(1))
print(gr_mean.tail(1))
#Code ends here
# --------------
#Code starts here
print(data['Last Updated'])
data['Last Updated'] = pd.to_datetime(data['Last Updated'])
max_date = data['Last Updated'].max()
data['Last Updated Days'] = (max_date - data['Last Updated']).dt.days
sns.regplot(x='Last Updated Days',y='Rating',data=data)
plt.title('Rating vs Last Updated [RegPlot]')
#Code ends here
| StarcoderdataPython |
3336014 | ###################### LEVELING #####################
lvl=0
strn=10
vit=10
dex=10
inte=10
fth=10
stm=10
mge=10
lvl=0
levels=0
freelvls=30
stattochange=0
while freelvls>0:
print("allocate stats")
print("Unallocated levels: "+ str(freelvls))
print("level: "+ str(lvl))
print("1 Vitality:"+str(vit))
print("2 Stamina:"+str(stm))
print("3 Strength:"+str(strn))
print("4 Dexterity:"+str(dex))
print("5 Intelegence:"+str(inte))
print("6 Faith:"+str(fth))
print("7 Magicality:"+str(mge))
try:
stattochange=int(input("Stat: "))
except:
print("Please use a vaild number")
if stattochange>7 or stattochange<=0:
print("not a stat")
if 0<stattochange<=7:
try:
levels=int(input("Level how much: "))
except:
print("Please use a vaild number")
if levels>freelvls:
print("Insufficient Unallocated levels")
if levels<0:
print("Unable to remove levels")
if 0<levels<=freelvls:
if stattochange==1:
vit+=levels
if stattochange==2:
stm+=levels
if stattochange==3:
strn+=levels
if stattochange==4:
dex+=levels
if stattochange==5:
inte+=levels
if stattochange==6:
fth+=levels
if stattochange==7:
mge+=levels
lvl+=levels
freelvls-=levels
###################### WEAPONS #####################
weapon=0
print("(1) Knight Sword (2) Axe (3) Fencing sword (4) Staff (5) Chime")
while weapon==0:
beginning_weapon=input("Begining weapon: ")
weapon="0"+str(beginning_weapon)
###################### armor #####################
armor=00
print(str(vit)+str(stm)+str(strn)+str(dex)+str(inte)+str(fth)+str(mge)+str(weapon)+str(armor)+"00")
| StarcoderdataPython |
1756720 | <reponame>legumeinfo/CoNekT
import os
from tempfile import mkstemp
from flask import request, flash, url_for
from conekt.extensions import admin_required
from werkzeug.exceptions import abort
from werkzeug.utils import redirect
from conekt.controllers.admin.controls import admin_controls
from conekt.forms.admin.add_species import AddSpeciesForm
from conekt.models.sequences import Sequence
from conekt.models.species import Species
@admin_controls.route('/add/species', methods=['POST'])
@admin_required
def add_species():
"""
Adds a species to the species table and adds sequences for that species to the sequence table based on the fasta
file provided.
:return: Redirect to admin panel interface
"""
form = AddSpeciesForm(request.form)
if request.method == 'POST' and form.validate():
# Add species (or return id of existing species)
species_id = Species.add(request.form.get('code'),
request.form.get('name'),
data_type=request.form.get('data_type'),
color='#' + request.form.get('color'),
highlight='#' + request.form.get('highlight'),
description=request.form.get('description'))
# Add Sequences
fd, temp_path = mkstemp()
fasta_data = request.files[form.fasta.name].read()
print(request.files[form.fasta.name].content_type)
compressed = 'gzip' in request.files[form.fasta.name].content_type
with open(temp_path, 'wb') as fasta_writer:
fasta_writer.write(fasta_data)
sequence_count = Sequence.add_from_fasta(temp_path, species_id, compressed=compressed)
os.close(fd)
os.remove(temp_path)
flash('Added species %s with %d sequences' % (request.form.get('name'), sequence_count), 'success')
return redirect(url_for('admin.index'))
else:
if not form.validate():
flash('Unable to validate data, potentially missing fields', 'danger')
return redirect(url_for('admin.index'))
else:
abort(405)
| StarcoderdataPython |
1730080 | <gh_stars>1-10
import numpy as np
import pytest
from ..sim_utils import Sim
PIXEL_SCALE = 0.263
@pytest.mark.parametrize('gal_type', ['exp'])
@pytest.mark.parametrize('psf_type', ['gauss', 'ps'])
@pytest.mark.parametrize('homogenize_psf', [False, True])
@pytest.mark.parametrize('n_coadd_psf', [1, 2, 3])
def test_sim_seeding_reproduce(
gal_type, psf_type, homogenize_psf, n_coadd_psf):
s1 = Sim(
rng=np.random.RandomState(seed=10),
gal_type=gal_type,
psf_type=psf_type,
homogenize_psf=homogenize_psf,
n_coadd_psf=n_coadd_psf,
n_coadd=10,
scale=PIXEL_SCALE)
s2 = Sim(
rng=np.random.RandomState(seed=10),
gal_type=gal_type,
psf_type=psf_type,
homogenize_psf=homogenize_psf,
n_coadd_psf=n_coadd_psf,
n_coadd=10,
scale=PIXEL_SCALE)
mbobs1 = s1.get_mbobs()
mbobs2 = s2.get_mbobs()
assert np.array_equal(mbobs1[0][0].image, mbobs2[0][0].image)
assert np.array_equal(mbobs1[0][0].noise, mbobs2[0][0].noise)
assert np.array_equal(mbobs1[0][0].psf.image, mbobs2[0][0].psf.image)
@pytest.mark.parametrize('gal_type', ['exp'])
@pytest.mark.parametrize('psf_type', ['ps', 'gauss'])
@pytest.mark.parametrize('homogenize_psf', [False, True])
@pytest.mark.parametrize('n_coadd_psf', [1, 2, 3])
def test_sim_seeding_not_reproduce(
gal_type, psf_type, homogenize_psf, n_coadd_psf):
s1 = Sim(
rng=np.random.RandomState(seed=10),
gal_type=gal_type,
psf_type=psf_type,
homogenize_psf=homogenize_psf,
n_coadd_psf=n_coadd_psf,
n_coadd=10,
scale=PIXEL_SCALE)
s2 = Sim(
rng=np.random.RandomState(seed=24357),
gal_type=gal_type,
psf_type=psf_type,
homogenize_psf=homogenize_psf,
n_coadd_psf=n_coadd_psf,
n_coadd=10,
scale=PIXEL_SCALE)
mbobs1 = s1.get_mbobs()
mbobs2 = s2.get_mbobs()
assert not np.array_equal(mbobs1[0][0].image, mbobs2[0][0].image)
assert not np.array_equal(mbobs1[0][0].noise, mbobs2[0][0].noise)
if psf_type != 'gauss':
assert not np.array_equal(
mbobs1[0][0].psf.image, mbobs2[0][0].psf.image)
@pytest.mark.parametrize('gal_type', ['exp'])
@pytest.mark.parametrize('psf_type', ['gauss', 'ps'])
@pytest.mark.parametrize('homogenize_psf', [False, True])
@pytest.mark.parametrize('n_coadd_psf', [1, 2, 3])
def test_sim_seeding_shears(
gal_type, psf_type, homogenize_psf, n_coadd_psf):
s1 = Sim(
rng=np.random.RandomState(seed=10),
gal_type=gal_type,
psf_type=psf_type,
homogenize_psf=homogenize_psf,
n_coadd_psf=n_coadd_psf,
n_coadd=10,
g1=0.02,
scale=PIXEL_SCALE)
s2 = Sim(
rng=np.random.RandomState(seed=10),
gal_type=gal_type,
psf_type=psf_type,
homogenize_psf=homogenize_psf,
n_coadd_psf=n_coadd_psf,
n_coadd=10,
g1=-0.02,
scale=PIXEL_SCALE)
mbobs1 = s1.get_mbobs()
mbobs2 = s2.get_mbobs()
assert not np.array_equal(mbobs1[0][0].image, mbobs2[0][0].image)
assert np.array_equal(mbobs1[0][0].noise, mbobs2[0][0].noise)
assert np.array_equal(mbobs1[0][0].psf.image, mbobs2[0][0].psf.image)
| StarcoderdataPython |
4836654 | import timm
import torch
from timm.data import resolve_data_config
from timm.data.transforms_factory import create_transform
from torchvision import datasets
import numpy as np
import os
NUM_CLASSES_DICT = {'imagenette':10,'imagenet':1000,'flower102':102,'cifar':10,'cifar100':100,'svhn':10}
def get_model(model_name,dataset_name,model_dir):
#build model and load weights
'''
INPUT:
model_name str, model name. The name should contrain one of ('resnetv2_50x1_bit_distilled', 'vit_base_patch16_224','resmlp_24_distilled_224')
dataset_name str, dataset name. One of ('imagenette','imagenet','cifar','cifar100','svhn','flower102')
model_dir str, the directory of model checkpoints
OUTPUT:
model torch.nn.Module, the PyToch model with weights loaded
'''
if 'resnetv2_50x1_bit_distilled' in model_name:
model = timm.create_model('resnetv2_50x1_bit_distilled', pretrained=True)
elif 'vit_base_patch16_224' in model_name:
model = timm.create_model('vit_base_patch16_224', pretrained=True)
elif 'resmlp_24_distilled_224' in model_name:
model = timm.create_model('resmlp_24_distilled_224', pretrained=True)
# modify classification head and load model weight
if dataset_name in ['cifar','imagenette','svhn','flower102','cifar100']:
model.reset_classifier(num_classes=NUM_CLASSES_DICT[dataset_name])
#model = torch.nn.DataParallel(model)
checkpoint_name = model_name + '_{}.pth'.format(dataset_name)
checkpoint = torch.load(os.path.join(model_dir,checkpoint_name))
pretrained_dict = checkpoint['model_state_dict']
# I trained and saved the model with DataParallel, here I remove 'module.' in the weight dict key
# an alternative is to comment out the line beblow and create a DataParallel instance yourself, e.g., model = torch.nn.DataParallel(model)
# note: using DataParallel might result in subtle issues with timm (e.g., timm.data.resolve_data_config), since the instance becomes the DataParallel wrapper instead of timm.models
pretrained_dict = {key.replace("module.", ""): value for key, value in pretrained_dict.items()}
model.load_state_dict(pretrained_dict)
elif dataset_name == 'imagenet' and 'masked' in model_name: #override the pretrained ImageNet weights when masked model training is used
checkpoint_name = model_name + '_imagenet.pth'
checkpoint = torch.load(os.path.join(model_dir,checkpoint_name))
model.load_state_dict(checkpoint['state_dict'])
return model
def get_data_loader(dataset_name,data_dir,model,batch_size=1,num_img=-1,train=False):
# get the data loader (possibly only a subset of the dataset)
'''
INPUT:
dataset_name str, dataset name. One of ('imagenette','imagenet','cifar','cifar100','svhn','flower102')
data_dir str, the directory of data
model_name str, model name. The name should contrain one of ('resnetv2_50x1_bit_distilled', 'vit_base_patch16_224','resmlp_24_distilled_224')
model torch.nn.Module / timm.models, the built model returned by get_model(), which has an attribute of default_cfg for data preprocessing
batch_size int, batch size. default value is 1 for per-example inference time evaluation. In practice, a larger batch size is preferred
num_img int, number of images to construct a random image subset. if num_img<0, we return a data loader for the entire dataset
train bool, whether to return the training data split.
OUTPUT:
loader the PyToch data loader
len(dataset) the size of dataset
config data preprocessing configuration dict
'''
# get dataset
if dataset_name in ['imagenette','imagenet','flower102']:
#high resolution images; use the default image preprocessing (all three models use 224x224 inputs)
config = resolve_data_config({}, model=model)
print(config)
ds_transforms = create_transform(**config)
split = 'train' if train else 'val'
dataset_ = datasets.ImageFolder(os.path.join(data_dir,split),ds_transforms)
elif dataset_name in ['cifar','cifar100','svhn']:
#low resolution images; resize them to 224x224 without cropping
config = resolve_data_config({'crop_pct':1}, model=model)
ds_transforms = create_transform(**config)
if dataset_name == 'cifar':
dataset_ = datasets.CIFAR10(root=data_dir, train=train, download=True, transform=ds_transforms)
elif dataset_name == 'cifar100':
dataset_ = datasets.CIFAR100(root=data_dir, train=train, download=True, transform=ds_transforms)
elif dataset_name == 'svhn':
split = 'train' if train else 'test'
dataset_ = datasets.SVHN(root=data_dir, split=split, download=True, transform=ds_transforms)
# select a random set of test images (when args.num_img>0)
np.random.seed(233333333)#random seed for selecting test images
idxs=np.arange(len(dataset_))
np.random.shuffle(idxs)
if num_img>0:
idxs=idxs[:num_img]
dataset = torch.utils.data.Subset(dataset_, idxs)
loader = torch.utils.data.DataLoader(dataset, batch_size=batch_size,shuffle=train,num_workers=2)
return loader,len(dataset),config
| StarcoderdataPython |
1792584 | <gh_stars>0
import requests as rqt
import datetime as dts
import yagmail
# import sched
import time
from keep_alive import keep_alive
# s = sched.scheduler(time.time, time.sleep)
# user credentials
MAIL = "<EMAIL>"
PASSCODE = "<PASSWORD>"
MY_LAT = 12.971599
MY_LNG = 77.594566
MY_LOC = (MY_LAT, MY_LNG)
# print(MY_LOC)
iss_location = ()
def iss_nearby():
global iss_location
# Get ISS location
iss_api = rqt.get("http://api.open-notify.org/iss-now.json")
data_iss = iss_api.json()['iss_position']
iss_lat = float(data_iss['latitude'])
iss_lng = float(data_iss['longitude'])
iss_location = (iss_lat, iss_lng)
print(iss_location)
# Check if ISS is close to me
if MY_LAT - 5 <= iss_lat <= MY_LAT + 5 and MY_LNG - 5 <= iss_lng <= MY_LNG + 5:
print("bingo\n ISS is above you")
return True
def night_sky():
# Check for sun rise and sun set
# Providing parameters to api's
location = {"lat": MY_LAT, "lng": MY_LNG, "formatted": 0}
# Getting Current Time
now = dts.datetime.now()
current_hr = now.hour
# print(type(current_hr))
sn_api = rqt.get("https://api.sunrise-sunset.org/json", params=location)
sn_api.raise_for_status()
data = sn_api.json()['results']
sunrise = data['sunrise']
sunset = data['sunset']
morning = int(sunrise.split("T")[1].split(":")[0])
evening = int(sunset.split("T")[1].split(":")[0])
if current_hr >= evening or current_hr <= morning:
print("It's Dark out there")
return True
# Mail to ppl
def mailer():
with yagmail.SMTP(user=MAIL,
password=<PASSWORD>,
host='smtp.mail.yahoo.com',
port=587,
smtp_ssl=False,
smtp_starttls=True) as server:
if iss_nearby() and night_sky():
subject = 'ISS is nearby'
html = "<h1>ISS is above head</h1>"
body = f"Peep out the night sky,<br>ISS Location:{iss_location}"
server.send(to="<EMAIL>",
subject=subject,
contents=[html, body])
# s.enter(delay=60, priority=1, action=mailer)
keep_alive()
# Call the function
while True:
print("Running...")
mailer()
time.sleep(60)
| StarcoderdataPython |
4811508 | <filename>progress/1130_wordcloud.py
#DataFrame을 dictionary형태로 변환
#Positive WordCloud
import pandas as pd
from wordcloud import WordCloud
import matplotlib.pyplot as plt
%matplotlib inline
data=pd.read_csv("./Keyword_Dataset/Positive_Keyword.csv")
pos_text=''
#display(data)
cloud_dic=data.set_index('Korean (ko)').to_dict()['Frequency']
#print(cloud_dic)
keyword=wordcloud.generate_from_frequencies(cloud_dic)
array=keyword.to_array()
plt.figure(figsize=(10,10))
plt.imshow(array,interpolation='bilinear')
plt.axis('off')
plt.show()
#Negative WordCloud
import pandas as pd
from wordcloud import WordCloud
import matplotlib.pyplot as plt
%matplotlib inline
data=pd.read_csv("./Keyword_Dataset/Negative_Keyword.csv")
neg_text=''
cloud_dic=data.set_index('Korean (ko)').to_dict()['Frequency']
#print(cloud_dic)
keyword=wordcloud.generate_from_frequencies(cloud_dic)
array=keyword.to_array()
plt.figure(figsize=(10,10))
plt.imshow(array,interpolation='bilinear')
plt.axis('off')
plt.show()
#Desktop/virus 이미지 마스킹
| StarcoderdataPython |
74227 | """Run the Celery jobs."""
import os
from app import celery, create_app
import app.tasks
flask_app = create_app(os.getenv('FLASK_CONFIG') or 'default')
flask_app.app_context().push()
| StarcoderdataPython |
1765983 | <filename>image_demo.py
#! /usr/bin/env python
# coding=utf-8
#================================================================
# Copyright (C) 2019 * Ltd. All rights reserved.
#
# Editor : VIM
# File name : image_demo.py
# Author : YunYang1994
# Created date: 2019-01-20 16:06:06
# Description :
#
#================================================================
import os
import shutil
import cv2
import numpy as np
import core.utils as utils
import tensorflow as tf
from PIL import Image
return_elements = ["input/input_data:0", "pred_sbbox/concat_2:0", "pred_mbbox/concat_2:0", "pred_lbbox/concat_2:0"]
pb_file = "./yolov3_mark.pb"
image_path = "./docs/images/test_images"
result_path = "./docs/images/test_results1"
num_classes = 8
input_size = 416
graph = tf.Graph()
return_tensors = utils.read_pb_return_tensors(graph, pb_file, return_elements)
if os.path.exists(result_path): shutil.rmtree(result_path)
os.mkdir(result_path)
img_list = os.listdir(image_path)
with tf.Session(graph=graph) as sess:
for filename in img_list:
original_image = cv2.imread(os.path.join(image_path, filename))
original_image = cv2.cvtColor(original_image, cv2.COLOR_BGR2RGB)
original_image_size = original_image.shape[:2]
image_data = utils.image_preporcess(np.copy(original_image), [input_size, input_size])
image_data = image_data[np.newaxis, ...]
pred_sbbox, pred_mbbox, pred_lbbox = sess.run(
[return_tensors[1], return_tensors[2], return_tensors[3]],
feed_dict={ return_tensors[0]: image_data})
pred_bbox = np.concatenate([np.reshape(pred_sbbox, (-1, 5 + num_classes)),
np.reshape(pred_mbbox, (-1, 5 + num_classes)),
np.reshape(pred_lbbox, (-1, 5 + num_classes))], axis=0)
bboxes = utils.postprocess_boxes(pred_bbox, original_image_size, input_size, 0.3)
bboxes = utils.nms(bboxes, 0.45, method='nms')
image = utils.draw_bbox_with_contrast(original_image, bboxes)
# image = Image.fromarray(image)
# image.show()
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
cv2.imwrite(os.path.join(result_path, filename), image)
print("Saved to %s.\n" % os.path.join(result_path, filename))
| StarcoderdataPython |
1779033 | <filename>locale/pot/api/plotting/_autosummary/pyvista-themes-_SliderStyleConfig-slider_width-1.py
import pyvista
pyvista.global_theme.slider_styles.modern.slider_width = 0.04
| StarcoderdataPython |
4822441 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2021 The TARTRL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
import gym
from gym.utils import seeding
import numpy as np
from gym import spaces
from matplotlib import colors
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from .maze import trans1, trans2
from .maze import pos_type
from .maze_utils import generate_maze, parse_map
class MazeConfig:
def __init__(self, start_num=1, width=32, height=32, complexity=0.2, density=0.2, maze_type='RandomMaze',
max_distance=None):
self.maze_type = maze_type
self.width = width
self.height = height
self.complexity = complexity
self.density = density
self.max_distance = max_distance
self.start_num = start_num
class MazeEnv(gym.Env):
metadata = {'render.modes': ['human', 'rgb_array']}
def __init__(self, maze_config, max_step, pob_size=1, action_type='VonNeumann_4', obs_type='full', show_trace=False,
show=False, seed=None):
self.seed(seed)
self.maze_config = maze_config
self.maze, self.start_poses, self.goal_poses = generate_maze(self.maze_config)
self.maze_data = np.array(self.maze.get_maze())
self.maze_size = self.maze_data.shape
self.max_step = max_step
self.step_now = 0
self.show_trace = show_trace
self.traces = []
self.action_type = action_type
self.obs_type = obs_type
self.show = show
self.pos_nows = None
# Action space: 0: Up, 1: Down, 2: Left, 3: Right
if self.action_type == 'VonNeumann_4': # Von Neumann neighborhood
self.num_actions = 4
elif action_type == 'Moore_8': # Moore neighborhood
self.num_actions = 8
else:
raise TypeError('Action type must be either \'VonNeumann\' or \'Moore\'')
self.action_space = spaces.Discrete(self.num_actions * self.maze_config.start_num)
self.all_actions = list(range(self.action_space.n))
self.pob_size = pob_size
low_obs = 0 # Lowest integer in observation
high_obs = 6 # Highest integer in observation
if self.obs_type == 'full':
self.observation_space = spaces.Box(low=low_obs,
high=high_obs,
shape=self.maze_size,
dtype=np.float32)
elif self.obs_type == 'partial':
self.observation_space = spaces.Box(low=low_obs,
high=high_obs,
shape=(self.pob_size * 2 + 1, self.pob_size * 2 + 1),
dtype=np.float32)
else:
raise TypeError('Observation type must be either \'full\' or \'partial\'')
# Colormap: order of color is, free space, wall, agent, food, poison
self.cmap = colors.ListedColormap(['white', 'black', 'blue', 'green', 'red', 'gray'])
self.bounds = [pos_type['blank'], pos_type['block'], pos_type['agent'], pos_type['goal'], 4, 5,
6] # values for each color
self.norm = colors.BoundaryNorm(self.bounds, self.cmap.N)
self.ax_imgs = [] # For generating videos
def load_map(self, map_path):
self.start_poses, self.goal_poses = parse_map(map_path, self.maze)
def set_state(self,state):
full_map, pairs = state
self.maze.maze = full_map
self.start_poses = []
self.goal_poses = []
for pair in pairs:
self.start_poses.append( [int(p) for p in pair[0]] )
self.goal_poses.append( [int(p) for p in pair[1]])
def reset(self, maze_config=None, new_map=True, new_start=True, new_goal=True, show=None, show_trace=None):
if show is not None:
self.show = show
if show_trace is not None:
self.show_trace = show_trace
if maze_config is not None:
new_map = False
self.maze, self.start_poses, self.goal_poses = generate_maze(maze_config)
if new_map:
self.start_poses, self.goal_poses = self.maze.generate_maze()
else:
if new_goal and new_start:
self.start_poses, self.goal_poses = self.maze.reset_starts_goals(self.maze_config.start_num)
else:
if new_goal:
self.goal_poses = self.maze.reset_goals(self.maze_config.start_num)
if new_start:
self.start_poses = self.maze.reset_starts(self.maze_config.start_num)
# print(self.start_pos)
if hasattr(self, 'ani_obs'):
self.ani_obs = []
self.ani_obs_p = []
self.maze_data = np.array(self.maze.get_maze())
self.pos_nows = self.start_poses
self.ax_imgs = []
self.traces = self.start_poses
self.step_now = 0
return self._get_obs()
def step(self, action):
info = {}
pre_pos = self.pos_now
self.pos_now = self._next_pos(self.pos_now, action)
self.traces.append(self.pos_now)
if self._goal_test(self.pos_now): # Goal check
reward = +1
done = True
elif self.pos_now == pre_pos: # Hit wall
reward = -0.1
done = False
else: # Moved, small negative reward to encourage shorest path
reward = -0.01
done = False
# Additional info
self.step_now += 1
if self.step_now >= self.max_step:
done = True
return self._get_obs(), reward, done, info
def seed(self, seed=None):
if seed is None:
return
self.np_random, seed = seeding.np_random(seed)
np.random.seed(seed)
random.seed(seed)
def _get_obs(self):
if self.obs_type == 'full':
return self._get_full_obs()
elif self.obs_type == 'partial':
return self._get_partial_obs(self.pob_size)
def _get_full_obs_v0(self):
"""Return a 2D array representation of maze."""
obs = np.array(self.maze_data)
# Set goal positions
# for goal in self.goal_poses:
# print(goal[0],goal[1],pos_type['goal'])
# exit()
for goal in self.goal_poses:
obs[goal[0]][goal[1]] = pos_type['goal'] # 3: goal
# Set current position
# Come after painting goal positions, avoid invisible within multi-goal regions
obs[self.pos_now[0]][self.pos_now[1]] = pos_type['agent'] # 2: agent
return obs
def _get_full_obs(self):
# return (maze_size,3) observation, first dim for maze, second dim for start, third dim for goal
# return np.stack([self.maze_data, starts_map, goals_map])
pairs = list(zip(self.start_poses, self.goal_poses))
return self.maze_data, pairs
def _get_partial_obs(self, size=1):
"""Get partial observable window according to Moore neighborhood"""
# Get maze with indicated location of current position and goal positions
maze = self._get_full_obs_v0()
pos = np.array(self.pos_nows[0])
under_offset = np.min(pos - size)
over_offset = np.min(len(maze) - (pos + size + 1))
offset = np.min([under_offset, over_offset])
if offset < 0: # Need padding
maze = np.pad(maze, np.abs(offset), 'constant', constant_values=1)
pos += np.abs(offset)
return maze[pos[0] - size: pos[0] + size + 1, pos[1] - size: pos[1] + size + 1]
def _goal_test(self, pos):
"""Return True if current state is a goal state."""
if type(self.goal_poses[0]) == list:
return list(pos) in self.goal_poses
elif type(self.goal_poses[0]) == tuple:
return tuple(pos) in self.goal_poses
def _next_pos(self, pos, action):
"""Return the next state from a given state by taking a given action."""
# Transition table to define movement for each action
if self.action_type == 'VonNeumann_4':
transitions = trans1
elif self.action_type == 'Moore_8':
transitions = trans2
new_state = [pos[0] + transitions[action][0], pos[1] + transitions[action][1]]
if self.maze_data[new_state[0]][new_state[1]] == 1: # Hit wall, stay there
return pos
else: # Valid move for 0, 2, 3, 4
return new_state
def render(self, mode='human', close=False):
if close:
plt.close()
return
obs = self._get_full_obs_v0()
partial_obs = self._get_partial_obs(self.pob_size)
# For rendering traces: Only for visualization, does not affect the observation data
if self.show_trace:
obs[tuple(list(zip(*self.traces[:-1])))] = 6
for goal in self.goal_poses:
obs[goal[0]][goal[1]] = 3 # 3: goal
for pos_now in self.pos_nows:
obs[pos_now[0]][pos_now[1]] = 2 # 2: agent
if self.show:
# Create Figure for rendering
if not hasattr(self, 'fig'): # initialize figure and plotting axes
self.fig, (self.ax_full, self.ax_partial) = plt.subplots(nrows=1, ncols=2)
self.ax_full.axis('off')
self.ax_partial.axis('off')
self.fig.show()
# Only create the image the first time
if not hasattr(self, 'ax_full_img'):
self.ax_full_img = self.ax_full.imshow(obs, cmap=self.cmap, norm=self.norm, animated=True)
if not hasattr(self, 'ax_partial_img'):
self.ax_partial_img = self.ax_partial.imshow(partial_obs, cmap=self.cmap, norm=self.norm, animated=True)
# Update the image data for efficient live video
self.ax_full_img.set_data(obs)
self.ax_partial_img.set_data(partial_obs)
plt.draw()
# Update the figure display immediately
self.fig.canvas.draw()
return self.fig
else:
if not hasattr(self, 'ani_obs'):
self.ani_obs = [obs]
self.ani_obs_p = [partial_obs]
else:
self.ani_obs.append(obs)
self.ani_obs_p.append(partial_obs)
def _get_video(self, interval=200, gif_path=None):
if self.show:
# TODO: Find a way to create animations without slowing down the live display
print('Warning: Generating an Animation when live_display=True not yet supported.')
if not hasattr(self, 'fig'): # initialize figure and plotting axes
self.fig, (self.ax_full, self.ax_partial) = plt.subplots(nrows=1, ncols=2)
self.ax_full.axis('off')
self.ax_partial.axis('off')
self.fig.set_dpi(100)
for obs, partial_obs in zip(self.ani_obs, self.ani_obs_p):
# Create a new image each time to allow an animation to be created
self.ax_full_img = self.ax_full.imshow(obs, cmap=self.cmap, norm=self.norm, animated=True)
self.ax_partial_img = self.ax_partial.imshow(partial_obs, cmap=self.cmap, norm=self.norm, animated=True)
# Put in AxesImage buffer for video generation
self.ax_imgs.append([self.ax_full_img, self.ax_partial_img]) # List of axes to update figure frame
anim = animation.ArtistAnimation(self.fig, self.ax_imgs, interval=interval)
if gif_path is not None:
anim.save(gif_path, writer='imagemagick', fps=10)
return anim
def get_maze(self):
return self.maze.maze
def get_goals(self):
return self.goal_poses
def get_starts(self):
return self.start_poses
| StarcoderdataPython |
82205 | from tabulate import tabulate
row = ["o"] * 4
board = [row] * 4
board[1][1] = "x"
print(tabulate(board))
| StarcoderdataPython |
3312486 | <reponame>narumiruna/inference-template
from abc import ABCMeta, abstractmethod
class Hook(metaclass=ABCMeta):
@abstractmethod
def __call__(self, frame):
raise NotImplementedError
@abstractmethod
def begin(self):
raise NotImplementedError
@abstractmethod
def end(self):
raise NotImplementedError
@property
def name(self):
return self.__class__.__name__
| StarcoderdataPython |
193727 | <reponame>onedata/oneclient-pkg
import sys
from subprocess import STDOUT, check_call, check_output
dist = sys.argv[1]
# get package
packages = check_output(['ls', '/root/pkg']).split()
packages = sorted(packages, reverse=True)
oneclient_package = [path for path in packages
if path.startswith('oneclient-base')
and (dist in path)
and not path.startswith('oneclient-base-debuginfo')][0]
onedatafs_py2_package = [path for path in packages
if path.startswith('python-onedatafs')
and (dist in path)][0]
onedatafs_py3_package = [path for path in packages
if path.startswith('python3-onedatafs')
and (dist in path)][0]
# install oneclient package
check_call(['sh', '-c', 'apt -y install /root/pkg/{package}'.format(package=oneclient_package)
], stderr=STDOUT)
# install onedatafs Python2 package
check_call(['sh', '-c', 'apt -y install /root/pkg/{package}'.format(package=onedatafs_py2_package)
], stderr=STDOUT)
# install onedatafs Python3 package
check_call(['sh', '-c', 'apt -y install /root/pkg/{package}'.format(package=onedatafs_py3_package)
], stderr=STDOUT)
# validate oneclient package installation
check_call(['/usr/bin/oneclient', '--help'])
# validate onedatafs Python2 package installation
check_call(['python2', '-c', 'from onedatafs import OnedataFS'])
# validate onedatafs Python3 package installation
check_call(['python3', '-c', 'from onedatafs import OnedataFS'])
sys.exit(0)
| StarcoderdataPython |
1630361 | import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
import copy
import math
try:
from transformers.modeling_bert import BertConfig, BertEncoder, BertModel
except:
from transformers.models.bert.modeling_bert import BertConfig, BertEncoder, BertModel
class LSTM(nn.Module):
def __init__(self, args):
super(LSTM, self).__init__()
self.args = args
self.device = args.device
self.hidden_dim = self.args.hidden_dim
self.n_layers = self.args.n_layers
# Embedding
# interaction은 현재 correct로 구성되어있다. correct(1, 2) + padding(0)
self.embedding_interaction = nn.Embedding(3, self.hidden_dim//3)
self.embedding_test = nn.Embedding(self.args.n_test + 1, self.hidden_dim//3)
self.embedding_question = nn.Embedding(self.args.n_questions + 1, self.hidden_dim//3)
self.embedding_tag = nn.Embedding(self.args.n_tag + 1, self.hidden_dim//3)
self.embedding_tag = nn.Embedding(self.args.n_grade + 1, self.hidden_dim//3)
# embedding combination projection
self.cate_proj = nn.Sequential(nn.Linear((self.hidden_dim//3)*4, self.hidden_dim), nn.LayerNorm(self.hidden_dim))
self.embedding_cont = nn.Sequential(nn.Linear(self.args.n_cont, self.hidden_dim), nn.LayerNorm(self.hidden_dim))
self.comb_proj = nn.Sequential(nn.ReLU(),
nn.Linear(self.args.hidden_dim*2, self.args.hidden_dim),
nn.LayerNorm(self.args.hidden_dim))
self.lstm = nn.LSTM(self.hidden_dim,
self.hidden_dim,
self.n_layers,
batch_first=True)
# Fully connected layer
self.fc = nn.Linear(self.hidden_dim, 1)
self.activation = nn.Sigmoid()
def init_hidden(self, batch_size):
h = torch.zeros(
self.n_layers,
batch_size,
self.hidden_dim)
h = h.to(self.device)
c = torch.zeros(
self.n_layers,
batch_size,
self.hidden_dim)
c = c.to(self.device)
return (h, c)
def forward(self, input):
# Todo 수정!!!
batch_size = input["interaction"].size(0)
# Embedding
embed_interaction = self.embedding_interaction(input["interaction"])
embed_test = self.embedding_test(input["testId"])
embed_question = self.embedding_question(input["assessmentItemID"])
embed_tag = self.embedding_tag(input["KnowledgeTag"])
embed_cate = torch.cat([embed_interaction,
embed_test,
embed_question,
embed_tag,], 2)
embed_cate = self.cate_proj(embed_cate)
#check!!! 어떻게 한번에 넣을 수 있는지
cont = torch.cat([input[c].unsqueeze(2) for c in self.args.cont_col], 2)
embed_cont = self.embedding_cont(cont)
X = self.comb_proj(torch.cat([embed_cate, embed_cont],2))
hidden = self.init_hidden(batch_size)
out, hidden = self.lstm(X, hidden)
out = out.contiguous().view(batch_size, -1, self.hidden_dim)
out = self.fc(out)
preds = self.activation(out).view(batch_size, -1)
return preds
class LSTMATTN(nn.Module):
def __init__(self, args):
super(LSTMATTN, self).__init__()
self.args = args
self.device = args.device
self.hidden_dim = self.args.hidden_dim
self.n_layers = self.args.n_layers
self.n_heads = self.args.n_heads
self.drop_out = self.args.drop_out
# Embedding
# interaction은 현재 correct로 구성되어있다. correct(1, 2) + padding(0)
self.embedding_interaction = nn.Embedding(3, self.hidden_dim//3)
self.embedding_problem_interaction = nn.Embedding(13*2+1, self.hidden_dim//3)
self.embedding_test = nn.Embedding(self.args.n_test + 1, self.hidden_dim//3)
self.embedding_question = nn.Embedding(self.args.n_questions + 1, self.hidden_dim//3)
self.embedding_tag = nn.Embedding(self.args.n_tag + 1, self.hidden_dim//3)
self.embedding_grade = nn.Embedding(self.args.n_grade + 1, self.hidden_dim//3)
self.embedding_other = nn.Embedding(self.args.n_other + 1, self.hidden_dim//3)
# embedding combination projection
self.cate_proj = nn.Sequential(nn.Linear((self.hidden_dim//3)*(len(self.args.cate_col)+1), self.hidden_dim), nn.LayerNorm(self.hidden_dim))
self.bn_cont = nn.BatchNorm1d(self.args.n_cont)
self.embedding_cont = nn.Sequential(
nn.Linear(self.args.n_cont, self.hidden_dim),
nn.LayerNorm(self.hidden_dim))
self.comb_proj = nn.Sequential(
nn.Dropout(0.3),
nn.Linear(self.hidden_dim*2, self.hidden_dim),
nn.LayerNorm(self.hidden_dim))
self.lstm = nn.LSTM(self.hidden_dim,
self.hidden_dim,
self.n_layers,
batch_first=True)
self.config = BertConfig(
3, # not used
hidden_size=self.hidden_dim,
num_hidden_layers=1,
num_attention_heads=self.n_heads,
intermediate_size=self.hidden_dim,
hidden_dropout_prob=self.drop_out,
attention_probs_dropout_prob=self.drop_out,
)
self.attn = BertEncoder(self.config)
# Fully connected layer
self.fc = nn.Linear(self.hidden_dim, 1)
self.activation = nn.Sigmoid()
def init_hidden(self, batch_size):
h = torch.zeros(
self.n_layers,
batch_size,
self.hidden_dim)
h = h.to(self.device)
c = torch.zeros(
self.n_layers,
batch_size,
self.hidden_dim)
c = c.to(self.device)
return (h, c)
def forward(self, input):
# Categorical Variable Embedding
be_concat = []
if "interaction" in input :
embed_interaction = self.embedding_interaction(input["interaction"])
be_concat.append(embed_interaction)
batch_size = input["interaction"].size(0)
if "problem_interaction" in input :
embed_problem_interaction = self.embedding_problem_interaction(input["problem_interaction"])
be_concat.append(embed_problem_interaction)
batch_size = input["problem_interaction"].size(0)
if "testId" in input :
embed_test = self.embedding_test(input["testId"])
be_concat.append(embed_test)
if "assessmentItemID" in input :
embed_question = self.embedding_question(input["assessmentItemID"])
be_concat.append(embed_question)
if "KnowledgeTag" in input :
embed_tag = self.embedding_tag(input["KnowledgeTag"])
be_concat.append(embed_tag)
if "grade" in input :
embed_grade = self.embedding_grade(input["grade"])
be_concat.append(embed_grade)
# Categorical Variable Embedding (other embedding at one embedding function)
for c in self.args.cate_col :
if c not in ['assessmentItemID', 'testId', 'KnowledgeTag', 'grade']:
be_concat.append(self.embedding_other(input[c]))
embed_cate = torch.cat(be_concat, 2)
embed = self.cate_proj(embed_cate)
# continuous variable embedding
# batch normalization
if self.args.n_cont > 0 :
cont = torch.cat([input[c].unsqueeze(2) for c in self.args.cont_col], 2)
cont = self.bn_cont(cont.view(-1,cont.size(-1))).view(batch_size,-1,cont.size(-1))
embed_cont = self.embedding_cont(cont)
embed = [embed, embed_cont]
# Running LSTM
X = self.comb_proj(torch.cat(embed,2))
hidden = self.init_hidden(batch_size)
out, hidden = self.lstm(X, hidden)
out = out.contiguous().view(batch_size, -1, self.hidden_dim)
extended_attention_mask = input["mask"].unsqueeze(1).unsqueeze(2)
extended_attention_mask = extended_attention_mask.to(dtype=torch.float32)
extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0
head_mask = [None] * self.n_layers
encoded_layers = self.attn(out, extended_attention_mask, head_mask=head_mask)
sequence_output = encoded_layers[-1]
if self.args.loss_type == 'arcface' :
sequence_output = sequence_output[:,-1].contiguous().view(batch_size, -1)
return sequence_output
out = self.fc(sequence_output)
preds = self.activation(out).view(batch_size, -1)
return preds
class Bert(nn.Module):
def __init__(self, args):
super(Bert, self).__init__()
self.args = args
self.device = args.device
# Defining some parameters
self.hidden_dim = self.args.hidden_dim
self.n_layers = self.args.n_layers
# Embedding
# interaction은 현재 correct로 구성되어있다. correct(1, 2) + padding(0)
self.embedding_interaction = nn.Embedding(3, self.hidden_dim//4)
self.embedding_test = nn.Embedding(self.args.n_test + 1, self.hidden_dim//4)
self.embedding_question = nn.Embedding(self.args.n_questions + 1, self.hidden_dim//4)
self.embedding_tag = nn.Embedding(self.args.n_tag + 1, self.hidden_dim//4)
self.embedding_grade = nn.Embedding(self.args.n_grade + 1, self.hidden_dim//4)
# embedding combination projection
self.cate_proj = nn.Sequential(nn.Linear((self.hidden_dim//4)*5, self.hidden_dim), nn.LayerNorm(self.hidden_dim))
self.embedding_cont = nn.Sequential(nn.Linear(self.args.n_cont, self.hidden_dim), nn.LayerNorm(self.hidden_dim))
self.comb_proj = nn.Sequential(nn.ReLU(),
nn.Linear(self.args.hidden_dim*2, self.args.hidden_dim),
nn.LayerNorm(self.args.hidden_dim))
# Bert config
self.config = BertConfig(
3, # not used
hidden_size=self.hidden_dim,
num_hidden_layers=self.args.n_layers,
num_attention_heads=self.args.n_heads,
max_position_embeddings=self.args.max_seq_len
)
# Defining the layers
# Bert Layer
self.encoder = BertModel(self.config)
# Fully connected layer
self.fc = nn.Linear(self.args.hidden_dim, 1)
self.activation = nn.Sigmoid()
def forward(self, input):
batch_size = input["interaction"].size(0)
# Embedding
embed_interaction = self.embedding_interaction(input["interaction"])
embed_test = self.embedding_test(input["testId"])
embed_question = self.embedding_question(input["assessmentItemID"])
embed_tag = self.embedding_tag(input["KnowledgeTag"])
embed_grade = self.embedding_grade(input["grade"])
embed_cate = torch.cat([embed_interaction,
embed_test,
embed_question,
embed_tag,
embed_grade], 2)
embed_cate = self.cate_proj(embed_cate)
cont = torch.cat([input[c].unsqueeze(2) for c in self.args.cont_col], 2)
embed_cont = self.embedding_cont(cont)
X = self.comb_proj(torch.cat([embed_cate, embed_cont],2))
# Bert
encoded_layers = self.encoder(inputs_embeds=X, attention_mask=input["mask"])
out = encoded_layers[0]
out = out.contiguous().view(batch_size, -1, self.hidden_dim)
out = self.fc(out)
preds = self.activation(out).view(batch_size, -1)
return preds
class FFN(nn.Module):
def __init__(self, state_size=200):
super(FFN, self).__init__()
self.state_size = state_size
self.lr1 = nn.Linear(state_size, state_size)
self.relu = nn.ReLU()
self.lr2 = nn.Linear(state_size, state_size)
self.dropout = nn.Dropout(0.2)
def forward(self, x):
x = self.lr1(x)
x = self.relu(x)
x = self.lr2(x)
return self.dropout(x)
def future_mask(seq_length):
future_mask = np.triu(np.ones((seq_length, seq_length)), k=1).astype("bool")
return torch.from_numpy(future_mask)
class SAKT(nn.Module):
def __init__(self, n_skill, max_seq=400, embed_dim=256): # HDKIM 100
super(SAKTModel, self).__init__()
self.n_skill = n_skill
self.embed_dim = embed_dim
self.embedding = nn.Embedding(2 * n_skill + 1, embed_dim)
self.pos_embedding = nn.Embedding(max_seq - 1, embed_dim)
self.e_embedding = nn.Embedding(n_skill + 1, embed_dim)
self.multi_att = nn.MultiheadAttention(
embed_dim=embed_dim, num_heads=8, dropout=0.2
)
self.dropout = nn.Dropout(0.2)
self.layer_normal = nn.LayerNorm(embed_dim)
self.ffn = FFN(embed_dim)
self.pred = nn.Linear(embed_dim, 1)
self.activation = nn.Sigmoid()
def forward(self, x, question_ids):
device = x.device
x = self.embedding(x)
pos_id = torch.arange(x.size(1)).unsqueeze(0).to(device)
pos_x = self.pos_embedding(pos_id)
x = x + pos_x
e = self.e_embedding(question_ids)
x = x.permute(1, 0, 2) # x: [bs, s_len, embed] => [s_len, bs, embed]
e = e.permute(1, 0, 2)
att_mask = future_mask(x.size(0)).to(device)
att_output, att_weight = self.multi_att(e, x, x, attn_mask=att_mask)
att_output = self.layer_normal(att_output + e)
att_output = att_output.permute(
1, 0, 2
) # att_output: [s_len, bs, embed] => [bs, s_len, embed]
x = self.ffn(att_output)
x = self.layer_normal(x + att_output)
x = self.pred(x)
x = self.activation(x)
return x.squeeze(-1), att_weight
class Feed_Forward_block(nn.Module):
"""
out = Relu( M_out*w1 + b1) *w2 + b2
"""
def __init__(self, dim_ff):
super().__init__()
self.layer1 = nn.Linear(in_features=dim_ff, out_features=dim_ff)
self.layer2 = nn.Linear(in_features=dim_ff, out_features=dim_ff)
def forward(self, ffn_in):
return self.layer2(F.relu(self.layer1(ffn_in)))
class LastQuery(nn.Module):
def __init__(self, args):
super(LastQuery, self).__init__()
self.args = args
self.device = args.device
self.hidden_dim = self.args.hidden_dim
# Embedding
# interaction은 현재 correct으로 구성되어있다. correct(1, 2) + padding(0)
self.embedding_interaction = nn.Embedding(3, self.hidden_dim//3)
self.embedding_problem_interaction = nn.Embedding(13*2+1, self.hidden_dim//3)
self.embedding_test = nn.Embedding(self.args.n_test + 1, self.hidden_dim//3)
self.embedding_question = nn.Embedding(self.args.n_questions + 1, self.hidden_dim//3)
self.embedding_tag = nn.Embedding(self.args.n_tag + 1, self.hidden_dim//3)
self.embedding_grade = nn.Embedding(self.args.n_grade + 1, self.hidden_dim//3)
self.embedding_other = nn.Embedding(self.args.n_other + 1, self.hidden_dim//3)
self.embedding_position = nn.Embedding(self.args.max_seq_len, self.hidden_dim)
self.cate_proj = nn.Sequential(
nn.Linear((self.hidden_dim//3)*(len(self.args.cate_col)+1), self.hidden_dim),
nn.LayerNorm(self.hidden_dim))
self.bn_cont = nn.BatchNorm1d(self.args.n_cont)
self.embedding_cont = nn.Sequential(
nn.Linear(self.args.n_cont, self.hidden_dim),
nn.LayerNorm(self.hidden_dim))
# embedding combination projection
self.comb_proj = nn.Sequential(
nn.Linear(self.hidden_dim*2, self.hidden_dim),
nn.LayerNorm(self.hidden_dim))
# 기존 keetar님 솔루션에서는 Positional Embedding은 사용되지 않습니다
# 하지만 사용 여부는 자유롭게 결정해주세요 :)
# Encoder
self.query = nn.Linear(
in_features=self.hidden_dim, out_features=self.hidden_dim
)
self.key = nn.Linear(
in_features=self.hidden_dim, out_features=self.hidden_dim
)
self.value = nn.Linear(
in_features=self.hidden_dim, out_features=self.hidden_dim
)
self.attn = nn.MultiheadAttention(
embed_dim=self.hidden_dim, num_heads=self.args.n_heads
)
self.mask = None # last query에서는 필요가 없지만 수정을 고려하여서 넣어둠
self.ffn = Feed_Forward_block(self.hidden_dim)
self.ln1 = nn.LayerNorm(self.hidden_dim)
self.ln2 = nn.LayerNorm(self.hidden_dim)
# LSTM
self.lstm = nn.LSTM(
self.hidden_dim, self.hidden_dim, self.args.n_layers, batch_first=True
)
# Fully connected layer
self.fc = nn.Linear(self.hidden_dim, 1)
self.activation = nn.Sigmoid()
def get_pos(self, seq_len):
# use sine positional embeddinds
return torch.arange(seq_len).unsqueeze(0)
def init_hidden(self, batch_size):
h = torch.zeros(self.args.n_layers, batch_size, self.args.hidden_dim)
h = h.to(self.device)
c = torch.zeros(self.args.n_layers, batch_size, self.args.hidden_dim)
c = c.to(self.device)
return (h, c)
def get_mask(self, seq_len, mask, batch_size):
new_mask = torch.zeros_like(mask)
new_mask[mask == 0] = 1
new_mask[mask != 0] = 0
mask = new_mask
# batchsize * n_head 수만큼 각 mask를 반복하여 증가시킨다
mask = mask.repeat(1, self.args.n_heads).view(batch_size*self.args.n_heads, -1, seq_len)
return mask.masked_fill(mask==1, float('-inf'))
def forward(self, input):
# Categorical Variable Embedding
be_concat = []
if "interaction" in input :
embed_interaction = self.embedding_interaction(input["interaction"])
be_concat.append(embed_interaction)
batch_size = input["interaction"].size(0)
if "problem_interaction" in input :
embed_problem_interaction = self.embedding_problem_interaction(input["problem_interaction"])
be_concat.append(embed_problem_interaction)
batch_size = input["problem_interaction"].size(0)
if "testId" in input :
embed_test = self.embedding_test(input["testId"])
be_concat.append(embed_test)
if "assessmentItemID" in input :
embed_question = self.embedding_question(input["assessmentItemID"])
be_concat.append(embed_question)
if "KnowledgeTag" in input :
embed_tag = self.embedding_tag(input["KnowledgeTag"])
be_concat.append(embed_tag)
if "grade" in input :
embed_grade = self.embedding_grade(input["grade"])
be_concat.append(embed_grade)
# Categorical Variable Embedding (other embedding at one embedding function)
for c in self.args.cate_col :
if c not in ['assessmentItemID', 'testId', 'KnowledgeTag', 'grade']:
be_concat.append(self.embedding_other(input[c]))
embed_cate = torch.cat(be_concat, 2)
embed = self.cate_proj(embed_cate)
# continuous variable embedding
# batch normalization
if self.args.n_cont > 0 :
cont = torch.cat([input[c].unsqueeze(2) for c in self.args.cont_col], 2)
cont = self.bn_cont(cont.view(-1,cont.size(-1))).view(batch_size,-1,cont.size(-1))
embed_cont = self.embedding_cont(cont)
embed = [embed, embed_cont]
# Running LSTM
embed = self.comb_proj(torch.cat(embed,2))
# Positional Embedding
# last query에서는 positional embedding을 하지 않음
# position = self.get_pos(seq_len).to('cuda')
# embed_pos = self.embedding_position(position)
# embed = embed + embed_pos
####################### ENCODER #####################
q = self.query(embed)[:, -1:, :].permute(1, 0, 2)
k = self.key(embed).permute(1, 0, 2)
v = self.value(embed).permute(1, 0, 2)
## attention
# last query only
self.mask = self.get_mask(seq_len, mask, batch_size).to(self.device)
out, _ = self.attn(q, k, v)
## residual + layer norm
out = out.permute(1, 0, 2)
out = embed + out
out = self.ln1(out)
## feed forward network
out = self.ffn(out)
## residual + layer norm
out = embed + out
out = self.ln2(out)
###################### LSTM #####################
hidden = self.init_hidden(batch_size)
out, hidden = self.lstm(out, hidden)
###################### DNN #####################
out = out.contiguous().view(batch_size, -1, self.hidden_dim)
out = self.fc(out)
preds = self.activation(out).view(batch_size, -1)
return preds
class PositionalEncoding(nn.Module):
def __init__(self, d_model, dropout=0.1, max_len=1000):
super(PositionalEncoding, self).__init__()
self.dropout = nn.Dropout(p=dropout)
self.scale = nn.Parameter(torch.ones(1))
pe = torch.zeros(max_len, d_model)
position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1)
div_term = torch.exp(torch.arange(
0, d_model, 2).float() * (-math.log(10000.0) / d_model))
pe[:, 0::2] = torch.sin(position * div_term)
pe[:, 1::2] = torch.cos(position * div_term)
pe = pe.unsqueeze(0).transpose(0, 1)
self.register_buffer('pe', pe)
def forward(self, x):
x = x + self.scale * self.pe[:x.size(0), :]
return self.dropout(x)
class Saint(nn.Module):
def __init__(self, args):
super(Saint, self).__init__()
self.args = args
self.device = args.device
self.hidden_dim = self.args.hidden_dim
# self.dropout = self.args.dropout
self.dropout = 0.
### Embedding
# ENCODER embedding
self.embedding_test = nn.Embedding(self.args.n_test + 1, self.hidden_dim//3)
self.embedding_question = nn.Embedding(self.args.n_questions + 1, self.hidden_dim//3)
self.embedding_tag = nn.Embedding(self.args.n_tag + 1, self.hidden_dim//3)
self.embedding_grade = nn.Embedding(self.args.n_grade + 1, self.hidden_dim//3)
self.bn_cont_e = nn.BatchNorm1d(max(self.args.n_cont_e,1))
self.embedding_cont_e = nn.Sequential(
nn.Linear(max(self.args.n_cont_e,1), self.hidden_dim),
nn.LayerNorm(self.hidden_dim))
c = min(self.args.n_cont_e,1)
# encoder combination projection
self.enc_comb_proj = nn.Sequential(
nn.Linear(self.hidden_dim * c+(self.hidden_dim//3)*len(self.args.cate_col_e),
self.hidden_dim),
nn.LayerNorm(self.hidden_dim))
# DECODER embedding
# interaction은 현재 correct으로 구성되어있다. correct(1, 2) + padding(0)
self.embedding_interaction = nn.Embedding(3, self.hidden_dim//3)
self.embedding_problem_interaction = nn.Embedding(13*2+1, self.hidden_dim//3)
self.embedding_other = nn.Embedding(self.args.n_other + 1, self.hidden_dim//3)
self.bn_cont_d = nn.BatchNorm1d(max(self.args.n_cont_d,1))
self.embedding_cont_d = nn.Sequential(
nn.Linear(max(self.args.n_cont_d,1), self.hidden_dim),
nn.LayerNorm(self.hidden_dim))
# decoder combination projection
c = min(self.args.n_cont_d,1)
self.dec_comb_proj = nn.Linear(self.hidden_dim*c+(self.hidden_dim//3)*(len(self.args.cate_col_d)+1),
self.hidden_dim)
# Positional encoding
self.pos_encoder = PositionalEncoding(self.hidden_dim, self.dropout, self.args.max_seq_len)
self.pos_decoder = PositionalEncoding(self.hidden_dim, self.dropout, self.args.max_seq_len)
self.transformer = nn.Transformer(
d_model=self.hidden_dim,
nhead=self.args.n_heads,
num_encoder_layers=self.args.n_layers,
num_decoder_layers=self.args.n_layers,
dim_feedforward=self.hidden_dim,
dropout=self.dropout,
activation='relu')
self.fc = nn.Linear(self.hidden_dim, 1)
self.activation = nn.Sigmoid()
self.enc_mask = None
self.dec_mask = None
self.enc_dec_mask = None
def get_mask(self, seq_len):
mask = torch.from_numpy(np.triu(np.ones((seq_len, seq_len)), k=1))
return mask.masked_fill(mask==1, float('-inf'))
def forward(self, input):
# 신나는 embedding
# ENCODER
be_concat = []
if "testId" in input and "testId" in self.args.cate_col_e:
embed_test = self.embedding_test(input["testId"])
be_concat.append(embed_test)
if "assessmentItemID" in input and "assessmentItemID" in self.args.cate_col_e:
embed_question = self.embedding_question(input["assessmentItemID"])
be_concat.append(embed_question)
if "KnowledgeTag" in input and "KnowledgeTag" in self.args.cate_col_e:
embed_tag = self.embedding_tag(input["KnowledgeTag"])
be_concat.append(embed_tag)
batch_size = input["KnowledgeTag"].size(0)
seq_len = input["KnowledgeTag"].size(1)
if "grade" in input and "grade" in self.args.cate_col_e:
embed_grade = self.embedding_grade(input["grade"])
be_concat.append(embed_grade)
for c in self.args.cate_col_e :
if c not in ['assessmentItemID', 'testId', 'KnowledgeTag', 'grade']:
be_concat.append(self.embedding_other(input[c]))
if self.args.n_cont_e > 0 :
cont = torch.cat([input[c].unsqueeze(2) for c in self.args.cont_col_e], 2)
cont = self.bn_cont_e(cont.view(-1,cont.size(-1))).view(batch_size,-1,cont.size(-1))
embed_cont_e = self.embedding_cont_e(cont)
be_concat.append(embed_cont_e)
embed_enc = torch.cat(be_concat, 2)
embed_enc = self.enc_comb_proj(embed_enc)
# DECODER
be_concat = []
if "testId" in input and "testId" in self.args.cate_col_d:
embed_test = self.embedding_test(input["testId"])
be_concat.append(embed_test)
if "assessmentItemID" in input and "assessmentItemID" in self.args.cate_col_d:
embed_question = self.embedding_question(input["assessmentItemID"])
be_concat.append(embed_question)
if "KnowledgeTag" in input and "assessmentItemID" in self.args.cate_col_d:
embed_tag = self.embedding_tag(input["KnowledgeTag"])
be_concat.append(embed_tag)
if "grade" in input and "assessmentItemID" in self.args.cate_col_d:
embed_grade = self.embedding_grade(input["grade"])
be_concat.append(embed_grade)
if "interaction" in input :
embed_interaction = self.embedding_interaction(input["interaction"])
be_concat.append(embed_interaction)
if "problem_interaction" in input :
embed_problem_interaction = self.embedding_problem_interaction(input["problem_interaction"])
be_concat.append(embed_problem_interaction)
for c in self.args.cate_col_d :
if c not in ['assessmentItemID', 'testId', 'KnowledgeTag', 'grade']:
be_concat.append(self.embedding_other(input[c]))
if self.args.n_cont_d > 0 :
cont = torch.cat([input[c].unsqueeze(2) for c in self.args.cont_col_d], 2)
cont = self.bn_cont_d(cont.view(-1,cont.size(-1))).view(batch_size,-1,cont.size(-1))
embed_cont_d = self.embedding_cont_d(cont)
be_concat.append(embed_cont_d)
embed_dec = torch.cat(be_concat, 2)
embed_dec = self.dec_comb_proj(embed_dec)
# ATTENTION MASK 생성
# encoder하고 decoder의 mask는 가로 세로 길이가 모두 동일하여
# 사실 이렇게 3개로 나눌 필요가 없다
if self.enc_mask is None or self.enc_mask.size(0) != seq_len:
self.enc_mask = self.get_mask(seq_len).to(self.device)
if self.dec_mask is None or self.dec_mask.size(0) != seq_len:
self.dec_mask = self.get_mask(seq_len).to(self.device)
if self.enc_dec_mask is None or self.enc_dec_mask.size(0) != seq_len:
self.enc_dec_mask = self.get_mask(seq_len).to(self.device)
embed_enc = embed_enc.permute(1, 0, 2)
embed_dec = embed_dec.permute(1, 0, 2)
# Positional encoding
embed_enc = self.pos_encoder(embed_enc)
embed_dec = self.pos_decoder(embed_dec)
mask = input["mask"]
mask = mask.eq(0)
out = self.transformer(embed_enc, embed_dec,
src_mask = self.enc_mask,
tgt_mask = self.dec_mask,
memory_mask = self.enc_dec_mask,
# tgt_mask = self.dec_mask,
# src_key_padding_mask = mask,
# tgt_key_padding_mask = mask,
# memory_key_padding_mask = mask,
)
out = out.permute(1, 0, 2)
out = out.contiguous().view(batch_size, -1, self.hidden_dim)
out = self.fc(out)
preds = self.activation(out).view(batch_size, -1)
return preds | StarcoderdataPython |
4808171 | <gh_stars>1-10
import subprocess
import shlex
import os
from rlpython.utils.argument_parser import (
ReplArgumentParserError,
ReplArgumentParser,
)
class ShellRuntime:
def __init__(self, repl):
self.repl = repl
self._old_pwd = os.getcwd()
def validate_source(self, raw_source):
try:
shlex.split(raw_source)
return True
except Exception:
return False
def change_directory(self, command_line):
argument_parser = ReplArgumentParser(repl=self.repl, prog='cd')
argument_parser.add_argument('directory', nargs='?')
arguments = argument_parser.parse_args(command_line[1:])
directory = arguments.directory
if directory == '-':
directory = self._old_pwd
else:
if not directory:
directory = os.environ.get('HOME', '/home')
directory = os.path.abspath(directory)
self._old_pwd = os.getcwd()
try:
os.chdir(directory)
except FileNotFoundError:
self.repl.write_error(
'cd: {}: No such file or directory\n'.format(directory),
)
return 1
self.repl.write('{}\n'.format(directory))
return 0
def run(self, raw_source):
# split into command line
source = raw_source[1:]
command_line = shlex.split(source)
# change_directory command
if command_line[0] == 'cd':
try:
return self.change_directory(command_line)
except ReplArgumentParserError:
return 1
# run command
try:
output = subprocess.check_output(
command_line,
stderr=subprocess.PIPE,
shell=False,
).decode()
exit_code = 0
except FileNotFoundError:
output = ''
exit_code = 1
self.repl.write_error(
'{}: command not found\n'.format(command_line[0]),
)
except subprocess.CalledProcessError as exception:
output = '{}{}'.format(
exception.output.decode(),
exception.stderr.decode(),
)
exit_code = exception.returncode
if output:
self.repl.write(output)
return exit_code
| StarcoderdataPython |
4804931 | <reponame>zihen/quart-restplus
# -*- coding: utf-8 -*-
import pytest
import quart_restplus as restplus
from quart import url_for, Blueprint
from quart.routing import BuildError
async def test_default_apidoc_on_root(app, client):
restplus.Api(app, version='1.0')
async with app.test_request_context():
assert url_for('doc') == url_for('root')
response = await client.get(url_for('doc'))
assert response.status_code == 200
assert response.content_type == 'text/html; charset=utf-8'
async def test_default_apidoc_on_root_lazy(app, client):
api = restplus.Api(version='1.0')
api.init_app(app)
async with app.test_request_context():
assert url_for('doc') == url_for('root')
response = await client.get(url_for('doc'))
assert response.status_code == 200
assert response.content_type == 'text/html; charset=utf-8'
async def test_default_apidoc_on_root_with_blueprint(app, client):
blueprint = Blueprint('api', __name__, url_prefix='/api')
restplus.Api(blueprint, version='1.0')
app.register_blueprint(blueprint)
async with app.test_request_context():
assert url_for('api.doc') == url_for('api.root')
response = await client.get(url_for('api.doc'))
assert response.status_code == 200
assert response.content_type == 'text/html; charset=utf-8'
async def test_apidoc_with_custom_validator(app, client):
app.config['SWAGGER_VALIDATOR_URL'] = 'http://somewhere.com/validator'
restplus.Api(app, version='1.0')
async with app.test_request_context():
response = await client.get(url_for('doc'))
data = await response.get_data()
assert response.status_code == 200
assert response.content_type == 'text/html; charset=utf-8'
assert 'validatorUrl: "http://somewhere.com/validator" || null,' in data.decode()
async def test_apidoc_doc_expansion_parameter(app, client):
restplus.Api(app)
async with app.test_request_context():
response = await client.get(url_for('doc'))
assert 'docExpansion: "none"' in (await response.get_data(False))
app.config['SWAGGER_UI_DOC_EXPANSION'] = 'list'
response = await client.get(url_for('doc'))
assert 'docExpansion: "list"' in (await response.get_data(False))
app.config['SWAGGER_UI_DOC_EXPANSION'] = 'full'
response = await client.get(url_for('doc'))
assert 'docExpansion: "full"' in (await response.get_data(False))
async def test_apidoc_doc_display_operation_id(app, client):
restplus.Api(app)
async with app.test_request_context():
response = await client.get(url_for('doc'))
assert 'displayOperationId: false' in (await response.get_data(False))
app.config['SWAGGER_UI_OPERATION_ID'] = False
response = await client.get(url_for('doc'))
assert 'displayOperationId: false' in (await response.get_data(False))
app.config['SWAGGER_UI_OPERATION_ID'] = True
response = await client.get(url_for('doc'))
assert 'displayOperationId: true' in (await response.get_data(False))
async def test_apidoc_doc_display_request_duration(app, client):
restplus.Api(app)
async with app.test_request_context():
response = await client.get(url_for('doc'))
assert 'displayRequestDuration: false' in (await response.get_data(False))
app.config['SWAGGER_UI_REQUEST_DURATION'] = False
response = await client.get(url_for('doc'))
assert 'displayRequestDuration: false' in (await response.get_data(False))
app.config['SWAGGER_UI_REQUEST_DURATION'] = True
response = await client.get(url_for('doc'))
assert 'displayRequestDuration: true' in (await response.get_data(False))
async def test_custom_apidoc_url(app, client):
restplus.Api(app, version='1.0', doc='/doc/')
async with app.test_request_context():
doc_url = url_for('doc')
root_url = url_for('root')
assert doc_url != root_url
response = await client.get(root_url)
assert response.status_code == 404
assert doc_url == '/doc/'
response = await client.get(doc_url)
assert response.status_code == 200
assert response.content_type == 'text/html; charset=utf-8'
async def test_custom_api_prefix(app):
prefix = '/api'
api = restplus.Api(app, prefix=prefix)
api.namespace('resource')
async with app.test_request_context():
assert url_for('root') == prefix
async def test_custom_apidoc_page(app, client):
api = restplus.Api(app, version='1.0')
content = 'My Custom API Doc'
@api.documentation
def api_doc():
return content
async with app.test_request_context():
response = await client.get(url_for('doc'))
assert response.status_code == 200
assert (await response.get_data(False)) == content
async def test_custom_apidoc_page_lazy(app, client):
blueprint = Blueprint('api', __name__, url_prefix='/api')
api = restplus.Api(blueprint, version='1.0')
content = 'My Custom API Doc'
@api.documentation
def api_doc():
return content
app.register_blueprint(blueprint)
async with app.test_request_context():
response = await client.get(url_for('api.doc'))
assert response.status_code == 200
assert (await response.get_data(False)) == content
async def test_disabled_apidoc(app, client):
restplus.Api(app, version='1.0', doc=False)
async with app.test_request_context():
with pytest.raises(BuildError):
url_for('doc')
response = await client.get(url_for('root'))
assert response.status_code == 404
| StarcoderdataPython |
31456 | from typing import Iterable, Optional
from django import VERSION
from django.db.models.base import Model
from django.db.models.fields.related import ManyToManyField
from django.db.models.fields.reverse_related import ManyToOneRel
from django.db.models.manager import Manager
from django.db.models.query import QuerySet
def invalidate_onetomany(objs: Iterable[Model], prefetch_keys: Iterable[str]):
"""
Invalidate one-to-many caches. These are remote `ForeignKey` and
`ManyToManyField` fields fetched with `prefetch_related()`.
"""
if VERSION[0] == 1 or VERSION[0] == 2:
for obj in objs:
if not hasattr(obj, '_prefetched_objects_cache'):
continue
for key in prefetch_keys:
if key not in obj._prefetched_objects_cache:
continue
del obj._prefetched_objects_cache[key]
def invalidate_manytoone(objs: Iterable[Model], field_names: Iterable[str]):
"""
Invalidate many-to-one caches. These are `ForeignKey` and
`OneToOneField` fields fetched with `select_related()` or
`prefetch_related()`.
"""
if VERSION[0] == 1:
for obj in objs:
for field_name in field_names:
if not is_fk_cached(obj=obj, field_name=field_name):
continue
del obj.__dict__[f'_{field_name}_cache']
elif VERSION[0] == 2:
for obj in objs:
for field_name in field_names:
if not is_fk_cached(obj=obj, field_name=field_name):
continue
del obj._state.fields_cache[field_name]
def get_prefetch_cache_key(relation: Manager) -> str:
'Return a key used in the prefetched cache for a relation.'
try:
# Works on ManyToMany
return relation.prefetch_cache_name
except AttributeError:
# Is a ForeignKey (OneToMany)
rel_field = relation.field.remote_field # type: ManyToOneRel
if rel_field.related_name:
return rel_field.related_name
if VERSION[0] == 1:
return rel_field.name
elif VERSION[0] == 2:
return f'{rel_field.name}_set'
def init_prefetch_cache(obj: Model):
'Init a prefetch cache on the model.'
if VERSION[0] == 1 or VERSION[0] == 2:
if hasattr(obj, '_prefetched_objects_cache'):
return
obj._prefetched_objects_cache = {}
def is_query_prefetched(relation: Manager) -> bool:
'Return `True` if the relation is prefetched.'
if VERSION[0] == 1 or VERSION[0] == 2:
obj = relation.instance
if not hasattr(obj, '_prefetched_objects_cache'):
return False
prefetch_cache_key = get_prefetch_cache_key(relation=relation)
return prefetch_cache_key in obj._prefetched_objects_cache
return False
def set_prefetch_cache(
relation: Manager, queryset: QuerySet, override: bool = True):
'Set prefetch cache on a `Model` for a relation.'
if is_query_prefetched(relation=relation) and not override:
return
obj = relation.instance
init_prefetch_cache(obj=obj)
if VERSION[0] == 1 or VERSION[0] == 2:
key = get_prefetch_cache_key(relation=relation)
obj._prefetched_objects_cache[key] = queryset
def is_queryresult_loaded(qs: QuerySet) -> bool:
'Return `True` if the query is loaded, `False` otherwise.'
if VERSION[0] == 1 or VERSION[0] == 2:
return qs._result_cache is not None
return False
def set_queryresult(qs: QuerySet, result: list, override: bool = True):
'Set result on a previously setup query.'
if VERSION[0] == 1 or VERSION[0] == 2:
if override or not is_queryresult_loaded(qs=qs):
qs._result_cache = result
def get_queryresult(qs: QuerySet) -> Optional[list]:
'Return the cached query result of the passed `QuerySet`.'
if VERSION[0] == 1 or VERSION[0] == 2:
return qs._result_cache
def is_fk_cached(obj: Model, field_name: str) -> bool:
'Return `True` if the `ForeignKey` field on the object is cached.'
if VERSION[0] == 1:
return hasattr(obj, f'_{field_name}_cache')
elif VERSION[0] == 2:
if getattr(obj, '_state', None) is None or \
getattr(obj._state, 'fields_cache', None) is None:
return False
return field_name in obj._state.fields_cache
return False
def set_fk_cache(
obj: Model, field_name: str, value: Model, override: bool = True):
"""
Set a cache on the `obj` for a `ForeignKey` field, override when
requested.
"""
if not override and is_fk_cached(obj=obj, field_name=field_name):
return
if VERSION[0] == 1:
setattr(obj, f'_{field_name}_cache', value)
elif VERSION[0] == 2:
if getattr(obj, '_state', None) is None:
obj._state = dict()
if getattr(obj._state, 'fields_cache', None) is None:
obj._state.fields_cache = dict()
obj._state.fields_cache[field_name] = value
def del_fk_cache(obj: Model, field_name: str):
'Delete a cached `ForeignKey` on the `Model`.'
if not is_fk_cached(obj=obj, field_name=field_name):
return
if VERSION[0] == 1:
delattr(obj, f'_{field_name}_cache')
elif VERSION[0] == 2:
del obj._state.fields_cache
_old_m2m_savedata = ManyToManyField.save_form_data
def _save_m2m_form_data(
self: ManyToManyField, instance: Model, data: QuerySet):
_old_m2m_savedata(self=self, instance=instance, data=data)
set_prefetch_cache(
relation=getattr(instance, self.name), queryset=data, override=True)
ManyToManyField.save_form_data = _save_m2m_form_data
| StarcoderdataPython |
3343437 | <filename>scripts/study_case/ID_4/torch_geometric/nn/models/autoencoder.py
import math
import random
import torch
import numpy as np
from sklearn.metrics import roc_auc_score, average_precision_score
from scripts.study_case.ID_4.torch_geometric.utils import to_undirected
from ..inits import reset
EPS = 1e-15
MAX_LOGVAR = 10
def negative_sampling(pos_edge_index, num_nodes):
idx = (pos_edge_index[0] * num_nodes + pos_edge_index[1])
idx = idx.to(torch.device('cpu'))
rng = range(num_nodes**2)
perm = torch.tensor(random.sample(rng, idx.size(0)))
mask = torch.from_numpy(np.isin(perm, idx).astype(np.uint8))
rest = mask.nonzero().view(-1)
while rest.numel() > 0: # pragma: no cover
tmp = torch.tensor(random.sample(rng, rest.size(0)))
mask = torch.from_numpy(np.isin(tmp, idx).astype(np.uint8))
perm[rest] = tmp
rest = mask.nonzero().view(-1)
row, col = perm / num_nodes, perm % num_nodes
return torch.stack([row, col], dim=0).to(pos_edge_index.device)
class InnerProductDecoder(torch.nn.Module):
r"""The inner product decoder from the `"Variational Graph Auto-Encoders"
<https://arxiv.org/abs/1611.07308>`_ paper
.. math::
\sigma(\mathbf{Z}\mathbf{Z}^{\top})
where :math:`\mathbf{Z} \in \mathbb{R}^{N \times d}` denotes the latent
space produced by the encoder."""
def forward(self, z, edge_index, sigmoid=True):
r"""Decodes the latent variables :obj:`z` into edge probabilties for
the given node-pairs :obj:`edge_index`.
Args:
z (Tensor): The latent space :math:`\mathbf{Z}`.
sigmoid (bool, optional): If set to :obj:`False`, does not apply
the logistic sigmoid function to the output.
(default: :obj:`True`)
"""
value = (z[edge_index[0]] * z[edge_index[1]]).sum(dim=1)
return torch.sigmoid(value) if sigmoid else value
def forward_all(self, z, sigmoid=True):
r"""Decodes the latent variables :obj:`z` into a probabilistic dense
adjacency matrix.
Args:
z (Tensor): The latent space :math:`\mathbf{Z}`.
sigmoid (bool, optional): If set to :obj:`False`, does not apply
the logistic sigmoid function to the output.
(default: :obj:`True`)
"""
adj = torch.matmul(z, z.t())
return torch.sigmoid(adj) if sigmoid else adj
class GAE(torch.nn.Module):
r"""The Graph Auto-Encoder model from the
`"Variational Graph Auto-Encoders" <https://arxiv.org/abs/1611.07308>`_
paper based on user-defined encoder and decoder models.
Args:
encoder (Module): The encoder module.
decoder (Module, optional): The decoder module. If set to :obj:`None`,
will default to the
:class:`torch_geometric.nn.models.InnerProductDecoder`.
(default: :obj:`None`)
"""
def __init__(self, encoder, decoder=None):
super(GAE, self).__init__()
self.encoder = encoder
self.decoder = InnerProductDecoder() if decoder is None else decoder
GAE.reset_parameters(self)
def reset_parameters(self):
reset(self.encoder)
reset(self.decoder)
def encode(self, *args, **kwargs):
r"""Runs the encoder and computes node-wise latent variables."""
return self.encoder(*args, **kwargs)
def decode(self, *args, **kwargs):
r"""Runs the decoder and computes edge probabilties."""
return self.decoder(*args, **kwargs)
def split_edges(self, data, val_ratio=0.05, test_ratio=0.1):
r"""Splits the edges of a :obj:`torch_geometric.data.Data` object
into positve and negative train/val/test edges.
Args:
data (Data): The data object.
val_ratio (float, optional): The ratio of positive validation
edges. (default: :obj:`0.05`)
test_ratio (float, optional): The ratio of positive test
edges. (default: :obj:`0.1`)
"""
assert 'batch' not in data # No batch-mode.
row, col = data.edge_index
data.edge_index = None
# Return upper triangular portion.
mask = row < col
row, col = row[mask], col[mask]
n_v = int(math.floor(val_ratio * row.size(0)))
n_t = int(math.floor(test_ratio * row.size(0)))
# Positive edges.
perm = torch.randperm(row.size(0))
row, col = row[perm], col[perm]
r, c = row[:n_v], col[:n_v]
data.val_pos_edge_index = torch.stack([r, c], dim=0)
r, c = row[n_v:n_v + n_t], col[n_v:n_v + n_t]
data.test_pos_edge_index = torch.stack([r, c], dim=0)
r, c = row[n_v + n_t:], col[n_v + n_t:]
data.train_pos_edge_index = torch.stack([r, c], dim=0)
data.train_pos_edge_index = to_undirected(data.train_pos_edge_index)
# Negative edges.
num_nodes = data.num_nodes
neg_adj_mask = torch.ones(num_nodes, num_nodes, dtype=torch.uint8)
neg_adj_mask = neg_adj_mask.triu(diagonal=1)
neg_adj_mask[row, col] = 0
neg_row, neg_col = neg_adj_mask.nonzero().t()
perm = random.sample(
range(neg_row.size(0)), min(n_v + n_t, neg_row.size(0)))
perm = torch.tensor(perm)
perm = perm.to(torch.long)
neg_row, neg_col = neg_row[perm], neg_col[perm]
neg_adj_mask[neg_row, neg_col] = 0
data.train_neg_adj_mask = neg_adj_mask
row, col = neg_row[:n_v], neg_col[:n_v]
data.val_neg_edge_index = torch.stack([row, col], dim=0)
row, col = neg_row[n_v:n_v + n_t], neg_col[n_v:n_v + n_t]
data.test_neg_edge_index = torch.stack([row, col], dim=0)
return data
def recon_loss(self, z, pos_edge_index):
r"""Given latent variables :obj:`z`, computes the binary cross
entropy loss for positive edges :obj:`pos_edge_index` and negative
sampled edges.
Args:
z (Tensor): The latent space :math:`\mathbf{Z}`.
pos_edge_index (LongTensor): The positive edges to train against.
"""
pos_loss = -torch.log(
self.decoder(z, pos_edge_index, sigmoid=True) + EPS).mean()
neg_edge_index = negative_sampling(pos_edge_index, z.size(0))
neg_loss = -torch.log(
1 - self.decoder(z, neg_edge_index, sigmoid=True) + EPS).mean()
return pos_loss + neg_loss
def test(self, z, pos_edge_index, neg_edge_index):
r"""Given latent variables :obj:`z`, positive edges
:obj:`pos_edge_index` and negative edges :obj:`neg_edge_index`,
computes area under the ROC curve (AUC) and average precision (AP)
scores.
Args:
z (Tensor): The latent space :math:`\mathbf{Z}`.
pos_edge_index (LongTensor): The positive edges to evaluate
against.
neg_edge_index (LongTensor): The negative edges to evaluate
against.
"""
pos_y = z.new_ones(pos_edge_index.size(1))
neg_y = z.new_zeros(neg_edge_index.size(1))
y = torch.cat([pos_y, neg_y], dim=0)
pos_pred = self.decoder(z, pos_edge_index, sigmoid=True)
neg_pred = self.decoder(z, neg_edge_index, sigmoid=True)
pred = torch.cat([pos_pred, neg_pred], dim=0)
y, pred = y.detach().cpu().numpy(), pred.detach().cpu().numpy()
return roc_auc_score(y, pred), average_precision_score(y, pred)
class VGAE(GAE):
r"""The Variational Graph Auto-Encoder model from the
`"Variational Graph Auto-Encoders" <https://arxiv.org/abs/1611.07308>`_
paper.
Args:
encoder (Module): The encoder module to compute :math:`\mu` and
:math:`\log\sigma^2`.
decoder (Module, optional): The decoder module. If set to :obj:`None`,
will default to the
:class:`torch_geometric.nn.models.InnerProductDecoder`.
(default: :obj:`None`)
"""
def __init__(self, encoder, decoder=None):
super(VGAE, self).__init__(encoder, decoder)
def reparametrize(self, mu, logvar):
if self.training:
return mu + torch.randn_like(logvar) * torch.exp(logvar)
else:
return mu
def encode(self, *args, **kwargs):
""""""
self.__mu__, self.__logvar__ = self.encoder(*args, **kwargs)
# self.__logvar__ = self.__logvar__.clamp(max=MAX_LOGVAR)
z = self.reparametrize(self.__mu__, self.__logvar__)
return z
def kl_loss(self, mu=None, logvar=None):
r"""Computes the KL loss, either for the passed arguments :obj:`mu`
and :obj:`logvar`, or based on latent variables from last encoding.
Args:
mu (Tensor, optional): The latent space for :math:`\mu`. If set to
:obj:`None`, uses the last computation of :math:`mu`.
(default: :obj:`None`)
logvar (Tensor, optional): The latent space for
:math:`\log\sigma^2`. If set to :obj:`None`, uses the last
computation of :math:`\log\sigma^2`.(default: :obj:`None`)
"""
mu = self.__mu__ if mu is None else mu
# logvar = self.__logvar__ if logvar is None else logvar.clamp(
# max=MAX_LOGVAR)
logvar = self.__logvar__
return -0.5 * torch.mean(
torch.sum(1 + logvar - mu**2 - logvar.log(), dim=1))
class ARGA(GAE):
r"""The Adversarially Regularized Graph Auto-Encoder model from the
`"Adversarially Regularized Graph Autoencoder for Graph Embedding"
<https://arxiv.org/abs/1802.04407>`_ paper.
paper.
Args:
encoder (Module): The encoder module.
discriminator (Module): The discriminator module.
decoder (Module, optional): The decoder module. If set to :obj:`None`,
will default to the
:class:`torch_geometric.nn.models.InnerProductDecoder`.
(default: :obj:`None`)
"""
def __init__(self, encoder, discriminator, decoder=None):
self.discriminator = discriminator
super(ARGA, self).__init__(encoder, decoder)
reset(self.discriminator)
def reset_parameters(self):
super(ARGA, self).reset_parameters()
reset(self.discriminator)
def reg_loss(self, z):
r"""Computes the regularization loss of the encoder.
Args:
z (Tensor): The latent space :math:`\mathbf{Z}`.
"""
real = torch.sigmoid(self.discriminator(z))
real_loss = -torch.log(real + EPS).mean()
return real_loss
def discriminator_loss(self, z):
r"""Computes the loss of the discriminator.
Args:
z (Tensor): The latent space :math:`\mathbf{Z}`.
"""
real = torch.sigmoid(self.discriminator(torch.randn_like(z)))
fake = torch.sigmoid(self.discriminator(z.detach()))
real_loss = -torch.log(real + EPS).mean()
fake_loss = -torch.log(1 - fake + EPS).mean()
return real_loss + fake_loss
class ARGVA(ARGA):
r"""The Adversarially Regularized Variational Graph Auto-Encoder model from
the `"Adversarially Regularized Graph Autoencoder for Graph Embedding"
<https://arxiv.org/abs/1802.04407>`_ paper.
paper.
Args:
encoder (Module): The encoder module to compute :math:`\mu` and
:math:`\log\sigma^2`.
discriminator (Module): The discriminator module.
decoder (Module, optional): The decoder module. If set to :obj:`None`,
will default to the
:class:`torch_geometric.nn.models.InnerProductDecoder`.
(default: :obj:`None`)
"""
def __init__(self, encoder, discriminator, decoder=None):
super(ARGVA, self).__init__(encoder, discriminator, decoder)
self.VGAE = VGAE(encoder, decoder)
@property
def __mu__(self):
return self.VGAE.__mu__
@property
def __logvar__(self):
return self.VGAE.__logvar__
def reparametrize(self, mu, logvar):
return self.VGAE.reparametrize(mu, logvar)
def encode(self, *args, **kwargs):
""""""
return self.VGAE.encode(*args, **kwargs)
def kl_loss(self, mu=None, logvar=None):
return self.VGAE.kl_loss(mu, logvar)
| StarcoderdataPython |
1737134 | from typing import Any, List, Optional
from ...exceptions import InvalidEnvelopeExpressionError
class SchemaValidationError(Exception):
"""When serialization fail schema validation"""
def __init__(
self,
message: str,
validation_message: Optional[str] = None,
name: Optional[str] = None,
path: Optional[List] = None,
value: Optional[Any] = None,
definition: Optional[Any] = None,
rule: Optional[str] = None,
rule_definition: Optional[Any] = None,
):
"""
Parameters
----------
message : str
Powertools formatted error message
validation_message : str, optional
Containing human-readable information what is wrong
(e.g. `data.property[index] must be smaller than or equal to 42`)
name : str, optional
name of a path in the data structure
(e.g. `data.property[index]`)
path: List, optional
`path` as an array in the data structure
(e.g. `['data', 'property', 'index']`),
value : Any, optional
The invalid value
definition : Any, optional
The full rule `definition`
(e.g. `42`)
rule : str, optional
`rule` which the `data` is breaking
(e.g. `maximum`)
rule_definition : Any, optional
The specific rule `definition`
(e.g. `42`)
"""
super().__init__(message)
self.message = message
self.validation_message = validation_message
self.name = name
self.path = path
self.value = value
self.definition = definition
self.rule = rule
self.rule_definition = rule_definition
class InvalidSchemaFormatError(Exception):
"""When JSON Schema is in invalid format"""
__all__ = ["SchemaValidationError", "InvalidSchemaFormatError", "InvalidEnvelopeExpressionError"]
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.