hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringdate 2015-01-01 00:00:47 2022-03-31 23:42:18 ⌀ | max_issues_repo_issues_event_max_datetime stringdate 2015-01-01 17:43:30 2022-03-31 23:59:58 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ae1ad8c506c36a888f234786efecf582422e3003 | 35 | py | Python | src/artifice/scraper/supervisor/__init__.py | artifice-project/artifice-scraper | f224a0da22162fd479d6b9f9095ff5cae4723716 | [
"MIT"
] | null | null | null | src/artifice/scraper/supervisor/__init__.py | artifice-project/artifice-scraper | f224a0da22162fd479d6b9f9095ff5cae4723716 | [
"MIT"
] | 5 | 2019-09-18T19:17:14.000Z | 2021-03-20T01:46:06.000Z | src/artifice/scraper/supervisor/__init__.py | artifice-project/artifice-scraper | f224a0da22162fd479d6b9f9095ff5cae4723716 | [
"MIT"
] | null | null | null | from .supervisor import Supervisor
| 17.5 | 34 | 0.857143 |
ae1b1c2f48b9a90d658a39990474e0ffceef271d | 366 | py | Python | Entradas/migrations/0012_auto_20200521_1931.py | ToniIvars/Blog | c2d1674c2c1fdf51749f4b014795b507ed93b45e | [
"MIT"
] | null | null | null | Entradas/migrations/0012_auto_20200521_1931.py | ToniIvars/Blog | c2d1674c2c1fdf51749f4b014795b507ed93b45e | [
"MIT"
] | 4 | 2021-03-30T13:26:38.000Z | 2021-06-10T19:20:56.000Z | Entradas/migrations/0012_auto_20200521_1931.py | ToniIvars/Blog | c2d1674c2c1fdf51749f4b014795b507ed93b45e | [
"MIT"
] | null | null | null | # Generated by Django 3.0.5 on 2020-05-21 17:31
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Entradas', '0011_comentarios'),
]
operations = [
migrations.RenameField(
model_name='comentarios',
old_name='comentario',
new_name='cuerpo',
),
]
| 19.263158 | 47 | 0.587432 |
ae1dba2c9332b1aaf3dca98391c5242cc59d4eab | 322 | py | Python | jd/api/rest/ComJdQlBasicWsGlscGlscBasicSecondaryWSGetAssortByFidRequest.py | jof2jc/jd | 691bf22c68ed88fb3fb32bfb43dd6da75024994a | [
"MIT"
] | null | null | null | jd/api/rest/ComJdQlBasicWsGlscGlscBasicSecondaryWSGetAssortByFidRequest.py | jof2jc/jd | 691bf22c68ed88fb3fb32bfb43dd6da75024994a | [
"MIT"
] | null | null | null | jd/api/rest/ComJdQlBasicWsGlscGlscBasicSecondaryWSGetAssortByFidRequest.py | jof2jc/jd | 691bf22c68ed88fb3fb32bfb43dd6da75024994a | [
"MIT"
] | null | null | null | from jd.api.base import RestApi
class ComJdQlBasicWsGlscGlscBasicSecondaryWSGetAssortByFidRequest(RestApi):
def __init__(self,domain,port=80):
RestApi.__init__(self,domain, port)
self.assFid = None
def getapiname(self):
return 'jingdong.com.jd.ql.basic.ws.glsc.GlscBasicSecondaryWS.getAssortByFid'
| 20.125 | 80 | 0.776398 |
ae209fc837cb7fa92d358e927f5a60ae96f43be3 | 682 | py | Python | tensorflow_gnn/tools/generate_training_data_test.py | mattdangerw/gnn | f39d3ea0d8fc6e51cf58814873fc1502c12554ae | [
"Apache-2.0"
] | 611 | 2021-11-18T06:04:10.000Z | 2022-03-29T11:46:42.000Z | tensorflow_gnn/tools/generate_training_data_test.py | mattdangerw/gnn | f39d3ea0d8fc6e51cf58814873fc1502c12554ae | [
"Apache-2.0"
] | 25 | 2021-11-18T17:21:12.000Z | 2022-03-31T06:36:55.000Z | tensorflow_gnn/tools/generate_training_data_test.py | mattdangerw/gnn | f39d3ea0d8fc6e51cf58814873fc1502c12554ae | [
"Apache-2.0"
] | 52 | 2021-11-18T23:12:30.000Z | 2022-03-27T06:31:08.000Z | """Unit tests for generate training data test."""
from os import path
from absl import flags
import tensorflow as tf
from tensorflow_gnn.tools import generate_training_data
from tensorflow_gnn.utils import test_utils
FLAGS = flags.FLAGS
class GenerateDataTest(tf.test.TestCase):
def test_generate_training_data(self):
schema_filename = test_utils.get_resource("examples/schemas/mpnn.pbtxt")
output_filename = path.join(FLAGS.test_tmpdir, "examples.tfrecords")
generate_training_data.generate_training_data(
schema_filename, output_filename, "tfrecord", 64)
self.assertTrue(path.exists(output_filename))
if __name__ == "__main__":
tf.test.main()
| 26.230769 | 76 | 0.781525 |
ae2166a391abaacff03859c883ab005463fa8d39 | 561 | py | Python | vsenvs.py | KaoruShiga/geister_rl | a0dbf6bd7f79b0366727664da6d9f1cf3060190e | [
"MIT"
] | 8 | 2021-03-12T00:06:44.000Z | 2022-01-15T20:09:51.000Z | vsenvs.py | KaoruShiga/geister_rl | a0dbf6bd7f79b0366727664da6d9f1cf3060190e | [
"MIT"
] | null | null | null | vsenvs.py | KaoruShiga/geister_rl | a0dbf6bd7f79b0366727664da6d9f1cf3060190e | [
"MIT"
] | 1 | 2021-10-04T07:42:01.000Z | 2021-10-04T07:42:01.000Z | import random as rnd
import numpy as np
from random_agent import RandomAgent
from geister2 import Geister2
from vsenv import VsEnv
class VsEnvs(VsEnv):
"""複数のエージェントからランダムに一つ使うやつ"""
# Resetting
def on_episode_begin(self, init_red0):
self._opponent = rnd.choice(self._opponents)
return super().on_episode_begin(init_red0=init_red0)
def __init__(self, opponents, game=Geister2(), seed=0):
self._opponents = opponents
opp = rnd.choice(opponents)
return super().__init__(opponent=opp, game=game, seed=seed)
| 29.526316 | 67 | 0.716578 |
ae219f0c0bca31c2d0339c1392885c6f7d746211 | 2,007 | py | Python | src/util/utils.py | purpleposeidon/texture-atlas-generator | 68864bf42a8bb38b4cdbd883f776b32816084c8e | [
"Unlicense",
"MIT"
] | null | null | null | src/util/utils.py | purpleposeidon/texture-atlas-generator | 68864bf42a8bb38b4cdbd883f776b32816084c8e | [
"Unlicense",
"MIT"
] | null | null | null | src/util/utils.py | purpleposeidon/texture-atlas-generator | 68864bf42a8bb38b4cdbd883f776b32816084c8e | [
"Unlicense",
"MIT"
] | null | null | null | import os.path
import shutil
from data_parsers.json_parser import JsonParser
from data_parsers.xml_parser import XmlParser
from data_parsers.parser import ParserError
from packing_algorithms.ratcliff.texture_packer_ratcliff import TexturePackerRatcliff
from packing_algorithms.maxrects.texture_packer_maxrects import TexturePackerMaxRects
from packing_algorithms.maxrects.texture_packer_maxrects import FreeRectChoiceHeuristicEnum
def get_parser(parser_type):
if parser_type == 'xml':
return XmlParser()
elif parser_type == 'json':
return JsonParser()
else:
raise ParserError('Unknown parser_type encountered %s' % parser_type)
def get_maxrects_heuristic(heuristic):
if heuristic == 'shortside':
return FreeRectChoiceHeuristicEnum.RectBestShortSideFit
elif heuristic == 'longside':
return FreeRectChoiceHeuristicEnum.RectBestLongSideFit
elif heuristic == 'area':
return FreeRectChoiceHeuristicEnum.RectBestAreaFit
elif heuristic == 'bottomleft':
return FreeRectChoiceHeuristicEnum.RectBottomLeftRule
elif heuristic == 'contactpoint':
return FreeRectChoiceHeuristicEnum.RectContactPointRule
else:
raise NotImplementedError('Unknown heuristic enum encountered')
def get_packer(algorithm_type, size=0, heuristic=""):
if algorithm_type == 'ratcliff':
return TexturePackerRatcliff()
elif algorithm_type == 'maxrects':
return TexturePackerMaxRects(get_maxrects_heuristic(heuristic), int(size), int(size))
else:
raise NotImplementedError('%s is unknown or not implemented yet.' % (algorithm_type))
def get_atlas_path(resource_path):
return os.path.join(resource_path, 'atlases')
def get_color(color_text):
color_list = color_text.split(',')
color_list = map(int, color_list)
return tuple(color_list[:len(color_list)])
def clear_atlas_dir(directory):
if(os.path.isdir(directory)):
shutil.rmtree(directory)
os.mkdir(directory)
| 34.016949 | 93 | 0.757349 |
ae21aa10b6eac30d1836dbd0c8245d129f6fe3ff | 1,038 | py | Python | myslice/web/rest/confirm.py | loicbaron/myslice2 | 32af9462cc9e5654a6e3036978ae74b0a03a2698 | [
"MIT"
] | null | null | null | myslice/web/rest/confirm.py | loicbaron/myslice2 | 32af9462cc9e5654a6e3036978ae74b0a03a2698 | [
"MIT"
] | 1 | 2020-06-02T12:30:07.000Z | 2020-06-02T12:30:07.000Z | myslice/web/rest/confirm.py | loicbaron/myslice2 | 32af9462cc9e5654a6e3036978ae74b0a03a2698 | [
"MIT"
] | 1 | 2018-10-29T16:11:26.000Z | 2018-10-29T16:11:26.000Z | import json
import logging
import rethinkdb as r
from tornado import gen, escape
from myslice.db.activity import Event
from myslice.lib.util import myJSONEncoder
from myslice.web.rest import Api
logger = logging.getLogger('myslice.rest.confirm')
class ConfirmHandler(Api):
@gen.coroutine
def get(self, id):
"""
GET /confirm/id
it allows to confirm an email address using the event id
:return:
"""
try:
ev = yield r.table('activity').get(id).run(self.application.dbconnection)
if len(ev) != 1:
raise ValueError("event id is not valid")
event = Event(ev)
event.setPending()
dispatch(self.application.dbconnection, event)
event.logInfo("Event is pending, a manager will validate your request")
self.finish(json.dumps({"result": ["your email is confirmed"]}, cls=myJSONEncoder))
except Exception as e:
self.userError("This link is not valid")
return
| 31.454545 | 95 | 0.628131 |
ae21fa093e46f99cba061fc2247a9c451b1f519b | 218 | py | Python | FullContact/utils.py | KamalAwasthi/FullContact | fa2e9f29079064b015848d980ddbb8da51f323c9 | [
"Apache-2.0"
] | 2 | 2018-05-31T16:21:06.000Z | 2019-11-28T11:58:12.000Z | FullContact/utils.py | KamalAwasthi/FullContact | fa2e9f29079064b015848d980ddbb8da51f323c9 | [
"Apache-2.0"
] | null | null | null | FullContact/utils.py | KamalAwasthi/FullContact | fa2e9f29079064b015848d980ddbb8da51f323c9 | [
"Apache-2.0"
] | 2 | 2018-02-12T16:37:08.000Z | 2019-11-28T11:58:24.000Z | import requests
def getContacts(email):
payload = {'email': email}
r = requests.get('https://api.fullcontact.com/v2/person.json', params=payload, headers={"X-FullContact-APIKey": "841831f8eef0a46f"})
return r.text
| 31.142857 | 133 | 0.738532 |
ae22121e986bc6059cb536b9769429d2efd4c361 | 1,665 | py | Python | python/advent_of_code/y2015/day01.py | stonecharioteer/advent-of-code | c18e47e378e82f82b77558a114e7d7c3a43c8429 | [
"MIT"
] | null | null | null | python/advent_of_code/y2015/day01.py | stonecharioteer/advent-of-code | c18e47e378e82f82b77558a114e7d7c3a43c8429 | [
"MIT"
] | null | null | null | python/advent_of_code/y2015/day01.py | stonecharioteer/advent-of-code | c18e47e378e82f82b77558a114e7d7c3a43c8429 | [
"MIT"
] | null | null | null | """--- Day 1: Not Quite Lisp ---
Santa was hoping for a white Christmas, but his weather machine's "snow" function is powered by stars, and he's fresh out! To save Christmas, he needs you to collect fifty stars by December 25th.
Collect stars by helping Santa solve puzzles. Two puzzles will be made available on each day in the Advent calendar; the second puzzle is unlocked when you complete the first. Each puzzle grants one star. Good luck!
Here's an easy puzzle to warm you up.
Santa is trying to deliver presents in a large apartment building, but he can't find the right floor - the directions he got are a little confusing. He starts on the ground floor (floor 0) and then follows the instructions one character at a time.
An opening parenthesis, (, means he should go up one floor, and a closing parenthesis, ), means he should go down one floor.
The apartment building is very tall, and the basement is very deep; he will never find the top or bottom floors.
For example:
(()) and ()() both result in floor 0.
((( and (()(()( both result in floor 3.
))((((( also results in floor 3.
()) and ))( both result in floor -1 (the first basement level).
))) and )())()) both result in floor -3.
To what floor do the instructions take Santa?"""
from typing import TextIO, Tuple
def run(inp: TextIO) -> Tuple[int, int]:
"""Returns floor count"""
data = inp.read()
floor = 0
basement = None
for ix, character in enumerate(data):
if character == "(":
floor += 1
elif character == ")":
floor -= 1
if floor == -1 and basement is None:
basement = ix+1
return floor, basement
| 42.692308 | 247 | 0.684685 |
ae2625e0bfcb85513b735f8abfbccb014e1bc0b8 | 875 | py | Python | setup.py | nbgallery/ipylogging | fa54a7ace0262398b5d7a9dd3ec6938248a70752 | [
"MIT"
] | 1 | 2021-10-18T22:12:37.000Z | 2021-10-18T22:12:37.000Z | setup.py | nbgallery/ipylogging | fa54a7ace0262398b5d7a9dd3ec6938248a70752 | [
"MIT"
] | null | null | null | setup.py | nbgallery/ipylogging | fa54a7ace0262398b5d7a9dd3ec6938248a70752 | [
"MIT"
] | null | null | null | # vim: expandtab tabstop=4 shiftwidth=4
from setuptools import setup
# read the contents of your README file
from os import path
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README.md'), 'r') as f:
long_description = f.read()
setup(
name='ipylogging',
version='2020.342.1',
author='Bill Allen',
author_email='photo.allen@gmail.com',
description='Easy log messages in Jupyter notebooks.',
long_description=long_description,
long_description_content_type='text/markdown',
license='MIT',
keywords='logging logger logs ipython jupyter notebook messages'.split(),
url='https://github.com/nbgallery/ipylogging',
packages=['ipylogging'],
classifiers=[
'Development Status :: 4 - Beta',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License'
]
)
| 30.172414 | 77 | 0.691429 |
ae288231dc020ec00eec037bd175a4539730e6b8 | 2,594 | py | Python | utils/i18n.py | minsukkahng/pokr.kr | 169475778c998b4198ac7d6a1cebbc3c389e41b8 | [
"Apache-2.0"
] | 76 | 2015-01-19T12:39:43.000Z | 2021-10-14T06:10:25.000Z | utils/i18n.py | minsukkahng/pokr.kr | 169475778c998b4198ac7d6a1cebbc3c389e41b8 | [
"Apache-2.0"
] | 22 | 2015-01-03T01:00:53.000Z | 2019-09-14T11:55:06.000Z | utils/i18n.py | minsukkahng/pokr.kr | 169475778c998b4198ac7d6a1cebbc3c389e41b8 | [
"Apache-2.0"
] | 28 | 2015-01-14T15:45:00.000Z | 2020-06-03T13:29:41.000Z | from babel import Locale
from flask import current_app as cur_app, request
from flask.ext.babel import Babel, get_locale
from functools import wraps
from popong_nlp.utils.translit import translit
__all__ = ['PopongBabel']
class PopongBabel(Babel):
def init_app(self, app):
super(PopongBabel, self).init_app(app)
self.localeselector(localeselector)
# shortcuts
app.babel = self
app.LOCALES = self.list_translations() + [Locale('en')]
# cmd-line locale option
if hasattr(app, 'locale') and getattr(app, 'locale') in app.LOCALES:
app.babel.force_locale(app.locale)
# jinja filters
app.jinja_env.filters['translit'] = filter_translit
app.jinja_env.globals.update(translit=filter_translit)
# context processor
app.context_processor(inject_locales)
def force_locale(self, locale):
self.locale_selector_func = lambda: locale
class InvalidLocaleError(Exception):
pass
class NotInAppContextError(Exception):
pass
@wraps
def babel_context(f):
def decorated(*args, **kwargs):
if not hasattr(cur_app, 'babel') or not hasattr(cur_app, 'LOCALES'):
raise NotInAppContextError()
f(*args, **kwargs)
return decorated
@babel_context
def is_valid_locale(locale):
return locale in cur_app.LOCALES
def assert_valid_locale(locale):
if not is_valid_locale(locale):
raise InvalidLocaleError()
def host(locale=None):
assert_valid_locale(locale)
t = request.host.split('.', 1)
if len(t) < 2 or not is_valid_locale(t[0]):
host = request.host
else:
host = t[1]
return '{locale}.{host}'.format(locale=locale, host=host)
@babel_context
def localeselector():
locale = request.host.split('.', 1)[0]
if not is_valid_locale(locale):
locale = cur_app.babel.default_locale
return locale
@babel_context
def inject_locales():
# TODO: caching
locale_links = {
locale: request.url.replace(request.host, host(locale))
for locale in cur_app.LOCALES
}
return dict(locale_links=locale_links,
locale=str(get_locale()))
def filter_translit(*args, **kwargs):
locale = str(get_locale())
_type = kwargs.get('type')
if len(args) == 1:
string = args[0]
return translit(string, 'ko', locale, _type) if locale != 'ko' else string
elif args:
raise Exception('filter_translit() only accepts one or zero argument')
else:
return lambda x: filter_translit(x, type=_type)
| 24.018519 | 82 | 0.662298 |
ae28fbfcfc5475fc99a477407eec02fb25989dcb | 5,240 | py | Python | Model/lookalike-model/lookalike_model/application/pipeline/top_n_similarity_table_generator.py | sanjaynirmal/blue-marlin | 725d614e941e5de76562d354edf11ac18897f242 | [
"Apache-2.0"
] | 1 | 2020-03-06T09:41:49.000Z | 2020-03-06T09:41:49.000Z | Model/lookalike-model/lookalike_model/application/pipeline/top_n_similarity_table_generator.py | sanjaynirmal/blue-marlin | 725d614e941e5de76562d354edf11ac18897f242 | [
"Apache-2.0"
] | null | null | null | Model/lookalike-model/lookalike_model/application/pipeline/top_n_similarity_table_generator.py | sanjaynirmal/blue-marlin | 725d614e941e5de76562d354edf11ac18897f242 | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0.html
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import yaml
import argparse
import pyspark.sql.functions as fn
from pyspark import SparkContext
from pyspark.sql import HiveContext
from pyspark.sql.types import FloatType, StringType, StructType, StructField, ArrayType, MapType, StructType
# from rest_client import predict, str_to_intlist
import requests
import json
import argparse
from pyspark.sql.functions import udf
from math import sqrt
import time
import numpy as np
import itertools
import heapq
'''
This process generates the top-n-similarity table.
spark-submit --master yarn --num-executors 20 --executor-cores 5 --executor-memory 16G --driver-memory 16G --conf spark.driver.maxResultSize=5g --conf spark.hadoop.hive.exec.dynamic.partition=true --conf spark.hadoop.hive.exec.dynamic.partition.mode=nonstrict top_n_similarity_table_generator.py config.yml
The top-n-similarity table is
|user| top-N-similarity|top-n-users
|:-------------| :------------: |
|user-1-did| [similarity-score-11, similarity-score-12, similarity-score-13] |[user-did-1, user-did-2, user-did-3]|
|user-2-did| [similarity-score-21, similarity-score-22, similarity-score-23] |[user-did-10, user-did-20, user-did-30]|
|user-3-did| [similarity-score-31, similarity-score-32, similarity-score-33] |[user-did-23, user-did-87, user-did-45]|
'''
def __save_as_table(df, table_name, hive_context, create_table):
if create_table:
command = """
DROP TABLE IF EXISTS {}
""".format(table_name)
hive_context.sql(command)
df.createOrReplaceTempView("r907_temp_table")
command = """
CREATE TABLE IF NOT EXISTS {} as select * from r907_temp_table
""".format(table_name)
hive_context.sql(command)
def run(sc, hive_context, cfg):
score_vector_alpha_table = cfg['score_vector_rebucketing']['score_vector_alpha_table']
similarity_table = cfg['top_n_similarity']['similarity_table']
N = cfg['top_n_similarity']['top_n']
command = "SELECT did, score_vector FROM {}".format(score_vector_alpha_table)
# |0004f3b4731abafa9ac54d04cb88782ed61d30531262decd799d91beb6d6246a|0 |
# [0.24231663, 0.20828941, 0.0]|
df = hive_context.sql(command)
df = df.withColumn('top_n_user_score', fn.array())
alpha_bucket_size = cfg['score_vector_rebucketing']['alpha_did_bucket_size']
alpha_bucket_step = cfg['top_n_similarity']['alpha_did_bucket_step']
first_round = True
for start_bucket in range(0, alpha_bucket_size,alpha_bucket_step):
command = "SELECT did, did_bucket, score_vector, alpha_did_bucket FROM {} WHERE alpha_did_bucket BETWEEN {} AND {}".format(score_vector_alpha_table,
start_bucket, start_bucket + alpha_bucket_size - 1)
df_user = hive_context.sql(command)
block_user = df_user.select('did', 'score_vector').collect()
block_user = ([_['did'] for _ in block_user], [_['score_vector'] for _ in block_user])
block_user_broadcast = sc.broadcast(block_user)
def calculate_similarity(user_score_vector, top_n_user_score):
user_score_vector = np.array(user_score_vector)
dids, other_score_vectors = block_user_broadcast.value
other_score_vectors = np.array(other_score_vectors)
product = np.matmul(user_score_vector, other_score_vectors.transpose()).tolist()
user_score_s = list(itertools.izip(dids, product))
user_score_s.extend(top_n_user_score)
user_score_s = heapq.nlargest(N, user_score_s, key=lambda x: x[1])
return user_score_s
elements_type = StructType([StructField('did', StringType(), False), StructField('score', FloatType(), False)])
df = df.withColumn('top_n_user_score', udf(calculate_similarity, ArrayType(elements_type))(df.score_vector, df.top_n_user_score))
__save_as_table(df.select('did', 'top_n_user_score'), similarity_table, hive_context, True)
if __name__ == "__main__":
start = time.time()
parser = argparse.ArgumentParser(description=" ")
parser.add_argument('config_file')
args = parser.parse_args()
with open(args.config_file, 'r') as yml_file:
cfg = yaml.safe_load(yml_file)
sc = SparkContext.getOrCreate()
sc.setLogLevel('INFO')
hive_context = HiveContext(sc)
run(sc=sc, hive_context=hive_context, cfg=cfg)
sc.stop()
end = time.time()
print('Runtime of the program is:', (end - start))
| 40.620155 | 306 | 0.717748 |
ae294288f339abaa44909776daf88e26d1673f50 | 1,056 | py | Python | lib/auth.py | p4lsec/autoshoppr | a0dba3060e26008c2d441358ff7f4a909ba4fcab | [
"MIT"
] | null | null | null | lib/auth.py | p4lsec/autoshoppr | a0dba3060e26008c2d441358ff7f4a909ba4fcab | [
"MIT"
] | null | null | null | lib/auth.py | p4lsec/autoshoppr | a0dba3060e26008c2d441358ff7f4a909ba4fcab | [
"MIT"
] | null | null | null | from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
import pickle
import configparser
class AmazonLogin:
def __init__(self, driver=None):
self.url = "https://www.amazon.com/your-account"
if driver is not None:
self.driver = driver
else:
self.driver = webdriver.Chrome()
self.wait = WebDriverWait(self.driver, 10)
def login(self):
try:
self.driver.get(self.url)
self.load_cookies()
self.driver.find_element_by_xpath("//*[contains(text(), 'Login & security')]").click()
config = configparser.ConfigParser()
config.read('shoppr.conf')
except:
raise Exception("Could not add to cart")
def load_cookies(self):
cookies = pickle.load(open("amazon.pkl", "rb"))
for cookie in cookies:
self.driver.add_cookie(cookie) | 35.2 | 98 | 0.629735 |
ae29895c6324b4119860a3e674198d1b40dd9964 | 1,317 | py | Python | Verulean/days/aoc15.py | BasedJellyfish11/Advent-of-Code-2021 | 9ed84902958c99c341ec2444d5db561c84348911 | [
"MIT"
] | 3 | 2021-12-03T22:40:17.000Z | 2021-12-23T21:17:16.000Z | Verulean/days/aoc15.py | BasedJellyfish11/Advent-of-Code-2021 | 9ed84902958c99c341ec2444d5db561c84348911 | [
"MIT"
] | null | null | null | Verulean/days/aoc15.py | BasedJellyfish11/Advent-of-Code-2021 | 9ed84902958c99c341ec2444d5db561c84348911 | [
"MIT"
] | null | null | null | import numpy as np
import heapq
class PriorityQueue(list):
def pop(self):
return heapq.heappop(self)
def push(self, value):
return heapq.heappush(self, value)
def neighbors(i, j):
return ((i-1, j), (i+1, j), (i, j-1), (i, j+1))
def numpy_dijkstra(costs):
m, n = costs.shape
start = (0, 0)
end = (m - 1, n - 1)
q = PriorityQueue()
q.push((0, start))
g = np.full_like(costs, np.inf)
g[start] = 0
while q:
cost, node = q.pop()
if node == end:
return int(g[end])
for adj in neighbors(*node):
if not (0 <= adj[0] < m and 0 <= adj[1] < n):
continue
adj_cost = cost + costs[adj]
if adj_cost < g[adj]:
g[adj] = adj_cost
q.push((adj_cost, adj))
def expand_block(block, M, N):
m, n = block.shape
shift = np.add.outer(np.arange(M), np.arange(N))
shift = np.repeat(np.repeat(shift, m, axis=0), n, axis=1)
return ((np.tile(block, (M, N)) + shift - 1) % 9) + 1
def solve(data):
costs_a = np.array([list(row) for row in data], dtype=float)
ans_a = numpy_dijkstra(costs_a)
costs_b = expand_block(costs_a, 5, 5)
ans_b = numpy_dijkstra(costs_b)
return ans_a, ans_b | 24.388889 | 64 | 0.525437 |
ae2d0af0f1b9daeb6ad913a0cc22fcfa911b9c6b | 5,291 | py | Python | pypy/module/_minimal_curses/fficurses.py | microvm/pypy-mu | 6b03fbe93052d0eb3a4c67152c987c16837b3484 | [
"Apache-2.0",
"OpenSSL"
] | 34 | 2015-07-09T04:53:27.000Z | 2021-07-19T05:22:27.000Z | pypy/module/_minimal_curses/fficurses.py | microvm/pypy-mu | 6b03fbe93052d0eb3a4c67152c987c16837b3484 | [
"Apache-2.0",
"OpenSSL"
] | 6 | 2015-05-30T17:20:45.000Z | 2017-06-12T14:29:23.000Z | pypy/module/_minimal_curses/fficurses.py | microvm/pypy-mu | 6b03fbe93052d0eb3a4c67152c987c16837b3484 | [
"Apache-2.0",
"OpenSSL"
] | 11 | 2015-09-07T14:26:08.000Z | 2020-04-10T07:20:41.000Z | """ The ffi for rpython, need to be imported for side effects
"""
from rpython.rtyper.lltypesystem import rffi
from rpython.rtyper.lltypesystem import lltype
from rpython.rtyper.tool import rffi_platform
from rpython.rtyper.extfunc import register_external
from pypy.module._minimal_curses import interp_curses
from rpython.translator.tool.cbuild import ExternalCompilationInfo
# We cannot trust ncurses5-config, it's broken in various ways in
# various versions. For example it might not list -ltinfo even though
# it's needed, or --cflags might be completely empty. On Ubuntu 10.04
# it gives -I/usr/include/ncurses, which doesn't exist at all. Crap.
def try_cflags():
yield ExternalCompilationInfo(includes=['curses.h', 'term.h'])
yield ExternalCompilationInfo(includes=['curses.h', 'term.h'],
include_dirs=['/usr/include/ncurses'])
yield ExternalCompilationInfo(includes=['ncurses/curses.h',
'ncurses/term.h'])
def try_ldflags():
yield ExternalCompilationInfo(libraries=['curses'])
yield ExternalCompilationInfo(libraries=['curses', 'tinfo'])
yield ExternalCompilationInfo(libraries=['ncurses'])
yield ExternalCompilationInfo(libraries=['ncurses'],
library_dirs=['/usr/lib64'])
def try_tools():
try:
yield ExternalCompilationInfo.from_pkg_config("ncurses")
except Exception:
pass
try:
yield ExternalCompilationInfo.from_config_tool("ncurses5-config")
except Exception:
pass
def try_eci():
for eci in try_tools():
yield eci.merge(ExternalCompilationInfo(includes=['curses.h',
'term.h']))
for eci1 in try_cflags():
for eci2 in try_ldflags():
yield eci1.merge(eci2)
def guess_eci():
for eci in try_eci():
class CConfig:
_compilation_info_ = eci
HAS = rffi_platform.Has("setupterm")
if rffi_platform.configure(CConfig)['HAS']:
return eci
raise ImportError("failed to guess where ncurses is installed. "
"You might need to install libncurses5-dev or similar.")
eci = guess_eci()
INT = rffi.INT
INTP = lltype.Ptr(lltype.Array(INT, hints={'nolength':True}))
c_setupterm = rffi.llexternal('setupterm', [rffi.CCHARP, INT, INTP], INT,
compilation_info=eci)
c_tigetstr = rffi.llexternal('tigetstr', [rffi.CCHARP], rffi.CCHARP,
compilation_info=eci)
c_tparm = rffi.llexternal('tparm', [rffi.CCHARP, INT, INT, INT, INT, INT,
INT, INT, INT, INT], rffi.CCHARP,
compilation_info=eci)
ERR = rffi.CConstant('ERR', lltype.Signed)
OK = rffi.CConstant('OK', lltype.Signed)
def curses_setupterm(term, fd):
intp = lltype.malloc(INTP.TO, 1, flavor='raw')
err = rffi.cast(lltype.Signed, c_setupterm(term, fd, intp))
try:
if err == ERR:
errret = rffi.cast(lltype.Signed, intp[0])
if errret == 0:
msg = "setupterm: could not find terminal"
elif errret == -1:
msg = "setupterm: could not find terminfo database"
else:
msg = "setupterm: unknown error"
raise interp_curses.curses_error(msg)
interp_curses.module_info.setupterm_called = True
finally:
lltype.free(intp, flavor='raw')
def curses_setupterm_null_llimpl(fd):
curses_setupterm(lltype.nullptr(rffi.CCHARP.TO), fd)
def curses_setupterm_llimpl(term, fd):
ll_s = rffi.str2charp(term)
try:
curses_setupterm(ll_s, fd)
finally:
rffi.free_charp(ll_s)
register_external(interp_curses._curses_setupterm_null,
[int], llimpl=curses_setupterm_null_llimpl,
export_name='_curses.setupterm_null')
register_external(interp_curses._curses_setupterm,
[str, int], llimpl=curses_setupterm_llimpl,
export_name='_curses.setupterm')
def check_setup_invoked():
if not interp_curses.module_info.setupterm_called:
raise interp_curses.curses_error("must call (at least) setupterm() first")
def tigetstr_llimpl(cap):
check_setup_invoked()
ll_cap = rffi.str2charp(cap)
try:
ll_res = c_tigetstr(ll_cap)
num = lltype.cast_ptr_to_int(ll_res)
if num == 0 or num == -1:
raise interp_curses.TermError()
res = rffi.charp2str(ll_res)
return res
finally:
rffi.free_charp(ll_cap)
register_external(interp_curses._curses_tigetstr, [str], str,
export_name='_curses.tigetstr', llimpl=tigetstr_llimpl)
def tparm_llimpl(s, args):
check_setup_invoked()
l = [0, 0, 0, 0, 0, 0, 0, 0, 0]
for i in range(min(len(args), 9)):
l[i] = args[i]
ll_s = rffi.str2charp(s)
# XXX nasty trick stolen from CPython
ll_res = c_tparm(ll_s, l[0], l[1], l[2], l[3], l[4], l[5], l[6],
l[7], l[8])
rffi.free_charp(ll_s)
res = rffi.charp2str(ll_res)
return res
register_external(interp_curses._curses_tparm, [str, [int]], str,
export_name='_curses.tparm', llimpl=tparm_llimpl)
| 36.743056 | 82 | 0.634096 |
ae2e72786b0e755905085b12bef4f3ce69f9d8fc | 34 | py | Python | pycalc.py | erhuabushuo/pycalc | a46b85aaafe37ad7cca95ac0198d9bfea985b598 | [
"MIT"
] | null | null | null | pycalc.py | erhuabushuo/pycalc | a46b85aaafe37ad7cca95ac0198d9bfea985b598 | [
"MIT"
] | null | null | null | pycalc.py | erhuabushuo/pycalc | a46b85aaafe37ad7cca95ac0198d9bfea985b598 | [
"MIT"
] | null | null | null | import calcpy
calcpy.calculcate() | 11.333333 | 19 | 0.823529 |
ae2f2e86a4f028f1e691a235394a547ef477d257 | 470 | py | Python | peripherals/cardreader.py | sparkoo/payterm | a8c783583017e65a6c2549a831a7dfa44367dbd1 | [
"WTFPL"
] | null | null | null | peripherals/cardreader.py | sparkoo/payterm | a8c783583017e65a6c2549a831a7dfa44367dbd1 | [
"WTFPL"
] | 4 | 2020-02-26T21:56:57.000Z | 2020-03-01T11:37:39.000Z | peripherals/cardreader.py | sparkoo/payterm | a8c783583017e65a6c2549a831a7dfa44367dbd1 | [
"WTFPL"
] | null | null | null | import RPi.GPIO as GPIO
import time
from mfrc522 import SimpleMFRC522
import importlib.util
spec = importlib.util.spec_from_file_location("conn", "lib/conn.py")
conn = importlib.util.module_from_spec(spec)
spec.loader.exec_module(conn)
def readCard():
try:
time.sleep(1)
reader = SimpleMFRC522()
cardid, text = reader.read()
print(cardid)
print(text)
return text.strip()
finally:
GPIO.cleanup()
conn.writeConn("cardreader", readCard)
| 20.434783 | 68 | 0.723404 |
ae313f7b22dd8a45cb53e8bfba694df52241d4b5 | 1,310 | py | Python | exercises/development/intermediate/exercise_5.py | littlekign/comp-think.github.io | 21bce306c7672b6355a6fdaf260824542dbca595 | [
"CC0-1.0",
"CC-BY-4.0"
] | 40 | 2019-01-25T11:14:30.000Z | 2021-12-05T15:04:11.000Z | exercises/development/intermediate/exercise_5.py | littlekign/comp-think.github.io | 21bce306c7672b6355a6fdaf260824542dbca595 | [
"CC0-1.0",
"CC-BY-4.0"
] | 1 | 2020-11-08T15:18:58.000Z | 2020-11-19T22:44:28.000Z | exercises/development/intermediate/exercise_5.py | littlekign/comp-think.github.io | 21bce306c7672b6355a6fdaf260824542dbca595 | [
"CC0-1.0",
"CC-BY-4.0"
] | 19 | 2019-12-28T16:06:01.000Z | 2021-12-14T15:52:44.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2019, Silvio Peroni <essepuntato@gmail.com>
#
# Permission to use, copy, modify, and/or distribute this software for any purpose
# with or without fee is hereby granted, provided that the above copyright notice
# and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
# OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
# DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
# SOFTWARE.
from collections import deque
# Test case for the function
def test_do_it(queue, number, expected):
result = do_it(queue, number)
if expected == result:
return True
else:
return False
# Code of the function
def do_it(queue, number):
if number <= len(queue):
for i in range(number):
queue.popleft()
return queue
# Tests
print(test_do_it(deque(["a", "b"]), 3, None))
print(test_do_it(deque(["a", "b", "c", "d", "e"]), 3, deque(["d", "e"])))
| 33.589744 | 84 | 0.70458 |
ae31a4d08aeb572fffda497f8fd188e49b5508e5 | 149 | py | Python | modules/2.79/bpy/types/TextCharacterFormat.py | cmbasnett/fake-bpy-module | acb8b0f102751a9563e5b5e5c7cd69a4e8aa2a55 | [
"MIT"
] | null | null | null | modules/2.79/bpy/types/TextCharacterFormat.py | cmbasnett/fake-bpy-module | acb8b0f102751a9563e5b5e5c7cd69a4e8aa2a55 | [
"MIT"
] | null | null | null | modules/2.79/bpy/types/TextCharacterFormat.py | cmbasnett/fake-bpy-module | acb8b0f102751a9563e5b5e5c7cd69a4e8aa2a55 | [
"MIT"
] | null | null | null | class TextCharacterFormat:
material_index = None
use_bold = None
use_italic = None
use_small_caps = None
use_underline = None
| 14.9 | 26 | 0.691275 |
ae3401171f8e9d1d9a120271065ad3caf42b8ad2 | 38 | py | Python | tests/__init__.py | masasin/latexipy | 1f888a44f2077a5c0ef63216616cd24c279e44d0 | [
"MIT"
] | 144 | 2017-08-24T08:58:58.000Z | 2021-04-18T10:38:44.000Z | tests/__init__.py | masasin/latexipy | 1f888a44f2077a5c0ef63216616cd24c279e44d0 | [
"MIT"
] | 424 | 2017-09-04T16:21:10.000Z | 2022-03-28T02:23:25.000Z | tests/__init__.py | masasin/latexipy | 1f888a44f2077a5c0ef63216616cd24c279e44d0 | [
"MIT"
] | 15 | 2017-08-26T08:05:55.000Z | 2019-05-13T22:29:44.000Z | '''Unit test package for latexipy.'''
| 19 | 37 | 0.684211 |
ae340b92aadfe682e4f7ba5a3b3a05872dd322d2 | 192 | py | Python | pyravendb/data/counters.py | CDuPlooy/ravendb-python-client | dbe51ee8eea166e0d9e60897ab480dd9a693366b | [
"MIT"
] | 19 | 2019-02-16T14:39:38.000Z | 2022-03-23T12:27:00.000Z | pyravendb/data/counters.py | CDuPlooy/ravendb-python-client | dbe51ee8eea166e0d9e60897ab480dd9a693366b | [
"MIT"
] | 24 | 2018-10-21T07:31:21.000Z | 2022-03-27T17:27:29.000Z | pyravendb/data/counters.py | CDuPlooy/ravendb-python-client | dbe51ee8eea166e0d9e60897ab480dd9a693366b | [
"MIT"
] | 14 | 2018-08-14T07:58:46.000Z | 2022-01-05T12:20:08.000Z | from enum import Enum
class CounterOperationType(Enum):
none = "None"
increment = "Increment"
delete = "Delete"
get = "Get"
def __str__(self):
return self.value
| 16 | 33 | 0.625 |
ae3425a0e350725139bf2c51d7938fab7269b9d6 | 516 | py | Python | src/lib/spaces/orientedplane.py | Wombatlord/PygamePong | d56b1529fe095e6a30b27b6039d9d52105ad900d | [
"MIT"
] | null | null | null | src/lib/spaces/orientedplane.py | Wombatlord/PygamePong | d56b1529fe095e6a30b27b6039d9d52105ad900d | [
"MIT"
] | 2 | 2021-02-19T05:05:43.000Z | 2021-02-20T02:16:53.000Z | src/lib/spaces/orientedplane.py | Wombatlord/PygamePong | d56b1529fe095e6a30b27b6039d9d52105ad900d | [
"MIT"
] | 1 | 2020-08-13T10:14:46.000Z | 2020-08-13T10:14:46.000Z | from src.lib.spaces.vector import Vector
class OrientedPlane:
def __init__(self, normal: Vector) -> None:
self.normal = normal.normalise()
def reflect(self, initialVector: Vector):
normalComponent: float = initialVector.dot(self.normal)
if normalComponent < 0:
normalComponentVector = self.normal.scale(normalComponent)
reflector = normalComponentVector.scale(-2)
else:
reflector = Vector(0, 0)
return initialVector + reflector
| 30.352941 | 70 | 0.660853 |
ae344aa9b51e47e1afbcdf4afc821fdaead42258 | 715 | py | Python | core/migrations/0003_alter_carro_chassi_alter_carro_montadora.py | montalvas/django05 | 199f2ba1c757d899a78f8fc40742081bc74a4187 | [
"MIT"
] | null | null | null | core/migrations/0003_alter_carro_chassi_alter_carro_montadora.py | montalvas/django05 | 199f2ba1c757d899a78f8fc40742081bc74a4187 | [
"MIT"
] | null | null | null | core/migrations/0003_alter_carro_chassi_alter_carro_montadora.py | montalvas/django05 | 199f2ba1c757d899a78f8fc40742081bc74a4187 | [
"MIT"
] | null | null | null | # Generated by Django 4.0.2 on 2022-02-02 19:01
import core.models
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0002_carro_motoristas'),
]
operations = [
migrations.AlterField(
model_name='carro',
name='chassi',
field=models.OneToOneField(on_delete=models.SET(core.models.set_default_chassi), to='core.chassi'),
),
migrations.AlterField(
model_name='carro',
name='montadora',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.SET_DEFAULT, to='core.montadora'),
),
]
| 27.5 | 117 | 0.633566 |
ae371956e205306769109642792d7cbf72cc52de | 156 | py | Python | snake/__init__.py | lparolari/snake | ceaaec051584be768c9541fb106234e6de2b4900 | [
"MIT"
] | 1 | 2020-11-02T11:04:49.000Z | 2020-11-02T11:04:49.000Z | snake/__init__.py | lparolari/snake | ceaaec051584be768c9541fb106234e6de2b4900 | [
"MIT"
] | 112 | 2019-09-24T20:08:23.000Z | 2021-02-08T00:36:07.000Z | snake/__init__.py | lparolari/snake | ceaaec051584be768c9541fb106234e6de2b4900 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""Top-level package for snake."""
__author__ = """Luca Parolari"""
__email__ = 'luca.parolari23@gmail.com'
__version__ = '0.2.2'
| 19.5 | 39 | 0.641026 |
ae371c01a5249a7ea65891e859df84f39ceed04c | 1,357 | py | Python | UI for prediction/prediction_file.py | berfin-t/HeartAttackPrediction | a9acbd0356f3c3e4100b1964862242f6afe7da3b | [
"Apache-2.0"
] | null | null | null | UI for prediction/prediction_file.py | berfin-t/HeartAttackPrediction | a9acbd0356f3c3e4100b1964862242f6afe7da3b | [
"Apache-2.0"
] | null | null | null | UI for prediction/prediction_file.py | berfin-t/HeartAttackPrediction | a9acbd0356f3c3e4100b1964862242f6afe7da3b | [
"Apache-2.0"
] | null | null | null | import pickle
import os
import sys
import pandas as pd
from sklearn.metrics import accuracy_score
from sklearn.model_selection import train_test_split
import warnings
warnings.filterwarnings("ignore", message="Reloaded modules: <module_name>")
def train():
data = pd.read_csv('heart.csv')
Y = data["target"]
X = data.drop('target',axis=1)
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.20, random_state = 0)
from sklearn.linear_model import LogisticRegression
model = LogisticRegression(solver='liblinear')
loj_reg=model.fit(X_train,Y_train.values.ravel())
with open('svc.pkl','wb') as m:
pickle.dump(loj_reg,m)
test(X_test,Y_test)
def test(X_test,Y_test):
with open('svc.pkl','rb') as mod:
p=pickle.load(mod)
pre=p.predict(X_test)
print (accuracy_score(Y_test,pre))
def find_data_file(filename):
if getattr(sys, "frozen", False):
datadir = os.path.dirname(sys.executable)
else:
datadir = os.path.dirname(__file__)
return os.path.join(datadir, filename)
def check_input(data) ->int :
df=pd.DataFrame(data=data,index=[0])
with open(find_data_file('svc.pkl'),'rb') as model:
p=pickle.load(model)
op=p.predict(df)
return op[0]
if __name__=='__main__':
train()
| 26.096154 | 95 | 0.664702 |
ae386b890d023c6368568c73f1d37d4dc2112c5f | 1,432 | py | Python | bot/models.py | xammi/nash_dom_bot | 9d5dfc7e0120d56c95e020e7e20505b973a5d402 | [
"MIT"
] | null | null | null | bot/models.py | xammi/nash_dom_bot | 9d5dfc7e0120d56c95e020e7e20505b973a5d402 | [
"MIT"
] | null | null | null | bot/models.py | xammi/nash_dom_bot | 9d5dfc7e0120d56c95e020e7e20505b973a5d402 | [
"MIT"
] | null | null | null | from django.db import models
from django.db.models import CASCADE
class House(models.Model):
class Meta:
verbose_name = 'Дом'
verbose_name_plural = 'дома'
address = models.CharField(verbose_name='Адрес дома', max_length=255)
tg_chat_id = models.BigIntegerField(verbose_name='ID чата жильцов')
def __str__(self):
return f'Дом #{self.id}'
class HouseCell(models.Model):
class Meta:
verbose_name = 'Пролёт дома'
verbose_name_plural = 'пролёты дома'
house = models.ForeignKey('bot.House', verbose_name='Дом', on_delete=CASCADE)
entry = models.IntegerField(verbose_name='Номер подъезда')
floor = models.IntegerField(verbose_name='Номер этажа')
min_flat = models.IntegerField(verbose_name='Квартира от')
max_flat = models.IntegerField(verbose_name='Квартира до')
def __str__(self):
return f'Пролёт #{self.id}'
class Resident(models.Model):
class Meta:
verbose_name = 'Житель'
verbose_name_plural = 'жители'
cell = models.ForeignKey('bot.HouseCell', verbose_name='Пролёт', on_delete=CASCADE)
flat = models.IntegerField(verbose_name='Номер квартиры')
tg_id = models.BigIntegerField(verbose_name='ID в телеграме')
created = models.DateTimeField(verbose_name='Дата создания')
updated = models.DateTimeField(verbose_name='Дата обновления')
def __str__(self):
return f'Житель #{self.id}'
| 31.822222 | 87 | 0.703212 |
ae3968619345b1a7bf80058788bc082425067214 | 3,592 | py | Python | tf_ops/genCompileScript.py | chenzhutian/MCCNN | e28ca4a2deeecbfd1c8939ca666fcc010554fcbb | [
"MIT"
] | 90 | 2018-07-05T13:43:43.000Z | 2022-01-21T08:23:06.000Z | tf_ops/genCompileScript.py | DylanWusee/MCCNN | 13c2afb81aa231779b2be564ae31931b1d82e3fa | [
"MIT"
] | 9 | 2018-11-08T14:22:59.000Z | 2022-03-13T08:35:15.000Z | tf_ops/genCompileScript.py | DylanWusee/MCCNN | 13c2afb81aa231779b2be564ae31931b1d82e3fa | [
"MIT"
] | 12 | 2018-11-09T09:31:46.000Z | 2021-06-21T01:23:11.000Z | '''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''
\file genCompileScript.py
\brief Python script to generate the compile script for unix systems.
\copyright Copyright (c) 2018 Visual Computing group of Ulm University,
Germany. See the LICENSE file at the top-level directory of
this distribution.
\author pedro hermosilla (pedro-1.hermosilla-casajus@uni-ulm.de)
'''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''
import argparse
import tensorflow as tf
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Generate the compile script for the MCCNN operations.')
parser.add_argument('--cudaFolder', required=True, help='Path to the CUDA folder')
parser.add_argument('--MLPSize', default=8, type=int, help='Size of the MLPs (default 8)')
parser.add_argument('--debugInfo', action='store_true', help='Print debug information during execution (default: False)')
args = parser.parse_args()
debugString = " -DPRINT_CONV_INFO" if args.debugInfo else ""
with open("compile.sh", "w") as myCompileScript:
myCompileScript.write(args.cudaFolder+"/bin/nvcc -DBLOCK_MLP_SIZE="+str(args.MLPSize)+debugString+" -std=c++11 aabb_gpu.cu -o aabb_gpu.cu.o -c -O2 -DGOOGLE_CUDA=1 -x cu -Xcompiler -fPIC\n")
myCompileScript.write(args.cudaFolder+"/bin/nvcc -DBLOCK_MLP_SIZE="+str(args.MLPSize)+debugString+" -std=c++11 sort_gpu.cu -o sort_gpu.cu.o -c -O2 -DGOOGLE_CUDA=1 -x cu -Xcompiler -fPIC\n")
myCompileScript.write(args.cudaFolder+"/bin/nvcc -DBLOCK_MLP_SIZE="+str(args.MLPSize)+debugString+" -std=c++11 find_neighbors.cu -o find_neighbors.cu.o -c -O2 -DGOOGLE_CUDA=1 -x cu -Xcompiler -fPIC\n")
myCompileScript.write(args.cudaFolder+"/bin/nvcc -DBLOCK_MLP_SIZE="+str(args.MLPSize)+debugString+" -std=c++11 compute_pdf.cu -o compute_pdf.cu.o -c -O2 -DGOOGLE_CUDA=1 -x cu -Xcompiler -fPIC\n")
myCompileScript.write(args.cudaFolder+"/bin/nvcc -DBLOCK_MLP_SIZE="+str(args.MLPSize)+debugString+" -std=c++11 poisson_sampling.cu -o poisson_sampling.cu.o -c -O2 -DGOOGLE_CUDA=1 -x cu -Xcompiler -fPIC\n")
myCompileScript.write(args.cudaFolder+"/bin/nvcc -DBLOCK_MLP_SIZE="+str(args.MLPSize)+debugString+" -std=c++11 spatial_conv.cu -o spatial_conv.cu.o -c -O2 -DGOOGLE_CUDA=1 -x cu -Xcompiler -fPIC\n")
tensorflowInclude = tf.sysconfig.get_include()
tensorflowLib = tf.sysconfig.get_lib()
myCompileScript.write("g++ -std=c++11 -DBLOCK_MLP_SIZE="+str(args.MLPSize)+debugString+" spatial_conv.cc poisson_sampling.cc compute_pdf.cc "\
"find_neighbors.cc sort_gpu.cc aabb_gpu.cc spatial_conv.cu.o poisson_sampling.cu.o compute_pdf.cu.o "\
"find_neighbors.cu.o sort_gpu.cu.o aabb_gpu.cu.o -o MCConv.so -shared -fPIC -I"+tensorflowInclude+" -I"+tensorflowInclude+"/external/nsync/public "\
"-I"+args.cudaFolder+"/include -lcudart -L "+args.cudaFolder+"/lib64/ -L"+tensorflowLib+" -ltensorflow_framework -O2 -D_GLIBCXX_USE_CXX11_ABI=0\n")
with open("MCConvModuleSrc", "r") as mySrcPyScript:
with open("MCConvModule.py", "w") as myDestPyScript:
for line in mySrcPyScript:
myDestPyScript.write(line)
myDestPyScript.write("\n")
myDestPyScript.write("\n")
myDestPyScript.write("def get_block_size():\n")
myDestPyScript.write(" return "+str(args.MLPSize)+"\n")
myDestPyScript.write("\n")
| 71.84 | 214 | 0.65618 |
ae39d5bb797b8ed6a1c3f37606a273b2c5c79dbb | 8,326 | py | Python | tests/test_optimization.py | davidusb-geek/emhass | 5d6a5ad45c26b819c6bc1cb0e8943940d7fc8f17 | [
"MIT"
] | 17 | 2021-09-12T22:32:09.000Z | 2022-03-17T17:45:29.000Z | tests/test_optimization.py | davidusb-geek/emhass | 5d6a5ad45c26b819c6bc1cb0e8943940d7fc8f17 | [
"MIT"
] | 1 | 2021-12-22T21:10:04.000Z | 2021-12-22T21:10:04.000Z | tests/test_optimization.py | davidusb-geek/emhass | 5d6a5ad45c26b819c6bc1cb0e8943940d7fc8f17 | [
"MIT"
] | 2 | 2021-11-03T10:29:05.000Z | 2021-11-19T12:08:24.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import pandas as pd
import numpy as np
import pathlib
import pickle
from datetime import datetime, timezone
from emhass.retrieve_hass import retrieve_hass
from emhass.optimization import optimization
from emhass.forecast import forecast
from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger
# the root folder
root = str(get_root(__file__, num_parent=2))
# create logger
logger, ch = get_logger(__name__, root, save_to_file=False)
class TestOptimization(unittest.TestCase):
def setUp(self):
get_data_from_file = True
params = None
retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(pathlib.Path(root+'/config_emhass.yaml'), use_secrets=False)
self.retrieve_hass_conf, self.optim_conf, self.plant_conf = \
retrieve_hass_conf, optim_conf, plant_conf
self.rh = retrieve_hass(self.retrieve_hass_conf['hass_url'], self.retrieve_hass_conf['long_lived_token'],
self.retrieve_hass_conf['freq'], self.retrieve_hass_conf['time_zone'],
params, root, logger)
if get_data_from_file:
with open(pathlib.Path(root+'/data/test_df_final.pkl'), 'rb') as inp:
self.rh.df_final, self.days_list, self.var_list = pickle.load(inp)
else:
self.days_list = get_days_list(self.retrieve_hass_conf['days_to_retrieve'])
self.var_list = [self.retrieve_hass_conf['var_load'], self.retrieve_hass_conf['var_PV']]
self.rh.get_data(self.days_list, self.var_list,
minimal_response=False, significant_changes_only=False)
self.rh.prepare_data(self.retrieve_hass_conf['var_load'], load_negative = self.retrieve_hass_conf['load_negative'],
set_zero_min = self.retrieve_hass_conf['set_zero_min'],
var_replace_zero = self.retrieve_hass_conf['var_replace_zero'],
var_interp = self.retrieve_hass_conf['var_interp'])
self.df_input_data = self.rh.df_final.copy()
self.fcst = forecast(self.retrieve_hass_conf, self.optim_conf, self.plant_conf,
params, root, logger, get_data_from_file=get_data_from_file)
self.df_weather = self.fcst.get_weather_forecast(method=optim_conf['weather_forecast_method'])
self.P_PV_forecast = self.fcst.get_power_from_weather(self.df_weather)
self.P_load_forecast = self.fcst.get_load_forecast(method=optim_conf['load_forecast_method'])
self.df_input_data_dayahead = pd.concat([self.P_PV_forecast, self.P_load_forecast], axis=1)
self.df_input_data_dayahead.columns = ['P_PV_forecast', 'P_load_forecast']
self.costfun = 'profit'
self.opt = optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf,
self.fcst.var_load_cost, self.fcst.var_prod_price,
self.costfun, root, logger)
self.df_input_data = self.fcst.get_load_cost_forecast(self.df_input_data)
self.df_input_data = self.fcst.get_prod_price_forecast(self.df_input_data)
self.input_data_dict = {
'retrieve_hass_conf': retrieve_hass_conf,
}
def test_perform_perfect_forecast_optim(self):
self.opt_res = self.opt.perform_perfect_forecast_optim(self.df_input_data, self.days_list)
self.assertIsInstance(self.opt_res, type(pd.DataFrame()))
self.assertIsInstance(self.opt_res.index, pd.core.indexes.datetimes.DatetimeIndex)
self.assertIsInstance(self.opt_res.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype)
self.assertTrue('cost_fun_'+self.costfun in self.opt_res.columns)
def test_perform_dayahead_forecast_optim(self):
self.df_input_data_dayahead = self.fcst.get_load_cost_forecast(self.df_input_data_dayahead)
self.df_input_data_dayahead = self.fcst.get_prod_price_forecast(self.df_input_data_dayahead)
self.opt_res_dayahead = self.opt.perform_dayahead_forecast_optim(
self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast)
self.assertIsInstance(self.opt_res_dayahead, type(pd.DataFrame()))
self.assertIsInstance(self.opt_res_dayahead.index, pd.core.indexes.datetimes.DatetimeIndex)
self.assertIsInstance(self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype)
self.assertTrue('cost_fun_'+self.costfun in self.opt_res_dayahead.columns)
self.assertTrue(self.opt_res_dayahead['P_deferrable0'].sum()*(
self.retrieve_hass_conf['freq'].seconds/3600) == self.optim_conf['P_deferrable_nom'][0]*self.optim_conf['def_total_hours'][0])
# Testing estimation of the current index
now_precise = datetime.now(self.input_data_dict['retrieve_hass_conf']['time_zone']).replace(second=0, microsecond=0)
idx_closest = self.opt_res_dayahead.index.get_indexer([now_precise], method='ffill')[0]
idx_closest = self.opt_res_dayahead.index.get_indexer([now_precise], method='nearest')[0]
# Test the battery
self.optim_conf.update({'set_use_battery': True})
self.opt = optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf,
self.fcst.var_load_cost, self.fcst.var_prod_price,
self.costfun, root, logger)
self.opt_res_dayahead = self.opt.perform_dayahead_forecast_optim(
self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast)
self.assertIsInstance(self.opt_res_dayahead, type(pd.DataFrame()))
self.assertTrue('P_batt' in self.opt_res_dayahead.columns)
self.assertTrue('SOC_opt' in self.opt_res_dayahead.columns)
self.assertAlmostEqual(self.opt_res_dayahead.loc[self.opt_res_dayahead.index[-1],'SOC_opt'], self.plant_conf['SOCtarget'])
# Test table conversion
opt_res = pd.read_csv(root+'/data/opt_res_latest.csv', index_col='timestamp')
cost_cols = [i for i in opt_res.columns if 'cost_' in i]
table = opt_res[cost_cols].reset_index().sum(numeric_only=True).to_frame(name='Cost Totals').reset_index()
def test_perform_naive_mpc_optim(self):
self.df_input_data_dayahead = self.fcst.get_load_cost_forecast(self.df_input_data_dayahead)
self.df_input_data_dayahead = self.fcst.get_prod_price_forecast(self.df_input_data_dayahead)
# Test the battery
self.optim_conf.update({'set_use_battery': True})
self.opt = optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf,
self.fcst.var_load_cost, self.fcst.var_prod_price,
self.costfun, root, logger)
prediction_horizon = 10
soc_init = 0.4
soc_final = 0.6
def_total_hours = [2, 3]
self.opt_res_dayahead = self.opt.perform_naive_mpc_optim(
self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast, prediction_horizon,
soc_init=soc_init, soc_final=soc_final, def_total_hours=def_total_hours)
self.assertIsInstance(self.opt_res_dayahead, type(pd.DataFrame()))
self.assertTrue('P_batt' in self.opt_res_dayahead.columns)
self.assertTrue('SOC_opt' in self.opt_res_dayahead.columns)
self.assertTrue(np.abs(self.opt_res_dayahead.loc[self.opt_res_dayahead.index[-1],'SOC_opt']-soc_final)<1e-3)
term1 = self.optim_conf['P_deferrable_nom'][0]*def_total_hours[0]
term2 = self.opt_res_dayahead['P_deferrable0'].sum()*(self.retrieve_hass_conf['freq'].seconds/3600)
self.assertTrue(np.abs(term1-term2)<1e-3)
soc_init = 0.8
soc_final = 0.5
self.opt_res_dayahead = self.opt.perform_naive_mpc_optim(
self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast, prediction_horizon,
soc_init=soc_init, soc_final=soc_final, def_total_hours=def_total_hours)
self.assertAlmostEqual(self.opt_res_dayahead.loc[self.opt_res_dayahead.index[-1],'SOC_opt'], soc_final)
if __name__ == '__main__':
unittest.main()
ch.close()
logger.removeHandler(ch)
| 60.773723 | 138 | 0.700336 |
ae3a515565662ff474b5546eb89caaad693236c2 | 223 | py | Python | leetcode_runner/models.py | fbjorn/leetcode-runner | 38569e68a3ec2e420ed54aa509c236748f5d55dc | [
"MIT"
] | null | null | null | leetcode_runner/models.py | fbjorn/leetcode-runner | 38569e68a3ec2e420ed54aa509c236748f5d55dc | [
"MIT"
] | null | null | null | leetcode_runner/models.py | fbjorn/leetcode-runner | 38569e68a3ec2e420ed54aa509c236748f5d55dc | [
"MIT"
] | null | null | null | class Args:
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
class TestCase:
def __init__(self, args: Args, answer):
self.args = args
self.answer = answer
| 20.272727 | 43 | 0.591928 |
ae3a5afb8c080bcd642ec9b461aca11065494bcb | 4,555 | py | Python | experiments/counters.py | TenantBase/django-experiments | b75cf11159da4f4c75d9798dff3ddfd1ca454261 | [
"MIT"
] | null | null | null | experiments/counters.py | TenantBase/django-experiments | b75cf11159da4f4c75d9798dff3ddfd1ca454261 | [
"MIT"
] | 1 | 2019-05-29T00:00:15.000Z | 2019-05-29T00:00:15.000Z | experiments/counters.py | TenantBase/django-experiments | b75cf11159da4f4c75d9798dff3ddfd1ca454261 | [
"MIT"
] | null | null | null | from django.conf import settings
from django.utils.functional import cached_property
import redis
from redis.sentinel import Sentinel
from redis.exceptions import ConnectionError, ResponseError
COUNTER_CACHE_KEY = 'experiments:participants:%s'
COUNTER_FREQ_CACHE_KEY = 'experiments:freq:%s'
class Counters(object):
@cached_property
def _redis(self):
if getattr(settings, 'EXPERIMENTS_REDIS_SENTINELS', None):
sentinel = Sentinel(settings.EXPERIMENTS_REDIS_SENTINELS, socket_timeout=settings.EXPERIMENTS_REDIS_SENTINELS_TIMEOUT)
host, port = sentinel.discover_master(settings.EXPERIMENTS_REDIS_MASTER_NAME)
else:
host = getattr(settings, 'EXPERIMENTS_REDIS_HOST', 'localhost')
port = getattr(settings, 'EXPERIMENTS_REDIS_PORT', 6379)
password = getattr(settings, 'EXPERIMENTS_REDIS_PASSWORD', None)
db = getattr(settings, 'EXPERIMENTS_REDIS_DB', 0)
return redis.Redis(host=host, port=port, password=password, db=db)
def increment(self, key, participant_identifier, count=1):
if count == 0:
return
try:
cache_key = COUNTER_CACHE_KEY % key
freq_cache_key = COUNTER_FREQ_CACHE_KEY % key
new_value = self._redis.hincrby(cache_key, participant_identifier, count)
# Maintain histogram of per-user counts
if new_value > count:
self._redis.hincrby(freq_cache_key, new_value - count, -1)
self._redis.hincrby(freq_cache_key, new_value, 1)
except (ConnectionError, ResponseError):
# Handle Redis failures gracefully
pass
def clear(self, key, participant_identifier):
try:
# Remove the direct entry
cache_key = COUNTER_CACHE_KEY % key
pipe = self._redis.pipeline()
freq, _ = pipe.hget(cache_key, participant_identifier).hdel(cache_key, participant_identifier).execute()
# Handle cases where the cache_key isn't found gracefully.
if freq is None:
return
# Remove from the histogram
freq_cache_key = COUNTER_FREQ_CACHE_KEY % key
self._redis.hincrby(freq_cache_key, freq, -1)
except (ConnectionError, ResponseError):
# Handle Redis failures gracefully
pass
def get(self, key):
try:
cache_key = COUNTER_CACHE_KEY % key
return self._redis.hlen(cache_key)
except (ConnectionError, ResponseError):
# Handle Redis failures gracefully
return 0
def get_frequency(self, key, participant_identifier):
try:
cache_key = COUNTER_CACHE_KEY % key
freq = self._redis.hget(cache_key, participant_identifier)
return int(freq) if freq else 0
except (ConnectionError, ResponseError):
# Handle Redis failures gracefully
return 0
def get_frequencies(self, key):
try:
freq_cache_key = COUNTER_FREQ_CACHE_KEY % key
# In some cases when there are concurrent updates going on, there can
# briefly be a negative result for some frequency count. We discard these
# as they shouldn't really affect the result, and they are about to become
# zero anyway.
return dict((int(k), int(v)) for (k, v) in self._redis.hgetall(freq_cache_key).items() if int(v) > 0)
except (ConnectionError, ResponseError):
# Handle Redis failures gracefully
return tuple()
def reset(self, key):
try:
cache_key = COUNTER_CACHE_KEY % key
self._redis.delete(cache_key)
freq_cache_key = COUNTER_FREQ_CACHE_KEY % key
self._redis.delete(freq_cache_key)
return True
except (ConnectionError, ResponseError):
# Handle Redis failures gracefully
return False
def reset_pattern(self, pattern_key):
#similar to above, but can pass pattern as arg instead
try:
cache_key = COUNTER_CACHE_KEY % pattern_key
for key in self._redis.keys(cache_key):
self._redis.delete(key)
freq_cache_key = COUNTER_FREQ_CACHE_KEY % pattern_key
for key in self._redis.keys(freq_cache_key):
self._redis.delete(key)
return True
except (ConnectionError, ResponseError):
# Handle Redis failures gracefully
return False
| 38.931624 | 130 | 0.639517 |
ae3bab1dfe4bf59579d4fb381bd53583200e99c5 | 447 | py | Python | irl_gym/envs/env_utils.py | uidilr/irl_gym | 3352cb9189f3d5076a116db6678207e186ff4fc6 | [
"MIT"
] | 1 | 2020-12-29T11:04:56.000Z | 2020-12-29T11:04:56.000Z | irl_gym/envs/env_utils.py | uidilr/irl_gym | 3352cb9189f3d5076a116db6678207e186ff4fc6 | [
"MIT"
] | null | null | null | irl_gym/envs/env_utils.py | uidilr/irl_gym | 3352cb9189f3d5076a116db6678207e186ff4fc6 | [
"MIT"
] | null | null | null | import os
ENV_ASSET_DIR = os.path.join(os.path.dirname(__file__), 'assets')
def get_asset_xml(xml_name):
return os.path.join(ENV_ASSET_DIR, xml_name)
def test_env(env, T=100):
aspace = env.action_space
env.reset()
for t in range(T):
o, r, done, infos = env.step(aspace.sample())
print('---T=%d---' % t)
print('rew:', r)
print('obs:', o)
env.render()
if done:
break
| 20.318182 | 65 | 0.568233 |
ae3bbabd4550be0f4670cf95d502fca83a0b0369 | 1,483 | py | Python | example/resnet/convert_resnet_pytorch.py | leonskim/webdnn | f97c798c9a659fe953f9dc8c8537b8917e4be7a2 | [
"MIT"
] | 1 | 2021-04-09T15:55:35.000Z | 2021-04-09T15:55:35.000Z | example/resnet/convert_resnet_pytorch.py | leonskim/webdnn | f97c798c9a659fe953f9dc8c8537b8917e4be7a2 | [
"MIT"
] | null | null | null | example/resnet/convert_resnet_pytorch.py | leonskim/webdnn | f97c798c9a659fe953f9dc8c8537b8917e4be7a2 | [
"MIT"
] | null | null | null | """
Example of converting ResNet-50 PyTorch model
"""
import argparse
import os
import torch, torchvision
import numpy as np
from webdnn.backend import generate_descriptor, backend_names
from webdnn.frontend.pytorch import PyTorchConverter
from webdnn.util import console
def generate_graph():
model = torchvision.models.resnet50(pretrained=True)
dummy_input = torch.autograd.Variable(torch.zeros(1, 3, 224, 224))
graph = PyTorchConverter().convert(model, dummy_input)
return graph
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--backend", default=",".join(backend_names))
parser.add_argument("--encoding")
parser.add_argument('--out', '-o', default='output_pytorch',
help='Directory to output the graph descriptor')
graph = generate_graph()
args = parser.parse_args()
os.makedirs(args.out, exist_ok=True)
any_backend_failed = False
last_backend_exception = None
for backend in args.backend.split(","):
try:
graph_exec_data = generate_descriptor(backend, graph, constant_encoder_name=args.encoding)
graph_exec_data.save(args.out)
except Exception as ex:
any_backend_failed = True
last_backend_exception = ex
console.error(f"Failed generating descriptor for backend {backend}: {str(ex)}\n")
if any_backend_failed:
raise last_backend_exception
if __name__ == "__main__":
main()
| 28.519231 | 102 | 0.697235 |
ae3bbe9c1d9612593b0ae91960ae41d57309e29d | 436 | py | Python | test/test_getter_setter.py | msztolcman/fileperms | 8a99c5bb981265c18228b58cf44419c032d8d895 | [
"MIT"
] | null | null | null | test/test_getter_setter.py | msztolcman/fileperms | 8a99c5bb981265c18228b58cf44419c032d8d895 | [
"MIT"
] | null | null | null | test/test_getter_setter.py | msztolcman/fileperms | 8a99c5bb981265c18228b58cf44419c032d8d895 | [
"MIT"
] | null | null | null | from fileperms import Permission, Permissions
class TestGetterSetter:
def test_empty(self):
prm = Permissions()
for item in Permission:
assert prm.get(item) == False
def test_other(self):
prm = Permissions()
for item in Permission:
prm.set(item, True)
assert prm.get(item) == True
prm.set(item, False)
assert prm.get(item) == False
| 24.222222 | 45 | 0.577982 |
ae3c1e0a31bb35705167a39456828a45fbe7fc2b | 157 | py | Python | lessons/Test_and_function_programming_in_Python/project/tests/test_sum_benchmark.py | johnklee/oo_dp_lesson | 06814a88b86b38435e0ed8f305ce9e50c1aac1f6 | [
"MIT"
] | null | null | null | lessons/Test_and_function_programming_in_Python/project/tests/test_sum_benchmark.py | johnklee/oo_dp_lesson | 06814a88b86b38435e0ed8f305ce9e50c1aac1f6 | [
"MIT"
] | 7 | 2021-06-07T03:52:37.000Z | 2022-03-14T11:07:31.000Z | lessons/Test_and_function_programming_in_Python/project/tests/test_sum_benchmark.py | johnklee/oo_dp_lesson | 06814a88b86b38435e0ed8f305ce9e50c1aac1f6 | [
"MIT"
] | null | null | null | from my_sum import sum
def test_sum_benchmark(benchmark):
hundred_one_list = [1] * 100
result = benchmark(sum, hundred_one_list)
assert result == 100
| 22.428571 | 43 | 0.751592 |
ae3d3e28bf5a8518622d4a9ff1865444e5e3583f | 1,889 | py | Python | Project_1-Alien_Invasion/settings.py | Vandeilsonln/Python-Crash-Course | 39b4f421504618f947672304a8e97edf7bc7f13d | [
"MIT"
] | null | null | null | Project_1-Alien_Invasion/settings.py | Vandeilsonln/Python-Crash-Course | 39b4f421504618f947672304a8e97edf7bc7f13d | [
"MIT"
] | null | null | null | Project_1-Alien_Invasion/settings.py | Vandeilsonln/Python-Crash-Course | 39b4f421504618f947672304a8e97edf7bc7f13d | [
"MIT"
] | null | null | null | import pygame
class Settings():
# A class to store all settings for Alien Invasion.
def __init__(self):
# Screen Settings.
self.screen_width = 1080
self.screen_height = 630
self.bg_image = pygame.image.load('Project_1-Alien_Invasion/_images/background_stars_moving.jpg')
self.bg_moving_speed = 0.3
self.bg_initial_position = -1705
# Ship Settings
self.ship_speed_factor = 1.2
self.ship_limit = 2
# Bullet settings
self.bullet_speed_factor = 3
self.bullet_width = 4 # 4
self.bullet_height = 15
self.bullet_color = 130, 60, 60
self.bullets_allowed = 5 #5
# Alien Settings
self.alien_speed_factor = 1
self.fleet_drop_speed = 30 #15
# fleet direction of 1 represents right; -1 represents left
self.fleet_direction = 1
# How quickly the game speeds up
self.speedup_scale = 1.15
# How quickly the alien values increase
self.score_scale = 1.4
self.initialize_dynamic_settings()
def initialize_dynamic_settings(self):
"""Initialize settings that change throughout the game."""
self.ship_speed_factor = 1.5
self.bullet_speed_factor = 3
self.alien_speed_factor = 1.1
self.bg_moving_speed = 0.3
# Fleet_direction of 1 represents right | -1 represents left.
self.fleet_direction = 1
# Scoring
self.alien_points = 50
def increase_speed(self):
# Increase speed settings and alien point values
self.ship_speed_factor *= self.speedup_scale
self.bullet_speed_factor *= self.speedup_scale
self.alien_speed_factor *= self.speedup_scale
self.bg_moving_speed *= (self.speedup_scale * 1.4)
self.alien_points = int(self.alien_points * self.score_scale) | 32.568966 | 105 | 0.643727 |
ae3d486c599d0d195d2c989a9d3e670a0c3383e1 | 431 | py | Python | connect.py | XtremeCurling/nextbus2pg | 4a7b32ecbc5232c3a7e4a81152aea87b2c80d517 | [
"MIT"
] | null | null | null | connect.py | XtremeCurling/nextbus2pg | 4a7b32ecbc5232c3a7e4a81152aea87b2c80d517 | [
"MIT"
] | 4 | 2018-04-03T21:12:24.000Z | 2018-05-13T22:53:43.000Z | connect.py | XtremeCurling/nextbus2pg | 4a7b32ecbc5232c3a7e4a81152aea87b2c80d517 | [
"MIT"
] | null | null | null | import urllib
import psycopg2
import psycopg2.extras
# Connect to a postgres database. Tweak some things.
def pgconnect(pghost, pgdb, pguser):
connection = psycopg2.connect(host = pghost, dbname = pgdb, user = pguser)
# Set autocommit to avoid repetitive connection.commit() statements.
connection.autocommit = True
# Register the UUID adapter globally.
psycopg2.extras.register_uuid()
return connection
| 28.733333 | 78 | 0.74942 |
ae3e733e97f3939f4c5a55b9fab69488409a8357 | 1,153 | py | Python | app/main/views/letter_jobs.py | karlchillmaid/notifications-admin | 9ef6da4ef9e2fa97b7debb4b573cb035a5cb8880 | [
"MIT"
] | null | null | null | app/main/views/letter_jobs.py | karlchillmaid/notifications-admin | 9ef6da4ef9e2fa97b7debb4b573cb035a5cb8880 | [
"MIT"
] | null | null | null | app/main/views/letter_jobs.py | karlchillmaid/notifications-admin | 9ef6da4ef9e2fa97b7debb4b573cb035a5cb8880 | [
"MIT"
] | null | null | null | from flask import redirect, render_template, request, session, url_for
from flask_login import login_required
from app import letter_jobs_client
from app.main import main
from app.utils import user_is_platform_admin
@main.route("/letter-jobs", methods=['GET', 'POST'])
@login_required
@user_is_platform_admin
def letter_jobs():
letter_jobs_list = letter_jobs_client.get_letter_jobs()
if request.method == 'POST':
if len(request.form.getlist('job_id')) > 0:
job_ids = request.form.getlist('job_id')
session['job_ids'] = job_ids
response = letter_jobs_client.send_letter_jobs(job_ids)
msg = response['response']
else:
msg = 'No jobs selected'
session['msg'] = msg
return redirect(url_for('main.letter_jobs'))
msg = session.pop('msg', None)
job_ids = session.pop('job_ids', None)
if job_ids:
for job_id in job_ids:
job = [j for j in letter_jobs_list if job_id == j['id']][0]
job['sending'] = 'sending'
return render_template('views/letter-jobs.html', letter_jobs_list=letter_jobs_list, message=msg)
| 31.162162 | 100 | 0.666956 |
ae3ea51dd07df4bb77e861ac50689fed8f983f65 | 909 | py | Python | dev-test/1_euler/srayan/euler-2.py | sgango/Y1-Project | 89205600552ede6f8da29231cfa52a3538ae8df4 | [
"BSD-2-Clause"
] | 2 | 2020-09-23T13:27:26.000Z | 2021-09-14T14:15:30.000Z | dev-test/1_euler/srayan/euler-2.py | sgango/Y1-Project | 89205600552ede6f8da29231cfa52a3538ae8df4 | [
"BSD-2-Clause"
] | 1 | 2020-06-18T14:02:59.000Z | 2020-06-18T14:02:59.000Z | dev-test/1_euler/srayan/euler-2.py | sgango/Y1-Project | 89205600552ede6f8da29231cfa52a3538ae8df4 | [
"BSD-2-Clause"
] | null | null | null | """
Adapting Euler method to handle 2nd order ODEs
Srayan Gangopadhyay
2020-05-16
"""
import numpy as np
import matplotlib.pyplot as plt
"""
y' = dy/dx
For a function of form y'' = f(x, y, y')
Define y' = v so y'' = v'
"""
def func(y, v, x): # RHS of v' = in terms of y, v, x
return x + v - 3*y
# PARAMETERS
y0 = 1 # y(x=0) =
v0 = -2 # y'(x=0) =
delta = 0.01 # step size
end = 4 # x-value to stop integration
steps = int(end/delta) + 1 # number of steps
x = np.linspace(0, end, steps) # array of x-values (discrete time)
y = np.zeros(steps) # empty array for solution
v = np.zeros(steps)
y[0] = y0 # inserting initial value
v[0] = v0
# INTEGRATING
for i in range(1, steps):
v[i] = v[i-1] + (delta*func(y[i-1], v[i-1], x[i-1]))
y[i] = y[i-1] + (delta*v[i-1])
plt.plot(x, y, label='Approx. soln (Euler)')
plt.plot(x, y, 'o')
plt.xlabel('x')
plt.ylabel('y')
plt.legend()
plt.show()
| 21.139535 | 67 | 0.59516 |
ae3ecabbefa60b62d05ffb5c99a5c524d7526637 | 73 | py | Python | task3/mask_r_cnn/mark_cvppp.py | HenryLiangzy/COMP9517_Group | 83be7304bee47d52781ea71f06838cd148dbd0bd | [
"Apache-2.0"
] | null | null | null | task3/mask_r_cnn/mark_cvppp.py | HenryLiangzy/COMP9517_Group | 83be7304bee47d52781ea71f06838cd148dbd0bd | [
"Apache-2.0"
] | null | null | null | task3/mask_r_cnn/mark_cvppp.py | HenryLiangzy/COMP9517_Group | 83be7304bee47d52781ea71f06838cd148dbd0bd | [
"Apache-2.0"
] | null | null | null | import cv2
import os
import glob
import numpy as np
def mark(img):
| 9.125 | 18 | 0.712329 |
ae3f83f14ff4a0be7289a02711f0b034c72507db | 3,022 | py | Python | dss_sm_so/tests/test_backends.py | MobileCloudNetworking/dssaas | 87b6f7d60ecc397a88326a955b2ddfd3d73205d1 | [
"Apache-2.0"
] | null | null | null | dss_sm_so/tests/test_backends.py | MobileCloudNetworking/dssaas | 87b6f7d60ecc397a88326a955b2ddfd3d73205d1 | [
"Apache-2.0"
] | null | null | null | dss_sm_so/tests/test_backends.py | MobileCloudNetworking/dssaas | 87b6f7d60ecc397a88326a955b2ddfd3d73205d1 | [
"Apache-2.0"
] | 1 | 2018-10-09T06:28:36.000Z | 2018-10-09T06:28:36.000Z | __author__ = 'florian'
import unittest
from occi.backend import ActionBackend, KindBackend
from sm.sm.backends import ServiceBackend
from mock import patch
from sm.sm.so_manager import SOManager
from occi.core_model import Kind
from occi.core_model import Resource
@patch('mcn.sm.so_manager.CONFIG')
@patch('mcn.sm.so_manager.LOG')
class TestBackendsConstruction(unittest.TestCase):
def setUp(self):
pass
@patch('os.system')
@patch('mcn.sm.so_manager.SOManager', spec='mcn.sm.so_manager.SOManager')
def test_init_for_sanity(self, mock_som, mock_os, mock_log, mock_config):
mock_os.return_value = 0
self.service_backend = ServiceBackend()
# Test that service_backend contains a SOManager instance
self.assertEqual(self.service_backend.som.__class__, SOManager)
# assertInstance should work there
# self.assertIsInstance(self.service_backend.som, SOManager)
# print type(self.service_backend.som)
class TestBackendsMethods(unittest.TestCase):
def setUp(self):
kind = Kind('http://schemas.mobile-cloud-networking.eu/occi/sm#',
'myservice',
title='Test Service',
attributes={'mcn.test.attribute1': 'immutable'},
related=[Resource.kind],
actions=[])
self.test_entity = Resource('my-id', kind, None)
self.patcher_system = patch('os.system', return_value=0)
self.patcher_system.start()
self.patcher_config = patch('mcn.sm.so_manager.CONFIG')
self.patcher_config.start()
self.patcher_log = patch('mcn.sm.so_manager.LOG')
self.patcher_log.start()
# Check why service backend cannot be created there with a mock (mock not taken into account)
@patch('mcn.sm.so_manager.SOManager.deploy')
def test_create_for_sanity(self, mock_deploy):
self.service_backend = ServiceBackend()
self.service_backend.create(self.test_entity, None)
mock_deploy.assert_called_once_with(self.test_entity, None)
@patch('mcn.sm.so_manager.SOManager.so_details')
def test_retrieve_for_sanity(self, mock_so_details):
service_backend = ServiceBackend()
service_backend.retrieve(self.test_entity, None)
mock_so_details.assert_called_once_with(self.test_entity, None)
@patch('mcn.sm.so_manager.SOManager.dispose')
def test_delete_for_sanity(self, mock_dispose):
service_backend = ServiceBackend()
service_backend.delete(self.test_entity, None)
mock_dispose.assert_called_once_with(self.test_entity, None)
# def testNotImplemented(self):
# service_backend = ServiceBackend()
# # self.assertRaises(NotImplementedError, service_backend.update(None, None, None))
# self.assertRaises(NotImplementedError, service_backend.replace(None, None, None))
def tearDown(self):
self.patcher_config.stop()
self.patcher_log.stop()
self.patcher_system.stop()
| 38.74359 | 101 | 0.697551 |
ae41aa44d40af6f5bd17f7b224b76b24b0631ba4 | 4,738 | py | Python | demo/q0w_demo_analyzer/core/fonts.py | YourNorth/rezak-summarizator | 3ab2f4bf1044ea9654b4084a39030987e4b8bfe8 | [
"MIT"
] | 3 | 2020-03-28T16:48:10.000Z | 2020-12-01T17:18:55.000Z | demo/q0w_demo_analyzer/core/fonts.py | YourNorth/rezak-summarizator | 3ab2f4bf1044ea9654b4084a39030987e4b8bfe8 | [
"MIT"
] | 31 | 2020-03-20T17:53:08.000Z | 2021-03-10T11:48:11.000Z | demo/q0w_demo_analyzer/core/fonts.py | YourNorth/rezak-summarizator | 3ab2f4bf1044ea9654b4084a39030987e4b8bfe8 | [
"MIT"
] | 1 | 2020-03-20T05:01:16.000Z | 2020-03-20T05:01:16.000Z | TESTPHRASE = 'Lorem ipsum'
# ANSI COLORS
# ====== FAMILY ===== #
end = '\33[0m'
bold = '\33[1m'
italic = '\33[3m'
underline = '\33[4m'
blink = '\33[5m'
blink2 = '\33[6m'
selected = '\33[7m'
# ====== COLOR ====== #
# greyscale
black = '\33[97m'
grey = '\33[90m'
grey2 = '\33[37m'
white = '\33[30m'
# less saturation
red = '\33[91m'
yellow = '\33[33m'
green = '\33[32m'
beige = '\33[36m'
blue = '\33[94m'
violet = '\33[35m'
# more saturation
red2 = '\33[31m'
yellow2 = '\33[93m'
green2 = '\33[92m'
beige2 = '\33[96m'
blue2 = '\33[34m'
violet2 = '\33[95m'
# === BACKGROUND ==== #
# greyscale
blackbg = '\33[107m'
greybg = '\33[100m'
greybg2 = '\33[47m'
whitebg = '\33[40m'
# less saturation
redbg = '\33[101m'
yellowbg = '\33[43m'
greenbg = '\33[42m'
beigebg = '\33[46m'
bluebg = '\33[104m'
violetbg = '\33[45m'
# more saturation
redbg2 = '\33[41m'
yellowbg2 = '\33[103m'
greenbg2 = '\33[102m'
beigebg2 = '\33[106m'
bluebg2 = '\33[44m'
violetbg2 = '\33[105m'
backs = [whitebg, greybg, greybg2, blackbg, redbg, redbg2, yellowbg, yellowbg2, greenbg, greenbg2, beigebg, beigebg2,
bluebg,
bluebg2, violetbg, violetbg2]
simples = [white, grey, grey2, black, red, red2, yellow, yellow2, green, green2, beige, beige2, blue,
blue2, violet, violet2]
# TODO: lists => dict with pairs; bg, sm => invert value (bg <=> sm)
def bg(simple_color):
return backs[simples.index(simple_color)]
def sm(back_color):
return simples[backs.index(back_color)]
def enhance(color):
list_color = simples if color in simples else backs
if color in list_color:
ind = list_color.index(color)
if ind % 2 == 1 and ind > 2:
return color
else:
return list_color[ind + 1]
else:
return color
def paint(value, content_color=beige, next_color=end, total=False):
if total:
for c in simples + backs + [end]:
if c != content_color:
value = str(value).replace(c, '')
return content_color + str(value) + next_color
def family():
print('bold: | %s' % bold + TESTPHRASE + end)
print('italic: | %s' % italic + TESTPHRASE + end)
print('url: | %s' % underline + TESTPHRASE + end)
print('blink: | %s' % blink + TESTPHRASE + end)
print('blink2: | %s' % blink2 + TESTPHRASE + end)
print('selected: | %s' % selected + TESTPHRASE + end)
def color():
print('black: | %s' % black + TESTPHRASE + end)
print('grey: | %s' % grey + TESTPHRASE + end)
print('grey2: | %s' % grey2 + TESTPHRASE + end)
print('white: | %s' % white + TESTPHRASE + end)
print('red: | %s' % red + TESTPHRASE + end)
print('red2: | %s' % red2 + TESTPHRASE + end)
print('yellow: | %s' % yellow + TESTPHRASE + end)
print('yellow2: | %s' % yellow2 + TESTPHRASE + end)
print('green: | %s' % green + TESTPHRASE + end)
print('green2: | %s' % green2 + TESTPHRASE + end)
print('beige: | %s' % beige + TESTPHRASE + end)
print('beige2: | %s' % beige2 + TESTPHRASE + end)
print('blue: | %s' % blue + TESTPHRASE + end)
print('blue2: | %s' % blue2 + TESTPHRASE + end)
print('violet: | %s' % violet + TESTPHRASE + end)
print('violet2: | %s' % violet2 + TESTPHRASE + end)
def background():
print('blackbg: | %s' % blackbg + TESTPHRASE + end)
print('greybg: | %s' % greybg + TESTPHRASE + end)
print('greybg2: | %s' % greybg2 + grey + TESTPHRASE + end)
print('whitebg: | %s' % whitebg + black + TESTPHRASE + end)
print('redbg: | %s' % redbg + white + TESTPHRASE + end)
print('redbg2: | %s' % redbg2 + white + TESTPHRASE + end)
print('yellowbg: | %s' % yellowbg + white + TESTPHRASE + end)
print('yellowbg2: | %s' % yellowbg2 + grey2 + TESTPHRASE + end)
print('greenbg: | %s' % greenbg + white + TESTPHRASE + end)
print('greenbg2: | %s' % greenbg2 + white + TESTPHRASE + end)
print('beigebg: | %s' % beigebg + white + TESTPHRASE + end)
print('beigebg2: | %s' % beigebg2 + white + TESTPHRASE + end)
print('bluebg: | %s' % bluebg + white + TESTPHRASE + end)
print('bluebg2: | %s' % bluebg2 + white + TESTPHRASE + end)
print('violetbg: | %s' % violetbg + white + TESTPHRASE + end)
print('violetbg2: | %s' % violetbg2 + white + TESTPHRASE + end)
if __name__ == "__main__":
if blackbg in backs:
print(bg(simple_color=red2) + black + TESTPHRASE + end)
print(sm(back_color=beigebg) + TESTPHRASE + end)
print(paint(value=TESTPHRASE, content_color=red2))
print(enhance(color=violet) + TESTPHRASE + end)
print(enhance(color=whitebg) + TESTPHRASE + end)
family()
color()
background()
| 31.798658 | 117 | 0.58358 |
ae41b46656d025e136cbbd3d68dd912515307e97 | 1,370 | py | Python | setup.py | eduk8s/prototype-cli | 74443dafb08e5b65f48ea3b9a7a03a803f79437a | [
"Apache-2.0"
] | 1 | 2019-12-30T02:52:56.000Z | 2019-12-30T02:52:56.000Z | setup.py | eduk8s/prototype-cli | 74443dafb08e5b65f48ea3b9a7a03a803f79437a | [
"Apache-2.0"
] | null | null | null | setup.py | eduk8s/prototype-cli | 74443dafb08e5b65f48ea3b9a7a03a803f79437a | [
"Apache-2.0"
] | null | null | null | import sys
import os
from setuptools import setup
long_description = open("README.rst").read()
classifiers = [
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
]
setup_kwargs = dict(
name="eduk8s-cli",
version="0.1.0",
description="Command line client for eduk8s.",
long_description=long_description,
url="https://github.com/eduk8s/eduk8s-cli",
author="Graham Dumpleton",
author_email="Graham.Dumpleton@gmail.com",
license="Apache License, Version 2.0",
python_requires=">=3.7.0",
classifiers=classifiers,
keywords="eduk8s kubernetes",
packages=["eduk8s", "eduk8s.cli", "eduk8s.kube",],
package_dir={"eduk8s": "src/eduk8s"},
package_data={"eduks.crds": ["session.yaml", "workshop.yaml"],},
entry_points={
"console_scripts": ["eduk8s = eduk8s.cli:main"],
"eduk8s_cli_plugins": [
"workshop = eduk8s.cli.workshop",
"session = eduk8s.cli.session",
"install = eduk8s.cli.install",
],
},
install_requires=[
"click",
"requests",
"rstr",
"PyYaml",
"kopf==0.23.2",
"openshift==0.10.1",
],
)
setup(**setup_kwargs)
| 27.4 | 68 | 0.607299 |
ae444d42c02b963c853a9f963e814c548f5a9dae | 1,652 | py | Python | torchsupport/training/score_supervised.py | bobelly/torchsupport | 5aa0a04f20c193ec99310f5d6a3375d2e95e740d | [
"MIT"
] | 18 | 2019-05-02T16:32:15.000Z | 2021-04-16T09:33:54.000Z | torchsupport/training/score_supervised.py | bobelly/torchsupport | 5aa0a04f20c193ec99310f5d6a3375d2e95e740d | [
"MIT"
] | 5 | 2019-10-14T13:46:49.000Z | 2021-06-08T11:48:34.000Z | torchsupport/training/score_supervised.py | bobelly/torchsupport | 5aa0a04f20c193ec99310f5d6a3375d2e95e740d | [
"MIT"
] | 12 | 2019-05-12T21:34:24.000Z | 2021-07-15T14:14:16.000Z | import random
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as func
from torchsupport.data.io import netwrite, to_device, make_differentiable
from torchsupport.training.energy import DenoisingScoreTraining
from torchsupport.training.samplers import AnnealedLangevin
class ScoreSupervisedTraining(DenoisingScoreTraining):
def logit_energy(self, logits):
return -logits.logsumexp(dim=-1)
def create_score(self):
def _score(data, sigma, *args):
score, logits = self.score(data, sigma, *args)
return score
return _score
def classifier_loss(self, logits, labels):
return func.cross_entropy(logits, labels)
def sample(self):
self.score.eval()
with torch.no_grad():
integrator = AnnealedLangevin([
self.sigma * self.factor ** idx for idx in range(self.n_sigma)
])
prep = to_device(self.prepare_sample(), self.device)
data, *args = self.data_key(prep)
result = integrator.integrate(
self.create_score(),
data, *args
).detach()
self.score.train()
return to_device((result, data, *args), self.device)
def run_energy(self, data):
data, labels = data
data, *args = self.data_key(data)
noisy, sigma = self.noise(data)
score, logits = self.score(noisy, sigma, *args)
return score, data, noisy, sigma, logits, labels
def energy_loss(self, score, data, noisy, sigma, logits, labels):
energy = super().energy_loss(score, data, noisy, sigma)
classifier = self.classifier_loss(logits, labels)
self.current_losses["classifier"] = float(classifier)
return energy + classifier
| 30.592593 | 73 | 0.70339 |
ae44d99a916d3dc0c3e4c682ab78f7a52d9f0c8b | 332 | py | Python | src/modeling/models/timer.py | NovaSBE-DSKC/predict-campaing-sucess-rate | fec339aee7c883f55d64130eb69e490f765ee27d | [
"MIT"
] | null | null | null | src/modeling/models/timer.py | NovaSBE-DSKC/predict-campaing-sucess-rate | fec339aee7c883f55d64130eb69e490f765ee27d | [
"MIT"
] | null | null | null | src/modeling/models/timer.py | NovaSBE-DSKC/predict-campaing-sucess-rate | fec339aee7c883f55d64130eb69e490f765ee27d | [
"MIT"
] | null | null | null | import time
class Timer():
def __init__(self):
self.start = time.time()
def end(self):
self.end = time.time()
elapsed_time = self.end - self.start
minutes = int(elapsed_time // 60)
seconds = elapsed_time % 60
print("Elapsed time: {}m {}s".format(minutes, round(seconds)))
| 22.133333 | 70 | 0.581325 |
ae490aaf317fe81f8776bee9c9b05dfe568d8efd | 3,538 | py | Python | tests/system/workspace_factory.py | davetcoleman/catkin_tools | 3dd28ffab0e48775b14c6bab5a7b8b974cdd126c | [
"Apache-2.0"
] | null | null | null | tests/system/workspace_factory.py | davetcoleman/catkin_tools | 3dd28ffab0e48775b14c6bab5a7b8b974cdd126c | [
"Apache-2.0"
] | null | null | null | tests/system/workspace_factory.py | davetcoleman/catkin_tools | 3dd28ffab0e48775b14c6bab5a7b8b974cdd126c | [
"Apache-2.0"
] | null | null | null | import os
import shutil
from ..utils import temporary_directory
class workspace_factory(temporary_directory):
def __init__(self, source_space='src', prefix=''):
super(workspace_factory, self).__init__(prefix=prefix)
self.source_space = source_space
def __enter__(self):
self.temporary_directory = super(workspace_factory, self).__enter__()
self.workspace_factory = WorkspaceFactory(self.temporary_directory, self.source_space)
return self.workspace_factory
def __exit__(self, exc_type, exc_value, traceback):
super(workspace_factory, self).__exit__(exc_type, exc_value, traceback)
class WorkspaceFactory(object):
def __init__(self, workspace, source_space):
self.workspace = workspace
self.source_space = os.path.join(self.workspace, source_space)
self.packages = {}
class Package(object):
def __init__(self, name, depends, build_depends, run_depends, test_depends):
self.name = name
self.build_depends = (build_depends or []) + (depends or [])
self.run_depends = (run_depends or []) + (depends or [])
self.test_depends = (test_depends or [])
def add_package(self, pkg_name, depends=None, build_depends=None, run_depends=None, test_depends=None):
self.packages[pkg_name] = self.Package(pkg_name, depends, build_depends, run_depends, test_depends)
def build(self):
cwd = os.getcwd()
if not os.path.isdir(self.workspace):
if os.path.exists(self.workspace):
raise RuntimeError("Cannot build workspace in '{0}' because it is a file".format(self.workspace))
os.makedirs(self.workspace)
if os.path.exists(self.source_space):
print("WARNING: source space given to WorkspaceFactory exists, clearing before build()'ing")
self.clear()
os.makedirs(self.source_space)
try:
os.chdir(self.source_space)
for name, pkg in self.packages.items():
pkg_dir = os.path.join(self.source_space, name)
os.makedirs(pkg_dir)
pkg_xml_path = os.path.join(pkg_dir, 'package.xml')
pkg_xml = """\
<?xml version="1.0"?>
<package>
<name>{name}</name>
<version>0.0.0</version>
<description>
Description for {name}
</description>
<maintainer email="person@email.com">Firstname Lastname</maintainer>
<license>MIT</license>
"""
pkg_xml += '\n'.join(
[' <build_depend>{0}</build_depend>'.format(x) for x in pkg.build_depends] +
[' <run_depend>{0}</run_depend>'.format(x) for x in pkg.run_depends] +
[' <test_depend>{0}</test_depend>'.format(x) for x in pkg.test_depends]
)
pkg_xml += """
<export>
<build_type>cmake</build_type>
</export>
</package>
"""
with open(pkg_xml_path, 'w') as f:
f.write(pkg_xml.format(name=name))
cmakelists_txt_path = os.path.join(pkg_dir, 'CMakeLists.txt')
cmakelists_txt = """\
cmake_minimum_required(VERSION 2.8.3)
project({name})
add_custom_target(install)
"""
with open(cmakelists_txt_path, 'w') as f:
f.write(cmakelists_txt.format(name=name, find_package=' '.join(pkg.build_depends)))
finally:
os.chdir(cwd)
def clear(self):
if os.path.exists(self.workspace):
shutil.rmtree(self.workspace)
| 37.242105 | 113 | 0.623233 |
ae4a47fd92f1a12f864fae2ce0feac13263ca7ac | 1,019 | py | Python | setup.py | thermokarst-forks/q2-plugin-template | 0583ed514a7476ae75fd7a052043e0aec2faecb9 | [
"BSD-3-Clause"
] | 5 | 2021-05-10T14:23:11.000Z | 2022-03-04T14:37:15.000Z | setup.py | thermokarst-forks/q2-plugin-template | 0583ed514a7476ae75fd7a052043e0aec2faecb9 | [
"BSD-3-Clause"
] | 2 | 2021-05-12T15:08:31.000Z | 2021-07-13T13:57:24.000Z | setup.py | thermokarst-forks/q2-plugin-template | 0583ed514a7476ae75fd7a052043e0aec2faecb9 | [
"BSD-3-Clause"
] | 3 | 2021-05-12T15:02:12.000Z | 2022-02-09T13:33:19.000Z | # ----------------------------------------------------------------------------
# Copyright (c) 2021, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
from setuptools import find_packages, setup
import versioneer
setup(
name='q2-plugin-name',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
license='BSD-3-Clause',
packages=find_packages(),
author="Michal Ziemski",
author_email="ziemski.michal@gmail.com",
description=("This is a template for building a new QIIME 2 plugin."),
url="https://github.com/bokulich-lab/q2-plugin-template",
entry_points={
'qiime2.plugins':
['q2-plugin-name=q2_plugin_name.plugin_setup:plugin']
},
package_data={
'q2_plugin_name': [
'citations.bib'
],
},
zip_safe=False,
)
| 29.970588 | 78 | 0.570167 |
ae4a834438c1be65dcec72110a53d1ee4b52eb26 | 7,722 | py | Python | zero/recommendation_algorithm.py | Akulen/mangaki-zero | 5eb2de06b8684ed948b8b903e9f567f06c35e3ef | [
"MIT"
] | null | null | null | zero/recommendation_algorithm.py | Akulen/mangaki-zero | 5eb2de06b8684ed948b8b903e9f567f06c35e3ef | [
"MIT"
] | null | null | null | zero/recommendation_algorithm.py | Akulen/mangaki-zero | 5eb2de06b8684ed948b8b903e9f567f06c35e3ef | [
"MIT"
] | null | null | null | from zero.side import SideInformation
from zero.chrono import Chrono
from collections import defaultdict
from itertools import product
import numpy as np
import pickle
import os.path
import logging
class RecommendationAlgorithmFactory:
def __init__(self):
self.algorithm_registry = {}
self.algorithm_factory = {}
self.logger = logging.getLogger(__name__ + '.' +
self.__class__.__name__)
self.initialized = False
self.size = 0
def initialize(self):
# FIXME: make it less complicated and go for a commonly used design
# pattern.
# Behind the hood, it's called in `utils.__init__.py` which triggers
# the `algos.__init__.py`
# which in turn triggers registration on this instance.
# Then, once it reach `recommendation_algorithm` file, it's good to go.
self.logger.debug('Recommendation algorithm factory initialized.'
'{} algorithms available in the factory.'
.format(len(self.algorithm_registry)))
self.initialized = True
def register(self, name, klass, default_kwargs):
self.algorithm_registry[name] = klass
self.algorithm_factory[name] = default_kwargs
self.logger.debug('Registered {} as a recommendation algorithm'.format(
name))
class RecommendationAlgorithm:
factory = RecommendationAlgorithmFactory()
def __init__(self, verbose_level=1):
self.verbose_level = verbose_level
self.chrono = Chrono(self.verbose_level)
self.nb_users = None
self.nb_works = None
self.size = 0 # For backup files
self.metrics = {category: defaultdict(list)
for category in {'train', 'test'}}
self.dataset = None
self.X_train = None
self.y_train = None
self.X_test = None
self.y_test = None
def get_backup_path(self, folder, filename):
if not self.is_serializable:
raise NotImplementedError
if filename is None:
filename = '%s.pickle' % self.get_shortname()
return os.path.join(folder, filename)
# def has_backup(self, filename=None):
# if filename is None:
# filename = self.get_backup_filename()
# return os.path.isfile(self.get_backup_path(filename))
@property
def is_serializable(self):
return False
def save(self, folder, filename=None):
self.backup_path = self.get_backup_path(folder, filename)
with open(self.backup_path, 'wb') as f:
pickle.dump(self.__dict__, f, pickle.HIGHEST_PROTOCOL)
self.size = os.path.getsize(self.backup_path) # In bytes
def load(self, folder, filename=None):
"""
This function raises FileNotFoundException if no backup exists.
"""
self.backup_path = self.get_backup_path(folder, filename)
with open(self.backup_path, 'rb') as f:
backup = pickle.load(f)
self.__dict__.update(backup)
def delete_snapshot(self):
os.remove(self.backup_path)
def recommend(self, user_ids, item_ids=None, k=None, method='mean'):
"""
Recommend :math:`k` items to a group of users.
:param user_ids: the users
:param item_ids: a subset of items. If is it None, then it is all items.
:param k: the number of items to recommend, if None then it is all items.
:param method: a way to combine the predictions. By default it is mean.
:returns: a numpy array with two columns, `item_id` and recommendation score
:complexity: :math:`O(N + K \log K)`
"""
if item_ids is None:
item_ids = np.arange(self.nb_works)
n = len(item_ids)
if k is None:
k = n
X = np.array(list(product(user_ids, item_ids)))
pred = self.predict(X).reshape(len(user_ids), -1)
if method == 'mean':
combined_pred = pred.mean(axis=0)
indices = np.argpartition(combined_pred, n - k)[-k:]
results = np.empty(k, dtype=[('item_id', int), ('score', combined_pred.dtype)])
results['item_id'] = indices
results['score'] = combined_pred
results.sort(order='score')
return results[::-1]
else:
raise NotImplementedError
def load_tags(self, T=None, perform_scaling=True, with_mean=False):
side = SideInformation(T, perform_scaling, with_mean)
self.nb_tags = side.nb_tags
self.T = side.T
def set_parameters(self, nb_users, nb_works):
self.nb_users = nb_users
self.nb_works = nb_works
def get_shortname(self):
return 'algo'
@staticmethod
def compute_rmse(y_pred, y_true):
return np.power(y_true - y_pred, 2).mean() ** 0.5
@staticmethod
def compute_mae(y_pred, y_true):
return np.abs(y_true - y_pred).mean()
def get_ranked_gains(self, y_pred, y_true):
return y_true[np.argsort(y_pred)[::-1]]
def compute_dcg(self, y_pred, y_true):
'''
Computes the discounted cumulative gain as stated in:
https://gist.github.com/bwhite/3726239
'''
ranked_gains = self.get_ranked_gains(y_pred, y_true)
return self.dcg_at_k(ranked_gains, 100)
def compute_ndcg(self, y_pred, y_true):
ranked_gains = self.get_ranked_gains(y_pred, y_true)
return self.ndcg_at_k(ranked_gains, 100)
def dcg_at_k(self, r, k):
r = np.asfarray(r)[:k]
if r.size:
return np.sum(np.subtract(np.power(2, r), 1) /
np.log2(np.arange(2, r.size + 2)))
return 0.
def ndcg_at_k(self, r, k):
idcg = self.dcg_at_k(sorted(r, reverse=True), k)
if not idcg:
return 0.
return self.dcg_at_k(r, k) / idcg
def compute_metrics(self):
if self.X_train is not None:
y_train_pred = self.predict(self.X_train)
train_rmse = self.compute_rmse(self.y_train, y_train_pred)
self.metrics['train']['rmse'].append(train_rmse)
logging.warning('Train RMSE=%f', train_rmse)
if self.X_test is not None:
y_test_pred = self.predict(self.X_test)
test_rmse = self.compute_rmse(self.y_test, y_test_pred)
self.metrics['test']['rmse'].append(test_rmse)
logging.warning('Test RMSE=%f', test_rmse)
@staticmethod
def available_evaluation_metrics():
return ['rmse', 'mae', 'dcg', 'ndcg']
@classmethod
def register_algorithm(cls, name, klass, default_kwargs=None):
cls.factory.register(name, klass, default_kwargs)
@classmethod
def list_available_algorithms(cls):
return list(cls.factory.algorithm_registry.keys())
@classmethod
def instantiate_algorithm(cls, name):
klass = cls.factory.algorithm_registry.get(name)
default_kwargs = cls.factory.algorithm_factory.get(name) or {}
if not klass:
raise KeyError('No algorithm named "{}" in the registry! Did you '
'forget a @register_algorithm? A typo?'
.format(name))
return klass(**default_kwargs)
def __str__(self):
return '[%s]' % self.get_shortname().upper()
def register_algorithm(algorithm_name, default_kwargs=None):
if default_kwargs is None:
default_kwargs = {}
def decorator(cls):
RecommendationAlgorithm.register_algorithm(algorithm_name, cls,
default_kwargs)
return cls
return decorator
| 35.916279 | 91 | 0.615644 |
ae4aea0f2b66c03f8fc9b59889443427e5fe285c | 150,103 | py | Python | venv/Lib/site-packages/pyo/lib/_wxwidgets.py | mintzer/pupillometry-rf-back | cfa86fa984a49dce0123798f8de5b838c02e10d5 | [
"CC-BY-4.0"
] | null | null | null | venv/Lib/site-packages/pyo/lib/_wxwidgets.py | mintzer/pupillometry-rf-back | cfa86fa984a49dce0123798f8de5b838c02e10d5 | [
"CC-BY-4.0"
] | null | null | null | venv/Lib/site-packages/pyo/lib/_wxwidgets.py | mintzer/pupillometry-rf-back | cfa86fa984a49dce0123798f8de5b838c02e10d5 | [
"CC-BY-4.0"
] | null | null | null | from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
"""
Copyright 2009-2015 Olivier Belanger
This file is part of pyo, a python module to help digital signal
processing script creation.
pyo is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
pyo is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with pyo. If not, see <http://www.gnu.org/licenses/>.
"""
import wx, os, sys, math, time, unicodedata
import wx.stc as stc
from ._core import rescale
if "phoenix" in wx.version():
wx.GraphicsContext_Create = wx.GraphicsContext.Create
wx.EmptyBitmap = wx.Bitmap
wx.EmptyImage = wx.Image
wx.BitmapFromImage = wx.Bitmap
wx.Image_HSVValue = wx.Image.HSVValue
wx.Image_HSVtoRGB = wx.Image.HSVtoRGB
if sys.version_info[0] < 3:
unicode_t = unicode
else:
unicode_t = str
BACKGROUND_COLOUR = "#EBEBEB"
def interpFloat(t, v1, v2):
"interpolator for a single value; interprets t in [0-1] between v1 and v2"
return (v2 - v1) * t + v1
def tFromValue(value, v1, v2):
"returns a t (in range 0-1) given a value in the range v1 to v2"
if (v2 - v1) == 0:
return 1.0
else:
return float(value - v1) / (v2 - v1)
def clamp(v, minv, maxv):
"clamps a value within a range"
if v < minv:
v = minv
if v > maxv:
v = maxv
return v
def toLog(t, v1, v2):
return math.log10(t / v1) / math.log10(v2 / v1)
def toExp(t, v1, v2):
return math.pow(10, t * (math.log10(v2) - math.log10(v1)) + math.log10(v1))
POWOFTWO = {
2: 1,
4: 2,
8: 3,
16: 4,
32: 5,
64: 6,
128: 7,
256: 8,
512: 9,
1024: 10,
2048: 11,
4096: 12,
8192: 13,
16384: 14,
32768: 15,
65536: 16,
}
def powOfTwo(x):
"Return 2 raised to the power of x."
return 2 ** x
def powOfTwoToInt(x):
"Return the exponent of 2 correponding to the value x."
return POWOFTWO[x]
def GetRoundBitmap(w, h, r):
maskColor = wx.Color(0, 0, 0)
shownColor = wx.Color(5, 5, 5)
b = wx.EmptyBitmap(w, h)
dc = wx.MemoryDC(b)
dc.SetBrush(wx.Brush(maskColor))
dc.DrawRectangle(0, 0, w, h)
dc.SetBrush(wx.Brush(shownColor))
dc.SetPen(wx.Pen(shownColor))
dc.DrawRoundedRectangle(0, 0, w, h, r)
dc.SelectObject(wx.NullBitmap)
b.SetMaskColour(maskColor)
return b
class ControlSlider(wx.Panel):
def __init__(
self,
parent,
minvalue,
maxvalue,
init=None,
pos=(0, 0),
size=(200, 16),
log=False,
outFunction=None,
integer=False,
powoftwo=False,
backColour=None,
orient=wx.HORIZONTAL,
ctrllabel="",
):
if size == (200, 16) and orient == wx.VERTICAL:
size = (40, 200)
wx.Panel.__init__(
self, parent=parent, id=wx.ID_ANY, pos=pos, size=size, style=wx.NO_BORDER | wx.WANTS_CHARS | wx.EXPAND
)
self.parent = parent
if backColour:
self.backgroundColour = backColour
else:
self.backgroundColour = BACKGROUND_COLOUR
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.SetBackgroundColour(self.backgroundColour)
self.orient = orient
# self.SetMinSize(self.GetSize())
if self.orient == wx.VERTICAL:
self.knobSize = 17
self.knobHalfSize = 8
self.sliderWidth = size[0] - 29
else:
self.knobSize = 40
self.knobHalfSize = 20
self.sliderHeight = size[1] - 5
self.outFunction = outFunction
self.integer = integer
self.log = log
self.powoftwo = powoftwo
if self.powoftwo:
self.integer = True
self.log = False
self.ctrllabel = ctrllabel
self.SetRange(minvalue, maxvalue)
self.borderWidth = 1
self.selected = False
self._enable = True
self.propagate = True
self.midictl = None
self.new = ""
if init is not None:
self.SetValue(init)
self.init = init
else:
self.SetValue(minvalue)
self.init = minvalue
self.clampPos()
self.Bind(wx.EVT_LEFT_DOWN, self.MouseDown)
self.Bind(wx.EVT_LEFT_UP, self.MouseUp)
self.Bind(wx.EVT_LEFT_DCLICK, self.DoubleClick)
self.Bind(wx.EVT_MOTION, self.MouseMotion)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, self.OnResize)
self.Bind(wx.EVT_CHAR, self.onChar)
self.Bind(wx.EVT_KILL_FOCUS, self.LooseFocus)
if sys.platform == "win32" or sys.platform.startswith("linux"):
self.dcref = wx.BufferedPaintDC
self.font = wx.Font(7, wx.FONTFAMILY_TELETYPE, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL)
else:
self.dcref = wx.PaintDC
self.font = wx.Font(10, wx.FONTFAMILY_TELETYPE, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL)
def getCtrlLabel(self):
return self.ctrllabel
def setMidiCtl(self, x, propagate=True):
self.propagate = propagate
self.midictl = x
self.Refresh()
def getMidiCtl(self):
return self.midictl
def getMinValue(self):
return self.minvalue
def getMaxValue(self):
return self.maxvalue
def Enable(self):
self._enable = True
wx.CallAfter(self.Refresh)
def Disable(self):
self._enable = False
wx.CallAfter(self.Refresh)
def setSliderHeight(self, height):
self.sliderHeight = height
self.Refresh()
def setSliderWidth(self, width):
self.sliderWidth = width
def getInit(self):
return self.init
def SetRange(self, minvalue, maxvalue):
self.minvalue = minvalue
self.maxvalue = maxvalue
def getRange(self):
return [self.minvalue, self.maxvalue]
def scale(self):
if self.orient == wx.VERTICAL:
h = self.GetSize()[1]
inter = tFromValue(h - self.pos, self.knobHalfSize, self.GetSize()[1] - self.knobHalfSize)
else:
inter = tFromValue(self.pos, self.knobHalfSize, self.GetSize()[0] - self.knobHalfSize)
if not self.integer:
return interpFloat(inter, self.minvalue, self.maxvalue)
elif self.powoftwo:
return powOfTwo(int(interpFloat(inter, self.minvalue, self.maxvalue)))
else:
return int(interpFloat(inter, self.minvalue, self.maxvalue))
def SetValue(self, value, propagate=True):
self.propagate = propagate
if self.HasCapture():
self.ReleaseMouse()
if self.powoftwo:
value = powOfTwoToInt(value)
value = clamp(value, self.minvalue, self.maxvalue)
if self.log:
t = toLog(value, self.minvalue, self.maxvalue)
self.value = interpFloat(t, self.minvalue, self.maxvalue)
else:
t = tFromValue(value, self.minvalue, self.maxvalue)
self.value = interpFloat(t, self.minvalue, self.maxvalue)
if self.integer:
self.value = int(self.value)
if self.powoftwo:
self.value = powOfTwo(self.value)
self.clampPos()
self.selected = False
wx.CallAfter(self.Refresh)
def GetValue(self):
if self.log:
t = tFromValue(self.value, self.minvalue, self.maxvalue)
val = toExp(t, self.minvalue, self.maxvalue)
else:
val = self.value
if self.integer:
val = int(val)
return val
def LooseFocus(self, event):
self.selected = False
self.Refresh()
def onChar(self, event):
if self.selected:
char = ""
if event.GetKeyCode() in range(wx.WXK_NUMPAD0, wx.WXK_NUMPAD9 + 1):
char = str(event.GetKeyCode() - wx.WXK_NUMPAD0)
elif event.GetKeyCode() in [wx.WXK_SUBTRACT, wx.WXK_NUMPAD_SUBTRACT]:
char = "-"
elif event.GetKeyCode() in [wx.WXK_DECIMAL, wx.WXK_NUMPAD_DECIMAL]:
char = "."
elif event.GetKeyCode() == wx.WXK_BACK:
if self.new != "":
self.new = self.new[0:-1]
elif event.GetKeyCode() < 256:
char = chr(event.GetKeyCode())
if char in ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9", ".", "-"]:
self.new += char
elif event.GetKeyCode() in [wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER]:
self.SetValue(eval(self.new))
self.new = ""
self.selected = False
self.Refresh()
event.Skip()
def MouseDown(self, evt):
if evt.ShiftDown():
self.DoubleClick(evt)
return
if self._enable:
size = self.GetSize()
if self.orient == wx.VERTICAL:
self.pos = clamp(evt.GetPosition()[1], self.knobHalfSize, size[1] - self.knobHalfSize)
else:
self.pos = clamp(evt.GetPosition()[0], self.knobHalfSize, size[0] - self.knobHalfSize)
self.value = self.scale()
self.CaptureMouse()
self.selected = False
self.Refresh()
evt.Skip()
def MouseUp(self, evt):
if self.HasCapture():
self.ReleaseMouse()
def DoubleClick(self, event):
if self._enable:
w, h = self.GetSize()
pos = event.GetPosition()
if self.orient == wx.VERTICAL:
if wx.Rect(0, self.pos - self.knobHalfSize, w, self.knobSize).Contains(pos):
self.selected = True
else:
if wx.Rect(self.pos - self.knobHalfSize, 0, self.knobSize, h).Contains(pos):
self.selected = True
self.Refresh()
event.Skip()
def MouseMotion(self, evt):
if self._enable:
size = self.GetSize()
if self.HasCapture():
if self.orient == wx.VERTICAL:
self.pos = clamp(evt.GetPosition()[1], self.knobHalfSize, size[1] - self.knobHalfSize)
else:
self.pos = clamp(evt.GetPosition()[0], self.knobHalfSize, size[0] - self.knobHalfSize)
self.value = self.scale()
self.selected = False
self.Refresh()
def OnResize(self, evt):
self.clampPos()
self.Refresh()
def clampPos(self):
size = self.GetSize()
if self.powoftwo:
val = powOfTwoToInt(self.value)
else:
val = self.value
if self.orient == wx.VERTICAL:
self.pos = tFromValue(val, self.minvalue, self.maxvalue) * (size[1] - self.knobSize) + self.knobHalfSize
self.pos = clamp(size[1] - self.pos, self.knobHalfSize, size[1] - self.knobHalfSize)
else:
self.pos = tFromValue(val, self.minvalue, self.maxvalue) * (size[0] - self.knobSize) + self.knobHalfSize
self.pos = clamp(self.pos, self.knobHalfSize, size[0] - self.knobHalfSize)
def setBackgroundColour(self, colour):
self.backgroundColour = colour
self.SetBackgroundColour(self.backgroundColour)
self.Refresh()
def OnPaint(self, evt):
w, h = self.GetSize()
if w <= 0 or h <= 0:
evt.Skip()
return
dc = self.dcref(self)
gc = wx.GraphicsContext_Create(dc)
dc.SetBrush(wx.Brush(self.backgroundColour, wx.SOLID))
dc.Clear()
# Draw background
dc.SetPen(wx.Pen(self.backgroundColour, width=self.borderWidth, style=wx.SOLID))
dc.DrawRectangle(0, 0, w, h)
# Draw inner part
if self._enable:
sliderColour = "#99A7CC"
else:
sliderColour = "#BBBBBB"
if self.orient == wx.VERTICAL:
w2 = (w - self.sliderWidth) // 2
rec = wx.Rect(w2, 0, self.sliderWidth, h)
brush = gc.CreateLinearGradientBrush(w2, 0, w2 + self.sliderWidth, 0, "#646986", sliderColour)
else:
h2 = self.sliderHeight // 4
rec = wx.Rect(0, h2, w, self.sliderHeight)
brush = gc.CreateLinearGradientBrush(0, h2, 0, h2 + self.sliderHeight, "#646986", sliderColour)
gc.SetBrush(brush)
gc.DrawRoundedRectangle(rec[0], rec[1], rec[2], rec[3], 2)
if self.midictl is not None:
if sys.platform == "win32" or sys.platform.startswith("linux"):
dc.SetFont(wx.Font(6, wx.FONTFAMILY_ROMAN, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL))
else:
dc.SetFont(wx.Font(9, wx.FONTFAMILY_ROMAN, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL))
dc.SetTextForeground("#FFFFFF")
if self.orient == wx.VERTICAL:
dc.DrawLabel(str(self.midictl), wx.Rect(w2, 2, self.sliderWidth, 12), wx.ALIGN_CENTER)
dc.DrawLabel(str(self.midictl), wx.Rect(w2, h - 12, self.sliderWidth, 12), wx.ALIGN_CENTER)
else:
dc.DrawLabel(str(self.midictl), wx.Rect(2, 0, h, h), wx.ALIGN_CENTER)
dc.DrawLabel(str(self.midictl), wx.Rect(w - h, 0, h, h), wx.ALIGN_CENTER)
# Draw knob
if self._enable:
knobColour = "#888888"
else:
knobColour = "#DDDDDD"
if self.orient == wx.VERTICAL:
rec = wx.Rect(0, self.pos - self.knobHalfSize, w, self.knobSize - 1)
if self.selected:
brush = wx.Brush("#333333", wx.SOLID)
else:
brush = gc.CreateLinearGradientBrush(0, 0, w, 0, "#323854", knobColour)
gc.SetBrush(brush)
gc.DrawRoundedRectangle(rec[0], rec[1], rec[2], rec[3], 3)
else:
rec = wx.Rect(int(self.pos) - self.knobHalfSize, 0, self.knobSize - 1, h)
if self.selected:
brush = wx.Brush("#333333", wx.SOLID)
else:
brush = gc.CreateLinearGradientBrush(
self.pos - self.knobHalfSize, 0, self.pos + self.knobHalfSize, 0, "#323854", knobColour
)
gc.SetBrush(brush)
gc.DrawRoundedRectangle(rec[0], rec[1], rec[2], rec[3], 3)
dc.SetFont(self.font)
# Draw text
if self.selected and self.new:
val = self.new
else:
if self.integer:
val = "%d" % self.GetValue()
elif abs(self.GetValue()) >= 1000:
val = "%.0f" % self.GetValue()
elif abs(self.GetValue()) >= 100:
val = "%.1f" % self.GetValue()
elif abs(self.GetValue()) >= 10:
val = "%.2f" % self.GetValue()
elif abs(self.GetValue()) < 10:
val = "%.3f" % self.GetValue()
if sys.platform.startswith("linux"):
width = len(val) * (dc.GetCharWidth() - 3)
else:
width = len(val) * dc.GetCharWidth()
dc.SetTextForeground("#FFFFFF")
dc.DrawLabel(val, rec, wx.ALIGN_CENTER)
# Send value
if self.outFunction and self.propagate:
self.outFunction(self.GetValue())
self.propagate = True
evt.Skip()
# TODO: key, command and slmap should be removed from the multislider widget.
# It should work in the same way as the ControlSlider widget.
class MultiSlider(wx.Panel):
def __init__(self, parent, init, key, command, slmap, ctrllabel=""):
wx.Panel.__init__(self, parent, size=(250, 250))
self.backgroundColour = BACKGROUND_COLOUR
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.SetBackgroundColour(self.backgroundColour)
self.Bind(wx.EVT_SIZE, self.OnResize)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_LEFT_DOWN, self.MouseDown)
self.Bind(wx.EVT_LEFT_UP, self.MouseUp)
self.Bind(wx.EVT_MOTION, self.MouseMotion)
self._slmap = slmap
self.ctrllabel = ctrllabel
self._values = [slmap.set(x) for x in init]
self._nchnls = len(init)
self._labels = init
self._key = key
self._command = command
self._height = 16
if sys.platform == "win32" or sys.platform.startswith("linux"):
self._font = wx.Font(7, wx.FONTFAMILY_ROMAN, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL)
else:
self._font = wx.Font(10, wx.FONTFAMILY_ROMAN, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL)
self.SetSize((250, self._nchnls * 16))
self.SetMinSize((250, self._nchnls * 16))
def getCtrlLabel(self):
return self.ctrllabel
def OnResize(self, event):
self.Layout()
self.Refresh()
def OnPaint(self, event):
w, h = self.GetSize()
dc = wx.AutoBufferedPaintDC(self)
dc.SetBrush(wx.Brush(self.backgroundColour))
dc.Clear()
dc.DrawRectangle(0, 0, w, h)
dc.SetBrush(wx.Brush("#000000"))
dc.SetFont(self._font)
dc.SetTextForeground("#999999")
for i in range(self._nchnls):
x = int(self._values[i] * w)
y = self._height * i
dc.DrawRectangle(0, y + 1, x, self._height - 2)
rec = wx.Rect(w // 2 - 15, y, 30, self._height)
dc.DrawLabel("%s" % self._labels[i], rec, wx.ALIGN_CENTER)
def MouseDown(self, evt):
w, h = self.GetSize()
pos = evt.GetPosition()
slide = pos[1] // self._height
if slide >= 0 and slide < self._nchnls:
self._values[slide] = pos[0] / float(w)
if self._slmap._res == "int":
self._labels = [int(self._slmap.get(x)) for x in self._values]
else:
self._labels = [self._slmap.get(x) for x in self._values]
self._command(self._key, self._labels)
self.CaptureMouse()
self.Refresh()
evt.Skip()
def MouseUp(self, evt):
if self.HasCapture():
self.ReleaseMouse()
def MouseMotion(self, evt):
w, h = self.GetSize()
pos = evt.GetPosition()
if evt.Dragging() and evt.LeftIsDown():
slide = pos[1] // self._height
if slide >= 0 and slide < self._nchnls:
self._values[slide] = pos[0] / float(w)
if self._slmap._res == "int":
self._labels = [int(self._slmap.get(x)) for x in self._values]
else:
self._labels = [self._slmap.get(x) for x in self._values]
self._command(self._key, self._labels)
self.Refresh()
def GetValue(self):
return self._labels
class VuMeter(wx.Panel):
def __init__(self, parent, size=(200, 11), numSliders=2, orient=wx.HORIZONTAL, pos=wx.DefaultPosition, style=0):
if orient == wx.HORIZONTAL:
size = (size[0], numSliders * 5 + 1)
else:
size = (numSliders * 5 + 1, size[1])
wx.Panel.__init__(self, parent, -1, pos=pos, size=size, style=style)
self.parent = parent
self.orient = orient
self.SetBackgroundColour("#000000")
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.old_nchnls = numSliders
self.numSliders = numSliders
self.amplitude = [0] * self.numSliders
self.createBitmaps()
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Bind(wx.EVT_CLOSE, self.OnClose)
def OnSize(self, evt):
self.createBitmaps()
wx.CallAfter(self.Refresh)
def createBitmaps(self):
w, h = self.GetSize()
b = wx.EmptyBitmap(w, h)
f = wx.EmptyBitmap(w, h)
dcb = wx.MemoryDC(b)
dcf = wx.MemoryDC(f)
dcb.SetPen(wx.Pen("#000000", width=1))
dcf.SetPen(wx.Pen("#000000", width=1))
if self.orient == wx.HORIZONTAL:
height = 6
steps = int(w / 10.0 + 0.5)
else:
width = 6
steps = int(h / 10.0 + 0.5)
bounds = int(steps / 6.0)
for i in range(steps):
if i == (steps - 1):
dcb.SetBrush(wx.Brush("#770000"))
dcf.SetBrush(wx.Brush("#FF0000"))
elif i >= (steps - bounds):
dcb.SetBrush(wx.Brush("#440000"))
dcf.SetBrush(wx.Brush("#CC0000"))
elif i >= (steps - (bounds * 2)):
dcb.SetBrush(wx.Brush("#444400"))
dcf.SetBrush(wx.Brush("#CCCC00"))
else:
dcb.SetBrush(wx.Brush("#004400"))
dcf.SetBrush(wx.Brush("#00CC00"))
if self.orient == wx.HORIZONTAL:
dcb.DrawRectangle(i * 10, 0, 11, height)
dcf.DrawRectangle(i * 10, 0, 11, height)
else:
ii = steps - 1 - i
dcb.DrawRectangle(0, ii * 10, width, 11)
dcf.DrawRectangle(0, ii * 10, width, 11)
if self.orient == wx.HORIZONTAL:
dcb.DrawLine(w - 1, 0, w - 1, height)
dcf.DrawLine(w - 1, 0, w - 1, height)
else:
dcb.DrawLine(0, 0, width, 0)
dcf.DrawLine(0, 0, width, 0)
dcb.SelectObject(wx.NullBitmap)
dcf.SelectObject(wx.NullBitmap)
self.backBitmap = b
self.bitmap = f
def setNumSliders(self, numSliders):
w, h = self.GetSize()
oldChnls = self.old_nchnls
self.numSliders = numSliders
self.amplitude = [0] * self.numSliders
gap = (self.numSliders - oldChnls) * 5
parentSize = self.parent.GetSize()
if self.orient == wx.HORIZONTAL:
self.SetSize((w, self.numSliders * 5 + 1))
self.SetMinSize((w, 5 * self.numSliders + 1))
self.parent.SetSize((parentSize[0], parentSize[1] + gap))
self.parent.SetMinSize((parentSize[0], parentSize[1] + gap))
else:
self.SetSize((self.numSliders * 5 + 1, h))
self.SetMinSize((5 * self.numSliders + 1, h))
self.parent.SetSize((parentSize[0] + gap, parentSize[1]))
self.parent.SetMinSize((parentSize[0] + gap, parentSize[1]))
wx.CallAfter(self.Refresh)
wx.CallAfter(self.parent.Layout)
wx.CallAfter(self.parent.Refresh)
def setRms(self, *args):
if args[0] < 0:
return
if not args:
self.amplitude = [0 for i in range(self.numSliders)]
else:
self.amplitude = args
wx.CallAfter(self.Refresh)
def OnPaint(self, event):
w, h = self.GetSize()
dc = wx.AutoBufferedPaintDC(self)
dc.SetBrush(wx.Brush("#000000"))
dc.Clear()
dc.DrawRectangle(0, 0, w, h)
if self.orient == wx.HORIZONTAL:
height = 6
for i in range(self.numSliders):
y = i * (height - 1)
if i < len(self.amplitude):
db = math.log10(self.amplitude[i] + 0.00001) * 0.2 + 1.0
width = int(db * w)
else:
width = 0
dc.DrawBitmap(self.backBitmap, 0, y)
if width > 0:
dc.SetClippingRegion(0, y, width, height)
dc.DrawBitmap(self.bitmap, 0, y)
dc.DestroyClippingRegion()
else:
width = 6
for i in range(self.numSliders):
y = i * (width - 1)
if i < len(self.amplitude):
db = math.log10(self.amplitude[i] + 0.00001) * 0.2 + 1.0
height = int(db * h)
else:
height = 0
dc.DrawBitmap(self.backBitmap, y, 0)
if height > 0:
dc.SetClippingRegion(y, h - height, width, height)
dc.DrawBitmap(self.bitmap, y, 0)
dc.DestroyClippingRegion()
event.Skip()
def OnClose(self, evt):
self.Destroy()
# TODO: BACKGROUND_COLOUR hard-coded all over the place in this class.
class RangeSlider(wx.Panel):
def __init__(
self,
parent,
minvalue,
maxvalue,
init=None,
pos=(0, 0),
size=(200, 15),
valtype="int",
log=False,
function=None,
backColour=None,
):
wx.Panel.__init__(self, parent=parent, id=wx.ID_ANY, pos=pos, size=size, style=wx.NO_BORDER)
if backColour:
self.backgroundColour = backColour
else:
self.backgroundColour = BACKGROUND_COLOUR
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.SetBackgroundColour(self.backgroundColour)
self.SetMinSize(self.GetSize())
self.sliderHeight = 15
self.borderWidth = 1
self.action = None
self.fillcolor = "#AAAAAA" # SLIDER_BACK_COLOUR
self.knobcolor = "#333333" # SLIDER_KNOB_COLOUR
self.handlecolor = wx.Colour(
int(self.knobcolor[1:3]) - 10, int(self.knobcolor[3:5]) - 10, int(self.knobcolor[5:7]) - 10
)
self.outFunction = function
if valtype.startswith("i"):
self.myType = int
else:
self.myType = float
self.log = log
self.SetRange(minvalue, maxvalue)
self.handles = [minvalue, maxvalue]
if init is not None:
if type(init) in [list, tuple]:
if len(init) == 1:
self.SetValue([init[0], init[0]])
else:
self.SetValue([init[0], init[1]])
else:
self.SetValue([minvalue, maxvalue])
else:
self.SetValue([minvalue, maxvalue])
self.Bind(wx.EVT_LEFT_DOWN, self.MouseDown)
self.Bind(wx.EVT_RIGHT_DOWN, self.MouseRightDown)
self.Bind(wx.EVT_LEFT_UP, self.MouseUp)
self.Bind(wx.EVT_RIGHT_UP, self.MouseUp)
self.Bind(wx.EVT_MOTION, self.MouseMotion)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, self.OnResize)
def createSliderBitmap(self):
w, h = self.GetSize()
b = wx.EmptyBitmap(w, h)
dc = wx.MemoryDC(b)
dc.SetPen(wx.Pen(self.backgroundColour, width=1))
dc.SetBrush(wx.Brush(self.backgroundColour))
dc.DrawRectangle(0, 0, w, h)
dc.SetBrush(wx.Brush("#777777"))
dc.SetPen(wx.Pen("#FFFFFF", width=1))
h2 = self.sliderHeight // 4
dc.DrawRoundedRectangle(0, h2, w, self.sliderHeight, 4)
dc.SelectObject(wx.NullBitmap)
b.SetMaskColour("#777777")
self.sliderMask = b
def setFillColour(self, col1, col2):
self.fillcolor = col1
self.knobcolor = col2
self.handlecolor = wx.Colour(self.knobcolor[0] * 0.35, self.knobcolor[1] * 0.35, self.knobcolor[2] * 0.35)
self.createSliderBitmap()
def SetRange(self, minvalue, maxvalue):
self.minvalue = minvalue
self.maxvalue = maxvalue
def scale(self, pos):
tmp = []
for p in pos:
inter = tFromValue(p, 1, self.GetSize()[0] - 1)
inter2 = interpFloat(inter, self.minvalue, self.maxvalue)
tmp.append(inter2)
return tmp
def MouseRightDown(self, evt):
size = self.GetSize()
xpos = evt.GetPosition()[0]
if xpos > (self.handlePos[0] - 5) and xpos < (self.handlePos[1] + 5):
self.lastpos = xpos
self.length = self.handlePos[1] - self.handlePos[0]
self.action = "drag"
self.handles = self.scale(self.handlePos)
self.CaptureMouse()
self.Refresh()
def MouseDown(self, evt):
size = self.GetSize()
xpos = evt.GetPosition()[0]
self.middle = (self.handlePos[1] - self.handlePos[0]) // 2 + self.handlePos[0]
midrec = wx.Rect(self.middle - 7, 4, 15, size[1] - 9)
if midrec.Contains(evt.GetPosition()):
self.lastpos = xpos
self.length = self.handlePos[1] - self.handlePos[0]
self.action = "drag"
elif xpos < self.middle:
self.handlePos[0] = clamp(xpos, 1, self.handlePos[1])
self.action = "left"
elif xpos > self.middle:
self.handlePos[1] = clamp(xpos, self.handlePos[0], size[0] - 1)
self.action = "right"
self.handles = self.scale(self.handlePos)
self.CaptureMouse()
self.Refresh()
def MouseMotion(self, evt):
size = self.GetSize()
if evt.Dragging() and self.HasCapture() and evt.LeftIsDown() or evt.RightIsDown():
xpos = evt.GetPosition()[0]
if self.action == "drag":
off = xpos - self.lastpos
self.lastpos = xpos
self.handlePos[0] = clamp(self.handlePos[0] + off, 1, size[0] - self.length)
self.handlePos[1] = clamp(self.handlePos[1] + off, self.length, size[0] - 1)
if self.action == "left":
self.handlePos[0] = clamp(xpos, 1, self.handlePos[1] - 20)
elif self.action == "right":
self.handlePos[1] = clamp(xpos, self.handlePos[0] + 20, size[0] - 1)
self.handles = self.scale(self.handlePos)
self.Refresh()
def MouseUp(self, evt):
while self.HasCapture():
self.ReleaseMouse()
def OnResize(self, evt):
self.createSliderBitmap()
self.createBackgroundBitmap()
self.clampHandlePos()
self.Refresh()
def clampHandlePos(self):
size = self.GetSize()
tmp = []
for handle in [min(self.handles), max(self.handles)]:
pos = tFromValue(handle, self.minvalue, self.maxvalue) * size[0]
pos = clamp(pos, 1, size[0] - 1)
tmp.append(pos)
self.handlePos = tmp
class HRangeSlider(RangeSlider):
def __init__(
self,
parent,
minvalue,
maxvalue,
init=None,
pos=(0, 0),
size=(200, 15),
valtype="int",
log=False,
function=None,
backColour=None,
):
RangeSlider.__init__(self, parent, minvalue, maxvalue, init, pos, size, valtype, log, function, backColour)
self.SetMinSize((50, 15))
self.createSliderBitmap()
# self.createBackgroundBitmap()
self.clampHandlePos()
def setSliderHeight(self, height):
self.sliderHeight = height
self.createSliderBitmap()
# self.createBackgroundBitmap()
self.Refresh()
def createBackgroundBitmap(self):
w, h = self.GetSize()
self.backgroundBitmap = wx.EmptyBitmap(w, h)
dc = wx.MemoryDC(self.backgroundBitmap)
dc.SetBrush(wx.Brush(self.backgroundColour, wx.SOLID))
dc.Clear()
# Draw background
dc.SetPen(wx.Pen(self.backgroundColour, width=self.borderWidth, style=wx.SOLID))
dc.DrawRectangle(0, 0, w, h)
# Draw inner part
h2 = self.sliderHeight // 4
rec = wx.Rect(0, h2, w, self.sliderHeight)
dc.GradientFillLinear(rec, "#666666", self.fillcolor, wx.BOTTOM)
dc.DrawBitmap(self.sliderMask, 0, 0, True)
dc.SelectObject(wx.NullBitmap)
def SetOneValue(self, value, which):
self.lasthandles = self.handles
value = clamp(value, self.minvalue, self.maxvalue)
if self.log:
t = toLog(value, self.minvalue, self.maxvalue)
value = interpFloat(t, self.minvalue, self.maxvalue)
else:
t = tFromValue(value, self.minvalue, self.maxvalue)
value = interpFloat(t, self.minvalue, self.maxvalue)
if self.myType == int:
value = int(value)
self.handles[which] = value
self.OnResize(None)
def SetValue(self, values):
self.lasthandles = self.handles
tmp = []
for val in values:
value = clamp(val, self.minvalue, self.maxvalue)
if self.log:
t = toLog(value, self.minvalue, self.maxvalue)
value = interpFloat(t, self.minvalue, self.maxvalue)
else:
t = tFromValue(value, self.minvalue, self.maxvalue)
value = interpFloat(t, self.minvalue, self.maxvalue)
if self.myType == int:
value = int(value)
tmp.append(value)
self.handles = tmp
self.OnResize(None)
def GetValue(self):
tmp = []
for value in self.handles:
if self.log:
t = tFromValue(value, self.minvalue, self.maxvalue)
val = toExp(t, self.minvalue, self.maxvalue)
else:
val = value
if self.myType == int:
val = int(val)
tmp.append(val)
tmp = [min(tmp), max(tmp)]
return tmp
def OnPaint(self, evt):
w, h = self.GetSize()
dc = wx.AutoBufferedPaintDC(self)
# Draw background
dc.SetBrush(wx.Brush(self.backgroundColour))
dc.Clear()
dc.SetPen(wx.Pen(self.backgroundColour))
dc.DrawRectangle(0, 0, w, h)
# dc.DrawBitmap(self.backgroundBitmap, 0, 0)
# Draw handles
dc.SetPen(wx.Pen(self.handlecolor, width=1, style=wx.SOLID))
dc.SetBrush(wx.Brush(self.handlecolor))
rec = (self.handlePos[0], 3, self.handlePos[1] - self.handlePos[0], h - 7)
dc.DrawRoundedRectangle(rec[0], rec[1], rec[2], rec[3], 4)
dc.SetPen(wx.Pen(self.fillcolor, width=1, style=wx.SOLID))
dc.SetBrush(wx.Brush(self.fillcolor))
mid = (self.handlePos[1] - self.handlePos[0]) // 2 + self.handlePos[0]
rec = (mid - 4, 4, 8, h - 9)
dc.DrawRoundedRectangle(rec[0], rec[1], rec[2], rec[3], 3)
# Send value
if self.outFunction:
self.outFunction(self.GetValue())
######################################################################
### Control window for PyoObject
######################################################################
class Command:
def __init__(self, func, key):
self.func = func
self.key = key
def __call__(self, value):
self.func(self.key, value)
class PyoObjectControl(wx.Frame):
def __init__(self, parent=None, obj=None, map_list=None):
wx.Frame.__init__(self, parent)
from .controls import SigTo
self.menubar = wx.MenuBar()
self.fileMenu = wx.Menu()
self.fileMenu.Append(9999, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.fileMenu.Bind(wx.EVT_MENU, self._destroy, id=9999)
self.fileMenu.AppendSeparator()
self.fileMenu.Append(
10000, "Copy all parameters to the clipboard (4 digits of precision)\tCtrl+C", kind=wx.ITEM_NORMAL
)
self.Bind(wx.EVT_MENU, self.copy, id=10000)
self.fileMenu.Append(
10001, "Copy all parameters to the clipboard (full precision)\tShift+Ctrl+C", kind=wx.ITEM_NORMAL
)
self.Bind(wx.EVT_MENU, self.copy, id=10001)
self.menubar.Append(self.fileMenu, "&File")
self.SetMenuBar(self.menubar)
self.Bind(wx.EVT_CLOSE, self._destroy)
self._obj = obj
self._map_list = map_list
self._sliders = []
self._excluded = []
self._values = {}
self._displays = {}
self._maps = {}
self._sigs = {}
panel = wx.Panel(self)
panel.SetBackgroundColour(BACKGROUND_COLOUR)
mainBox = wx.BoxSizer(wx.VERTICAL)
self.box = wx.FlexGridSizer(10, 2, 5, 5)
for i, m in enumerate(self._map_list):
key, init, mini, maxi, scl, res, dataOnly = m.name, m.init, m.min, m.max, m.scale, m.res, m.dataOnly
# filters PyoObjects
if type(init) not in [list, float, int]:
self._excluded.append(key)
else:
self._maps[key] = m
# label (param name)
if dataOnly:
label = wx.StaticText(panel, -1, key + " *")
else:
label = wx.StaticText(panel, -1, key)
# create and pack slider
if type(init) != list:
if scl == "log":
scl = True
else:
scl = False
if res == "int":
res = True
else:
res = False
self._sliders.append(
ControlSlider(
panel,
mini,
maxi,
init,
log=scl,
size=(300, 16),
outFunction=Command(self.setval, key),
integer=res,
ctrllabel=key,
)
)
self.box.AddMany([(label, 0, wx.LEFT, 5), (self._sliders[-1], 1, wx.EXPAND | wx.LEFT, 5)])
else:
self._sliders.append(MultiSlider(panel, init, key, self.setval, m, ctrllabel=key))
self.box.AddMany([(label, 0, wx.LEFT, 5), (self._sliders[-1], 1, wx.EXPAND | wx.LEFT, 5)])
# set obj attribute to PyoObject SigTo
if not dataOnly:
self._values[key] = init
self._sigs[key] = SigTo(init, 0.025, init)
refStream = self._obj.getBaseObjects()[0]._getStream()
server = self._obj.getBaseObjects()[0].getServer()
for k in range(len(self._sigs[key].getBaseObjects())):
curStream = self._sigs[key].getBaseObjects()[k]._getStream()
server.changeStreamPosition(refStream, curStream)
setattr(self._obj, key, self._sigs[key])
self.box.AddGrowableCol(1, 1)
mainBox.Add(self.box, 1, wx.EXPAND | wx.TOP | wx.BOTTOM | wx.RIGHT, 10)
panel.SetSizerAndFit(mainBox)
self.SetClientSize(panel.GetSize())
self.SetMinSize(self.GetSize())
self.SetMaxSize((-1, self.GetSize()[1]))
def _destroy(self, event):
for m in self._map_list:
key = m.name
if key not in self._excluded and key in self._values:
setattr(self._obj, key, self._values[key])
del self._sigs[key]
self.Destroy()
def setval(self, key, x):
if key in self._values:
self._values[key] = x
setattr(self._sigs[key], "value", x)
else:
setattr(self._obj, key, x)
def copy(self, evt):
labels = [slider.getCtrlLabel() for slider in self._sliders]
values = [slider.GetValue() for slider in self._sliders]
if evt.GetId() == 10000:
pstr = ""
for i in range(len(labels)):
pstr += "%s=" % labels[i]
if type(values[i]) == list:
pstr += "["
pstr += ", ".join(["%.4f" % val for val in values[i]])
pstr += "]"
else:
pstr += "%.4f" % values[i]
if i < (len(labels) - 1):
pstr += ", "
else:
pstr = ""
for i in range(len(labels)):
pstr += "%s=" % labels[i]
if type(values[i]) == list:
pstr += "["
pstr += ", ".join([str(val) for val in values[i]])
pstr += "]"
else:
pstr += str(values[i])
if i < (len(labels) - 1):
pstr += ", "
data = wx.TextDataObject(pstr)
if wx.TheClipboard.Open():
wx.TheClipboard.Clear()
wx.TheClipboard.SetData(data)
wx.TheClipboard.Close()
######################################################################
### View window for PyoTableObject
######################################################################
class ViewTable(wx.Frame):
def __init__(self, parent, samples=None, tableclass=None, object=None):
wx.Frame.__init__(self, parent, size=(500, 200))
self.SetMinSize((300, 150))
menubar = wx.MenuBar()
fileMenu = wx.Menu()
closeItem = fileMenu.Append(-1, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self._destroy, closeItem)
menubar.Append(fileMenu, "&File")
self.SetMenuBar(menubar)
self.tableclass = tableclass
self.object = object
self.Bind(wx.EVT_CLOSE, self._destroy)
self.panel = wx.Panel(self)
self.panel.SetBackgroundColour(BACKGROUND_COLOUR)
self.box = wx.BoxSizer(wx.VERTICAL)
self.wavePanel = ViewTablePanel(self.panel, object)
self.box.Add(self.wavePanel, 1, wx.EXPAND | wx.ALL, 5)
self.panel.SetSizerAndFit(self.box)
self.update(samples)
def update(self, samples):
self.wavePanel.draw(samples)
def _destroy(self, evt):
self.object._setViewFrame(None)
self.Destroy()
class ViewTablePanel(wx.Panel):
def __init__(self, parent, obj):
wx.Panel.__init__(self, parent)
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.obj = obj
self.samples = []
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, self.OnSize)
if sys.platform == "win32" or sys.platform.startswith("linux"):
self.dcref = wx.BufferedPaintDC
else:
self.dcref = wx.PaintDC
def draw(self, samples):
self.samples = samples
wx.CallAfter(self.Refresh)
def OnPaint(self, evt):
w, h = self.GetSize()
dc = self.dcref(self)
gc = wx.GraphicsContext_Create(dc)
dc.SetBrush(wx.Brush("#FFFFFF"))
dc.SetPen(wx.Pen("#BBBBBB", width=1, style=wx.SOLID))
dc.Clear()
dc.DrawRectangle(0, 0, w, h)
gc.SetPen(wx.Pen("#000000", width=1, style=wx.SOLID))
gc.SetBrush(wx.Brush("#FFFFFF"))
if len(self.samples) > 1:
gc.DrawLines(self.samples)
dc.DrawLine(0, h // 2 + 1, w, h // 2 + 1)
def OnSize(self, evt):
wx.CallAfter(self.obj.refreshView)
class SndViewTable(wx.Frame):
def __init__(self, parent, obj=None, tableclass=None, mouse_callback=None):
wx.Frame.__init__(self, parent, size=(500, 250))
self.SetMinSize((300, 150))
self.menubar = wx.MenuBar()
self.fileMenu = wx.Menu()
closeItem = self.fileMenu.Append(-1, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self._destroy, closeItem)
self.menubar.Append(self.fileMenu, "&File")
self.SetMenuBar(self.menubar)
self.Bind(wx.EVT_CLOSE, self._destroy)
self.obj = obj
self.chnls = len(self.obj)
self.dur = self.obj.getDur(False)
self.panel = wx.Panel(self)
self.panel.SetBackgroundColour(BACKGROUND_COLOUR)
self.box = wx.BoxSizer(wx.VERTICAL)
self.wavePanel = SndViewTablePanel(self.panel, obj, mouse_callback)
self.box.Add(self.wavePanel, 1, wx.EXPAND | wx.ALL, 5)
self.zoomH = HRangeSlider(
self.panel,
minvalue=0,
maxvalue=1,
init=None,
pos=(0, 0),
size=(200, 15),
valtype="float",
log=False,
function=self.setZoomH,
)
self.box.Add(self.zoomH, 0, wx.EXPAND | wx.LEFT | wx.RIGHT, 5)
self.panel.SetSizer(self.box)
def setZoomH(self, values):
self.wavePanel.setBegin(self.dur * values[0])
self.wavePanel.setEnd(self.dur * values[1])
self.update()
def update(self):
self.wavePanel.setImage()
def _destroy(self, evt):
self.obj._setViewFrame(None)
self.Destroy()
class SndViewTablePanel(wx.Panel):
def __init__(self, parent, obj=None, mouse_callback=None, select_callback=None):
wx.Panel.__init__(self, parent)
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_LEFT_DOWN, self.OnMouseDown)
self.Bind(wx.EVT_LEFT_UP, self.OnMouseUp)
self.Bind(wx.EVT_RIGHT_DOWN, self.OnRightDown)
self.Bind(wx.EVT_RIGHT_UP, self.OnMouseUp)
self.Bind(wx.EVT_MOTION, self.OnMotion)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.refresh_from_selection = False
self.background_bitmap = None
self.obj = obj
self.selstart = self.selend = self.movepos = None
self.moveSelection = False
self.createSelection = False
self.begin = 0
if self.obj is not None:
self.chnls = len(self.obj)
self.end = self.obj.getDur(False)
else:
self.chnls = 1
self.end = 1.0
self.img = [[]]
self.mouse_callback = mouse_callback
self.select_callback = select_callback
if sys.platform == "win32" or sys.platform.startswith("linux"):
self.dcref = wx.BufferedPaintDC
else:
self.dcref = wx.PaintDC
self.setImage()
def getDur(self):
if self.obj is not None:
return self.obj.getDur(False)
else:
return 1.0
def resetSelection(self):
self.selstart = self.selend = None
if self.background_bitmap is not None:
self.refresh_from_selection = True
self.Refresh()
if self.select_callback is not None:
self.select_callback((0.0, 1.0))
def setSelection(self, start, stop):
self.selstart = start
self.selend = stop
if self.background_bitmap is not None:
self.refresh_from_selection = True
self.Refresh()
if self.select_callback is not None:
self.select_callback((self.selstart, self.selend))
def setBegin(self, x):
self.begin = x
def setEnd(self, x):
self.end = x
def setImage(self):
if self.obj is not None:
self.img = self.obj.getViewTable(self.GetSize(), self.begin, self.end)
wx.CallAfter(self.Refresh)
def clipPos(self, pos):
if pos[0] < 0.0:
x = 0.0
elif pos[0] > 1.0:
x = 1.0
else:
x = pos[0]
if pos[1] < 0.0:
y = 0.0
elif pos[1] > 1.0:
y = 1.0
else:
y = pos[1]
if self.obj is not None:
x = x * ((self.end - self.begin) / self.obj.getDur(False)) + (self.begin / self.obj.getDur(False))
return (x, y)
def OnMouseDown(self, evt):
size = self.GetSize()
pos = evt.GetPosition()
if pos[1] <= 0:
pos = (float(pos[0]) / size[0], 1.0)
else:
pos = (float(pos[0]) / size[0], 1.0 - (float(pos[1]) / size[1]))
pos = self.clipPos(pos)
if self.mouse_callback is not None:
self.mouse_callback(pos)
self.CaptureMouse()
def OnRightDown(self, evt):
size = self.GetSize()
pos = evt.GetPosition()
if pos[1] <= 0:
pos = (float(pos[0]) / size[0], 1.0)
else:
pos = (float(pos[0]) / size[0], 1.0 - (float(pos[1]) / size[1]))
pos = self.clipPos(pos)
if evt.ShiftDown():
if self.selstart is not None and self.selend is not None:
self.moveSelection = True
self.movepos = pos[0]
elif evt.CmdDown():
self.selstart = self.selend = None
self.refresh_from_selection = True
self.Refresh()
if self.select_callback is not None:
self.select_callback((0.0, 1.0))
else:
self.createSelection = True
self.selstart = pos[0]
self.CaptureMouse()
def OnMotion(self, evt):
if self.HasCapture():
size = self.GetSize()
pos = evt.GetPosition()
if pos[1] <= 0:
pos = (float(pos[0]) / size[0], 1.0)
else:
pos = (float(pos[0]) / size[0], 1.0 - (float(pos[1]) / size[1]))
pos = self.clipPos(pos)
if evt.LeftIsDown():
if self.mouse_callback is not None:
self.mouse_callback(pos)
elif evt.RightIsDown():
refresh = False
if self.createSelection:
self.selend = pos[0]
refresh = True
elif self.moveSelection:
diff = pos[0] - self.movepos
self.movepos = pos[0]
self.selstart += diff
self.selend += diff
refresh = True
if refresh:
self.refresh_from_selection = True
self.Refresh()
if self.select_callback is not None:
self.select_callback((self.selstart, self.selend))
def OnMouseUp(self, evt):
if self.HasCapture():
self.ReleaseMouse()
self.createSelection = self.moveSelection = False
def create_background(self):
w, h = self.GetSize()
self.background_bitmap = wx.EmptyBitmap(w, h)
dc = wx.MemoryDC(self.background_bitmap)
gc = wx.GraphicsContext_Create(dc)
dc.SetBrush(wx.Brush("#FFFFFF"))
dc.Clear()
dc.DrawRectangle(0, 0, w, h)
off = h // self.chnls // 2
gc.SetPen(wx.Pen("#000000", width=1, style=wx.SOLID))
gc.SetBrush(wx.Brush("#FFFFFF", style=wx.TRANSPARENT))
dc.SetTextForeground("#444444")
if sys.platform in "darwin":
font, ptsize = dc.GetFont(), dc.GetFont().GetPointSize()
font.SetPointSize(ptsize - 3)
dc.SetFont(font)
else:
font = dc.GetFont()
font.SetPointSize(8)
dc.SetFont(font)
tickstep = w // 10
if tickstep < 40:
timelabel = "%.1f"
elif tickstep < 80:
timelabel = "%.2f"
elif tickstep < 120:
timelabel = "%.3f"
else:
timelabel = "%.4f"
timestep = (self.end - self.begin) * 0.1
for i, samples in enumerate(self.img):
y = h // self.chnls * i
if len(samples):
gc.DrawLines(samples)
dc.SetPen(wx.Pen("#888888", width=1, style=wx.DOT))
dc.DrawLine(0, y + off, w, y + off)
for j in range(10):
dc.SetPen(wx.Pen("#888888", width=1, style=wx.DOT))
dc.DrawLine(j * tickstep, 0, j * tickstep, h)
dc.DrawText(timelabel % (self.begin + j * timestep), j * tickstep + 2, h - y - 12)
dc.SetPen(wx.Pen("#000000", width=1))
dc.DrawLine(0, h - y, w, h - y)
dc.SelectObject(wx.NullBitmap)
def OnPaint(self, evt):
w, h = self.GetSize()
dc = self.dcref(self)
gc = wx.GraphicsContext_Create(dc)
dc.SetBrush(wx.Brush("#FFFFFF"))
dc.Clear()
dc.DrawRectangle(0, 0, w, h)
if not self.refresh_from_selection:
self.create_background()
dc.DrawBitmap(self.background_bitmap, 0, 0)
if self.selstart is not None and self.selend is not None:
gc.SetPen(wx.Pen(wx.Colour(0, 0, 0, 64)))
gc.SetBrush(wx.Brush(wx.Colour(0, 0, 0, 64)))
if self.obj is not None:
dur = self.obj.getDur(False)
else:
dur = 1.0
selstartabs = min(self.selstart, self.selend) * dur
selendabs = max(self.selstart, self.selend) * dur
if selstartabs < self.begin:
startpix = 0
else:
startpix = ((selstartabs - self.begin) / (self.end - self.begin)) * w
if selendabs > self.end:
endpix = w
else:
endpix = ((selendabs - self.begin) / (self.end - self.begin)) * w
gc.DrawRectangle(startpix, 0, endpix - startpix, h)
self.refresh_from_selection = False
def OnSize(self, evt):
wx.CallAfter(self.setImage)
######################################################################
## View window for PyoMatrixObject
#####################################################################
class ViewMatrixBase(wx.Frame):
def __init__(self, parent, size=None, object=None):
wx.Frame.__init__(self, parent)
self.object = object
self.menubar = wx.MenuBar()
self.fileMenu = wx.Menu()
closeItem = self.fileMenu.Append(-1, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self._destroy, closeItem)
self.menubar.Append(self.fileMenu, "&File")
self.SetMenuBar(self.menubar)
self.Bind(wx.EVT_CLOSE, self._destroy)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.SetClientSize(size)
self.SetMinSize(self.GetSize())
self.SetMaxSize(self.GetSize())
def update(self, samples):
self.setImage(samples)
def _destroy(self, evt):
self.object._setViewFrame(None)
self.Destroy()
class ViewMatrix(ViewMatrixBase):
def __init__(self, parent, samples=None, size=None, object=None):
ViewMatrixBase.__init__(self, parent, size, object)
self.size = size
self.setImage(samples)
def setImage(self, samples):
image = wx.EmptyImage(self.size[0], self.size[1])
image.SetData(samples)
self.img = wx.BitmapFromImage(image)
wx.CallAfter(self.Refresh)
def OnPaint(self, evt):
dc = wx.PaintDC(self)
dc.DrawBitmap(self.img, 0, 0)
######################################################################
## Spectrum Display
######################################################################
class SpectrumDisplay(wx.Frame):
def __init__(self, parent, obj=None):
wx.Frame.__init__(self, parent, size=(600, 350))
self.SetMinSize((400, 240))
self.menubar = wx.MenuBar()
self.fileMenu = wx.Menu()
closeItem = self.fileMenu.Append(-1, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self._destroy, closeItem)
self.menubar.Append(self.fileMenu, "&File")
pollMenu = wx.Menu()
pollID = 20000
self.availableSpeeds = [0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1]
for speed in self.availableSpeeds:
pollMenu.Append(pollID, "%.3f" % speed, kind=wx.ITEM_RADIO)
if speed == 0.05:
pollMenu.Check(pollID, True)
self.Bind(wx.EVT_MENU, self.setPollTime, id=pollID)
pollID += 1
self.menubar.Append(pollMenu, "&Polling Speed")
self.SetMenuBar(self.menubar)
self.Bind(wx.EVT_CLOSE, self._destroy)
self.obj = obj
self.panel = wx.Panel(self)
self.panel.SetBackgroundColour(BACKGROUND_COLOUR)
self.mainBox = wx.BoxSizer(wx.VERTICAL)
self.toolBox = wx.BoxSizer(wx.HORIZONTAL)
if sys.platform == "darwin":
X_OFF = 24
else:
X_OFF = 16
if self.obj is None:
initgain = 0.0
self.channelNamesVisible = True
self.channelNames = []
else:
initgain = self.obj.gain
self.channelNamesVisible = self.obj.channelNamesVisible
self.channelNames = self.obj.channelNames
tw, th = self.GetTextExtent("Start")
self.activeTog = wx.ToggleButton(self.panel, -1, label="Start", size=(tw + X_OFF, th + 10))
self.activeTog.SetValue(1)
self.activeTog.Bind(wx.EVT_TOGGLEBUTTON, self.activate)
self.toolBox.Add(self.activeTog, 0, wx.TOP | wx.LEFT, 5)
tw, th = self.GetTextExtent("Freq Log")
self.freqTog = wx.ToggleButton(self.panel, -1, label="Freq Log", size=(tw + X_OFF, th + 10))
self.freqTog.SetValue(0)
self.freqTog.Bind(wx.EVT_TOGGLEBUTTON, self.setFreqScale)
self.toolBox.Add(self.freqTog, 0, wx.TOP | wx.LEFT, 5)
tw, th = self.GetTextExtent("Mag Log")
self.magTog = wx.ToggleButton(self.panel, -1, label="Mag Log", size=(tw + X_OFF, th + 10))
self.magTog.SetValue(1)
self.magTog.Bind(wx.EVT_TOGGLEBUTTON, self.setMagScale)
self.toolBox.Add(self.magTog, 0, wx.TOP | wx.LEFT, 5)
tw, th = self.GetTextExtent("Blackman 3-term")
self.winPopup = wx.Choice(
self.panel,
-1,
choices=[
"Rectangular",
"Hamming",
"Hanning",
"Bartlett",
"Blackman 3",
"Blackman-H 4",
"Blackman-H 7",
"Tuckey",
"Half-sine",
],
size=(tw + X_OFF, th + 10),
)
self.winPopup.SetSelection(2)
self.winPopup.Bind(wx.EVT_CHOICE, self.setWinType)
self.toolBox.Add(self.winPopup, 0, wx.TOP | wx.LEFT, 5)
tw, th = self.GetTextExtent("16384")
self.sizePopup = wx.Choice(
self.panel,
-1,
choices=["64", "128", "256", "512", "1024", "2048", "4096", "8192", "16384"],
size=(-1, th + 10),
)
self.sizePopup.SetSelection(4)
self.sizePopup.Bind(wx.EVT_CHOICE, self.setSize)
self.toolBox.Add(self.sizePopup, 0, wx.TOP | wx.LEFT, 5)
self.mainBox.Add(self.toolBox, 0, wx.EXPAND)
self.dispBox = wx.BoxSizer(wx.HORIZONTAL)
self.box = wx.BoxSizer(wx.VERTICAL)
self.spectrumPanel = SpectrumPanel(
self.panel,
len(self.obj),
self.obj.getLowfreq(),
self.obj.getHighfreq(),
self.obj.getFscaling(),
self.obj.getMscaling(),
)
self.box.Add(self.spectrumPanel, 1, wx.EXPAND | wx.LEFT | wx.RIGHT | wx.TOP, 5)
self.zoomH = HRangeSlider(
self.panel,
minvalue=0,
maxvalue=0.5,
init=None,
pos=(0, 0),
size=(200, 15),
valtype="float",
log=False,
function=self.setZoomH,
)
self.box.Add(self.zoomH, 0, wx.EXPAND | wx.LEFT | wx.RIGHT, 5)
self.dispBox.Add(self.box, 1, wx.EXPAND, 0)
self.gainSlider = ControlSlider(self.panel, -24, 24, initgain, outFunction=self.setGain, orient=wx.VERTICAL)
self.dispBox.Add(self.gainSlider, 0, wx.EXPAND | wx.TOP, 5)
self.dispBox.AddSpacer(5)
self.mainBox.Add(self.dispBox, 1, wx.EXPAND)
self.panel.SetSizer(self.mainBox)
def activate(self, evt):
if evt.GetInt() == 1:
self.obj.poll(1)
else:
self.obj.poll(0)
def setPollTime(self, evt):
value = self.availableSpeeds[evt.GetId() - 20000]
self.obj.polltime(value)
def setFreqScale(self, evt):
if evt.GetInt() == 1:
self.obj.setFscaling(1)
else:
self.obj.setFscaling(0)
def setMagScale(self, evt):
if evt.GetInt() == 1:
self.obj.setMscaling(1)
else:
self.obj.setMscaling(0)
def setWinType(self, evt):
self.obj.wintype = evt.GetInt()
def setSize(self, evt):
size = 1 << (evt.GetInt() + 6)
self.obj.size = size
def setGain(self, gain):
self.obj.setGain(pow(10.0, gain * 0.05))
def setZoomH(self, values):
self.spectrumPanel.setLowFreq(self.obj.setLowbound(values[0]))
self.spectrumPanel.setHighFreq(self.obj.setHighbound(values[1]))
wx.CallAfter(self.spectrumPanel.Refresh)
def setDisplaySize(self, size):
self.obj.setWidth(size[0])
self.obj.setHeight(size[1])
def update(self, points):
self.spectrumPanel.setImage(points)
def setFscaling(self, x):
self.spectrumPanel.setFscaling(x)
wx.CallAfter(self.spectrumPanel.Refresh)
def setMscaling(self, x):
self.spectrumPanel.setMscaling(x)
wx.CallAfter(self.spectrumPanel.Refresh)
def showChannelNames(self, visible):
self.spectrumPanel.showChannelNames(visible)
self.channelNamesVisible = visible
def setChannelNames(self, names):
self.channelNames = names
self.spectrumPanel.setChannelNames(names)
def _destroy(self, evt):
self.obj._setViewFrame(None)
self.Destroy()
# TODO: Adjust the font size according to the size of the panel.
class SpectrumPanel(wx.Panel):
def __init__(
self, parent, chnls, lowfreq, highfreq, fscaling, mscaling, pos=wx.DefaultPosition, size=wx.DefaultSize, style=0
):
wx.Panel.__init__(self, parent, pos=pos, size=size, style=style)
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.SetMinSize((300, 100))
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, self.OnSize)
if chnls == 1:
self.chnls = 64
else:
self.chnls = chnls
try:
self.channelNamesVisible = self.GetParent().GetParent().channelNamesVisible
except:
self.channelNamesVisible = True
try:
self.channelNames = self.GetParent().GetParent().channelNames
except:
self.channelNames = []
self.img = None
self.obj = None
self.lowfreq = lowfreq
self.highfreq = highfreq
self.fscaling = fscaling
self.mscaling = mscaling
self.setPens()
if sys.platform == "win32" or sys.platform.startswith("linux"):
self.dcref = wx.BufferedPaintDC
else:
self.dcref = wx.PaintDC
def OnSize(self, evt):
try:
self.GetParent().GetParent().setDisplaySize(self.GetSize())
except:
pass
try:
size = self.GetSize()
self.obj.setWidth(size[0])
self.obj.setHeight(size[1])
except:
pass
self.Refresh()
def setImage(self, points):
self.img = [points[i] for i in range(len(points))]
wx.CallAfter(self.Refresh)
def setPens(self):
self.pens = []
self.brushes = []
for x in range(self.chnls):
hue = rescale(x, xmin=0, xmax=self.chnls - 1, ymin=0, ymax=2.0 / 3)
hsv = wx.Image_HSVValue(hue, 1.0, 0.6)
rgb = wx.Image_HSVtoRGB(hsv)
self.pens.append(wx.Pen(wx.Colour(rgb.red, rgb.green, rgb.blue), 1))
self.brushes.append(wx.Brush(wx.Colour(rgb.red, rgb.green, rgb.blue, 128)))
def setChnls(self, x):
if x == 1:
self.chnls = 64
else:
self.chnls = x
self.setPens()
def setFscaling(self, x):
self.fscaling = x
def setMscaling(self, x):
self.mscaling = x
def setLowFreq(self, x):
self.lowfreq = x
def setHighFreq(self, x):
self.highfreq = x
def showChannelNames(self, visible):
self.channelNamesVisible = visible
def setChannelNames(self, names):
self.channelNames = names
def OnPaint(self, evt):
w, h = self.GetSize()
dc = self.dcref(self)
gc = wx.GraphicsContext_Create(dc)
tw, th = dc.GetTextExtent("0")
# background
background = gc.CreatePath()
background.AddRectangle(0, 0, w - 1, h - 1)
gc.SetPen(wx.BLACK_PEN)
gc.SetBrush(wx.WHITE_BRUSH)
gc.DrawPath(background)
dc.SetTextForeground("#555555")
dc.SetPen(wx.Pen("#555555", style=wx.DOT))
# frequency linear grid
if not self.fscaling:
text = str(int(self.lowfreq))
tw, th = dc.GetTextExtent(text)
step = (self.highfreq - self.lowfreq) / 8
dc.DrawText(text, 2, 2)
w8 = w // 8
for i in range(1, 8):
pos = w8 * i
dc.DrawLine(pos, th + 4, pos, h - 2)
text = str(int(self.lowfreq + step * i))
tw, th = dc.GetTextExtent(text)
dc.DrawText(text, pos - tw // 2, 2)
# frequency logarithmic grid
else:
if self.lowfreq < 20:
lf = math.log10(20)
else:
lf = math.log10(self.lowfreq)
hf = math.log10(self.highfreq)
lrange = hf - lf
mag = pow(10.0, math.floor(lf))
if lrange > 6:
t = pow(10.0, math.ceil(lf))
base = pow(10.0, math.floor(lrange / 6))
def inc(t, floor_t):
return t * base - t
else:
t = math.ceil(pow(10.0, lf) / mag) * mag
def inc(t, floor_t):
return pow(10.0, floor_t)
majortick = int(math.log10(mag))
while t <= pow(10, hf):
floor_t = int(math.floor(math.log10(t) + 1e-16))
if majortick != floor_t:
majortick = floor_t
ticklabel = "1e%d" % majortick
ticklabel = str(int(float(ticklabel)))
tw, th = dc.GetTextExtent(ticklabel)
else:
if hf - lf < 2:
minortick = int(t / pow(10.0, majortick) + 0.5)
ticklabel = "%de%d" % (minortick, majortick)
ticklabel = str(int(float(ticklabel)))
tw, th = dc.GetTextExtent(ticklabel)
if not minortick % 2 == 0:
ticklabel = ""
else:
ticklabel = ""
pos = int((math.log10(t) - lf) / lrange * w)
if pos < (w - 25):
dc.DrawLine(pos, th + 4, pos, h - 2)
dc.DrawText(ticklabel, pos - tw // 2, 2)
t += inc(t, floor_t)
# magnitude linear grid
if not self.mscaling:
h4 = h * 0.75
step = h4 * 0.1
for i in range(1, 11):
pos = int(h - i * step)
text = "%.1f" % (i * 0.1)
tw, th = dc.GetTextExtent(text)
dc.DrawText(text, w - tw - 2, pos - th // 2)
dc.DrawLine(0, pos, w - tw - 4, pos)
dc.SetPen(wx.Pen("#555555", style=wx.SOLID))
dc.DrawLine(0, pos, w - tw - 6, pos)
dc.SetPen(wx.Pen("#555555", style=wx.DOT))
i += 1
while i * step < (h - th - 5):
pos = int(h - i * step)
text = "%.1f" % (i * 0.1)
tw, th = dc.GetTextExtent(text)
dc.DrawText(text, w - tw - 2, pos - th // 2)
dc.DrawLine(0, pos, w - tw - 6, pos)
i += 1
# magnitude logarithmic grid
else:
mw, mh = dc.GetTextExtent("-54")
h4 = h * 0.75
step = h4 * 0.1
for i in range(1, 11):
pos = int(h - i * step)
mval = int((10 - i) * -6.0)
if mval == -0:
mval = 0
text = "%d" % mval
tw, th = dc.GetTextExtent(text)
dc.DrawText(text, w - tw - 2, pos - th // 2)
dc.DrawLine(0, pos, w - mw - 6, pos)
dc.SetPen(wx.Pen("#555555", style=wx.SOLID))
dc.DrawLine(0, pos, w - mw - 4, pos)
dc.SetPen(wx.Pen("#555555", style=wx.DOT))
i += 1
while i * step < (h - th - 5):
pos = int(h - i * step)
text = "%d" % int((10 - i) * -6.0)
tw, th = dc.GetTextExtent(text)
dc.DrawText(text, w - tw - 2, pos - th // 2)
dc.DrawLine(0, pos, w - mw - 6, pos)
i += 1
# spectrum
if self.img is not None:
last_tw = tw
# legend
if len(self.img) > 1 and self.channelNamesVisible:
if not self.channelNames:
tw, th = dc.GetTextExtent("chan 8")
for i in range(len(self.img)):
dc.SetTextForeground(self.pens[i % self.chnls].GetColour())
dc.DrawText("chan %d" % (i + 1), w - tw - 20 - last_tw, i * th + th + 7)
else:
numChars = max([len(x) for x in self.channelNames])
tw, th = dc.GetTextExtent("0" * numChars)
for i in range(len(self.img)):
dc.SetTextForeground(self.pens[i % self.chnls].GetColour())
if i < len(self.channelNames):
dc.DrawText(self.channelNames[i], w - tw - 20 - last_tw, i * th + th + 7)
else:
dc.DrawText("chan %d" % (i + 1), w - tw - 20 - last_tw, i * th + th + 7)
# channel spectrums
for i, samples in enumerate(self.img):
gc.SetPen(self.pens[i % self.chnls])
gc.SetBrush(self.brushes[i % self.chnls])
gc.DrawLines(samples)
######################################################################
## Spectrum Display
######################################################################
class ScopeDisplay(wx.Frame):
def __init__(self, parent, obj=None):
wx.Frame.__init__(self, parent, size=(600, 350))
self.SetMinSize((400, 240))
self.menubar = wx.MenuBar()
self.fileMenu = wx.Menu()
closeItem = self.fileMenu.Append(-1, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self._destroy, closeItem)
self.menubar.Append(self.fileMenu, "&File")
self.SetMenuBar(self.menubar)
self.Bind(wx.EVT_CLOSE, self._destroy)
self.obj = obj
gain = self.obj.gain
length = self.obj.length
self.panel = wx.Panel(self)
self.panel.SetBackgroundColour(BACKGROUND_COLOUR)
self.mainBox = wx.BoxSizer(wx.VERTICAL)
self.toolBox = wx.BoxSizer(wx.HORIZONTAL)
if sys.platform == "darwin":
X_OFF = 24
else:
X_OFF = 16
tw, th = self.GetTextExtent("Start")
self.activeTog = wx.ToggleButton(self.panel, -1, label="Start", size=(tw + X_OFF, th + 10))
self.activeTog.SetValue(1)
self.activeTog.Bind(wx.EVT_TOGGLEBUTTON, self.activate)
self.toolBox.Add(self.activeTog, 0, wx.TOP | wx.LEFT | wx.RIGHT, 5)
self.toolBox.AddSpacer(10)
self.toolBox.Add(wx.StaticText(self.panel, -1, label="Window length (ms):"), 0, wx.TOP, 11)
self.lenSlider = ControlSlider(self.panel, 10, 1000, length * 1000, log=True, outFunction=self.setLength)
self.toolBox.Add(self.lenSlider, 1, wx.TOP | wx.LEFT | wx.RIGHT, 11)
self.toolBox.AddSpacer(40)
self.mainBox.Add(self.toolBox, 0, wx.EXPAND)
self.dispBox = wx.BoxSizer(wx.HORIZONTAL)
self.box = wx.BoxSizer(wx.VERTICAL)
self.scopePanel = ScopePanel(self.panel, self.obj)
self.box.Add(self.scopePanel, 1, wx.EXPAND | wx.LEFT | wx.RIGHT, 5)
self.dispBox.Add(self.box, 1, wx.EXPAND | wx.BOTTOM, 5)
self.gainSlider = ControlSlider(
self.panel, -24, 24, 20.0 * math.log10(gain), outFunction=self.setGain, orient=wx.VERTICAL
)
self.dispBox.Add(self.gainSlider, 0, wx.EXPAND | wx.BOTTOM, 5)
self.dispBox.AddSpacer(5)
self.mainBox.Add(self.dispBox, 1, wx.EXPAND)
self.panel.SetSizer(self.mainBox)
def activate(self, evt):
self.obj.poll(evt.GetInt())
def setLength(self, length):
length *= 0.001
self.obj.setLength(length)
self.scopePanel.setLength(length)
def setGain(self, gain):
gain = pow(10.0, gain * 0.05)
self.scopePanel.setGain(gain)
self.obj.setGain(gain)
def update(self, points):
self.scopePanel.setImage(points)
def showChannelNames(self, visible):
self.scopePanel.showChannelNames(visible)
def setChannelNames(self, names):
self.scopePanel.setChannelNames(names)
def _destroy(self, evt):
self.obj._setViewFrame(None)
self.Destroy()
class ScopePanel(wx.Panel):
def __init__(self, parent, obj=None, pos=wx.DefaultPosition, size=wx.DefaultSize, style=0):
wx.Panel.__init__(self, parent, pos=pos, size=size, style=style)
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.SetMinSize((300, 100))
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.img = [[]]
self.obj = obj
if self.obj is not None:
self.gain = self.obj.gain
self.length = self.obj.length
self.chnls = len(self.obj)
self.channelNamesVisible = self.obj.channelNamesVisible
self.channelNames = self.obj.channelNames
else:
self.gain = 1
self.length = 0.05
self.chnls = 64
self.channelNamesVisible = True
self.channelNamesVisible = []
self.setPens()
if sys.platform == "win32" or sys.platform.startswith("linux"):
self.dcref = wx.BufferedPaintDC
else:
self.dcref = wx.PaintDC
def OnSize(self, evt):
try:
size = self.GetSize()
self.obj.setWidth(size[0])
self.obj.setHeight(size[1])
except:
pass
wx.CallAfter(self.Refresh)
def setChnls(self, x):
if x == 1:
self.chnls = 64
else:
self.chnls = x
self.setPens()
def setPens(self):
self.pens = []
if self.chnls < 2:
hsv = wx.Image.HSVValue(0.0, 1.0, 0.6)
rgb = wx.Image.HSVtoRGB(hsv)
self.pens.append(wx.Pen(wx.Colour(rgb.red, rgb.green, rgb.blue), 1))
else:
for x in range(self.chnls):
hue = rescale(x, xmin=0, xmax=self.chnls - 1, ymin=0, ymax=2.0 / 3)
hsv = wx.Image.HSVValue(hue, 0.99, 0.6)
rgb = wx.Image.HSVtoRGB(hsv)
self.pens.append(wx.Pen(wx.Colour(rgb.red, rgb.green, rgb.blue), 1))
def setGain(self, gain):
self.gain = gain
def setLength(self, length):
self.length = length
def setImage(self, points):
self.img = points
wx.CallAfter(self.Refresh)
def showChannelNames(self, visible=True):
self.channelNamesVisible = visible
def setChannelNames(self, names):
self.channelNames = names
def OnPaint(self, evt):
w, h = self.GetSize()
dc = self.dcref(self)
gc = wx.GraphicsContext_Create(dc)
tw, th = dc.GetTextExtent("0")
dc.SetBrush(wx.Brush("#FFFFFF"))
dc.Clear()
dc.DrawRectangle(0, 0, w, h)
gc.SetPen(wx.Pen("#000000", width=1, style=wx.SOLID))
gc.SetBrush(wx.Brush("#FFFFFF", style=wx.TRANSPARENT))
dc.SetTextForeground("#444444")
if sys.platform == "darwin":
font, ptsize = dc.GetFont(), dc.GetFont().GetPointSize()
font.SetPointSize(ptsize - 3)
dc.SetFont(font)
elif sys.platform.startswith("linux"):
font, ptsize = dc.GetFont(), dc.GetFont().GetPointSize()
font.SetPointSize(ptsize - 1)
dc.SetFont(font)
elif sys.platform == "win32":
font = dc.GetFont()
font.SetPointSize(8)
dc.SetFont(font)
dc.SetPen(wx.Pen("#888888", width=1, style=wx.DOT))
# horizontal grid
step = h // 6
ampstep = 1.0 / 3.0 / self.gain
for i in range(1, 6):
pos = int(h - i * step)
npos = i - 3
text = "%.2f" % (ampstep * npos)
tw, th = dc.GetTextExtent(text)
dc.DrawText(text, w - tw - 2, pos - th // 2)
dc.DrawLine(0, pos, w - tw - 10, pos)
# vertical grid
tickstep = w // 4
timestep = self.length * 0.25
for j in range(4):
dc.SetPen(wx.Pen("#888888", width=1, style=wx.DOT))
dc.DrawLine(j * tickstep, 0, j * tickstep, h)
dc.DrawText("%.3f" % (j * timestep), j * tickstep + 2, h - 15)
# draw waveforms
for i, samples in enumerate(self.img):
gc.SetPen(self.pens[i % 8])
if len(samples) > 1:
gc.DrawLines(samples)
# legend
last_tw = tw
if len(self.img) > 1 and self.channelNamesVisible:
if not self.channelNames:
tw, th = dc.GetTextExtent("chan 8")
for i in range(len(self.img)):
dc.SetTextForeground(self.pens[i % self.chnls].GetColour())
dc.DrawText("chan %d" % (i + 1), w - tw - 20 - last_tw, i * th + th + 7) # 10
else:
numChars = max([len(x) for x in self.channelNames])
tw, th = dc.GetTextExtent("0" * numChars)
for i in range(len(self.img)):
dc.SetTextForeground(self.pens[i % self.chnls].GetColour())
if i < len(self.channelNames):
dc.DrawText(self.channelNames[i], w - tw - 20 - last_tw, i * th + th + 7)
else:
dc.DrawText("chan %d" % (i + 1), w - tw - 20 - last_tw, i * th + th + 7)
######################################################################
## Grapher window for PyoTableObject control
######################################################################
OFF = 10
OFF2 = OFF * 2
RAD = 3
RAD2 = RAD * 2
AREA = RAD + 2
AREA2 = AREA * 2
class Grapher(wx.Panel):
def __init__(
self,
parent,
xlen=8192,
yrange=(0.0, 1.0),
init=[(0.0, 0.0), (1.0, 1.0)],
mode=0,
exp=10.0,
inverse=True,
tension=0.0,
bias=0.0,
outFunction=None,
pos=(0, 0),
size=(300, 200),
style=0,
):
wx.Panel.__init__(self, parent, pos=pos, size=size, style=style)
self.backgroundColour = BACKGROUND_COLOUR
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.SetBackgroundColour(self.backgroundColour)
self.Bind(wx.EVT_LEAVE_WINDOW, self.OnLeave)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_LEFT_DOWN, self.MouseDown)
self.Bind(wx.EVT_LEFT_UP, self.MouseUp)
self.Bind(wx.EVT_MOTION, self.MouseMotion)
self.Bind(wx.EVT_KEY_DOWN, self.OnKeyDown)
self.Bind(wx.EVT_SIZE, self.OnResize)
self.mode = mode
self.exp = exp
self.inverse = inverse
self.tension = tension
self.bias = bias
self.pos = (OFF + RAD, OFF + RAD)
self.selected = None
self.xlen = xlen
self.yrange = yrange
self.init = [tup for tup in init]
self.points = [tup for tup in init]
self.outFunction = outFunction
if sys.platform == "win32" or sys.platform.startswith("linux"):
self.dcref = wx.BufferedPaintDC
else:
self.dcref = wx.PaintDC
self.SetFocus()
wx.CallAfter(self.sendValues)
def setInitPoints(self, pts):
self.init = [(p[0], p[1]) for p in pts]
self.points = [(p[0], p[1]) for p in pts]
self.selected = None
self.sendValues()
self.Refresh()
def pointToPixels(self, pt):
w, h = self.GetSize()
w, h = w - OFF2 - RAD2, h - OFF2 - RAD2
x = int(round(pt[0] * w)) + OFF + RAD
y = int(round(pt[1] * h)) + OFF + RAD
return x, y
def pixelsToPoint(self, pos):
w, h = self.GetSize()
w, h = w - OFF2 - RAD2, h - OFF2 - RAD2
x = (pos[0] - OFF - RAD) / float(w)
y = (pos[1] - OFF - RAD) / float(h)
return x, y
def pointToValues(self, pt):
x = pt[0] * self.xlen
if type(self.xlen) == int:
x = int(x)
y = pt[1] * (self.yrange[1] - self.yrange[0]) + self.yrange[0]
return x, y
def valuesToPoint(self, val):
x = val[0] / float(self.xlen)
y = (val[1] - self.yrange[0]) / float(self.yrange[1] - self.yrange[0])
return x, y
def borderClip(self, pos):
w, h = self.GetSize()
if pos[0] < (OFF + RAD):
pos[0] = OFF + RAD
elif pos[0] > (w - OFF - RAD):
pos[0] = w - OFF - RAD
if pos[1] < (OFF + RAD):
pos[1] = OFF + RAD
elif pos[1] > (h - OFF - RAD):
pos[1] = h - OFF - RAD
return pos
def pointClip(self, pos):
w, h = self.GetSize()
if self.selected == 0:
leftclip = OFF + RAD
else:
x, y = self.pointToPixels(self.points[self.selected - 1])
leftclip = x
if self.selected == (len(self.points) - 1):
rightclip = w - OFF - RAD
else:
x, y = self.pointToPixels(self.points[self.selected + 1])
rightclip = x
if pos[0] < leftclip:
pos[0] = leftclip
elif pos[0] > rightclip:
pos[0] = rightclip
if pos[1] < (OFF + RAD):
pos[1] = OFF + RAD
elif pos[1] > (h - OFF - RAD):
pos[1] = h - OFF - RAD
return pos
def reset(self):
self.points = [tup for tup in self.init]
self.Refresh()
def getPoints(self):
return [tup for tup in self.points]
def getValues(self):
values = []
for pt in self.points:
x, y = self.pointToValues(pt)
values.append((x, y))
return values
def sendValues(self):
if self.outFunction is not None:
values = self.getValues()
self.outFunction(values)
def OnResize(self, evt):
self.Refresh()
evt.Skip()
def OnLeave(self, evt):
self.pos = (OFF + RAD, OFF + RAD)
self.Refresh()
def OnKeyDown(self, evt):
if self.selected is not None and evt.GetKeyCode() in [wx.WXK_BACK, wx.WXK_DELETE, wx.WXK_NUMPAD_DELETE]:
del self.points[self.selected]
self.sendValues()
self.selected = None
self.Refresh()
elif evt.GetKeyCode() in [wx.WXK_UP, wx.WXK_NUMPAD_UP]:
self.points = [(pt[0], pt[1] + 0.002) for pt in self.points]
self.sendValues()
self.Refresh()
elif evt.GetKeyCode() in [wx.WXK_DOWN, wx.WXK_NUMPAD_DOWN]:
self.points = [(pt[0], pt[1] - 0.002) for pt in self.points]
self.sendValues()
self.Refresh()
evt.Skip()
def MouseDown(self, evt):
self.CaptureMouse()
w, h = self.GetSize()
self.pos = self.borderClip(evt.GetPosition())
self.pos[1] = h - self.pos[1]
for i, p in enumerate(self.points):
x, y = self.pointToPixels(p)
if wx.Rect(x - AREA, y - AREA, AREA2, AREA2).Contains(self.pos):
# Grab a point
self.selected = i
self.Refresh()
return
# Add a point
pt = self.pixelsToPoint(self.pos)
for i, p in enumerate(self.points):
if p >= pt:
self.points.insert(i, pt)
break
self.selected = self.points.index(pt)
self.Refresh()
def MouseUp(self, evt):
if self.HasCapture():
self.ReleaseMouse()
self.sendValues()
def MouseMotion(self, evt):
w, h = self.GetSize()
self.pos = self.borderClip(evt.GetPosition())
self.pos[1] = h - self.pos[1]
if self.HasCapture():
if self.selected is not None:
self.pos = self.pointClip(self.pos)
x, y = self.pixelsToPoint(self.pos)
if self.mode == 4 and y <= 0:
y = 0.000001
self.points[self.selected] = (x, y)
self.Refresh()
def getLogPoints(self, pt1, pt2):
tmp = []
if pt1[1] <= 0.0:
pt1 = (pt1[0], 0.000001)
if pt2[1] <= 0.0:
pt2 = (pt2[0], 0.000001)
if pt1[1] > pt2[1]:
low = pt2[1]
high = pt1[1]
else:
low = pt1[1]
high = pt2[1]
steps = pt2[0] - pt1[0]
if steps > 0:
lrange = high - low
logrange = math.log10(high) - math.log10(low)
logmin = math.log10(low)
diff = (float(pt2[1]) - pt1[1]) / steps
if lrange == 0:
for i in range(steps):
tmp.append((pt1[0] + i, pt1[1]))
else:
for i in range(steps):
ratio = ((pt1[1] + diff * i) - low) / lrange
tmp.append((pt1[0] + i, pow(10, ratio * logrange + logmin)))
return tmp
def getCosLogPoints(self, pt1, pt2):
tmp = []
if pt1[1] <= 0.0:
pt1 = (pt1[0], 0.000001)
if pt2[1] <= 0.0:
pt2 = (pt2[0], 0.000001)
if pt1[1] > pt2[1]:
low = pt2[1]
high = pt1[1]
else:
low = pt1[1]
high = pt2[1]
steps = pt2[0] - pt1[0]
if steps > 0:
lrange = high - low
logrange = math.log10(high) - math.log10(low)
logmin = math.log10(low)
diff = (float(pt2[1]) - pt1[1]) / steps
if lrange == 0:
for i in range(steps):
tmp.append((pt1[0] + i, pt1[1]))
else:
for i in range(steps):
mu = float(i) / steps
mu = (1.0 - math.cos(mu * math.pi)) * 0.5
mu = pt1[1] * (1.0 - mu) + pt2[1] * mu
ratio = (mu - low) / lrange
tmp.append((pt1[0] + i, pow(10, ratio * logrange + logmin)))
return tmp
def getCosPoints(self, pt1, pt2):
tmp = []
steps = pt2[0] - pt1[0]
for i in range(steps):
mu = float(i) / steps
mu2 = (1.0 - math.cos(mu * math.pi)) * 0.5
tmp.append((pt1[0] + i, pt1[1] * (1.0 - mu2) + pt2[1] * mu2))
return tmp
def getExpPoints(self, pt1, pt2):
tmp = []
ambitus = pt2[1] - pt1[1]
steps = pt2[0] - pt1[0]
if steps == 0:
inc = 1.0 / 0.0001
else:
inc = 1.0 / steps
pointer = 0.0
if self.inverse:
if ambitus >= 0:
for i in range(steps):
scl = 1.0 - pow(1.0 - pointer, self.exp)
tmp.append((pt1[0] + i, scl * ambitus + pt1[1]))
pointer += inc
else:
for i in range(steps):
scl = pow(pointer, self.exp)
tmp.append((pt1[0] + i, scl * ambitus + pt1[1]))
pointer += inc
else:
for i in range(steps):
scl = pow(pointer, self.exp)
tmp.append((pt1[0] + i, scl * ambitus + pt1[1]))
pointer += inc
return tmp
def addImaginaryPoints(self, tmp):
lst = []
x = tmp[1][0] - tmp[0][0]
if tmp[0][1] < tmp[1][1]:
y = tmp[0][1] - tmp[1][1]
else:
y = tmp[0][1] + tmp[1][1]
lst.append((x, y))
lst.extend(tmp)
x = tmp[-2][0] - tmp[-1][0]
if tmp[-2][1] < tmp[-1][1]:
y = tmp[-1][1] + tmp[-2][1]
else:
y = tmp[-1][1] - tmp[-2][1]
lst.append((x, y))
return lst
def getCurvePoints(self, pt0, pt1, pt2, pt3):
tmp = []
y0, y1, y2, y3 = pt0[1], pt1[1], pt2[1], pt3[1]
steps = pt2[0] - pt1[0]
for i in range(steps):
mu = float(i) / steps
mu2 = mu * mu
mu3 = mu2 * mu
m0 = (y1 - y0) * (1.0 + self.bias) * (1.0 - self.tension) * 0.5
m0 += (y2 - y1) * (1.0 - self.bias) * (1.0 - self.tension) * 0.5
m1 = (y2 - y1) * (1.0 + self.bias) * (1.0 - self.tension) * 0.5
m1 += (y3 - y2) * (1.0 - self.bias) * (1.0 - self.tension) * 0.5
a0 = 2.0 * mu3 - 3.0 * mu2 + 1.0
a1 = mu3 - 2.0 * mu2 + mu
a2 = mu3 - mu2
a3 = -2.0 * mu3 + 3.0 * mu2
tmp.append((pt1[0] + i, a0 * y1 + a1 * m0 + a2 * m1 + a3 * y2))
return tmp
def OnPaint(self, evt):
w, h = self.GetSize()
corners = [(OFF, OFF), (w - OFF, OFF), (w - OFF, h - OFF), (OFF, h - OFF)]
dc = self.dcref(self)
gc = wx.GraphicsContext_Create(dc)
gc.SetBrush(wx.Brush("#000000"))
gc.SetPen(wx.Pen("#000000"))
if sys.platform == "darwin":
font, ptsize = dc.GetFont(), dc.GetFont().GetPointSize()
else:
font, ptsize = dc.GetFont(), 10
font.SetPointSize(ptsize - 4)
dc.SetFont(font)
dc.SetTextForeground("#888888")
dc.Clear()
# Draw grid
dc.SetPen(wx.Pen("#CCCCCC", 1))
xstep = int(round((w - OFF2) / 10.0))
ystep = int(round((h - OFF2) / 10.0))
for i in range(10):
xpos = i * xstep + OFF
dc.DrawLine(xpos, OFF, xpos, h - OFF)
ypos = i * ystep + OFF
dc.DrawLine(OFF, ypos, w - OFF, ypos)
if i > 0:
if type(self.xlen) == int:
t = "%d" % int(self.xlen * i * 0.1)
else:
t = "%.2f" % (self.xlen * i * 0.1)
dc.DrawText(t, xpos + 2, h - OFF - 10)
if i < 9:
t = "%.2f" % ((9 - i) * 0.1 * (self.yrange[1] - self.yrange[0]) + self.yrange[0])
dc.DrawText(t, OFF + 2, ypos + ystep - 10)
else:
t = "%.2f" % ((9 - i) * 0.1 * (self.yrange[1] - self.yrange[0]) + self.yrange[0])
dc.DrawText(t, OFF + 2, h - OFF - 10)
dc.SetPen(wx.Pen("#000000", 1))
dc.SetBrush(wx.Brush("#000000"))
# Draw bounding box
for i in range(4):
dc.DrawLine(corners[i][0], corners[i][1], corners[(i + 1) % 4][0], corners[(i + 1) % 4][1])
# Convert points in pixels
w, h = w - OFF2 - RAD2, h - OFF2 - RAD2
tmp = []
back_y_for_log = []
for p in self.points:
x = int(round(p[0] * w)) + OFF + RAD
y = int(round((1.0 - p[1]) * h)) + OFF + RAD
tmp.append((x, y))
back_y_for_log.append(p[1])
# Draw lines
dc.SetPen(wx.Pen("#000000", 1))
last_p = None
if len(tmp) > 1:
if self.mode == 0:
for i in range(len(tmp) - 1):
gc.DrawLines([tmp[i], tmp[i + 1]])
elif self.mode == 1:
for i in range(len(tmp) - 1):
tmp2 = self.getCosPoints(tmp[i], tmp[i + 1])
if i == 0 and len(tmp2) < 2:
gc.DrawLines([tmp[i], tmp[i + 1]])
if last_p is not None:
gc.DrawLines([last_p, tmp[i]])
for j in range(len(tmp2) - 1):
gc.DrawLines([tmp2[j], tmp2[j + 1]])
last_p = tmp2[j + 1]
if last_p is not None:
gc.DrawLines([last_p, tmp[-1]])
elif self.mode == 2:
for i in range(len(tmp) - 1):
tmp2 = self.getExpPoints(tmp[i], tmp[i + 1])
if i == 0 and len(tmp2) < 2:
gc.DrawLines([tmp[i], tmp[i + 1]])
if last_p is not None:
gc.DrawLines([last_p, tmp[i]])
for j in range(len(tmp2) - 1):
gc.DrawLines([tmp2[j], tmp2[j + 1]])
last_p = tmp2[j + 1]
if last_p is not None:
gc.DrawLines([last_p, tmp[-1]])
elif self.mode == 3:
curvetmp = self.addImaginaryPoints(tmp)
for i in range(1, len(curvetmp) - 2):
tmp2 = self.getCurvePoints(curvetmp[i - 1], curvetmp[i], curvetmp[i + 1], curvetmp[i + 2])
if i == 1 and len(tmp2) < 2:
gc.DrawLines([curvetmp[i], curvetmp[i + 1]])
if last_p is not None:
gc.DrawLines([last_p, curvetmp[i]])
for j in range(len(tmp2) - 1):
gc.DrawLines([tmp2[j], tmp2[j + 1]])
last_p = tmp2[j + 1]
if last_p is not None:
gc.DrawLines([last_p, tmp[-1]])
elif self.mode == 4:
back_tmp = [p for p in tmp]
for i in range(len(tmp)):
tmp[i] = (tmp[i][0], back_y_for_log[i])
for i in range(len(tmp) - 1):
tmp2 = self.getLogPoints(tmp[i], tmp[i + 1])
for j in range(len(tmp2)):
tmp2[j] = (tmp2[j][0], int(round((1.0 - tmp2[j][1]) * h)) + OFF + RAD)
if i == 0 and len(tmp2) < 2:
gc.DrawLines([back_tmp[i], back_tmp[i + 1]])
if last_p is not None:
gc.DrawLines([last_p, back_tmp[i]])
for j in range(len(tmp2) - 1):
gc.DrawLines([tmp2[j], tmp2[j + 1]])
last_p = tmp2[j + 1]
if last_p is not None:
gc.DrawLines([last_p, back_tmp[-1]])
tmp = [p for p in back_tmp]
elif self.mode == 5:
back_tmp = [p for p in tmp]
for i in range(len(tmp)):
tmp[i] = (tmp[i][0], back_y_for_log[i])
for i in range(len(tmp) - 1):
tmp2 = self.getCosLogPoints(tmp[i], tmp[i + 1])
for j in range(len(tmp2)):
tmp2[j] = (tmp2[j][0], int(round((1.0 - tmp2[j][1]) * h)) + OFF + RAD)
if i == 0 and len(tmp2) < 2:
gc.DrawLines([back_tmp[i], back_tmp[i + 1]])
if last_p is not None:
gc.DrawLines([last_p, back_tmp[i]])
for j in range(len(tmp2) - 1):
gc.DrawLines([tmp2[j], tmp2[j + 1]])
last_p = tmp2[j + 1]
if last_p is not None:
gc.DrawLines([last_p, back_tmp[-1]])
tmp = [p for p in back_tmp]
# Draw points
for i, p in enumerate(tmp):
if i == self.selected:
gc.SetBrush(wx.Brush("#FFFFFF"))
dc.SetBrush(wx.Brush("#FFFFFF"))
else:
gc.SetBrush(wx.Brush("#000000"))
dc.SetBrush(wx.Brush("#000000"))
gc.DrawEllipse(p[0] - RAD, p[1] - RAD, RAD2, RAD2)
# Draw position values
font.SetPointSize(ptsize - 3)
dc.SetFont(font)
dc.SetTextForeground("#222222")
posptx, pospty = self.pixelsToPoint(self.pos)
xval, yval = self.pointToValues((posptx, pospty))
if type(self.xlen) == int:
dc.DrawText("%d, %.3f" % (xval, yval), w - 75, OFF)
else:
dc.DrawText("%.3f, %.3f" % (xval, yval), w - 75, OFF)
class TableGrapher(wx.Frame):
def __init__(self, parent=None, obj=None, mode=0, xlen=8192, yrange=(0.0, 1.0)):
wx.Frame.__init__(self, parent, size=(500, 250))
pts = obj.getPoints()
self.yrange = yrange
for i in range(len(pts)):
x = pts[i][0] / float(xlen)
y = (pts[i][1] - float(yrange[0])) / (yrange[1] - yrange[0])
pts[i] = (x, y)
if mode == 2:
self.graph = Grapher(
self,
xlen=xlen,
yrange=yrange,
init=pts,
mode=mode,
exp=obj.exp,
inverse=obj.inverse,
outFunction=obj.replace,
)
elif mode == 3:
self.graph = Grapher(
self,
xlen=xlen,
yrange=yrange,
init=pts,
mode=mode,
tension=obj.tension,
bias=obj.bias,
outFunction=obj.replace,
)
else:
self.graph = Grapher(self, xlen=xlen, yrange=yrange, init=pts, mode=mode, outFunction=obj.replace)
self.menubar = wx.MenuBar()
self.fileMenu = wx.Menu()
self.fileMenu.Append(9999, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self.close, id=9999)
self.fileMenu.AppendSeparator()
self.fileMenu.Append(
10000, "Copy all points to the clipboard (4 digits of precision)\tCtrl+C", kind=wx.ITEM_NORMAL
)
self.Bind(wx.EVT_MENU, self.copy, id=10000)
self.fileMenu.Append(
10001, "Copy all points to the clipboard (full precision)\tShift+Ctrl+C", kind=wx.ITEM_NORMAL
)
self.Bind(wx.EVT_MENU, self.copy, id=10001)
self.fileMenu.AppendSeparator()
self.fileMenu.Append(10002, "Reset\tCtrl+R", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self.reset, id=10002)
self.menubar.Append(self.fileMenu, "&File")
self.SetMenuBar(self.menubar)
def close(self, evt):
self.Destroy()
def copy(self, evt):
pts = self.graph.getValues()
if evt.GetId() == 10000:
pstr = "["
for i, pt in enumerate(pts):
pstr += "("
if type(pt[0]) == int:
pstr += "%d," % pt[0]
else:
pstr += "%.4f," % pt[0]
pstr += "%.4f)" % pt[1]
if i < (len(pts) - 1):
pstr += ","
pstr += "]"
else:
pstr = str(pts)
data = wx.TextDataObject(pstr)
if wx.TheClipboard.Open():
wx.TheClipboard.Clear()
wx.TheClipboard.SetData(data)
wx.TheClipboard.Close()
def reset(self, evt):
self.graph.reset()
class DataMultiSlider(wx.Panel):
def __init__(self, parent, init, yrange=(0, 1), outFunction=None, pos=(0, 0), size=(300, 200), style=0):
wx.Panel.__init__(self, parent, pos=pos, size=size, style=style)
self.backgroundColour = BACKGROUND_COLOUR
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.SetBackgroundColour(self.backgroundColour)
self.Bind(wx.EVT_SIZE, self.OnResize)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_LEFT_DOWN, self.MouseDown)
self.Bind(wx.EVT_LEFT_UP, self.MouseUp)
self.Bind(wx.EVT_MOTION, self.MouseMotion)
self.changed = True
self.values = [v for v in init]
self.len = len(self.values)
self.yrange = (float(yrange[0]), float(yrange[1]))
self.outFunction = outFunction
if sys.platform == "win32" or sys.platform.startswith("linux"):
self.dcref = wx.BufferedPaintDC
else:
self.dcref = wx.PaintDC
def OnResize(self, event):
self.Layout()
wx.CallAfter(self.Refresh)
def update(self, points):
self.values = points
self.changed = True
wx.CallAfter(self.Refresh)
def getValues(self):
return self.values
def OnPaint(self, event):
w, h = self.GetSize()
dc = self.dcref(self)
gc = wx.GraphicsContext_Create(dc)
dc.SetBrush(wx.Brush("#FFFFFF"))
dc.SetPen(wx.Pen("#FFFFFF"))
dc.Clear()
dc.DrawRectangle(0, 0, w, h)
gc.SetBrush(wx.Brush("#000000"))
gc.SetPen(wx.Pen("#000000"))
scl = self.yrange[1] - self.yrange[0]
mini = self.yrange[0]
bw = float(w) / self.len
points = [(0, h)]
x = 0
if bw >= 1:
for i in range(self.len):
y = h - ((self.values[i] - mini) / scl * h)
points.append((x, y))
x = (i + 1) * bw
points.append((x, y))
else:
slice = 1 / bw
p1 = 0
for i in range(w):
p2 = int((i + 1) * slice)
y = h - ((max(self.values[p1:p2]) - mini) / scl * h)
points.append((i, y))
p1 = p2
points.append((w, y))
points.append((w, h))
gc.DrawLines(points)
if self.outFunction is not None and self.changed:
self.changed = False
self.outFunction(self.values)
def MouseDown(self, evt):
w, h = self.GetSize()
self.lastpos = pos = evt.GetPosition()
self.CaptureMouse()
scl = self.yrange[1] - self.yrange[0]
mini = self.yrange[0]
bw = float(w) / self.len
x = int(pos[0] / bw)
y = (h - pos[1]) / float(h) * scl + mini
self.values[x] = y
self.changed = True
wx.CallAfter(self.Refresh)
evt.Skip()
def MouseUp(self, evt):
if self.HasCapture():
self.ReleaseMouse()
def MouseMotion(self, evt):
w, h = self.GetSize()
pos = evt.GetPosition()
if pos[0] < 0:
pos[0] = 0
elif pos[0] > w:
pos[0] = w
if pos[1] < 0:
pos[1] = 0
elif pos[1] > h:
pos[1] = h
if self.HasCapture() and evt.Dragging() and evt.LeftIsDown():
scl = self.yrange[1] - self.yrange[0]
mini = self.yrange[0]
bw = float(w) / self.len
x1 = int(self.lastpos[0] / bw)
y1 = (h - self.lastpos[1]) / float(h) * scl + mini
x2 = int(pos[0] / bw)
y2 = (h - pos[1]) / float(h) * scl + mini
step = abs(x2 - x1)
if step > 1:
inc = (y2 - y1) / step
if x2 > x1:
for i in range(0, step):
self.values[x1 + i] = y1 + inc * i
else:
for i in range(1, step):
self.values[x1 - i] = y1 + inc * i
if x2 >= 0 and x2 < self.len:
self.values[x2] = y2
self.lastpos = pos
self.changed = True
wx.CallAfter(self.Refresh)
class DataTableGrapher(wx.Frame):
def __init__(self, parent=None, obj=None, yrange=(0.0, 1.0)):
wx.Frame.__init__(self, parent, size=(500, 250))
self.obj = obj
self.length = len(self.obj._get_current_data())
self.multi = DataMultiSlider(self, self.obj._get_current_data(), yrange, outFunction=self.obj.replace)
self.menubar = wx.MenuBar()
self.fileMenu = wx.Menu()
self.fileMenu.Append(9999, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self.close, id=9999)
self.fileMenu.AppendSeparator()
self.fileMenu.Append(
10000, "Copy all points to the clipboard (4 digits of precision)\tCtrl+C", kind=wx.ITEM_NORMAL
)
self.Bind(wx.EVT_MENU, self.copy, id=10000)
self.fileMenu.Append(
10001, "Copy all points to the clipboard (full precision)\tShift+Ctrl+C", kind=wx.ITEM_NORMAL
)
self.Bind(wx.EVT_MENU, self.copy, id=10001)
self.menubar.Append(self.fileMenu, "&File")
self.SetMenuBar(self.menubar)
def getLength(self):
return self.length
def close(self, evt):
self.Destroy()
def update(self, samples):
self.multi.update(samples)
def copy(self, evt):
values = self.multi.getValues()
if evt.GetId() == 10000:
pstr = "["
for i, val in enumerate(values):
pstr += "%.4f" % val
if i < (len(values) - 1):
pstr += ", "
pstr += "]"
else:
pstr = str(values)
data = wx.TextDataObject(pstr)
if wx.TheClipboard.Open():
wx.TheClipboard.Clear()
wx.TheClipboard.SetData(data)
wx.TheClipboard.Close()
class ExprLexer(object):
"""Defines simple interface for custom lexer objects."""
(
STC_EXPR_DEFAULT,
STC_EXPR_KEYWORD,
STC_EXPR_KEYWORD2,
STC_EXPR_COMMENT,
STC_EXPR_VARIABLE,
STC_EXPR_LETVARIABLE,
) = list(range(6))
def __init__(self):
super(ExprLexer, self).__init__()
self.alpha = "abcdefghijklmnopqrstuvwxyz"
self.digits = "0123456789"
self.keywords = [
"sin",
"cos",
"tan",
"tanh",
"atan",
"atan2",
"sqrt",
"log",
"sr",
"log2",
"log10",
"pow",
"abs",
"floor",
"ceil",
"exp",
"round",
"min",
"max",
"randf",
"randi",
"sah",
"const",
"pi",
"twopi",
"e",
"if",
"rpole",
"rzero",
"neg",
"and",
"or",
"wrap",
"delay",
"complex",
"real",
"imag",
"cpole",
"czero",
"out",
]
self.keywords2 = ["define", "load", "var", "let"]
def StyleText(self, evt):
"""Handle the EVT_STC_STYLENEEDED event."""
stc = evt.GetEventObject()
last_styled_pos = stc.GetEndStyled()
line = stc.LineFromPosition(last_styled_pos)
start_pos = stc.PositionFromLine(line)
end_pos = evt.GetPosition()
var = letvar = False
while start_pos < end_pos:
stc.StartStyling(start_pos)
curchar = chr(stc.GetCharAt(start_pos))
if curchar == "$":
var = True
elif var and curchar in " \t\n()":
var = False
if curchar == "#":
letvar = True
elif letvar and curchar in " \t\n()":
letvar = False
if var:
style = self.STC_EXPR_VARIABLE
stc.SetStyling(1, style)
start_pos += 1
elif letvar:
style = self.STC_EXPR_LETVARIABLE
stc.SetStyling(1, style)
start_pos += 1
elif curchar in self.alpha:
start = stc.WordStartPosition(start_pos, True)
end = stc.WordEndPosition(start, True)
word = stc.GetTextRange(start, end)
if word in self.keywords:
style = self.STC_EXPR_KEYWORD
stc.SetStyling(len(word), style)
elif word in self.keywords2:
style = self.STC_EXPR_KEYWORD2
stc.SetStyling(len(word), style)
else:
style = self.STC_EXPR_DEFAULT
stc.SetStyling(len(word), style)
start_pos += len(word)
elif curchar == "/" and chr(stc.GetCharAt(start_pos + 1)) == "/":
eol = stc.GetLineEndPosition(stc.LineFromPosition(start_pos))
style = self.STC_EXPR_COMMENT
stc.SetStyling(eol - start_pos, style)
start_pos = eol
else:
style = self.STC_EXPR_DEFAULT
stc.SetStyling(1, style)
start_pos += 1
class ExprEditor(stc.StyledTextCtrl):
def __init__(self, parent, id=-1, obj=None):
stc.StyledTextCtrl.__init__(self, parent, id)
self.obj = obj
if sys.platform == "darwin":
accel_ctrl = wx.ACCEL_CMD
self.faces = {"mono": "Monaco", "size": 12}
else:
accel_ctrl = wx.ACCEL_CTRL
self.faces = {"mono": "Monospace", "size": 10}
atable = wx.AcceleratorTable(
[
(accel_ctrl, wx.WXK_RETURN, 10000),
(accel_ctrl, ord("z"), wx.ID_UNDO),
(accel_ctrl | wx.ACCEL_SHIFT, ord("z"), wx.ID_REDO),
]
)
self.SetAcceleratorTable(atable)
self.Bind(wx.EVT_MENU, self.onExecute, id=10000)
self.Bind(wx.EVT_MENU, self.undo, id=wx.ID_UNDO)
self.Bind(wx.EVT_MENU, self.redo, id=wx.ID_REDO)
self.Bind(stc.EVT_STC_UPDATEUI, self.OnUpdateUI)
self.lexer = ExprLexer()
self.currentfile = ""
self.modified = False
self.setup()
self.setCmdKeys()
self.setStyle()
self.SetText(self.obj.expr)
def undo(self, evt):
self.Undo()
def redo(self, evt):
self.Redo()
def setup(self):
self.SetIndent(2)
self.SetBackSpaceUnIndents(True)
self.SetTabIndents(True)
self.SetTabWidth(2)
self.SetUseTabs(False)
self.SetMargins(2, 2)
self.SetMarginWidth(1, 1)
def setCmdKeys(self):
self.CmdKeyAssign(ord("="), stc.STC_SCMOD_CTRL, stc.STC_CMD_ZOOMIN)
self.CmdKeyAssign(ord("-"), stc.STC_SCMOD_CTRL, stc.STC_CMD_ZOOMOUT)
def setStyle(self):
self.SetLexer(wx.stc.STC_LEX_CONTAINER)
self.SetStyleBits(5)
self.Bind(wx.stc.EVT_STC_STYLENEEDED, self.OnStyling)
self.SetCaretForeground("#000000")
self.SetCaretWidth(2)
# Global default styles for all languages
self.StyleSetSpec(stc.STC_STYLE_DEFAULT, "face:%(mono)s,size:%(size)d" % self.faces)
self.StyleClearAll()
self.StyleSetSpec(stc.STC_STYLE_DEFAULT, "face:%(mono)s,size:%(size)d" % self.faces)
self.StyleSetSpec(stc.STC_STYLE_CONTROLCHAR, "face:%(mono)s" % self.faces)
self.StyleSetSpec(stc.STC_STYLE_BRACELIGHT, "fore:#FFFFFF,back:#0000FF,bold")
self.StyleSetSpec(stc.STC_STYLE_BRACEBAD, "fore:#000000,back:#FF0000,bold")
# Expr specific styles
self.StyleSetSpec(self.lexer.STC_EXPR_DEFAULT, "fore:#000000,face:%(mono)s,size:%(size)d" % self.faces)
self.StyleSetSpec(self.lexer.STC_EXPR_KEYWORD, "fore:#3300DD,face:%(mono)s,size:%(size)d,bold" % self.faces)
self.StyleSetSpec(self.lexer.STC_EXPR_KEYWORD2, "fore:#0033FF,face:%(mono)s,size:%(size)d,bold" % self.faces)
self.StyleSetSpec(self.lexer.STC_EXPR_VARIABLE, "fore:#006600,face:%(mono)s,size:%(size)d,bold" % self.faces)
self.StyleSetSpec(self.lexer.STC_EXPR_LETVARIABLE, "fore:#555500,face:%(mono)s,size:%(size)d,bold" % self.faces)
self.StyleSetSpec(self.lexer.STC_EXPR_COMMENT, "fore:#444444,face:%(mono)s,size:%(size)d,italic" % self.faces)
self.SetSelBackground(1, "#CCCCDD")
def OnStyling(self, evt):
self.lexer.StyleText(evt)
def loadfile(self, filename):
self.LoadFile(filename)
self.currentfile = filename
self.GetParent().SetTitle(self.currentfile)
def savefile(self, filename):
self.currentfile = filename
self.GetParent().SetTitle(self.currentfile)
self.SaveFile(filename)
self.OnUpdateUI(None)
def OnUpdateUI(self, evt):
# check for matching braces
braceAtCaret = -1
braceOpposite = -1
charBefore = None
caretPos = self.GetCurrentPos()
if caretPos > 0:
charBefore = self.GetCharAt(caretPos - 1)
styleBefore = self.GetStyleAt(caretPos - 1)
# check before
if charBefore and chr(charBefore) in "[]{}()":
braceAtCaret = caretPos - 1
# check after
if braceAtCaret < 0:
charAfter = self.GetCharAt(caretPos)
styleAfter = self.GetStyleAt(caretPos)
if charAfter and chr(charAfter) in "[]{}()":
braceAtCaret = caretPos
if braceAtCaret >= 0:
braceOpposite = self.BraceMatch(braceAtCaret)
if braceAtCaret != -1 and braceOpposite == -1:
self.BraceBadLight(braceAtCaret)
else:
self.BraceHighlight(braceAtCaret, braceOpposite)
# Check if horizontal scrollbar is needed
self.checkScrollbar()
def checkScrollbar(self):
lineslength = [self.LineLength(i) + 1 for i in range(self.GetLineCount())]
maxlength = max(lineslength)
width = self.GetCharWidth() + (self.GetZoom() * 0.5)
if (self.GetSize()[0]) < (maxlength * width):
self.SetUseHorizontalScrollBar(True)
else:
self.SetUseHorizontalScrollBar(False)
def onExecute(self, evt):
pos = self.GetCurrentPos()
self.obj.expr = self.GetText()
self.SetCurrentPos(pos)
self.SetSelection(pos, pos)
class ExprEditorFrame(wx.Frame):
def __init__(self, parent=None, obj=None):
wx.Frame.__init__(self, parent, size=(650, 450))
self.obj = obj
self.obj._editor = self
self.editor = ExprEditor(self, -1, self.obj)
self.menubar = wx.MenuBar()
self.fileMenu = wx.Menu()
self.fileMenu.Append(wx.ID_OPEN, "Open\tCtrl+O")
self.Bind(wx.EVT_MENU, self.open, id=wx.ID_OPEN)
self.fileMenu.Append(wx.ID_CLOSE, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self.close, id=wx.ID_CLOSE)
self.fileMenu.AppendSeparator()
self.fileMenu.Append(wx.ID_SAVE, "Save\tCtrl+S")
self.Bind(wx.EVT_MENU, self.save, id=wx.ID_SAVE)
self.fileMenu.Append(wx.ID_SAVEAS, "Save As...\tShift+Ctrl+S")
self.Bind(wx.EVT_MENU, self.saveas, id=wx.ID_SAVEAS)
self.menubar.Append(self.fileMenu, "&File")
self.SetMenuBar(self.menubar)
def open(self, evt):
dlg = wx.FileDialog(
self, message="Choose a file", defaultDir=os.path.expanduser("~"), defaultFile="", style=wx.FD_OPEN
)
if dlg.ShowModal() == wx.ID_OK:
path = ensureNFD(dlg.GetPath())
self.editor.loadfile(path)
dlg.Destroy()
def close(self, evt):
self.obj._editor = None
self.Destroy()
def save(self, evt):
path = self.editor.currentfile
if not path:
self.saveas(None)
else:
self.editor.savefile(path)
def saveas(self, evt):
deffile = os.path.split(self.editor.currentfile)[1]
dlg = wx.FileDialog(
self, message="Save file as ...", defaultDir=os.path.expanduser("~"), defaultFile=deffile, style=wx.FD_SAVE
)
dlg.SetFilterIndex(0)
if dlg.ShowModal() == wx.ID_OK:
path = ensureNFD(dlg.GetPath())
self.editor.savefile(path)
dlg.Destroy()
def update(self, text):
self.editor.SetText(text)
class MMLLexer(object):
"""Defines simple interface for custom lexer objects."""
STC_MML_DEFAULT, STC_MML_KEYWORD, STC_MML_KEYWORD2, STC_MML_COMMENT, STC_MML_VARIABLE, STC_MML_VOICE_TOKEN = list(
range(6)
)
def __init__(self):
super(MMLLexer, self).__init__()
self.alpha = "abcdefghijklmnopqrstuvwxyz"
self.digits = "0123456789"
notes = ["a", "b", "c", "d", "e", "f", "g", "r"]
self.keywords = notes + ["%s%d" % (n, i) for n in notes for i in range(10)]
stmts = ["t", "o", "v"]
self.keywords2 = (
stmts + ["t%d" % i for i in range(256)] + ["o%d" % i for i in range(16)] + ["v%d" % i for i in range(101)]
)
def StyleText(self, evt):
"""Handle the EVT_STC_STYLENEEDED event."""
stc = evt.GetEventObject()
last_styled_pos = stc.GetEndStyled()
line = stc.LineFromPosition(last_styled_pos)
start_pos = stc.PositionFromLine(line)
end_pos = evt.GetPosition()
userXYZ = voiceToken = False
while start_pos < end_pos:
stc.StartStyling(start_pos)
curchar = chr(stc.GetCharAt(start_pos))
if curchar in "xyz":
userXYZ = True
elif userXYZ and curchar in " \t\n":
userXYZ = False
if curchar == "#":
voiceToken = True
elif voiceToken and curchar in " \t\n":
voiceToken = False
if userXYZ:
style = self.STC_MML_VARIABLE
stc.SetStyling(1, style)
start_pos += 1
elif voiceToken:
style = self.STC_MML_VOICE_TOKEN
stc.SetStyling(1, style)
start_pos += 1
elif curchar in self.alpha:
start = stc.WordStartPosition(start_pos, True)
end = stc.WordEndPosition(start, True)
word = stc.GetTextRange(start, end)
if word in self.keywords:
style = self.STC_MML_KEYWORD
stc.SetStyling(len(word), style)
elif word in self.keywords2:
style = self.STC_MML_KEYWORD2
stc.SetStyling(len(word), style)
else:
style = self.STC_MML_DEFAULT
stc.SetStyling(len(word), style)
start_pos += len(word)
elif curchar == ";":
eol = stc.GetLineEndPosition(stc.LineFromPosition(start_pos))
style = self.STC_MML_COMMENT
stc.SetStyling(eol - start_pos, style)
start_pos = eol
else:
style = self.STC_MML_DEFAULT
stc.SetStyling(1, style)
start_pos += 1
class MMLEditor(stc.StyledTextCtrl):
def __init__(self, parent, id=-1, obj=None):
stc.StyledTextCtrl.__init__(self, parent, id)
self.obj = obj
if sys.platform == "darwin":
accel_ctrl = wx.ACCEL_CMD
self.faces = {"mono": "Monaco", "size": 12}
else:
accel_ctrl = wx.ACCEL_CTRL
self.faces = {"mono": "Monospace", "size": 10}
atable = wx.AcceleratorTable(
[
(accel_ctrl, wx.WXK_RETURN, 10000),
(accel_ctrl, ord("z"), wx.ID_UNDO),
(accel_ctrl | wx.ACCEL_SHIFT, ord("z"), wx.ID_REDO),
]
)
self.SetAcceleratorTable(atable)
self.Bind(wx.EVT_MENU, self.onExecute, id=10000)
self.Bind(wx.EVT_MENU, self.undo, id=wx.ID_UNDO)
self.Bind(wx.EVT_MENU, self.redo, id=wx.ID_REDO)
self.Bind(stc.EVT_STC_UPDATEUI, self.OnUpdateUI)
self.lexer = MMLLexer()
self.currentfile = ""
self.modified = False
self.setup()
self.setCmdKeys()
self.setStyle()
if os.path.isfile(self.obj.music):
with open(self.obj.music, "r") as f:
music = f.read()
else:
music = self.obj.music
self.SetText(music)
def undo(self, evt):
self.Undo()
def redo(self, evt):
self.Redo()
def setup(self):
self.SetIndent(2)
self.SetBackSpaceUnIndents(True)
self.SetTabIndents(True)
self.SetTabWidth(2)
self.SetUseTabs(False)
self.SetMargins(2, 2)
self.SetMarginWidth(1, 1)
def setCmdKeys(self):
self.CmdKeyAssign(ord("="), stc.STC_SCMOD_CTRL, stc.STC_CMD_ZOOMIN)
self.CmdKeyAssign(ord("-"), stc.STC_SCMOD_CTRL, stc.STC_CMD_ZOOMOUT)
def setStyle(self):
self.SetLexer(wx.stc.STC_LEX_CONTAINER)
self.SetStyleBits(5)
self.Bind(wx.stc.EVT_STC_STYLENEEDED, self.OnStyling)
self.SetCaretForeground("#000000")
self.SetCaretWidth(2)
# Global default styles for all languages
self.StyleSetSpec(stc.STC_STYLE_DEFAULT, "face:%(mono)s,size:%(size)d" % self.faces)
self.StyleClearAll()
self.StyleSetSpec(stc.STC_STYLE_DEFAULT, "face:%(mono)s,size:%(size)d" % self.faces)
self.StyleSetSpec(stc.STC_STYLE_CONTROLCHAR, "face:%(mono)s" % self.faces)
self.StyleSetSpec(stc.STC_STYLE_BRACELIGHT, "fore:#FFFFFF,back:#0000FF,bold")
self.StyleSetSpec(stc.STC_STYLE_BRACEBAD, "fore:#000000,back:#FF0000,bold")
# MML specific styles
self.StyleSetSpec(self.lexer.STC_MML_DEFAULT, "fore:#000000,face:%(mono)s,size:%(size)d" % self.faces)
self.StyleSetSpec(self.lexer.STC_MML_KEYWORD, "fore:#3300DD,face:%(mono)s,size:%(size)d,bold" % self.faces)
self.StyleSetSpec(self.lexer.STC_MML_KEYWORD2, "fore:#0033FF,face:%(mono)s,size:%(size)d,bold" % self.faces)
self.StyleSetSpec(self.lexer.STC_MML_VARIABLE, "fore:#006600,face:%(mono)s,size:%(size)d,bold" % self.faces)
self.StyleSetSpec(self.lexer.STC_MML_VOICE_TOKEN, "fore:#555500,face:%(mono)s,size:%(size)d,bold" % self.faces)
self.StyleSetSpec(self.lexer.STC_MML_COMMENT, "fore:#444444,face:%(mono)s,size:%(size)d,italic" % self.faces)
self.SetSelBackground(1, "#CCCCDD")
def OnStyling(self, evt):
self.lexer.StyleText(evt)
def loadfile(self, filename):
self.LoadFile(filename)
self.currentfile = filename
self.GetParent().SetTitle(self.currentfile)
def savefile(self, filename):
self.currentfile = filename
self.GetParent().SetTitle(self.currentfile)
self.SaveFile(filename)
self.OnUpdateUI(None)
def OnUpdateUI(self, evt):
# check for matching braces
braceAtCaret = -1
braceOpposite = -1
charBefore = None
caretPos = self.GetCurrentPos()
if caretPos > 0:
charBefore = self.GetCharAt(caretPos - 1)
styleBefore = self.GetStyleAt(caretPos - 1)
# check before
if charBefore and chr(charBefore) in "[]{}()":
braceAtCaret = caretPos - 1
# check after
if braceAtCaret < 0:
charAfter = self.GetCharAt(caretPos)
styleAfter = self.GetStyleAt(caretPos)
if charAfter and chr(charAfter) in "[]{}()":
braceAtCaret = caretPos
if braceAtCaret >= 0:
braceOpposite = self.BraceMatch(braceAtCaret)
if braceAtCaret != -1 and braceOpposite == -1:
self.BraceBadLight(braceAtCaret)
else:
self.BraceHighlight(braceAtCaret, braceOpposite)
# Check if horizontal scrollbar is needed
self.checkScrollbar()
def checkScrollbar(self):
lineslength = [self.LineLength(i) + 1 for i in range(self.GetLineCount())]
maxlength = max(lineslength)
width = self.GetCharWidth() + (self.GetZoom() * 0.5)
if (self.GetSize()[0]) < (maxlength * width):
self.SetUseHorizontalScrollBar(True)
else:
self.SetUseHorizontalScrollBar(False)
def onExecute(self, evt):
pos = self.GetCurrentPos()
self.obj.music = self.GetText()
self.SetCurrentPos(pos)
self.SetSelection(pos, pos)
class MMLEditorFrame(wx.Frame):
def __init__(self, parent=None, obj=None):
wx.Frame.__init__(self, parent, size=(650, 450))
self.obj = obj
self.obj._editor = self
self.editor = MMLEditor(self, -1, self.obj)
self.menubar = wx.MenuBar()
self.fileMenu = wx.Menu()
self.fileMenu.Append(wx.ID_OPEN, "Open\tCtrl+O")
self.Bind(wx.EVT_MENU, self.open, id=wx.ID_OPEN)
self.fileMenu.Append(wx.ID_CLOSE, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self.close, id=wx.ID_CLOSE)
self.fileMenu.AppendSeparator()
self.fileMenu.Append(wx.ID_SAVE, "Save\tCtrl+S")
self.Bind(wx.EVT_MENU, self.save, id=wx.ID_SAVE)
self.fileMenu.Append(wx.ID_SAVEAS, "Save As...\tShift+Ctrl+S")
self.Bind(wx.EVT_MENU, self.saveas, id=wx.ID_SAVEAS)
self.menubar.Append(self.fileMenu, "&File")
self.SetMenuBar(self.menubar)
def open(self, evt):
dlg = wx.FileDialog(
self, message="Choose a file", defaultDir=os.path.expanduser("~"), defaultFile="", style=wx.FD_OPEN
)
if dlg.ShowModal() == wx.ID_OK:
path = ensureNFD(dlg.GetPath())
self.editor.loadfile(path)
dlg.Destroy()
def close(self, evt):
self.obj._editor = None
self.Destroy()
def save(self, evt):
path = self.editor.currentfile
if not path:
self.saveas(None)
else:
self.editor.savefile(path)
def saveas(self, evt):
deffile = os.path.split(self.editor.currentfile)[1]
dlg = wx.FileDialog(
self, message="Save file as ...", defaultDir=os.path.expanduser("~"), defaultFile=deffile, style=wx.FD_SAVE
)
dlg.SetFilterIndex(0)
if dlg.ShowModal() == wx.ID_OK:
path = ensureNFD(dlg.GetPath())
self.editor.savefile(path)
dlg.Destroy()
def update(self, text):
self.editor.SetText(text)
class Keyboard(wx.Panel):
def __init__(
self,
parent,
id=wx.ID_ANY,
pos=wx.DefaultPosition,
size=wx.DefaultSize,
poly=64,
outFunction=None,
style=wx.TAB_TRAVERSAL,
):
wx.Panel.__init__(self, parent, id, pos, size, style)
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.SetBackgroundColour(BACKGROUND_COLOUR)
self.parent = parent
self.outFunction = outFunction
self.poly = poly
self.gap = 0
self.offset = 12
self.w1 = 15
self.w2 = int(self.w1 / 2) + 1
self.hold = 1
self.keyPressed = None
self.Bind(wx.EVT_LEFT_DOWN, self.MouseDown)
self.Bind(wx.EVT_LEFT_UP, self.MouseUp)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Bind(wx.EVT_KEY_DOWN, self.OnKeyDown)
self.Bind(wx.EVT_KEY_UP, self.OnKeyUp)
self.white = (0, 2, 4, 5, 7, 9, 11)
self.black = (1, 3, 6, 8, 10)
self.whiteSelected = []
self.blackSelected = []
self.whiteVelocities = {}
self.blackVelocities = {}
self.whiteKeys = []
self.blackKeys = []
self.offRec = wx.Rect(900 - 55, 0, 28, 150)
self.holdRec = wx.Rect(900 - 27, 0, 27, 150)
self.keydown = []
self.keymap = {
90: 36,
83: 37,
88: 38,
68: 39,
67: 40,
86: 41,
71: 42,
66: 43,
72: 44,
78: 45,
74: 46,
77: 47,
44: 48,
76: 49,
46: 50,
59: 51,
47: 52,
81: 60,
50: 61,
87: 62,
51: 63,
69: 64,
82: 65,
53: 66,
84: 67,
54: 68,
89: 69,
55: 70,
85: 71,
73: 72,
57: 73,
79: 74,
48: 75,
80: 76,
}
wx.CallAfter(self._setRects)
def getCurrentNotes(self):
"Returns a list of the current notes."
notes = []
for key in self.whiteSelected:
notes.append((self.white[key % 7] + int(key / 7) * 12 + self.offset, 127 - self.whiteVelocities[key]))
for key in self.blackSelected:
notes.append((self.black[key % 5] + int(key / 5) * 12 + self.offset, 127 - self.blackVelocities[key]))
notes.sort()
return notes
def reset(self):
"Resets the keyboard state."
for key in self.blackSelected:
pit = self.black[key % 5] + int(key / 5) * 12 + self.offset
note = (pit, 0)
if self.outFunction:
self.outFunction(note)
for key in self.whiteSelected:
pit = self.white[key % 7] + int(key / 7) * 12 + self.offset
note = (pit, 0)
if self.outFunction:
self.outFunction(note)
self.whiteSelected = []
self.blackSelected = []
self.whiteVelocities = {}
self.blackVelocities = {}
wx.CallAfter(self.Refresh)
def setPoly(self, poly):
"Sets the maximum number of notes that can be held at the same time."
self.poly = poly
def _setRects(self):
w, h = self.GetSize()
self.offRec = wx.Rect(w - 55, 0, 28, h)
self.holdRec = wx.Rect(w - 27, 0, 27, h)
num = int(w / self.w1)
self.gap = w - num * self.w1
self.whiteKeys = [wx.Rect(i * self.w1, 0, self.w1 - 1, h - 1) for i in range(num)]
self.blackKeys = []
height2 = int(h * 4 / 7)
for i in range(int(num / 7) + 1):
space2 = self.w1 * 7 * i
off = int(self.w1 / 2) + space2 + 3
self.blackKeys.append(wx.Rect(off, 0, self.w2, height2))
off += self.w1
self.blackKeys.append(wx.Rect(off, 0, self.w2, height2))
off += self.w1 * 2
self.blackKeys.append(wx.Rect(off, 0, self.w2, height2))
off += self.w1
self.blackKeys.append(wx.Rect(off, 0, self.w2, height2))
off += self.w1
self.blackKeys.append(wx.Rect(off, 0, self.w2, height2))
wx.CallAfter(self.Refresh)
def OnSize(self, evt):
self._setRects()
wx.CallAfter(self.Refresh)
evt.Skip()
def OnKeyDown(self, evt):
if evt.HasAnyModifiers():
evt.Skip()
return
if evt.GetKeyCode() in self.keymap and evt.GetKeyCode() not in self.keydown:
self.keydown.append(evt.GetKeyCode())
pit = self.keymap[evt.GetKeyCode()]
deg = pit % 12
total = len(self.blackSelected) + len(self.whiteSelected)
note = None
if self.hold:
if deg in self.black:
which = self.black.index(deg) + int((pit - self.offset) / 12) * 5
if which in self.blackSelected:
self.blackSelected.remove(which)
del self.blackVelocities[which]
note = (pit, 0)
else:
if total < self.poly:
self.blackSelected.append(which)
self.blackVelocities[which] = 100
note = (pit, 100)
elif deg in self.white:
which = self.white.index(deg) + int((pit - self.offset) / 12) * 7
if which in self.whiteSelected:
self.whiteSelected.remove(which)
del self.whiteVelocities[which]
note = (pit, 0)
else:
if total < self.poly:
self.whiteSelected.append(which)
self.whiteVelocities[which] = 100
note = (pit, 100)
else:
if deg in self.black:
which = self.black.index(deg) + int((pit - self.offset) / 12) * 5
if which not in self.blackSelected and total < self.poly:
self.blackSelected.append(which)
self.blackVelocities[which] = 100
note = (pit, 100)
elif deg in self.white:
which = self.white.index(deg) + int((pit - self.offset) / 12) * 7
if which not in self.whiteSelected and total < self.poly:
self.whiteSelected.append(which)
self.whiteVelocities[which] = 100
note = (pit, 100)
if note and self.outFunction and total < self.poly:
self.outFunction(note)
wx.CallAfter(self.Refresh)
evt.Skip()
def OnKeyUp(self, evt):
if evt.HasAnyModifiers():
evt.Skip()
return
if evt.GetKeyCode() in self.keydown:
del self.keydown[self.keydown.index(evt.GetKeyCode())]
if not self.hold and evt.GetKeyCode() in self.keymap:
pit = self.keymap[evt.GetKeyCode()]
deg = pit % 12
note = None
if deg in self.black:
which = self.black.index(deg) + int((pit - self.offset) / 12) * 5
if which in self.blackSelected:
self.blackSelected.remove(which)
del self.blackVelocities[which]
note = (pit, 0)
elif deg in self.white:
which = self.white.index(deg) + int((pit - self.offset) / 12) * 7
if which in self.whiteSelected:
self.whiteSelected.remove(which)
del self.whiteVelocities[which]
note = (pit, 0)
if note and self.outFunction:
self.outFunction(note)
wx.CallAfter(self.Refresh)
evt.Skip()
def MouseUp(self, evt):
if not self.hold and self.keyPressed is not None:
key = self.keyPressed[0]
pit = self.keyPressed[1]
if key in self.blackSelected:
self.blackSelected.remove(key)
del self.blackVelocities[key]
if key in self.whiteSelected:
self.whiteSelected.remove(key)
del self.whiteVelocities[key]
note = (pit, 0)
if self.outFunction:
self.outFunction(note)
self.keyPressed = None
wx.CallAfter(self.Refresh)
evt.Skip()
def MouseDown(self, evt):
w, h = self.GetSize()
pos = evt.GetPosition()
if self.holdRec.Contains(pos):
if self.hold:
self.hold = 0
self.reset()
else:
self.hold = 1
wx.CallAfter(self.Refresh)
return
if self.offUpRec.Contains(pos):
self.offset += 12
if self.offset > 60:
self.offset = 60
wx.CallAfter(self.Refresh)
return
if self.offDownRec.Contains(pos):
self.offset -= 12
if self.offset < 0:
self.offset = 0
wx.CallAfter(self.Refresh)
return
total = len(self.blackSelected) + len(self.whiteSelected)
scanWhite = True
note = None
if self.hold:
for i, rec in enumerate(self.blackKeys):
if rec.Contains(pos):
pit = self.black[i % 5] + int(i / 5) * 12 + self.offset
if i in self.blackSelected:
self.blackSelected.remove(i)
del self.blackVelocities[i]
vel = 0
else:
hb = int(h * 4 / 7)
vel = int((hb - pos[1]) * 127 / hb)
if total < self.poly:
self.blackSelected.append(i)
self.blackVelocities[i] = int(127 - vel)
note = (pit, vel)
scanWhite = False
break
if scanWhite:
for i, rec in enumerate(self.whiteKeys):
if rec.Contains(pos):
pit = self.white[i % 7] + int(i / 7) * 12 + self.offset
if i in self.whiteSelected:
self.whiteSelected.remove(i)
del self.whiteVelocities[i]
vel = 0
else:
vel = int((h - pos[1]) * 127 / h)
if total < self.poly:
self.whiteSelected.append(i)
self.whiteVelocities[i] = int(127 - vel)
note = (pit, vel)
break
if note and self.outFunction and total < self.poly:
self.outFunction(note)
else:
self.keyPressed = None
for i, rec in enumerate(self.blackKeys):
if rec.Contains(pos):
pit = self.black[i % 5] + int(i / 5) * 12 + self.offset
if i not in self.blackSelected:
hb = int(h * 4 / 7)
vel = int((hb - pos[1]) * 127 / hb)
if total < self.poly:
self.blackSelected.append(i)
self.blackVelocities[i] = int(127 - vel)
note = (pit, vel)
self.keyPressed = (i, pit)
scanWhite = False
break
if scanWhite:
for i, rec in enumerate(self.whiteKeys):
if rec.Contains(pos):
pit = self.white[i % 7] + int(i / 7) * 12 + self.offset
if i not in self.whiteSelected:
vel = int((h - pos[1]) * 127 / h)
if total < self.poly:
self.whiteSelected.append(i)
self.whiteVelocities[i] = int(127 - vel)
note = (pit, vel)
self.keyPressed = (i, pit)
break
if note and self.outFunction and total < self.poly:
self.outFunction(note)
wx.CallAfter(self.Refresh)
evt.Skip()
def OnPaint(self, evt):
w, h = self.GetSize()
dc = wx.AutoBufferedPaintDC(self)
dc.SetBrush(wx.Brush("#000000", wx.SOLID))
dc.Clear()
dc.SetPen(wx.Pen("#000000", width=1, style=wx.SOLID))
dc.DrawRectangle(0, 0, w, h)
if sys.platform == "darwin":
dc.SetFont(wx.Font(12, wx.FONTFAMILY_SWISS, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD))
else:
dc.SetFont(wx.Font(8, wx.FONTFAMILY_SWISS, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD))
for i, rec in enumerate(self.whiteKeys):
if i in self.whiteSelected:
amp = int(self.whiteVelocities[i] * 1.5)
dc.GradientFillLinear(rec, (250, 250, 250), (amp, amp, amp), wx.SOUTH)
dc.SetBrush(wx.Brush("#CCCCCC", wx.SOLID))
dc.SetPen(wx.Pen("#CCCCCC", width=1, style=wx.SOLID))
else:
dc.SetBrush(wx.Brush("#FFFFFF", wx.SOLID))
dc.SetPen(wx.Pen("#CCCCCC", width=1, style=wx.SOLID))
dc.DrawRectangle(rec)
if i == (35 - (7 * int(self.offset / 12))):
if i in self.whiteSelected:
dc.SetTextForeground("#FFFFFF")
else:
dc.SetTextForeground("#000000")
dc.DrawText("C", rec[0] + 3, rec[3] - 15)
dc.SetPen(wx.Pen("#000000", width=1, style=wx.SOLID))
for i, rec in enumerate(self.blackKeys):
if i in self.blackSelected:
amp = int(self.blackVelocities[i] * 1.5)
dc.GradientFillLinear(rec, (250, 250, 250), (amp, amp, amp), wx.SOUTH)
dc.DrawLine(rec[0], 0, rec[0], rec[3])
dc.DrawLine(rec[0] + rec[2], 0, rec[0] + rec[2], rec[3])
dc.DrawLine(rec[0], rec[3], rec[0] + rec[2], rec[3])
dc.SetBrush(wx.Brush("#DDDDDD", wx.SOLID))
else:
dc.SetBrush(wx.Brush("#000000", wx.SOLID))
dc.SetPen(wx.Pen("#000000", width=1, style=wx.SOLID))
dc.DrawRectangle(rec)
dc.SetBrush(wx.Brush(BACKGROUND_COLOUR, wx.SOLID))
dc.SetPen(wx.Pen("#AAAAAA", width=1, style=wx.SOLID))
dc.DrawRectangle(self.offRec)
dc.DrawRectangle(self.holdRec)
dc.SetTextForeground("#000000")
dc.DrawText("oct", self.offRec[0] + 3, 15)
x1, y1 = self.offRec[0], self.offRec[1]
dc.SetBrush(wx.Brush("#000000", wx.SOLID))
if sys.platform == "darwin":
dc.DrawPolygon([wx.Point(x1 + 3, 36), wx.Point(x1 + 10, 29), wx.Point(x1 + 17, 36)])
self.offUpRec = wx.Rect(x1, 28, x1 + 20, 10)
dc.DrawPolygon([wx.Point(x1 + 3, 55), wx.Point(x1 + 10, 62), wx.Point(x1 + 17, 55)])
self.offDownRec = wx.Rect(x1, 54, x1 + 20, 10)
else:
dc.DrawPolygon([wx.Point(x1 + 5, 38), wx.Point(x1 + 12, 31), wx.Point(x1 + 19, 38)])
self.offUpRec = wx.Rect(x1, 30, x1 + 20, 10)
dc.DrawPolygon([wx.Point(x1 + 5, 57), wx.Point(x1 + 12, 64), wx.Point(x1 + 19, 57)])
self.offDownRec = wx.Rect(x1, 56, x1 + 20, 10)
dc.DrawText("%d" % int(self.offset / 12), x1 + 9, 41)
if self.hold:
dc.SetTextForeground("#0000CC")
else:
dc.SetTextForeground("#000000")
for i, c in enumerate("HOLD"):
dc.DrawText(c, self.holdRec[0] + 8, int(self.holdRec[3] / 6) * i + 15)
evt.Skip()
class NoteinKeyboardFrame(wx.Frame):
def __init__(self, parent=None, obj=None):
wx.Frame.__init__(self, parent, size=(900, 150))
self.obj = obj
self.keyboard = Keyboard(self, -1, outFunction=self.obj._newNote)
self.menubar = wx.MenuBar()
self.fileMenu = wx.Menu()
self.fileMenu.Append(wx.ID_CLOSE, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self.close, id=wx.ID_CLOSE)
self.menubar.Append(self.fileMenu, "&File")
self.SetMenuBar(self.menubar)
def close(self, evt):
self.Destroy()
class ServerGUI(wx.Frame):
def __init__(
self,
parent=None,
nchnls=2,
startf=None,
stopf=None,
recstartf=None,
recstopf=None,
ampf=None,
started=0,
locals=None,
shutdown=None,
meter=True,
timer=True,
amp=1.0,
exit=True,
getIsBooted=None,
getIsStarted=None,
):
wx.Frame.__init__(self, parent, style=wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER)
self.menubar = wx.MenuBar()
self.menu = wx.Menu()
self.menu.Append(22999, "Start/Stop\tCtrl+R", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self.start, id=22999)
quit_item = self.menu.Append(wx.ID_EXIT, "Quit\tCtrl+Q")
self.Bind(wx.EVT_MENU, self.on_quit, id=wx.ID_EXIT)
self.menubar.Append(self.menu, "&File")
self.SetMenuBar(self.menubar)
self.shutdown = shutdown
self.locals = locals
self.nchnls = nchnls
self.startf = startf
self.stopf = stopf
self.recstartf = recstartf
self.recstopf = recstopf
self.ampf = ampf
self.exit = exit
self.getIsBooted = getIsBooted
self.getIsStarted = getIsStarted
self._started = False
self._recstarted = False
self._history = []
self._histo_count = 0
panel = wx.Panel(self)
panel.SetBackgroundColour(BACKGROUND_COLOUR)
box = wx.BoxSizer(wx.VERTICAL)
buttonBox = wx.BoxSizer(wx.HORIZONTAL)
self.startButton = wx.Button(panel, -1, "Start")
self.startButton.Bind(wx.EVT_BUTTON, self.start)
buttonBox.Add(self.startButton, 0, wx.LEFT | wx.RIGHT, 5)
self.recButton = wx.Button(panel, -1, "Rec Start")
self.recButton.Bind(wx.EVT_BUTTON, self.record)
buttonBox.Add(self.recButton, 0, wx.RIGHT, 5)
self.quitButton = wx.Button(panel, -1, "Quit")
self.quitButton.Bind(wx.EVT_BUTTON, self.on_quit)
buttonBox.Add(self.quitButton, 0, wx.RIGHT, 5)
box.Add(buttonBox, 0, wx.TOP, 10)
box.AddSpacer(10)
box.Add(wx.StaticText(panel, -1, "Amplitude (dB)"), 0, wx.LEFT, 5)
self.ampScale = ControlSlider(panel, -60, 18, 20.0 * math.log10(amp), size=(202, 16), outFunction=self.setAmp)
box.Add(self.ampScale, 0, wx.LEFT | wx.RIGHT | wx.EXPAND, 5)
if meter:
box.AddSpacer(10)
self.meter = VuMeter(panel, size=(200, 5 * self.nchnls + 1), numSliders=self.nchnls)
box.Add(self.meter, 0, wx.LEFT | wx.RIGHT | wx.EXPAND, 5)
box.AddSpacer(5)
if timer:
box.AddSpacer(10)
tt = wx.StaticText(panel, -1, "Elapsed time (hh:mm:ss:ms)")
box.Add(tt, 0, wx.LEFT, 5)
box.AddSpacer(3)
self.timetext = wx.StaticText(panel, -1, "00 : 00 : 00 : 000")
box.Add(self.timetext, 0, wx.LEFT, 5)
if self.locals is not None:
box.AddSpacer(10)
t = wx.StaticText(panel, -1, "Interpreter")
box.Add(t, 0, wx.LEFT, 5)
tw, th = self.GetTextExtent("|")
self.text = wx.TextCtrl(panel, -1, "", size=(202, th + 8), style=wx.TE_PROCESS_ENTER)
self.text.Bind(wx.EVT_TEXT_ENTER, self.getText)
self.text.Bind(wx.EVT_KEY_DOWN, self.onChar)
box.Add(self.text, 0, wx.LEFT | wx.RIGHT | wx.EXPAND, 5)
box.AddSpacer(10)
panel.SetSizerAndFit(box)
self.SetClientSize(panel.GetSize())
self.Bind(wx.EVT_CLOSE, self.on_quit)
if started == 1:
self.start(None, True)
def setTime(self, *args):
wx.CallAfter(self.timetext.SetLabel, "%02d : %02d : %02d : %03d" % (args[0], args[1], args[2], args[3]))
def start(self, evt=None, justSet=False):
if self._started == False:
self._started = True
wx.CallAfter(self.startButton.SetLabel, "Stop")
if self.exit:
wx.CallAfter(self.quitButton.Disable)
if not justSet:
self.startf()
else:
self._started = False
wx.CallAfter(self.startButton.SetLabel, "Start")
if self.exit:
wx.CallAfter(self.quitButton.Enable)
# TODO: Need a common method for every OSes.
# wx.CallLater(100, self.stopf)
# wx.CallAfter(self.stopf)
if self.getIsStarted():
self.stopf()
def record(self, evt):
if self._recstarted == False:
self.recstartf()
self._recstarted = True
wx.CallAfter(self.recButton.SetLabel, "Rec Stop")
else:
self.recstopf()
self._recstarted = False
wx.CallAfter(self.recButton.SetLabel, "Rec Start")
def quit_from_code(self):
wx.CallAfter(self.on_quit, None)
def on_quit(self, evt):
if self.exit and self.getIsBooted():
self.shutdown()
time.sleep(0.25)
self.Destroy()
if self.exit:
sys.exit()
def getPrev(self):
self.text.Clear()
self._histo_count -= 1
if self._histo_count < 0:
self._histo_count = 0
self.text.SetValue(self._history[self._histo_count])
wx.CallAfter(self.text.SetInsertionPointEnd)
def getNext(self):
self.text.Clear()
self._histo_count += 1
if self._histo_count >= len(self._history):
self._histo_count = len(self._history)
else:
self.text.SetValue(self._history[self._histo_count])
wx.CallAfter(self.text.SetInsertionPointEnd)
def getText(self, evt):
source = self.text.GetValue()
self.text.Clear()
self._history.append(source)
self._histo_count = len(self._history)
exec(source, self.locals)
def onChar(self, evt):
key = evt.GetKeyCode()
if key == 315:
self.getPrev()
evt.StopPropagation()
elif key == 317:
self.getNext()
evt.StopPropagation()
else:
evt.Skip()
def setAmp(self, value):
self.ampf(math.pow(10.0, float(value) * 0.05))
def setRms(self, *args):
self.meter.setRms(*args)
def setStartButtonState(self, state):
if state:
self._started = True
wx.CallAfter(self.startButton.SetLabel, "Stop")
if self.exit:
wx.CallAfter(self.quitButton.Disable)
else:
self._started = False
wx.CallAfter(self.startButton.SetLabel, "Start")
if self.exit:
wx.CallAfter(self.quitButton.Enable)
def ensureNFD(unistr):
if sys.platform == "win32" or sys.platform.startswith("linux"):
encodings = [sys.getdefaultencoding(), sys.getfilesystemencoding(), "cp1252", "iso-8859-1", "utf-16"]
format = "NFC"
else:
encodings = [sys.getdefaultencoding(), sys.getfilesystemencoding(), "macroman", "iso-8859-1", "utf-16"]
format = "NFC"
decstr = unistr
if type(decstr) != unicode_t:
for encoding in encodings:
try:
decstr = decstr.decode(encoding)
break
except UnicodeDecodeError:
continue
except:
decstr = "UnableToDecodeString"
print("Unicode encoding not in a recognized format...")
break
if decstr == "UnableToDecodeString":
return unistr
else:
return unicodedata.normalize(format, decstr)
| 35.987293 | 120 | 0.525166 |
ae4bd329b0a39f201a2f41d92b1c573029070350 | 5,382 | py | Python | napalm_yang/utils.py | ckishimo/napalm-yang | 8f2bd907bd3afcde3c2f8e985192de74748baf6c | [
"Apache-2.0"
] | 64 | 2016-10-20T15:47:18.000Z | 2021-11-11T11:57:32.000Z | napalm_yang/utils.py | ckishimo/napalm-yang | 8f2bd907bd3afcde3c2f8e985192de74748baf6c | [
"Apache-2.0"
] | 126 | 2016-10-05T10:36:14.000Z | 2019-05-15T08:43:23.000Z | napalm_yang/utils.py | ckishimo/napalm-yang | 8f2bd907bd3afcde3c2f8e985192de74748baf6c | [
"Apache-2.0"
] | 63 | 2016-11-07T15:23:08.000Z | 2021-09-22T14:41:16.000Z | from napalm_yang import base
def model_to_dict(model, mode="", show_defaults=False):
"""
Given a model, return a representation of the model in a dict.
This is mostly useful to have a quick visual represenation of the model.
Args:
model (PybindBase): Model to transform.
mode (string): Whether to print config, state or all elements ("" for all)
Returns:
dict: A dictionary representing the model.
Examples:
>>> config = napalm_yang.base.Root()
>>>
>>> # Adding models to the object
>>> config.add_model(napalm_yang.models.openconfig_interfaces())
>>> config.add_model(napalm_yang.models.openconfig_vlan())
>>> # Printing the model in a human readable format
>>> pretty_print(napalm_yang.utils.model_to_dict(config))
>>> {
>>> "openconfig-interfaces:interfaces [rw]": {
>>> "interface [rw]": {
>>> "config [rw]": {
>>> "description [rw]": "string",
>>> "enabled [rw]": "boolean",
>>> "mtu [rw]": "uint16",
>>> "name [rw]": "string",
>>> "type [rw]": "identityref"
>>> },
>>> "hold_time [rw]": {
>>> "config [rw]": {
>>> "down [rw]": "uint32",
>>> "up [rw]": "uint32"
(trimmed for clarity)
"""
def is_mode(obj, mode):
if mode == "":
return True
elif mode == "config":
return obj._yang_name == "config" or obj._is_config
elif mode == "state":
return obj._yang_name == "state" or not obj._is_config
else:
raise ValueError(
"mode can only be config, state or ''. Passed: {}".format(mode)
)
def get_key(key, model, parent_defining_module, show_defaults):
if not show_defaults:
# No need to display rw/ro when showing the defaults.
key = "{} {}".format(key, "[rw]" if model._is_config else "[ro]")
if parent_defining_module != model._defining_module:
key = "{}:{}".format(model._defining_module, key)
return key
if model._yang_type in ("container", "list"):
cls = model if model._yang_type in ("container",) else model._contained_class()
result = {}
for k, v in cls:
r = model_to_dict(v, mode=mode, show_defaults=show_defaults)
if r:
result[get_key(k, v, model._defining_module, show_defaults)] = r
return result
else:
if show_defaults:
if model._default is False:
if model._yang_type != "boolean":
# Unless the datatype is bool, when the _default attribute
# is False, it means there is not default value defined in
# the YANG model.
return None
return model._default
return model._yang_type if is_mode(model, mode) else None
def _diff_root(f, s):
result = {}
for k in f.elements():
v = getattr(f, k)
w = getattr(s, k)
r = diff(v, w)
if r:
result[k] = r
return result
def _diff_list(f, s):
result = {}
first_keys = set(f.keys())
second_keys = set(s.keys())
both = first_keys & second_keys
first_only = first_keys - second_keys
second_only = second_keys - first_keys
both_dict = {}
for k in both:
r = diff(f[k], s[k])
if r:
both_dict[k] = r
if both_dict:
result["both"] = both_dict
if first_only or second_only:
result["first_only"] = list(first_only)
result["second_only"] = list(second_only)
return result
def diff(f, s):
"""
Given two models, return the difference between them.
Args:
f (Pybindbase): First element.
s (Pybindbase): Second element.
Returns:
dict: A dictionary highlighting the differences.
Examples:
>>> diff = napalm_yang.utils.diff(candidate, running)
>>> pretty_print(diff)
>>> {
>>> "interfaces": {
>>> "interface": {
>>> "both": {
>>> "Port-Channel1": {
>>> "config": {
>>> "mtu": {
>>> "first": "0",
>>> "second": "9000"
>>> }
>>> }
>>> }
>>> },
>>> "first_only": [
>>> "Loopback0"
>>> ],
>>> "second_only": [
>>> "Loopback1"
>>> ]
>>> }
>>> }
>>> }
"""
if isinstance(f, base.Root) or f._yang_type in ("container", None):
result = _diff_root(f, s)
elif f._yang_type in ("list",):
result = _diff_list(f, s)
else:
result = {}
first = "{}".format(f)
second = "{}".format(s)
if first != second:
result = {"first": first, "second": second}
return result
| 30.40678 | 87 | 0.473987 |
ae4dfb5b9ba2ae94cfbe34ece6b1afd93884dd8b | 2,430 | py | Python | config.py | kenykau/reinforcement-forex | cac8c59ae7f5593bb7d9bb47e85f4ba2435a7a33 | [
"MIT"
] | null | null | null | config.py | kenykau/reinforcement-forex | cac8c59ae7f5593bb7d9bb47e85f4ba2435a7a33 | [
"MIT"
] | null | null | null | config.py | kenykau/reinforcement-forex | cac8c59ae7f5593bb7d9bb47e85f4ba2435a7a33 | [
"MIT"
] | null | null | null | from enum import IntEnum
from typing import List, Dict
class AssetType(IntEnum):
FOREX = 0
CFD = 1
class SpreadMode(IntEnum):
BIDASK = 0
RANDOM = 1
IGNORE = 2
FIXED = 3
SESSIONAL = 4
class Op(IntEnum):
LONG = 0
SHORT = 1
HOLD = 2
CLOSEALL = 3
class Config:
datafile:str = './2021617-60.csv'
fields:Dict = {
"symbol" : "symbol",
"dt" : "dt",
"tf" : "tf",
"open" : "open",
"high" : "high",
"low" : "low",
"close" : "close",
"vol" : "volume",
"bid" : "bid",
"ask" : "ask"}
symbols: List[Dict] = [{
"name" : "USDJPY",
"asset_type": AssetType.FOREX,
"leverage": 100,
"quote" : "JPY",
"base" : "USD",
"digits" : 3,
"commission" : 7,
"min_lot" : 0.01,
"max_lot" : 1,
"lot_step" : 0.01,
"lot_size" : 100000,
"swap_long" : 2.30,
"swap_short" : 2.75,
"swap_day" : 2,
"min_spread" : 1,
"max_spread" : 10,
"fixed_spread": 3,
"spread_mode" : SpreadMode.RANDOM,
"fixed_pt_value" : 1
},
{
"name" : "EURUSD",
"asset_type": AssetType.FOREX,
"leverage": 100,
"quote" : "USD",
"base" : "EUR",
"digits" : 5,
"commission" : 0,
"min_lot" : 0.01,
"max_lot" : 1,
"lot_step" : 0.01,
"lot_size" : 100000,
"swap_long" : 0,
"swap_short" : 0,
"swap_day" : 2,
"min_spread" : 1,
"max_spread" : 10,
"fixed_spread": 3,
"spread_mode" : SpreadMode.IGNORE,
"fixed_pt_value" : 1
}]
account: Dict = {
"balance": 10000.00,
"stop_out": 0.5,
"currency": "USD",
"fields": ["balance", "equity", "last_pnl", "total_orders", "margin_hold", "margin_free", "max_fl", "max_fp", "max_dd", "win_counts", "loss_count", "break_even"]
}
env: Dict = {
"window_size": 12,
"allow_multi_orders": False,
"obs_price_features": [],
"obs_price_exclude": ["tf", "symbol", "bid", "ask"],
#"obs_account_features": ["balance", "equity", "total_orders", "margin_hold", "margin_free", "max_fl", "max_fp", "win_counts", "loss_count", "break_even"]
"obs_account_features": ["balance", "equity", "win_counts", "loss_count", "break_even"]
}
| 25.851064 | 169 | 0.488477 |
ae4e5f7fe6b5f5c3253e178b1b6eeb60c312745d | 3,020 | py | Python | metaci/release/models.py | giveclarity/MetaCI | f51bd50acf2e7d5e111f993f4816e5f0a5c5a441 | [
"BSD-3-Clause"
] | null | null | null | metaci/release/models.py | giveclarity/MetaCI | f51bd50acf2e7d5e111f993f4816e5f0a5c5a441 | [
"BSD-3-Clause"
] | null | null | null | metaci/release/models.py | giveclarity/MetaCI | f51bd50acf2e7d5e111f993f4816e5f0a5c5a441 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
from django.db import models
from django.utils.translation import ugettext_lazy as _
from model_utils import Choices
from model_utils.fields import AutoCreatedField, AutoLastModifiedField
from model_utils.models import StatusModel
from metaci.release.utils import update_release_from_github
class ChangeCaseTemplate(models.Model):
name = models.CharField(_("name"), max_length=255)
case_template_id = models.CharField(_("case template id"), max_length=18)
def __str__(self):
return self.name
class Release(StatusModel):
def get_sandbox_date():
return datetime.date.today()
def get_production_date():
return datetime.date.today() + datetime.timedelta(days=6)
STATUS = Choices("draft", "published", "hidden")
created = AutoCreatedField(_("created"))
modified = AutoLastModifiedField(_("modified"))
repo = models.ForeignKey(
"repository.Repository", on_delete=models.CASCADE, related_name="releases"
)
version_name = models.CharField(
_("version name"), max_length=255, null=True, blank=True
)
version_number = models.CharField(
_("version number"), max_length=255, null=True, blank=True
)
package_version_id = models.CharField(
_("package version id"), max_length=18, null=True, blank=True
)
git_tag = models.CharField(_("git tag"), max_length=1024, null=True)
github_release = models.URLField(
_("github release"), max_length=1024, null=True, blank=True
)
trialforce_id = models.CharField(
_("trialforce template id"), max_length=18, null=True, blank=True
)
release_creation_date = models.DateField(
_("release creation date"),
null=True,
blank=True,
default=get_sandbox_date,
)
sandbox_push_date = models.DateField(
_("sandbox push date"),
null=True,
blank=True,
default=get_sandbox_date,
)
production_push_date = models.DateField(
_("production push date"),
null=True,
blank=True,
default=get_production_date,
)
created_from_commit = models.CharField(
_("created from commit"), max_length=1024, null=True, blank=True
)
work_item_link = models.URLField(
_("work item link"), max_length=1024, null=True, blank=True
)
change_case_template = models.ForeignKey(
"release.ChangeCaseTemplate", on_delete=models.SET_NULL, null=True
)
change_case_link = models.URLField(
_("change case link"), max_length=1024, null=True, blank=True
)
class Meta:
get_latest_by = "created"
ordering = ["-created"]
verbose_name = _("release")
verbose_name_plural = _("releases")
unique_together = ("repo", "git_tag")
def __str__(self):
return f"{self.repo}: {self.version_name}"
def update_from_github(self):
update_release_from_github(self)
| 30.816327 | 82 | 0.67351 |
ae4f4fa26dd1ca5d802353abe3e69eef53ebe442 | 218 | py | Python | src/models/bert_crf.py | Zrealshadow/NLP_HW | 1fcc874b53cdf9465ab188c12082587d48644601 | [
"MIT"
] | 1 | 2020-08-19T03:27:18.000Z | 2020-08-19T03:27:18.000Z | src/models/bert_crf.py | OpenNLPhub/ChineseNER | 1fcc874b53cdf9465ab188c12082587d48644601 | [
"MIT"
] | null | null | null | src/models/bert_crf.py | OpenNLPhub/ChineseNER | 1fcc874b53cdf9465ab188c12082587d48644601 | [
"MIT"
] | null | null | null | import torch
import os
from torch import nn
from transformers import BertForTokenClassification,BertTokenizer,BertConfig;
cwd=os.getcwd()
class BERT_CRF(nn.Module):
def __init__(self,vocab_size):
pass
| 16.769231 | 77 | 0.770642 |
ae5037c12585478c8a1c85ed99c80d350dbc79af | 415 | py | Python | mathMB/__init__.py | mburger-stsci/mathMB | 107c11a6e65429c5f7d2facb4ce4e199538a39d8 | [
"BSD-3-Clause"
] | null | null | null | mathMB/__init__.py | mburger-stsci/mathMB | 107c11a6e65429c5f7d2facb4ce4e199538a39d8 | [
"BSD-3-Clause"
] | null | null | null | mathMB/__init__.py | mburger-stsci/mathMB | 107c11a6e65429c5f7d2facb4ce4e199538a39d8 | [
"BSD-3-Clause"
] | null | null | null | from .interpu import interpu
from .minmaxmean import minmaxmean
from .randomdeviates import random_deviates_1d, random_deviates_2d
from .rotation_matrix import rotation_matrix
from .smooth import smooth, smooth_sphere
from .fit_model import fit_model
from .histogram import HistogramSphere, Histogram, Histogram2d
name = 'mathMB'
__author__ = 'Matthew Burger'
__email__ = 'mburger@stsci.edu'
__version__ = '1.10'
| 29.642857 | 66 | 0.824096 |
ae52b0c373a33d43af43b8a92c2a1b20dd0c87e2 | 3,841 | py | Python | dgraphpandas/strategies/horizontal.py | rohith-bs/dgraphpandas | 29e91e2e7bb1d5d991ab94709a2d7e27f7dd7316 | [
"MIT"
] | 1 | 2022-02-28T17:34:11.000Z | 2022-02-28T17:34:11.000Z | dgraphpandas/strategies/horizontal.py | rohith-bs/dgraphpandas | 29e91e2e7bb1d5d991ab94709a2d7e27f7dd7316 | [
"MIT"
] | null | null | null | dgraphpandas/strategies/horizontal.py | rohith-bs/dgraphpandas | 29e91e2e7bb1d5d991ab94709a2d7e27f7dd7316 | [
"MIT"
] | 1 | 2021-04-10T19:57:05.000Z | 2021-04-10T19:57:05.000Z | import logging
from typing import Any, Dict, List, Callable, Union
import pandas as pd
from dgraphpandas.config import get_from_config
from dgraphpandas.strategies.vertical import vertical_transform
logger = logging.getLogger(__name__)
def horizontal_transform(
frame: Union[str, pd.DataFrame],
config: Dict[str, Any],
config_file_key: str,
**kwargs):
'''
Horizontally Transform a Pandas DataFrame into Intrinsic and Edge DataFrames.
'''
if frame is None:
raise ValueError('frame')
if not config:
raise ValueError('config')
if not config_file_key:
raise ValueError('config_file_key')
file_config: Dict[str, Any] = config['files'][config_file_key]
type_overrides: Dict[str, str] = get_from_config('type_overrides', file_config, {}, **(kwargs))
subject_fields: Union[List[str], Callable[..., List[str]]] = get_from_config('subject_fields', file_config, **(kwargs))
date_fields: Dict[str, str] = get_from_config('date_fields', file_config, {}, **(kwargs))
if not subject_fields:
raise ValueError('subject_fields')
if isinstance(frame, str):
logger.debug(f'Reading file {frame}')
read_csv_options: Dict[str, Any] = get_from_config('read_csv_options', file_config, {}, **(kwargs))
frame = pd.read_csv(frame, **(read_csv_options))
if frame.shape[1] <= len(subject_fields):
raise ValueError(f'''
It looks like there are no data fields.
The subject_fields are {subject_fields}
The frame columns are {frame.columns}
''')
'''
Date Fields get special treatment as they can be represented in many different ways
from different sources. Therefore if the column has been defined in date_fields
then apply those options to that column.
'''
for col, date_format in date_fields.items():
date_format = date_fields[col]
logger.debug(f'Converting {col} to datetime: {date_format}')
frame[col] = pd.to_datetime(frame[col], **(date_format))
if col not in type_overrides:
logger.debug(f'Ensuring {col} has datetime64 type')
type_overrides[col] = 'datetime64'
'''
Ensure that object values have the correct type according to type_overrides.
For example, when pandas reads a csv and detects a numerical value it may decide to
represent them as a float e.g 10.0 so when it's melted into a string it will show as such
But we really want the value to be just 10 so it matches the corresponding rdf type.
Therefore before we melt the frame, we enforce these columns have the correct form.
'''
logger.debug('Applying Type Overrides %s', type_overrides)
for col, current_type in type_overrides.items():
try:
logger.debug(f'Converting {col} to {current_type}')
frame[col] = frame[col].astype(current_type)
except ValueError:
logger.exception(
f'''
Could not convert {col} to {current_type}.
Please confirm that the values in the {col} series are convertable to {current_type}.
A common scenario here is when we have NA values but the target type does not support them.
''')
exit()
'''
Pivot the Horizontal DataFrame based on the given key (subject).
Change the frame to be 3 columns with triples: subject, predicate, object
This changes the horizontal frame into a vertical frame as this more closely
resembles rdf triples.
'''
logger.debug(f'Melting frame with subject: {subject_fields}')
frame = frame.melt(
id_vars=subject_fields,
var_name='predicate',
value_name='object')
return vertical_transform(frame, config, config_file_key, **(kwargs))
| 40.431579 | 123 | 0.667534 |
ae533f8aecb8c3af4f9e6c1898e9747d30e5e6e5 | 2,675 | py | Python | classifier-start/lib/utils.py | sharifkaiser/codelabs-edgetpu-image-classifier-detector | da01229abec824994776507949adad1939fa45f0 | [
"Apache-2.0"
] | 4 | 2019-05-13T15:18:36.000Z | 2021-10-08T22:16:49.000Z | classifier-start/lib/utils.py | sharifkaiser/codelabs-edgetpu-image-classifier-detector | da01229abec824994776507949adad1939fa45f0 | [
"Apache-2.0"
] | 1 | 2019-06-30T14:43:31.000Z | 2019-10-25T17:49:52.000Z | classifier-start/lib/utils.py | sharifkaiser/codelabs-edgetpu-image-classifier-detector | da01229abec824994776507949adad1939fa45f0 | [
"Apache-2.0"
] | 3 | 2019-07-22T15:16:02.000Z | 2022-03-04T11:51:11.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from .svg import *
CSS_STYLES = str(CssStyle({'.back': Style(fill='black',
stroke='black',
stroke_width='0.5em')}))
def size_em(length):
return '%sem' % str(0.6 * length)
def overlay(title, results, inference_time, layout):
x0, y0, width, height = layout.window
font_size = 0.03 * height
defs = Defs()
defs += CSS_STYLES
doc = Svg(width=width, height=height,
viewBox='%s %s %s %s' % layout.window,
font_size=font_size, font_family='monospace', font_weight=500)
doc += defs
ox1, ox2 = x0 + 20, x0 + width - 20
oy1, oy2 = y0 + 20 + font_size, y0 + height - 20
# Classes
lines = ['%s (%.2f)' % pair for pair in results]
for i, line in enumerate(lines):
y = oy2 - i * 1.7 * font_size
doc += Rect(x=0, y=0, width=size_em(len(line)), height='1em',
transform='translate(%s, %s) scale(-1,-1)' % (ox2, y),
_class='back')
doc += Text(line, text_anchor='end', x=ox2, y=y, fill='white')
# Title
if title:
doc += Rect(x=0, y=0, width=size_em(len(title)), height='1em',
transform='translate(%s, %s) scale(1,-1)' % (ox1, oy1), _class='back')
doc += Text(title, x=ox1, y=oy1, fill='white')
# Info
lines = [
'Inference time: %.2f ms (%.2f fps)' % (inference_time, 1000.0 / inference_time)
]
for i, line in enumerate(reversed(lines)):
y = oy2 - i * 1.7 * font_size
doc += Rect(x=0, y=0, width=size_em(len(line)), height='1em',
transform='translate(%s, %s) scale(1,-1)' % (ox1, y), _class='back')
doc += Text(line, x=ox1, y=y, fill='white')
return str(doc)
LABEL_PATTERN = re.compile(r'\s*(\d+)(.+)')
def load_labels(path):
with open(path, 'r', encoding='utf-8') as f:
lines = (LABEL_PATTERN.match(line).groups() for line in f.readlines())
return {int(num): text.strip() for num, text in lines}
| 34.74026 | 94 | 0.575327 |
ae537a846977c181886563c30f2c68e1118f6d27 | 74,410 | py | Python | remit_admin/views.py | naamara/blink | 326c035b2f0ef0feae4cd7aa2d4e73fa4a40171a | [
"Unlicense",
"MIT"
] | null | null | null | remit_admin/views.py | naamara/blink | 326c035b2f0ef0feae4cd7aa2d4e73fa4a40171a | [
"Unlicense",
"MIT"
] | 10 | 2019-12-26T17:31:31.000Z | 2022-03-21T22:17:33.000Z | remit_admin/views.py | naamara/blink | 326c035b2f0ef0feae4cd7aa2d4e73fa4a40171a | [
"Unlicense",
"MIT"
] | null | null | null | # Create your views here.
from django.template import Template, context, RequestContext
from django.shortcuts import render_to_response, render, get_object_or_404, redirect, HttpResponseRedirect, HttpResponse
from django.contrib.auth.decorators import login_required
from remit_admin.forms import RateUpdateForm, ProfileUpdateForm, ProfileAddForm, PhonebookAddForm, TransactionAddForm, CreateAdminUserForm, TransactionUpdateForm, ContactUserForm, EditAdminUserForm, transactionPhonenumberSearchForm, ChargesLimitsForm,CreateHealthUserForm,AddInfoForm,AddHealthInfoForm,AddLawInfoForm,AddPubInfoForm,AddEducInfoForm
import remit.settings as settings
#from remit.utils import generate_sha1, mailer, sendsms, error_message, success_message
from remit.utils import error_message, success_message, admin_mail, sendsms, mailer
import payments.payment as p
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from remit.models import Transaction, Phonebook, Rate, Country, Charge
from remit.utils import COUNTRY_CHOICES, NETWORK_CHOICES
from accounts.models import Profile, AdminProfile, UserActions,Create_staff_User
from remit_admin.decorators import admin_required, superuser_required, permission_required, customer_care_required
from django.db.models import Q
from datetime import datetime, timedelta
import payments.payment as payments
from django.db.models import Sum, Max
from django.contrib import messages
from django.db import IntegrityError
import remit_admin.utils as admin_utils
import urllib2
from django.core.files.base import ContentFile
from StringIO import StringIO
from PIL import Image
from remit.utils import debug, log_unauthorized_access, render_to_pdf
#from dateutil.relativedelta import relativedelta
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from remit_admin.models import EmailSupport, add_health_info,HealthInfo,LawhInfo,JounalisthInfo,EducationInfo
import pytz
from django.contrib.auth.models import User
from remit_admin.utils import log_action, store_login_info
from pesapot.pesapot import PesaPot
def dashboard_stats(request):
'''Data for the admin templated'''
data = {'boss_man': False}
countries = Country.objects.all()
if request.user.is_active and request.user.is_staff:
'''get data only when user is logged in'''
profile = User.objects.filter(
is_superuser=False, is_staff=False).count()
data['user_count'] = profile
data['verified_user_count'] = admin_utils.verified_users(
count=True)
data['blocked_user_count'] = admin_utils.blocked_users(count=True)
data['pending_user_count'] = admin_utils.users_pending_verification(
count=True)
transaction = Transaction.objects.filter(
visa_success=True, is_processed=False, amount_sent__isnull=False).aggregate(Sum('amount_sent'))
data['amount_pending'] = transaction['amount_sent__sum']
for country in countries:
currency = country.currency.lower()
# amount pending
transaction = Transaction.objects.filter(
visa_success=True, is_processed=False, to_country=country.pk, amount_sent__isnull=False).aggregate(Sum('amount_received'))
data['amount_pending_%s' % currency] = transaction[
'amount_received__sum']
# pending transactions
transaction = Transaction.objects.filter(
visa_success=True, is_processed=False, amount_sent__isnull=False, to_country=country.pk).count()
data['pending_transactions_%s' % currency] = transaction
data['pending_transactions'] = len(Transaction.momo.pending())
transaction = Transaction.objects.filter(
visa_success=False, is_processed=False, amount_sent__isnull=False).count()
data['failed_transactions'] = transaction
transaction = Transaction.objects.filter(
visa_success=True, is_processed=True, amount_sent__isnull=False).aggregate(Sum('amount_sent'))
data['total_amount_transfered'] = transaction['amount_sent__sum']
transaction = Transaction.objects.filter(
visa_success=True, is_processed=True, amount_sent__isnull=False).aggregate(Sum('amount_sent'))
data['total_amount_transfered'] = transaction['amount_sent__sum']
transaction = Transaction.objects.filter(
visa_success=True, is_processed=True, amount_sent__isnull=False).aggregate(Sum('amount_received'))
data['total_amount_transfered_ugx'] = transaction[
'amount_received__sum']
data['user_with_transaction'] = Transaction.objects.filter(
visa_success=True, is_processed=True, amount_sent__isnull=False).values('user').distinct().count()
data['complete_transactions'] = Transaction.objects.filter(
visa_success=True, is_processed=True, amount_sent__isnull=False).count()
data['pending_bills'] = Transaction.objects.filter(
visa_success=True,
is_processed=False,
amount_sent__isnull=False,
utility=True
).count()
data['cancelled_bills'] = Transaction.objects.filter(
visa_success=True,
is_processed=False,
amount_sent__isnull=False,
utility=True
).count()
data['failed_bills'] = Transaction.objects.filter(
visa_success=False,
is_processed=False,
amount_sent__isnull=False,
utility=True
).count()
return data
@admin_required
def render_view(request, template, data):
'''
wrapper for rendering views , loads RequestContext
@request request object
@template string
@data tumple
'''
# store login info
if not 'login_info' in request.session:
store_login_info(request)
# debug(request.session['login_info'])
# user permissions
if request.user.is_authenticated():
permissions = get_user_permissions(request.user)
profile = {}
try:
profile = AdminProfile.objects.get(user=request.user)
except Exception, e:
if request.user.is_superuser:
create_superuser(request.user)
data.update({'profile': profile, 'permissions': permissions})
# for pagnation
#debug(permissions, 'permissions')
queries_without_page = request.GET.copy()
if queries_without_page.has_key('page'):
del queries_without_page['page']
# update the request context
data.update(
{'queries': queries_without_page})
data.update({'admin_data': dashboard_stats(request)})
return render_to_response(
template, data,
context_instance=RequestContext(request)
)
def create_superuser(user):
'''we are not doing this'''
profile = AdminProfile.objects.create(user=user)
def get_user_permissions(user):
'''return user permissions as a dict'''
permissions = {}
for x in Permission.objects.filter(user=user):
permissions.update({x.codename: True})
return permissions
def get_country_access(user):
'''get the users country access'''
countries = ()
if user.is_superuser:
countries = COUNTRY_CHOICES
else:
profile = AdminProfile.objects.get(user=user)
if not profile.country == 'False':
for keyword, value in COUNTRY_CHOICES:
if profile.country == keyword:
countries = ((keyword, value),)
else:
countries = COUNTRY_CHOICES
return countries
def get_network_access(user):
'''get the users network access'''
networks = {}
if user.is_superuser:
networks = NETWORK_CHOICES
else:
profile = AdminProfile.objects.get(user=user)
if not profile.mobile_network == 'False':
networks = profile.mobile_network
for keyword, value in NETWORK_CHOICES:
if profile.mobile_network == keyword:
networks = ((keyword, value),)
else:
networks = NETWORK_CHOICES
return networks
def check_user_permission(user, codename):
'''check if user has a particular permission to do something'''
if user.is_superuser:
# Admin is all powerfull
return True
else:
perm = Permission.objects.filter(user=user, codename=codename)
return perm
@admin_required
def home(request):
print "Everythin is fine"
ad = AdminProfile.objects.get(user=request.user)
if request.user.is_superuser:
countries = Country.objects.all()
return render_view(request, 'admin/index.html', {'countries': countries})
elif ad.is_lawyer == True:
print "Everythin is fine"
return render_view(request, 'admin/index_staff_lawyer.html', {})
elif ad.is_educ == True:
print "Everythin is fine"
return render_view(request, 'admin/index_staff_educ.html', {})
elif ad.is_doctor == True:
print "Everythin is fine"
return render_view(request, 'admin/index_staff_doctor.html', {})
elif ad.is_jounalist == True:
print "Everythin is fine"
return render_view(request, 'admin/index_staff_jounalist.html', {})
else:
return render_view(request, 'admin/index_staff.html', {})
@permission_required('edit_user')
def unblock_user(request):
'''
block user
admin is responsible for all the nastiness
'''
if request.POST:
if not 'unblock_user' in request.POST:
return HttpResponseRedirect(reverse('custom_404'))
# print request.POST
else:
id = int(request.POST['unblock_user']) ^ 0xABCDEFAB
'''check if the user is waiting verification'''
profile = get_object_or_404(Profile.objects.filter(
id=id, account_blocked=True), id=id, account_blocked=True)
''' Block user '''
profile.unblocked_by = request.user
profile.status_updated_on = datetime.now()
profile.account_blocked = False
try:
profile.save()
success_message(
request, 'admin_user_unblocked', {'profile': profile})
# account verified email and sms
# template = settings.EMAIL_TEMPLATE_DIR+'general.html'
# c ={'admin_user_unverified': True, 'data':profile}
# mailer(request, 'VERIFIED: Your identity has been verified', template, c, profile.user.email)
# send sms
# template = settings.SMS_TEMPLATE_DIR+'general.html';
# sendsms(profile.get_phonenumber(),template,{'code':'admin_user_verified','profile':profile})
except Exception, e:
error_message(
request, 'admin_user_unblocked', {'profile': profile})
admin_mail(request, 'server_error', {
'error_message': 'errror unverifying user'}, e)
# return HttpResponseRedirect(settings.BASE_URL + 'admin/users/verified/')
return HttpResponseRedirect(reverse('admin:admin_users', args=['verified']))
@superuser_required
def block_user(request):
'''
block user
admin is responsible for all the nastiness
'''
if request.POST:
if not 'block_user' in request.POST:
return HttpResponseRedirect(reverse('custom_404'))
# print request.POST
else:
id = int(request.POST['block_user']) ^ 0xABCDEFAB
'''check if the user is waiting verification'''
profile = get_object_or_404(Profile.objects.filter(id=id), id=id)
''' Block user '''
profile.blocked_by = request.user
profile.status_updated_on = datetime.now()
profile.account_blocked = True
try:
profile.save()
log_action(request, model_object=profile,
action_flag=15, change_message='blocked user')
success_message(
request, 'admin_user_blocked', {'profile': profile})
# account verified email and sms
# template = settings.EMAIL_TEMPLATE_DIR+'general.html'
# c ={'admin_user_unverified': True, 'data':profile}
# mailer(request, 'VERIFIED: Your identity has been verified', template, c, profile.user.email)
# send sms
# template = settings.SMS_TEMPLATE_DIR+'general.html';
# sendsms(profile.get_phonenumber(),template,{'code':'admin_user_verified','profile':profile})
except Exception, e:
error_message(
request, 'admin_user_blocked', {'profile': profile})
admin_mail(request, 'server_error', {
'error_message': 'errror unverifying user'}, e)
# return HttpResponseRedirect(settings.BASE_URL + 'admin/users/verified/')
return HttpResponseRedirect(reverse('admin:admin_users', args=['verified']))
@superuser_required
def unverify_user(request):
'''
unverify user
admin is responsible for all the nastiness
'''
if request.POST:
if not 'unverifyuser' in request.POST:
return HttpResponseRedirect(reverse('custom_404'))
# print request.POST
else:
id = int(request.POST['unverifyuser']) ^ 0xABCDEFAB
'''check if the user is waiting verification'''
profile = get_object_or_404(Profile.objects.filter(
id=id, account_verified=True, id_verified=True, user__isnull=False), id=id, account_verified=True, id_verified=True, user__isnull=False)
''' verify user '''
profile.unverified_by = request.user
profile.status_updated_on = datetime.now()
profile.account_verified = False
profile.id_verified = False
try:
profile.save()
success_message(
request, 'admin_user_unverified', {'profile': profile})
# account verified email and sms
# template = settings.EMAIL_TEMPLATE_DIR+'general.html'
# c ={'admin_user_unverified': True, 'data':profile}
# mailer(request, 'VERIFIED: Your identity has been verified', template, c, profile.user.email)
# send sms
# template = settings.SMS_TEMPLATE_DIR+'general.html';
# sendsms(profile.get_phonenumber(),template,{'code':'admin_user_verified','profile':profile})
except Exception, e:
error_message(
request, 'admin_user_unverified', {'profile': profile})
admin_mail(request, 'server_error', {
'error_message': 'errror unverifying user'}, e)
# return HttpResponseRedirect(settings.BASE_URL + 'admin/users/verified/')
return HttpResponseRedirect(reverse('admin:admin_users', args=['verified']))
@superuser_required
def verify_user(request):
'''
verify user
admin is responsible for all the nastiness
'''
if request.POST:
if not 'verifyuser' in request.POST:
return HttpResponseRedirect(reverse('custom_404'))
else:
id = int(request.POST['verifyuser']) ^ 0xABCDEFAB
'''check if the user is waiting verification'''
profile = get_object_or_404(
Profile.objects.filter(
id=id, account_verified=False, id_pic__isnull=False, id_verified=False,
account_blocked=False), id=id, account_verified=False, id_pic__isnull=False, id_verified=False, account_blocked=False)
''' verify user '''
profile.verified_by = request.user
profile.status_updated_on = datetime.now()
profile.account_verified = True
profile.id_verified = True
try:
profile.save()
log_action(request, model_object=profile,
action_flag=9, change_message='verified user')
success_message(
request, 'admin_user_verified', {'profile': profile})
# account verified email and sms
template = settings.EMAIL_TEMPLATE_DIR + 'general.html'
user_email = profile.user.email
user_names = profile.get_names()
c = {'admin_user_verified': True, 'user_names': user_names}
mailer(request, 'VERIFIED: Your account on %s has been verified' % settings.APP_NAME,
template, c, user_email)
# send sms
if profile.phone_verified:
template = settings.SMS_TEMPLATE_DIR + 'general.html'
sendsms(profile.get_phonenumber(), template, {
'code': 'admin_user_verified', 'user_names': user_names})
except Exception, e:
debug(e, 'error sending verification emails ')
admin_mail(request, 'server_error', {
'error_message': 'errror verifying user : %s' % e}, e)
return HttpResponseRedirect(reverse('admin:admin_users', args=['pending_verification']))
@permission_required('view_transaction')
def view_transaction(request, name):
name = int(name) ^ 0xABCDEFAB
transaction = get_object_or_404(Transaction.objects.filter(pk=name))
log_action(request, model_object=transaction, action_flag=6,
change_message='Viewed Transaction')
return render_view(request, 'admin/transaction.html',
{'transaction': transaction})
@superuser_required
def resend_transaction_email(request, name):
pk = int(name) ^ 0xABCDEFAB
transaction = get_object_or_404(Transaction.objects.filter(pk=pk))
if request.POST:
from payments.payment import card_charged_email, transaction_delivered_email
email = transaction.user.email
action = request.POST.get('action', None)
if action == '2':
action = "Card Charged Email"
card_charged_email(request, transaction)
if action == '1':
action = "Delivery Email"
transaction_delivered_email(request, transaction)
log_action(request, model_object=transaction, action_flag=6,
change_message='Resend Transaction Email')
messages.success(
request, "The %s email Was Successfully resent to %s" % (action, email))
return HttpResponseRedirect(reverse('admin:admin_transaction', args=(name,)))
def transaction_receipt(request, name):
name = int(name) ^ 0xABCDEFAB
transaction = get_object_or_404(Transaction.objects.filter(pk=name))
template = settings.EMAIL_TEMPLATE_DIR + 'credit_card_charged_pdf.html'
#log_action(request,model_object=transaction, action_flag=6, change_message='Downloaded Receipt Transaction')
return render_to_pdf(
template, {
'data': transaction,
'BASE_URL': settings.BASE_URL
}
)
@superuser_required
def edit_transaction(request, name):
name = int(name) ^ 0xABCDEFAB
transaction = get_object_or_404(Transaction.objects.filter(pk=name))
form = TransactionUpdateForm()
if request.POST:
form = TransactionUpdateForm(request.POST, instance=transaction)
if form.is_valid():
form.save()
transaction.updated_by = request.user
transaction.save()
success_message(request, 'admin_edit_transaction', {})
admin_mail(request, 'transaction_updated',
{'transaction': transaction})
log_action(request, model_object=transaction,
action_flag=9, change_message='edited Transaction')
return render_view(request, 'admin/edit_transaction.html',
{'transaction': transaction, 'form': form})
def stuff_transaction_list(user, status=1):
'''
status
(1)-successful,
(2)-pending,
(3)-Failed,
(4)-All,
(6)-successful bills
(7)-All bills
(8)-All non bill transactions
(9)-All pending bills
(10)-All failed bills
(11)-All cancelled bills
'''
transaction_list = False
if status == 1:
transaction_list = Transaction.objects.filter(
visa_success=True, is_processed=True, amount_sent__isnull=False, utility=False)
elif status == 2:
transaction_list = Transaction.objects.filter(
visa_success=True, is_processed=False, amount_sent__isnull=False, utility=False)
elif status == 3:
transaction_list = Transaction.objects.filter(
visa_success=False, utility=False)
elif status == 4:
#transaction_list = Transaction.objects.all()
transaction_list = Transaction.objects.filter(utility=False)
elif status == 5:
transaction_list = Transaction.objects.filter(
is_canceled=True, visa_success=True, is_processed=True, amount_sent__isnull=False, utility=False
)
elif status == 6:
transaction_list = Transaction.objects.filter(
visa_success=True, is_processed=True, amount_sent__isnull=False, utility=True
)
elif status == 7:
transaction_list = Transaction.objects.filter(
utility=True
)
elif status == 8:
transaction_list = Transaction.objects.filter(
utility=False
)
elif status == 9:
transaction_list = Transaction.objects.filter(
visa_success=True, is_processed=False, amount_sent__isnull=False, utility=True)
elif status == 10:
#
transaction_list = Transaction.objects.filter(
visa_success=False, utility=True)
elif status == 11:
transaction_list = Transaction.objects.filter(
is_canceled=True, visa_success=True, is_processed=True, amount_sent__isnull=False, utility=True
)
# else:
# if len(transaction_list) > 0:
# transaction_list = transaction_list.filter(utility=False)
'''get the transaction list our stuff users are allowed access to'''
if transaction_list and not user.is_superuser:
country_filter = network_filter = Q()
for value, keyword in get_country_access(user):
country_filter |= Q(to_country__code=value)
for value, keyword in get_network_access(user):
network_filter |= Q(mobile_network_code=value)
#transaction_list = Transaction.objects.filter(country_filter & network_filter)
transaction_list = transaction_list.filter(
country_filter & network_filter)
# if successful:
# transaction_list = transaction_list.filter(
# visa_success=True, is_processed=True, amount_sent__isnull=False)
return transaction_list
@permission_required('view_transaction')
def transactions(request, name=False, user_id=False):
'''
Transactions
'''
pretitle = 'Pending Transactions'
page_title = 'Pending Transactions'
#debug(get_country_access(request.user), 'country')
transaction_list = False
status = 4
if not name and request.user.is_superuser:
page_title = pretitle = 'Transactions'
elif name == 'pending':
status = 2
# transaction_list = transaction_list.filter(
# visa_success=True, is_processed=False, amount_sent__isnull=False)
elif name == 'successful':
status = 1
page_title = pretitle = 'Successful Transactions'
# transaction_list = transaction_list.filter(
# visa_success=True, is_processed=True, amount_sent__isnull=False)
elif name == 'failed':
status = 3
page_title = pretitle = 'Failed Transactions'
elif name == 'canceled':
status = 5
page_title = pretitle = 'Canceled Transactions'
#transaction_list = transaction_list.filter(visa_success=False)
elif name == 'search':
page_title = pretitle = 'Search Transactions'
elif name == 'billpayments':
status = 6
page_title = pretitle = 'Search Billpayments'
else:
return HttpResponseRedirect(reverse('admin:admin_dashboard'))
# search query
if 'q' in request.GET:
try:
id = int(request.GET['q']) ^ 0xABCDEFAB
transaction_list = transaction_list.filter(id=id)
except Exception, e:
messages.error(request, "The Transaction was not found")
if not transaction_list:
try:
num = str(request.GET['q'])
ctry_code = num[:3]
debug(ctry_code)
phone_num = num[3:]
debug(phone_num)
transaction_list.filter(receiver_number=phone_num)
except Exception, e:
debug(e)
# if request.user.is_superuser:
# transaction_list = Transaction.objects.all()
transaction_list = stuff_transaction_list(request.user, status)
# we are dealing with a specific user
if user_id and transaction_list:
user_id = int(user_id) ^ 0xABCDEFAB
profile = get_object_or_404(Profile.objects.filter(id=user_id))
transaction_list = transaction_list.filter(user=profile.user)
if transaction_list:
transaction_list = transaction_list.order_by('-id')
paginator = Paginator(transaction_list, settings.PAGNATION_LIMIT)
page = request.GET.get('page')
try:
transactions = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
transactions = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
transactions = paginator.page(paginator.num_pages)
log_action(request, model_object=transaction_list,
action_flag=6, change_message='view Transaction')
return render_view(request, 'admin/transactions.html', {'transactions': transactions, 'pretitle': pretitle, 'page_title': page_title, 'type': name})
def tradelance(request):
"""work with tradelance."""
pretitle = 'Pending Transactions'
page_title = 'Pending Transactions'
response_data = {}
return render_view(request,'admin/tradelance.html',
{'result':response_data
})
def tradelance_response(request):
"""Tradelance response."""
phone = None
amount = None
tlance_method = None
response_data = {}
pesapot = PesaPot()
if request.POST:
data = request.POST.copy()
amount = data.get('tlance_amount','')
number = data.get('tlance_number','')
tlance_id = data.get('tlance_status','')
tlance_method = data.get('selected_tmethod','')
if tlance_method == 'tlance_deposit':
response_data = pesapot.TradelanceDeposit(number,amount)
elif tlance_method == 'tlance_request':
response_data = pesapot.TradelanceRequest(number,amount)
elif tlance_method == 'tlance_balance':
response_data = pesapot.TradelanceBalance()
elif tlance_method == 'tlance_status':
response_data = pesapot.TradelanceStatus(tlance_id)
return render_view(request,'admin/tradelance_response.html',
{'result':response_data})
@permission_required('view_transaction')
def bill_transactions(request, name=False, user_id=False):
'''
Transactions
'''
pretitle = 'Pending Transactions'
page_title = 'Pending Transactions'
#debug(get_country_access(request.user), 'country')
transaction_list = False
status = 7
if not name and request.user.is_superuser:
page_title = pretitle = 'Bill Transactions'
elif name == 'pending':
status = 9
# transaction_list = transaction_list.filter(
# visa_success=True, is_processed=False, amount_sent__isnull=False)
elif name == 'successful':
status = 6
page_title = pretitle = 'Successful Bill Transactions'
# transaction_list = transaction_list.filter(
# visa_success=True, is_processed=True, amount_sent__isnull=False)
elif name == 'failed':
status = 10
page_title = pretitle = 'Failed Bill Transactions'
elif name == 'canceled':
status = 11
page_title = pretitle = 'Canceled Bill Transactions'
#transaction_list = transaction_list.filter(visa_success=False)
elif name == 'search':
page_title = pretitle = 'Search Transactions'
else:
return HttpResponseRedirect(reverse('admin:admin_dashboard'))
# search query
if 'q' in request.GET:
try:
id = int(request.GET['q']) ^ 0xABCDEFAB
transaction_list = transaction_list.filter(id=id)
except Exception, e:
messages.error(request, "The Transaction was not found")
if not transaction_list:
try:
num = str(request.GET['q'])
ctry_code = num[:3]
debug(ctry_code)
phone_num = num[3:]
debug(phone_num)
transaction_list.filter(receiver_number=phone_num)
except Exception, e:
debug(e)
# if request.user.is_superuser:
# transaction_list = Transaction.objects.all()
transaction_list = stuff_transaction_list(request.user, status)
# we are dealing with a specific user
if user_id and transaction_list:
user_id = int(user_id) ^ 0xABCDEFAB
profile = get_object_or_404(Profile.objects.filter(id=user_id))
transaction_list = transaction_list.filter(user=profile.user)
if transaction_list:
transaction_list = transaction_list.order_by('-id')
paginator = Paginator(transaction_list, settings.PAGNATION_LIMIT)
page = request.GET.get('page')
try:
transactions = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
transactions = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
transactions = paginator.page(paginator.num_pages)
log_action(request, model_object=transaction_list,
action_flag=6, change_message='view Transaction')
return render_view(request, 'admin/bill_transactions.html', {'transactions': transactions, 'pretitle': pretitle, 'page_title': page_title, 'type': name})
@permission_required('edit_transaction')
def resend_transaction(request):
'''
Resend the user transaction
'''
if request.POST:
if not 'resend_transaction' in request.POST:
return HttpResponseRedirect(reverse('admin:admin_dashboard'))
else:
name = int(request.POST['resend_transaction'])
id = name ^ 0xABCDEFAB
transaction = get_object_or_404(Transaction.objects.filter(
id=id, visa_success=True, is_processed=False, amount_sent__isnull=False), id=id, visa_success=True, is_processed=False, amount_sent__isnull=False)
response = {}
response = payments.process_mobilemoney(
transaction, response, request, processed_by=request.user)
#debug(response, 'Resend Response')
# if not response['error'] and 'delivered_to_mobile' in response :
# reget the transaction
transaction = get_object_or_404(Transaction.objects.filter(id=id))
if transaction.is_processed:
success_message(request, 'admin_resend_transaction', {
'response': response})
# else:
# error_message(request, 'admin_resend_transaction', {'response': response})
else:
error_message(request, 'admin_process_transaction', {
'response': response})
else:
return HttpResponseRedirect(reverse('custom_404'))
return HttpResponseRedirect(reverse('admin:admin_transaction', args=(name,)))
@permission_required('edit_transaction')
def process_transaction(request):
'''
Mark as processed with resending
'''
if request.POST:
cancel_transaction = request.POST.get('cancel_transaction', None)
process_transaction = request.POST.get('process_transaction', None)
if cancel_transaction:
name = cancel_transaction
id = int(name) ^ 0xABCDEFAB
transaction = get_object_or_404(Transaction.objects.filter(
id=id, visa_success=True, is_processed=False, amount_sent__isnull=False), id=id, visa_success=True, is_processed=False, amount_sent__isnull=False)
elif process_transaction:
name = process_transaction
id = int(name) ^ 0xABCDEFAB
transaction = get_object_or_404(Transaction.objects.filter(
id=id, visa_success=True, is_processed=False, amount_sent__isnull=False), id=id, visa_success=True, is_processed=False, amount_sent__isnull=False)
else:
return HttpResponseRedirect(reverse('custom_404'))
if process_transaction:
response = {'status_code': payments.RESPONSE_CODES['SUCCESS']}
payments.process_mobilemoney(
transaction, response, request, processed_by=request.user, mark_as_processed=True)
_process_error = response.get('error', None)
if not _process_error:
delivered_to_mobile = False
if 'delivered_to_mobile' in response:
delivered_to_mobile = response['delivered_to_mobile']
success_message(request, 'admin_process_transaction', {
'status_code': response['status_code'], 'delivered_to_mobile': delivered_to_mobile})
return HttpResponseRedirect(reverse('admin:admin_transaction', args=(name,)))
else:
error_message(request, 'admin_process_transaction', {
'status_code': response['status_code']})
if cancel_transaction:
transaction.is_processed = True
transaction.is_canceled = True
transaction.canceled_by = request.user
transaction.cancled_on = datetime.now()
transaction.save()
return HttpResponseRedirect(reverse('admin:admin_transactions', args=('canceled',)))
# return HttpResponseRedirect(settings.BASE_URL +
# 'admin/transactions/successful/')
return HttpResponseRedirect(reverse('admin:admin_transactions', args=('pending',)))
@admin_required
def users(request, name):
'''
@request request object
'''
# user_list = Profile.objects.filter(account_verified=True,user__isnull=False)
# print name
pretitle = 'verified users'
page_title = 'verified users'
if name == 'verified':
user_list = admin_utils.verified_users()
elif name == 'unverified':
user_list = Profile.objects.filter(
Q(id_pic=''),
account_verified=False,
user__isnull=False, account_blocked=False)
pretitle = 'Unverified Users'
page_title = 'verified users'
elif name == 'pending_verification':
pretitle = 'Users waiting to be verified'
page_title = 'users pending verification'
user_list = admin_utils.users_pending_verification()
elif name == 'blocked':
pretitle = 'Blocked Users'
page_title = 'Blocked Users'
user_list = admin_utils.blocked_users()
elif name == 'top':
pretitle = 'Blocked Users'
page_title = 'Blocked Users'
user_list = Profile.objects.filter(account_blocked=False)
elif name == 'search':
pretitle = 'User Search'
page_title = 'User Search'
user_list = Profile.objects.filter(user__isnull=False)
else:
return HttpResponseRedirect(reverse('custom_404'))
user_list = user_list.filter().order_by('-id')
# search query
if 'q' in request.GET:
pretitle += ' | %s' % request.GET['q']
page_title += ' | %s' % request.GET['q']
user_list = user_list.filter(
Q(firstname__icontains='' + request.GET['q'] + '') | Q(lastname__icontains='' + request.GET['q'] + ''))
paginator = Paginator(user_list, settings.PAGNATION_LIMIT)
page = request.GET.get('page')
try:
users = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
users = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
users = paginator.page(paginator.num_pages)
log_action(request, model_object=user_list,
action_flag=13, change_message='searched user')
return render_view(request, 'admin/users.html', {'users': users, 'pretitle': pretitle, 'page_title': page_title})
@superuser_required
def stuff_users(request, name=False):
'''fetch stuff '''
user_list = AdminProfile.objects.all() # (is_staff=True)
debug(user_list, 'stuff')
user_list = user_list.filter().order_by('-id')
# search query
if 'q' in request.GET:
pretitle += ' | %s' % request.GET['q']
page_title += ' | %s' % request.GET['q']
user_list = user_list.filter(
Q(username__icontains='' + request.GET['q'] + ''))
paginator = Paginator(user_list, settings.PAGNATION_LIMIT)
page = request.GET.get('page')
try:
users = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
users = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
users = paginator.page(paginator.num_pages)
return render_view(request, 'admin/stuff_users.html', {'users': users})
@superuser_required
def health_users(request, name=False):
'''fetch stuff '''
user_list = AdminProfile.objects.all() # (is_staff=True)
debug(user_list, 'stuff')
user_list = user_list.filter().order_by('-id')
# search query
if 'q' in request.GET:
pretitle += ' | %s' % request.GET['q']
page_title += ' | %s' % request.GET['q']
user_list = user_list.filter(
Q(username__icontains='' + request.GET['q'] + ''))
paginator = Paginator(user_list, settings.PAGNATION_LIMIT)
page = request.GET.get('page')
try:
users = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
users = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
users = paginator.page(paginator.num_pages)
return render_view(request, 'admin/health_users.html', {'users': users})
@admin_required
def user(request, name):
pretitle = 'User'
page_title = 'User'
id = int(name) ^ 0xABCDEFAB
profile = get_object_or_404(Profile.objects.filter(id=id))
pretitle = page_title = profile.get_names()
userdata = {}
if not check_user_permission(request.user, 'edit_profile'):
# print "NOOOOO"
return render_view(request, 'admin/user_readonly.html', {'name': name,
'user_profile': profile,
'pretitle': pretitle,
'page_title': page_title,
'userdata': userdata})
else:
if request.POST:
if 'update_account' in request.POST:
post_values = request.POST.copy()
post_values['dob'] = datetime.strptime(
post_values['dob_month'] + '-' + post_values['dob_day'] + '-' + post_values['dob_year'], '%m-%d-%Y')
form = ProfileUpdateForm(post_values, instance=profile)
# debug(request.FILES)
if 'passport' in request.FILES:
form.id_pic = request.FILES['passport']
if form.is_valid():
if 'passport' in request.FILES:
profile.id_pic = request.FILES['passport']
profile.save()
#profile = Profile.objects.filter(id=profile.pk)
form.save()
success_message(request, 'admin_update_profile', {})
else:
error_message(request, 'admin_update_profile', {})
transaction_stats = Transaction.objects.filter(
user=profile.user, visa_success=True, is_processed=True,
amount_sent__isnull=False)
userdata['successful_transactions'] = transaction_stats.count()
if userdata['successful_transactions'] > 0:
userdata['amount_sent'] = transaction_stats.aggregate(
Sum('amount_sent'))
if 'amount_sent__sum' in userdata['amount_sent']:
userdata['amount_sent'] = userdata[
'amount_sent']['amount_sent__sum']
top_amount_sent = transaction_stats.aggregate(Max('amount_sent'))
if 'amount_sent__max' in top_amount_sent:
userdata['top_amount_sent'] = top_amount_sent[
'amount_sent__max']
return render_view(request, 'admin/user.html', {'name': name,
'user_profile': profile,
'pretitle': pretitle,
'page_title': page_title,
'userdata': userdata})
@superuser_required
def charges_limits(request, code):
active_country = get_object_or_404(Country.objects.filter(code=code))
rate = Charge.objects.get(country=active_country)
countries = Country.objects.all()
form = ChargesLimitsForm()
if request.POST:
form = ChargesLimitsForm(request.POST, instance=rate)
if form.is_valid():
form.save()
messages.success(
request, "The Charges & Limits Was Successfully Edited")
else:
print form.errors
return render_view(request, 'admin/charges_limits.html',
{'rate': rate, 'pretitle': 'charges & limits',
'page_title': "charges & Limits",
'countries': countries,
'country_code': code,
'form': form}
)
#@admin_required
@permission_required('view_rate')
def rates(request, code):
'''edit and check our rates'''
active_country = get_object_or_404(Country.objects.filter(code=code))
# Charge.objects.all().delete()
rate = Charge.objects.get(country=active_country)
countries = Country.objects.all()
form = RateUpdateForm()
if request.POST:
form = RateUpdateForm(request.POST, instance=rate)
if form.is_valid():
form.save()
messages.success(request, "The Rates Were Successfully Edited")
else:
print form.errors
return render_view(request, 'admin/rates.html',
{'rate': rate, 'form': form, 'countries': countries}
)
@admin_required
def logs(request):
return render_view(request, 'admin/logs.html', {})
def save_transaction(cur, user, pending=False):
for row in cur.fetchall():
debug(row, 'row data')
cur.execute(
"SELECT invoice_id,phon_num,phon_ext,amount_received,amount,added,exchange_rate from transaction_log where log_id = %d " %
row[0])
datarow = cur.fetchone()
if datarow:
data = {
'user': user.pk,
'receiver_number': datarow[1],
'receiver_country_code': datarow[2],
'amount_sent': datarow[4],
'processed_by': 1,
'rate': datarow[6],
'visa_success': True,
}
processed_on = datetime.fromtimestamp(int(datarow[5]))
if not pending:
data['processed_on'] = processed_on
data['is_processed'] = True
else:
debug(data, 'Pending Transaction')
data['is_processed'] = False
data['amount_received'] = float(datarow[4]) * float(datarow[6])
data['started_on'] = processed_on
transaction = TransactionAddForm(data)
if transaction.is_valid():
try:
transaction.save()
except IntegrityError as e:
print e
else:
print transaction.errors
@permission_required('view_audit_trail')
def audits_trails(request):
'''system user actions'''
from django.contrib.admin.models import LogEntry
audit_logs_list = UserActions.objects.all()
# unique actions
unique_actions = []
# if settings.IS_SQLITE:
log_entrys = LogEntry.objects.all()
for log_entry in log_entrys:
if log_entry.action_flag not in unique_actions:
unique_actions.append(log_entry.action_flag)
# unique users
unique_users = []
# if settings.IS_SQLITE:
for log_entry in audit_logs_list:
if log_entry.user not in unique_users:
unique_users.append(log_entry.user)
# else:
# unique_users = audit_logs_list.distinct(
# 'user')
# debug(unique_actions,'unique_actions')
if 'start_date' in request.GET:
start_date = '%s' % request.GET['start_date']
start_date = datetime.strptime(start_date, '%d-%m-%Y')
else:
first_log_entry = LogEntry.objects.values_list(
'action_time', flat=True).order_by('id')[:1]
start_date = first_log_entry[0]
if 'end_date' in request.GET:
end_date = '%s' % request.GET['end_date']
end_date = datetime.strptime(end_date, '%d-%m-%Y')
else:
end_date = datetime.now()
try:
end_date = pytz.utc.localize(end_date)
start_date = pytz.utc.localize(start_date)
except Exception, e:
pass
if start_date == end_date:
audit_logs_list = audit_logs_list.filter(
log_entry__action_time__contains=start_date.date())
else:
audit_logs_list = audit_logs_list.filter(
log_entry__action_time__range=(start_date, end_date))
action_type = request.GET.get('action_type', None)
if action_type and not action_type == 'All':
audit_logs_list.filter(log_entry__action_flag=action_type)
start_date = '%s-%s-%s' % (
start_date.day, start_date.month, start_date.year)
end_date = '%s-%s-%s' % (end_date.day, end_date.month, end_date.year)
audit_logs_list = audit_logs_list.order_by('-id')
paginator = Paginator(audit_logs_list, settings.PAGNATION_LIMIT)
page = request.GET.get('page')
try:
audit_logs = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
audit_logs = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
audit_logs = paginator.page(paginator.num_pages)
# debug(audit_logs_list,'Logs')
return render_view(request, 'audits.html',
{'audit_logs': audit_logs,
'unique_actions': unique_actions,
'start_date': start_date,
'end_date': end_date,
'unique_users': unique_users,
}
)
@permission_required('view_reports')
def reports(request):
# we only pick successful transactions
profile = AdminProfile.objects.get(user=request.user)
# = COUNTRY_CHOICES[profile.country]
if 'end_date' in request.GET:
end_date = '%s' % request.GET['end_date']
end_date = datetime.strptime(end_date, '%d-%m-%Y')
else:
end_date = datetime.now()
if 'start_date' in request.GET:
start_date = '%s' % request.GET['start_date']
start_date = datetime.strptime(start_date, '%d-%m-%Y')
else:
#start_date = end_date - relativedelta(years=1)
first_successful_transaction = Transaction.objects.filter(
visa_success=True,
is_processed=True,
amount_sent__isnull=False
).values_list(
'processed_on', flat=True).order_by('id')[:1]
# start_date = "%d-%m-%Y".format(rstart_date[0])
# #datetime.strptime(rstart_date[0], '%d-%m-%Y')
if len(first_successful_transaction) > 0:
start_date = first_successful_transaction[0]
else:
start_date = end_date
# make dates datezone aware
try:
end_date = pytz.utc.localize(end_date)
start_date = pytz.utc.localize(start_date)
except Exception, e:
debug(e, 'localize time error', 'admin')
pass
status = request.GET.get('status', 1)
# if request.GET['status']:
transaction_list = stuff_transaction_list(request.user, int(status))
countries_list = transaction_list.values_list(
'sender_country', flat=True).distinct()
if start_date == end_date:
transaction_list = transaction_list.filter(
Q(started_on__startswith=start_date.date()) | Q(
started_on__startswith=end_date.date()),
)
# print transaction_list
else:
transaction_list = transaction_list.filter(
Q(started_on__range=(start_date, end_date)) | Q(
started_on__startswith=start_date.date()) | Q(started_on__startswith=end_date.date())
)
number_of_trasactions = amount_transfered = number_of_unique_senders = average_transaction_amount = 0
if transaction_list:
transaction_list = transaction_list.order_by('processed_on')
# get filters need to come before sums
# filter the network
if 'network' in request.GET and not request.GET['network'] == 'All':
transaction_list = transaction_list.filter(
mobile_network_code=request.GET['network'])
if 'ctry' in request.GET and not request.GET['ctry'] == 'All':
transaction_list = transaction_list.filter(
to_country__code=request.GET['ctry'])
if 'sender_ctry' in request.GET and not request.GET['sender_ctry'] == 'All':
transaction_list = transaction_list.filter(
sender_country=request.GET['sender_ctry'])
# if 'sender_ctry':
# transaction_list = transaction_list.filter(album__artist__id=123)
if transaction_list:
number_of_trasactions = transaction_list.count()
amount_transfered = transaction_list.aggregate(
Sum('amount_received'))
if 'amount_received__sum' in amount_transfered:
amount_transfered = amount_transfered['amount_received__sum']
if settings.IS_SQLITE:
number_of_unique_senders = []
for t_user in transaction_list:
if t_user.user not in number_of_unique_senders:
number_of_unique_senders.append(t_user.user)
number_of_unique_senders = len(number_of_unique_senders)
else:
# number_of_unique_senders = transaction_list.distinct(
# 'user').count()
number_of_unique_senders = transaction_list.values_list(
'user', flat=True).distinct()
# if len(l) > 0 else float('nan')
average_transaction_amount = amount_transfered / \
number_of_trasactions
start_date = '%s-%s-%s' % (
start_date.day, start_date.month, start_date.year)
end_date = '%s-%s-%s' % (end_date.day, end_date.month, end_date.year)
# restrict a user to thier
countries = get_country_access(request.user)
networks = get_network_access(request.user)
if request.POST:
if 'generate_report' in request.POST:
return generate_csv_report(transaction_list, request.user)
paginator = Paginator(transaction_list, settings.PAGNATION_LIMIT)
page = request.GET.get('page')
try:
transactions = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
transactions = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
transactions = paginator.page(paginator.num_pages)
return render_view(request, 'reports.html', {'transactions': transactions,
'start_date': start_date,
'end_date': end_date,
'number_of_trasactions': number_of_trasactions,
'amount_transfered': amount_transfered,
'number_of_unique_senders': number_of_unique_senders,
'average_transaction_amount': average_transaction_amount,
'countries': countries,
'networks': networks,
'countries_list': countries_list,
})
@superuser_required
def edit_stuff_user(request, name):
id = int(name) ^ 0xABCDEFAB
user = get_object_or_404(AdminProfile.objects.filter(id=id))
form = EditAdminUserForm()
if request.POST:
form = EditAdminUserForm(request.POST, instance=user.user)
if form.is_valid():
# update user password
if form.cleaned_data['password']:
#user.user.password = form.cleaned_data['password']
user.user.set_password(form.cleaned_data['password'])
# update user permissions
assign_permissions(user.user, form, update=True)
user.user.save()
messages.success(request, "The Stuff User Was Successfully Edited")
country_access = get_country_access(user.user)
network_access = get_network_access(user.user)
edit_permissions = get_user_permissions(user.user)
return render_view(request, 'admin/edit_stuff_user.html',
{'stuff_profile': user, 'NETWORK_CHOICES': NETWORK_CHOICES,
'COUNTRY_CHOICES': COUNTRY_CHOICES, 'form': form,
'country_access': country_access, 'network_access': network_access,
'edit_permissions': edit_permissions}
)
@superuser_required
def create_customer_care_user(request):
'''create a customer care user'''
return create_stuff_user(request, is_customer_care=True)
@superuser_required
def create_stuff_user(request, is_customer_care=False):
'''create an admin user'''
form = CreateAdminUserForm()
if request.POST:
form = CreateAdminUserForm(request.POST)
if form.is_valid():
user = User.objects.create_user(
form.cleaned_data['username'], form.cleaned_data['email'], form.cleaned_data['password'])
user.save()
user.is_staff = True
# user.save()
# assign user permissions
update = False
user.save()
# save profile options
profile = AdminProfile(user=user,category=form.cleaned_data['category'],cat_name=form.cleaned_data['cat_name'],doct_name=form.cleaned_data['doct_name'],phone=form.cleaned_data['phone'],region=form.cleaned_data['region'],districts=form.cleaned_data['districts'],info=form.cleaned_data['info'])
profile.is_customer_care = is_customer_care
# if form.cleaned_data['reports'] == '2':
# profile.is_customer_care = True
if form.cleaned_data['role'] == 'lawyer':
profile.is_lawyer = True
if form.cleaned_data['role'] == 'doctor':
profile.is_doctor = True
if form.cleaned_data['role'] == 'jounalist':
profile.is_jounalist = True
if form.cleaned_data['role'] == 'education':
profile.is_educ = True
profile.save()
# user = form.save()
# user.is_staff = True
# user.save()
# debug(user)
messages.success(request, "The User Was Successfully Created")
return render_view(request, 'create_stuff_user.html', {'form': form, 'NETWORK_CHOICES': NETWORK_CHOICES, 'COUNTRY_CHOICES': COUNTRY_CHOICES, 'is_customer_care': is_customer_care}
)
def add_health_info(request, is_customer_care=False):
'''create an admin user'''
form = AddHealthInfoForm()
title_health = request.POST.get('title_health','')
message = request.POST.get('message','')
print 'Subject ', title_health
print 'Message ', message
form = AddHealthInfoForm(request.POST)
if request.POST:
if form.is_valid():
health_info = HealthInfo(msg=message, sub=title_health)
health_info.save()
print 'Success'
# user.save()
# assign user permissions
update = False
messages.success(request, "The Info Was Successfully Created")
return render_view(request, 'add_health_info.html', {'form': form})
def add_law_info(request, is_customer_care=False):
'''create an admin user'''
form = AddLawInfoForm()
sub = request.POST.get('sub','')
msg = request.POST.get('msg','')
print 'Subject ', sub
print 'Message ', msg
form = AddLawInfoForm(request.POST)
if request.POST:
if form.is_valid():
law_info = LawhInfo(msg=msg, sub=sub)
law_info.save()
print 'Success'
# user.save()
# assign user permissions
update = False
messages.success(request, "The Info Was Successfully Created")
return render_view(request, 'add_law_info.html', {'form': form})
def add_pub_info(request, is_customer_care=False):
'''create an admin user'''
form = AddPubInfoForm()
sub = request.POST.get('sub','')
msg = request.POST.get('msg','')
print 'Subject ', sub
print 'Message ', msg
form = AddPubInfoForm(request.POST)
if request.POST:
if form.is_valid():
pub_info = JounalisthInfo(msg=msg, sub=sub)
pub_info.save()
print 'Success'
# user.save()
# assign user permissions
update = False
messages.success(request, "The Info Was Successfully Created")
return render_view(request, 'add_law_info.html', {'form': form})
def add_educ_info(request, is_customer_care=False):
'''create an admin user'''
form = AddEducInfoForm()
sub = request.POST.get('sub','')
msg = request.POST.get('msg','')
print 'Subject ', sub
print 'Message ', msg
form = AddEducInfoForm(request.POST)
if request.POST:
if form.is_valid():
educ_info = EducationInfo(msg=msg, sub=sub)
educ_info.save()
print 'Success'
# user.save()
# assign user permissions
update = False
messages.success(request, "The Info Was Successfully Created")
return render_view(request, 'add_educ_info.html', {'form': form})
@superuser_required
def create_educ_user(request, is_customer_care=False):
'''create an admin user'''
form = CreateEducUserForm()
if request.POST:
form = CreateEducUserForm(request.POST)
if form.is_valid():
user = Create_Health_User(
username=form.cleaned_data['username'],email=form.cleaned_data['email'],category=form.cleaned_data['category'],cat_name=form.cleaned_data['cat_name'],doct_name=form.cleaned_data['doct_name'],speciality=form.cleaned_data['speciality'],password=form.cleaned_data['password'],phone=form.cleaned_data['phone'],region=form.cleaned_data['region'],districts=form.cleaned_data['districts'], info=form.cleaned_data['info'])
user.save()
# user.save()
# assign user permissions
update = False
messages.success(request, "The User Was Successfully Created")
return render_view(request, 'create_health_user.html', {'form': form}
)
def assign_permissions(user, form, update=False, is_customer_care=False):
'''assign staff members permissions'''
if user:
if is_customer_care:
# customer care options
content_type = ContentType.objects.get_for_model(Transaction)
view_transaction = Permission.objects.get(
content_type=content_type, codename="view_transaction")
edit_transactions = Permission.objects.get(
content_type=content_type, codename="edit_transaction")
user.user_permissions.add(view_transaction)
user.user_permissions.remove(edit_transactions)
else:
content_type = ContentType.objects.get_for_model(Profile)
view_profile = Permission.objects.get(
content_type=content_type, codename="view_profile")
edit_profile = Permission.objects.get(
content_type=content_type, codename="edit_profile")
if form.cleaned_data['users'] == '2':
user.user_permissions.add(view_profile)
user.user_permissions.remove(edit_profile)
elif form.cleaned_data['users'] == '3':
user.user_permissions.add(edit_profile, view_profile)
if update and form.cleaned_data['users'] == '1':
user.user_permissions.remove(edit_profile, view_profile)
# rates edit permissions
content_type = ContentType.objects.get_for_model(Rate)
view_rate = Permission.objects.get(
content_type=content_type, codename="view_rate")
edit_rate = Permission.objects.get(
content_type=content_type, codename="edit_rate")
if form.cleaned_data['rates'] == '2':
user.user_permissions.add(view_rate)
user.user_permissions.remove(edit_rate)
elif form.cleaned_data['rates'] == '3':
user.user_permissions.add(view_rate, edit_rate)
if update and form.cleaned_data['rates'] == '1':
user.user_permissions.remove(edit_rate, view_rate)
# transaction edit permissions
content_type = ContentType.objects.get_for_model(Transaction)
view_transaction = Permission.objects.get(
content_type=content_type, codename="view_transaction")
edit_transactions = Permission.objects.get(
content_type=content_type, codename="edit_transaction")
if form.cleaned_data['transactions'] == '2':
user.user_permissions.add(view_transaction)
user.user_permissions.remove(edit_transactions)
elif form.cleaned_data['transactions'] == '3':
user.user_permissions.add(view_transaction, edit_transactions)
if update and form.cleaned_data['transactions'] == '1':
user.user_permissions.remove(
edit_transactions, view_transaction)
# reports
content_type = ContentType.objects.get_for_model(Transaction)
view_reports = Permission.objects.get(
content_type=content_type,
codename="view_reports"
)
if form.cleaned_data['reports'] == '2':
user.user_permissions.add(view_reports)
if update and form.cleaned_data['reports'] == '1':
user.user_permissions.remove(view_reports)
# audit trails
content_type = ContentType.objects.get_for_model(AdminProfile)
view_audit_trail = Permission.objects.get(
content_type=content_type, codename="view_audit_trail")
try:
if form.cleaned_data['audit_trail'] == '2':
user.user_permissions.add(view_audit_trail)
if update and form.cleaned_data['audit_trail'] == '1':
user.user_permissions.remove(view_audit_trail)
except Exception, e:
print e
user.save()
def download_image(name, image, url):
input_file = StringIO(urllib2.urlopen(url).read())
output_file = StringIO()
img = Image.open(input_file)
if img.mode != "RGB":
img = img.convert("RGB")
img.save(output_file, "JPEG")
image.save(name, ContentFile(output_file.getvalue()), save=False)
@permission_required('edit_user')
def contact_user(request, name):
id = int(name) ^ 0xABCDEFAB
profile = get_object_or_404(Profile.objects.filter(id=id))
reasons = EmailSupport.EMAIL_REASON
form = ContactUserForm()
if request.POST:
data = request.POST.copy()
data['user'] = profile.user.pk
#user = User.objects.get(user=request.user)
data['support_staff'] = request.user.id
if not 'subject' in data:
reason = request.POST['reason']
reason = [(age, person_id)
for (age, person_id) in reasons if age == reason]
data['subject'] = reason[0][1]
#debug(data,'Contact Form Database')
form = ContactUserForm(data)
if form.is_valid():
form.save()
template = settings.EMAIL_TEMPLATE_DIR + 'support.html'
try:
staff = Profile.objects.get(user=request.user)
data['support_staff_names'] = staff.get_names()
except Exception, e:
pass
data['user_names'] = profile.get_names()
mailer(request, data['subject'],
template, data, profile.user.email)
messages.success(request, 'The Message Was Successfully sent')
support_emails = EmailSupport.objects.filter(
user=profile.user).order_by('-id')
return render_view(request, 'contact_user.html', {'user_profile': profile, 'form': form, 'reasons': reasons, 'support_emails': support_emails})
@superuser_required
def delete_user(request):
'''delete user'''
if not request.POST or not 'delete_user' in request.POST:
log_unauthorized_access(request)
return HttpResponseRedirect(reverse('custom_404'))
transactions = Transactions.objects.get(user=request.user)
transactions.delete()
phonebook = Phonebook.objects.get(user=request.user)
phonebook.delete()
user = User.objects.get(user=request.user)
user.delete()
return HttpResponseRedirect(reverse('custom_404'))
def admin_503(request):
return render_view(request, 'admin_503.html', {})
def generate_csv_report(transaction, user=False, _file=False):
'''generate a csv report'''
import csv
from django.utils.encoding import smart_str
date = datetime.today().strftime("%B-%d-%Y")
response = HttpResponse(content_type='text/csv')
if _file:
'''if we want a'''
response = StringIO()
else:
response[
'Content-Disposition'] = 'attachment; filename="report_%s.csv"' % date
writer = csv.writer(response)
header = [
smart_str(u"Transaction ID"),
smart_str(u"MOM Transaction ID"),
smart_str(u"Date"),
smart_str(u"Sender names"),
smart_str(u"Sender number"),
smart_str(u"Sender country"),
smart_str(u"Currency"),
smart_str(u"Recipient name"),
smart_str(u"Recipient number"),
smart_str(u"Amount"),
smart_str(u"Status"),
smart_str(u"Revenue Share"),
]
if user:
if user.is_superuser:
header.append(smart_str(u"Mobile network"))
header.append(smart_str(u"USD Amount Sent"))
#header.append(smart_str(u"Exchange Rate"))
writer.writerow(header)
for t in transaction:
if t.actual_delivery_date:
t_date = t.actual_delivery_date
else:
t_date = t.actual_initiation_date
content = [
smart_str(t.get_invoice()),
smart_str(t.get_network_transactionid()),
smart_str(t_date),
smart_str(t.get_sender_profile().get_names()),
smart_str(t.get_sender_profile().get_phonenumber()),
smart_str(t.sender_country),
smart_str(t.currency_sent),
smart_str(t.recipient_names()),
smart_str(t.recipient_number()),
smart_str(t.amount_received),
smart_str(t.actual_status),
smart_str(t.revenue_share()),
]
if user:
if user.is_superuser:
content.append(smart_str(t.get_mobile_network()))
content.append(smart_str(t.amount_sent))
# content.append(smart_str(t.exchange_rate))
writer.writerow(content)
return response
@permission_required('view_transaction')
def phonenumber_transaction_search(request):
'''phonenumber transaction search'''
form = transactionPhonenumberSearchForm()
transaction_list = []
transactions = {}
if request.GET:
form = transactionPhonenumberSearchForm(request.GET)
if form.is_valid():
phon_num = '%s' % request.GET.get('phonenumber', '')
try:
'''search by Transaction id'''
invoice_id = int(phon_num) ^ 0xABCDEFAB
transaction_list = Transaction.objects.filter(pk=invoice_id)
except Exception, e:
print e
pass
if len(transaction_list) < 1:
'''search by operator id'''
transaction_list = Transaction.objects.filter(
mobile_response_code=phon_num
)
if len(transaction_list) < 1:
'''search by Transaction phonenumber'''
transaction_list = Transaction.objects.filter(
receiver_number=phon_num
)
user = request.user
if len(transaction_list) > 0:
country_filter = network_filter = Q()
for value, keyword in get_country_access(user):
print "Country access Value: %s , keyword: %s" % (value, keyword)
country_filter |= Q(to_country__code=value)
for value, keyword in get_network_access(user):
network_filter |= Q(mobile_network_code=value)
#transaction_list = Transaction.objects.filter(country_filter & network_filter)
transaction_list = transaction_list.filter(
country_filter & network_filter)
paginator = Paginator(
transaction_list, settings.PAGNATION_LIMIT)
page = request.GET.get('page')
try:
transactions = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
transactions = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of
# results.
transactions = paginator.page(paginator.num_pages)
return render_view(request, 'phonenumber_transaction_search.html',
{'form': form, 'transactions': transactions})
@superuser_required
def export_data(request):
'''generate a csv report'''
if request.POST:
import csv
from django.utils.encoding import smart_str
date = datetime.today().strftime("%B-%d-%Y")
response = HttpResponse(content_type='text/csv')
filename = 'user_data'
user_type = request.POST.get('data_type', None)
user_list = {}
if user_type == '1':
user_list = admin_utils.verified_users()
filename = 'verified_user_data'
elif user_type == '2':
user_list = admin_utils.users_pending_verification()
filename = 'verification_pending_user_data'
elif user_type == '3':
user_list = admin_utils.blocked_users()
filename = 'blocked_user_data'
elif user_type == '4':
user_list = admin_utils.unverified_users()
filename = 'unverified_user_data'
response[
'Content-Disposition'] = 'attachment; filename="export_%s_%s.csv"' % (filename, date)
import csv
from django.utils.encoding import smart_str
#csvfile = StringIO.StringIO()
writer = csv.writer(response)
header = [
smart_str(u"Email"),
smart_str(u"Phone number"),
smart_str(u"Firstname"),
smart_str(u"Lastname"),
smart_str(u"Country"),
]
writer.writerow(header)
for t in user_list:
content = [
smart_str(t.user.email),
smart_str(t.get_phonenumber()),
smart_str(t.firstname),
smart_str(t.lastname),
smart_str(t.country),
]
writer.writerow(content)
return response
return render_view(request, 'export_data.html', {})
| 39.432962 | 430 | 0.619339 |
ae5506b0817f6bd6ba613b44f0005304d3cd1c5d | 932 | py | Python | example.py | rivasd/activiewBCI | b2278ebacc733e328f28d308146108a52d3deb78 | [
"MIT"
] | 1 | 2020-09-10T08:04:06.000Z | 2020-09-10T08:04:06.000Z | example.py | rivasd/activiewBCI | b2278ebacc733e328f28d308146108a52d3deb78 | [
"MIT"
] | null | null | null | example.py | rivasd/activiewBCI | b2278ebacc733e328f28d308146108a52d3deb78 | [
"MIT"
] | null | null | null | from ActiView import ActiveTwo
import pyqtgraph as pg
from pyqtgraph.Qt import QtCore, QtGui
import numpy as np
app = QtGui.QApplication([])
win = pg.GraphicsWindow()
win.setWindowTitle("Mimicking ActiView's EEG monitoring screen")
monitor = win.addPlot()
#we have so many curves that we will store them in an array
curves = [monitor.plot() for x in range(64)]
#this is the data that will be continuously updated and plotted
rawdata = np.empty((64,0))
#initialize connection with ActiView
actiview = ActiveTwo()
def update():
global rawdata
data = actiview.read()
rawdata = np.concatenate((rawdata, data), axis=1)
for i in range(64):
curves[i].setData(rawdata[i])
timer = pg.QtCore.QTimer()
timer.timeout.connect(update)
timer.start(0)
if __name__ == '__main__':
import sys
if sys.flags.interactive != 1 or not hasattr(pg.QtCore, 'PYQT_VERSION'):
pg.QtGui.QApplication.exec_() | 23.897436 | 76 | 0.714592 |
ae56d2f7d0da35ed27472371e04da9a9312adf59 | 608 | py | Python | encoder_decoder_model/DNS_NetworkParameters_Jiao_Wan.py | mrjiao2018/LearningGroupStructure | 7426837b8c96f771724cfd663a57ed32f9d16560 | [
"MIT"
] | 1 | 2018-12-07T14:36:13.000Z | 2018-12-07T14:36:13.000Z | encoder_decoder_model/DNS_NetworkParameters_Jiao_Wan.py | mrjiao2018/LearningGroupStructure | 7426837b8c96f771724cfd663a57ed32f9d16560 | [
"MIT"
] | null | null | null | encoder_decoder_model/DNS_NetworkParameters_Jiao_Wan.py | mrjiao2018/LearningGroupStructure | 7426837b8c96f771724cfd663a57ed32f9d16560 | [
"MIT"
] | 1 | 2018-12-05T11:03:07.000Z | 2018-12-05T11:03:07.000Z | import os
class NetworkParameters:
def __init__(self, modelDirectory):
self.modelDirectory = modelDirectory
if os.path.exists(self.modelDirectory) is False:
os.mkdir(self.modelDirectory)
self.checkpointedModelDir = os.path.join(self.modelDirectory, 'savedModels')
if os.path.exists(self.checkpointedModelDir) is False:
os.mkdir(self.checkpointedModelDir)
self.modelSaveName = os.path.join(self.checkpointedModelDir, 'model_{epoch:02d}.hdf5')
self.bestModelSaveName = os.path.join(self.checkpointedModelDir, 'best_model.hdf5')
| 33.777778 | 94 | 0.710526 |
ae581ce55f5c70d548f9c9c2e1e5bda4e73bac54 | 11,401 | py | Python | segmentron/utils/visualize.py | GhadeerElmkaiel/Trans2Seg | 6717db602205cbed494ae1913ac7cbbca8e83463 | [
"Apache-2.0"
] | null | null | null | segmentron/utils/visualize.py | GhadeerElmkaiel/Trans2Seg | 6717db602205cbed494ae1913ac7cbbca8e83463 | [
"Apache-2.0"
] | null | null | null | segmentron/utils/visualize.py | GhadeerElmkaiel/Trans2Seg | 6717db602205cbed494ae1913ac7cbbca8e83463 | [
"Apache-2.0"
] | null | null | null | import os
import logging
import numpy as np
import torch
from PIL import Image
#from torchsummary import summary
from thop import profile
__all__ = ['get_color_pallete', 'print_iou', 'set_img_color',
'show_prediction', 'show_colorful_images', 'save_colorful_images']
def print_iou(iu, mean_pixel_acc, class_names=None, show_no_back=False):
n = iu.size
lines = []
for i in range(n):
if class_names is None:
cls = 'Class %d:' % (i + 1)
else:
cls = '%d %s' % (i + 1, class_names[i])
# lines.append('%-8s: %.3f%%' % (cls, iu[i] * 100))
mean_IU = np.nanmean(iu)
mean_IU_no_back = np.nanmean(iu[1:])
if show_no_back:
lines.append('mean_IU: %.3f%% || mean_IU_no_back: %.3f%% || mean_pixel_acc: %.3f%%' % (
mean_IU * 100, mean_IU_no_back * 100, mean_pixel_acc * 100))
else:
lines.append('mean_IU: %.3f%% || mean_pixel_acc: %.3f%%' % (mean_IU * 100, mean_pixel_acc * 100))
lines.append('=================================================')
line = "\n".join(lines)
print(line)
@torch.no_grad()
def show_flops_params(model, device, input_shape=[1, 3, 512, 512]):
#summary(model, tuple(input_shape[1:]), device=device)
input = torch.randn(*input_shape).to(torch.device(device))
flops, params = profile(model, inputs=(input,), verbose=False)
logging.info('{} flops: {:.3f}G input shape is {}, params: {:.3f}M'.format(
model.__class__.__name__, flops / 1000000000, input_shape[1:], params / 1000000))
def set_img_color(img, label, colors, background=0, show255=False):
for i in range(len(colors)):
if i != background:
img[np.where(label == i)] = colors[i]
if show255:
img[np.where(label == 255)] = 255
return img
def show_prediction(img, pred, colors, background=0):
im = np.array(img, np.uint8)
set_img_color(im, pred, colors, background)
out = np.array(im)
return out
def show_colorful_images(prediction, palettes):
im = Image.fromarray(palettes[prediction.astype('uint8').squeeze()])
im.show()
def save_colorful_images(prediction, filename, output_dir, palettes):
'''
:param prediction: [B, H, W, C]
'''
im = Image.fromarray(palettes[prediction.astype('uint8').squeeze()])
fn = os.path.join(output_dir, filename)
out_dir = os.path.split(fn)[0]
if not os.path.exists(out_dir):
os.mkdir(out_dir)
im.save(fn)
def get_color_pallete(npimg, dataset='cityscape'):
"""Visualize image.
Parameters
----------
npimg : numpy.ndarray
Single channel image with shape `H, W, 1`.
dataset : str, default: 'pascal_voc'
The dataset that model pretrained on. ('pascal_voc', 'ade20k')
Returns
-------
out_img : PIL.Image
Image with color pallete
"""
# recovery boundary
if dataset in ('pascal_voc', 'pascal_aug'):
npimg[npimg == -1] = 255
# put colormap
if dataset == 'ade20k':
npimg = npimg + 1
out_img = Image.fromarray(npimg.astype('uint8'))
out_img.putpalette(adepallete)
return out_img
elif dataset == 'cityscape':
out_img = Image.fromarray(npimg.astype('uint8'))
out_img.putpalette(cityscapepallete)
return out_img
elif dataset == 'trans10kv2' or dataset == 'transparent11':
out_img = Image.fromarray(npimg.astype('uint8'))
out_img.putpalette(trans10kv2pallete)
return out_img
elif dataset == 'pascal_voc':
out_img = Image.fromarray(npimg.astype('uint8'))
out_img.putpalette(vocpallete)
return out_img
elif dataset == 'sber_dataset':
out_img = Image.fromarray(npimg.astype('uint8'))
out_img.putpalette(sberpallete)
return out_img
elif dataset == 'sber_dataset_all':
out_img = Image.fromarray(npimg.astype('uint8'))
out_img.putpalette(sberallpallete)
return out_img
elif dataset == 'sber_dataset_all_no_fu':
out_img = Image.fromarray(npimg.astype('uint8'))
out_img.putpalette(sberallNoFUpallete)
return out_img
def _getvocpallete(num_cls):
n = num_cls
pallete = [0] * (n * 3)
for j in range(0, n):
lab = j
pallete[j * 3 + 0] = 0
pallete[j * 3 + 1] = 0
pallete[j * 3 + 2] = 0
i = 0
while (lab > 0):
pallete[j * 3 + 0] |= (((lab >> 0) & 1) << (7 - i))
pallete[j * 3 + 1] |= (((lab >> 1) & 1) << (7 - i))
pallete[j * 3 + 2] |= (((lab >> 2) & 1) << (7 - i))
i = i + 1
lab >>= 3
return pallete
vocpallete = _getvocpallete(256)
adepallete = [
0, 0, 0, 120, 120, 120, 180, 120, 120, 6, 230, 230, 80, 50, 50, 4, 200, 3, 120, 120, 80, 140, 140, 140, 204,
5, 255, 230, 230, 230, 4, 250, 7, 224, 5, 255, 235, 255, 7, 150, 5, 61, 120, 120, 70, 8, 255, 51, 255, 6, 82,
143, 255, 140, 204, 255, 4, 255, 51, 7, 204, 70, 3, 0, 102, 200, 61, 230, 250, 255, 6, 51, 11, 102, 255, 255,
7, 71, 255, 9, 224, 9, 7, 230, 220, 220, 220, 255, 9, 92, 112, 9, 255, 8, 255, 214, 7, 255, 224, 255, 184, 6,
10, 255, 71, 255, 41, 10, 7, 255, 255, 224, 255, 8, 102, 8, 255, 255, 61, 6, 255, 194, 7, 255, 122, 8, 0, 255,
20, 255, 8, 41, 255, 5, 153, 6, 51, 255, 235, 12, 255, 160, 150, 20, 0, 163, 255, 140, 140, 140, 250, 10, 15,
20, 255, 0, 31, 255, 0, 255, 31, 0, 255, 224, 0, 153, 255, 0, 0, 0, 255, 255, 71, 0, 0, 235, 255, 0, 173, 255,
31, 0, 255, 11, 200, 200, 255, 82, 0, 0, 255, 245, 0, 61, 255, 0, 255, 112, 0, 255, 133, 255, 0, 0, 255, 163,
0, 255, 102, 0, 194, 255, 0, 0, 143, 255, 51, 255, 0, 0, 82, 255, 0, 255, 41, 0, 255, 173, 10, 0, 255, 173, 255,
0, 0, 255, 153, 255, 92, 0, 255, 0, 255, 255, 0, 245, 255, 0, 102, 255, 173, 0, 255, 0, 20, 255, 184, 184, 0,
31, 255, 0, 255, 61, 0, 71, 255, 255, 0, 204, 0, 255, 194, 0, 255, 82, 0, 10, 255, 0, 112, 255, 51, 0, 255, 0,
194, 255, 0, 122, 255, 0, 255, 163, 255, 153, 0, 0, 255, 10, 255, 112, 0, 143, 255, 0, 82, 0, 255, 163, 255,
0, 255, 235, 0, 8, 184, 170, 133, 0, 255, 0, 255, 92, 184, 0, 255, 255, 0, 31, 0, 184, 255, 0, 214, 255, 255,
0, 112, 92, 255, 0, 0, 224, 255, 112, 224, 255, 70, 184, 160, 163, 0, 255, 153, 0, 255, 71, 255, 0, 255, 0,
163, 255, 204, 0, 255, 0, 143, 0, 255, 235, 133, 255, 0, 255, 0, 235, 245, 0, 255, 255, 0, 122, 255, 245, 0,
10, 190, 212, 214, 255, 0, 0, 204, 255, 20, 0, 255, 255, 255, 0, 0, 153, 255, 0, 41, 255, 0, 255, 204, 41, 0,
255, 41, 255, 0, 173, 0, 255, 0, 245, 255, 71, 0, 255, 122, 0, 255, 0, 255, 184, 0, 92, 255, 184, 255, 0, 0,
133, 255, 255, 214, 0, 25, 194, 194, 102, 255, 0, 92, 0, 255]
cityscapepallete = [
128, 64, 128,
244, 35, 232,
70, 70, 70,
102, 102, 156,
190, 153, 153,
153, 153, 153,
250, 170, 30,
220, 220, 0,
107, 142, 35,
152, 251, 152,
0, 130, 180,
220, 20, 60,
255, 0, 0,
0, 0, 142,
0, 0, 70,
0, 60, 100,
0, 80, 100,
0, 0, 230,
119, 11, 32,
]
trans10kv2pallete = [
0, 0, 0,
120, 120, 70,
235, 255, 7,
6, 230, 230,
204, 255, 4,
120, 120, 120,
140, 140, 140,
255, 51, 7,
224, 5, 255,
204, 5, 255,
150, 5, 61,
4, 250, 7]
sberpallete = [
255, 255, 255,
255, 0, 0,
0, 0, 0,
]
# sberallpallete = [
# 102, 255, 102, # Mirror
# 51, 221, 255, # Glass
# 245, 147, 49, # FU
# 184, 61, 245, # Other Optical Surface
# 250, 50, 83, # Floor
# 0, 0, 0,
# ]
sberallNoFUpallete = [
102, 255, 102, # Mirror
51, 221, 255, # Glass
# 245, 147, 49, # FU
250, 50, 83, # Floor
184, 61, 245, # Other Optical Surface
0, 0, 0,
]
sberallpallete = [
102, 255, 102, # Mirror
51, 221, 255, # Glass
245, 147, 49, # FU
184, 61, 245, # Other Optical Surface
250, 50, 83, # Floor
0, 0, 0,
6, 6, 6, 7, 7, 7, 8, 8, 8, 9, 9, 9, 10, 10, 10, 11, 11, 11, 12, 12, 12, 13, 13, 13, 14, 14, 14, 15, 15, 15, 16, 16, 16, 17, 17, 17, 18, 18, 18, 19, 19, 19, 20, 20, 20, 21, 21, 21, 22, 22, 22, 23, 23, 23, 24, 24, 24, 25, 25, 25, 26, 26, 26, 27, 27, 27, 28, 28, 28, 29, 29, 29, 30, 30, 30, 31, 31, 31, 32, 32, 32, 33, 33, 33, 34, 34, 34, 35, 35, 35, 36, 36, 36, 37, 37, 37, 38, 38, 38, 39, 39, 39, 40, 40, 40, 41, 41, 41, 42, 42, 42, 43, 43, 43, 44, 44, 44, 45, 45, 45, 46, 46, 46, 47, 47, 47, 48, 48, 48, 49, 49, 49, 50, 50, 50, 51, 51, 51, 52, 52, 52, 53, 53, 53, 54, 54, 54, 55, 55, 55, 56, 56, 56, 57, 57, 57, 58, 58, 58, 59, 59, 59, 60, 60, 60, 61, 61, 61, 62, 62, 62, 63, 63, 63, 64, 64, 64, 65, 65, 65, 66, 66, 66, 67, 67, 67, 68, 68, 68, 69, 69, 69, 70, 70, 70, 71, 71, 71, 72, 72, 72, 73, 73, 73, 74, 74, 74, 75, 75, 75, 76, 76, 76, 77, 77, 77, 78, 78, 78, 79, 79, 79, 80, 80, 80, 81, 81, 81, 82, 82, 82, 83, 83, 83, 84, 84, 84, 85, 85, 85, 86, 86, 86, 87, 87, 87, 88, 88, 88, 89, 89, 89, 90, 90, 90, 91, 91, 91, 92, 92, 92, 93, 93, 93, 94, 94, 94, 95, 95, 95, 96, 96, 96, 97, 97, 97, 98, 98, 98, 99, 99, 99, 100, 100, 100, 101, 101, 101, 102, 102, 102, 103, 103, 103, 104, 104, 104, 105, 105, 105, 106, 106, 106, 107, 107, 107, 108, 108, 108, 109, 109, 109, 110, 110, 110, 111, 111, 111, 112, 112, 112, 113, 113, 113, 114, 114, 114, 115, 115, 115, 116, 116, 116, 117, 117, 117, 118, 118, 118, 119, 119, 119, 120, 120, 120, 121, 121, 121, 122, 122, 122, 123, 123, 123, 124, 124, 124, 125, 125, 125, 126, 126, 126, 127, 127, 127, 128, 128, 128, 129, 129, 129, 130, 130, 130, 131, 131, 131, 132, 132, 132, 133, 133, 133, 134, 134, 134, 135, 135, 135, 136, 136, 136, 137, 137, 137, 138, 138, 138, 139, 139, 139, 140, 140, 140, 141, 141, 141, 142, 142, 142, 143, 143, 143, 144, 144, 144, 145, 145, 145, 146, 146, 146, 147, 147, 147, 148, 148, 148, 149, 149, 149, 150, 150, 150, 151, 151, 151, 152, 152, 152, 153, 153, 153, 154, 154, 154, 155, 155, 155, 156, 156, 156, 157, 157, 157, 158, 158, 158, 159, 159, 159, 160, 160, 160, 161, 161, 161, 162, 162, 162, 163, 163, 163, 164, 164, 164, 165, 165, 165, 166, 166, 166, 167, 167, 167, 168, 168, 168, 169, 169, 169, 170, 170, 170, 171, 171, 171, 172, 172, 172, 173, 173, 173, 174, 174, 174, 175, 175, 175, 176, 176, 176, 177, 177, 177, 178, 178, 178, 179, 179, 179, 180, 180, 180, 181, 181, 181, 182, 182, 182, 183, 183, 183, 184, 184, 184, 185, 185, 185, 186, 186, 186, 187, 187, 187, 188, 188, 188, 189, 189, 189, 190, 190, 190, 191, 191, 191, 192, 192, 192, 193, 193, 193, 194, 194, 194, 195, 195, 195, 196, 196, 196, 197, 197, 197, 198, 198, 198, 199, 199, 199, 200, 200, 200, 201, 201, 201, 202, 202, 202, 203, 203, 203, 204, 204, 204, 205, 205, 205, 206, 206, 206, 207, 207, 207, 208, 208, 208, 209, 209, 209, 210, 210, 210, 211, 211, 211, 212, 212, 212, 213, 213, 213, 214, 214, 214, 215, 215, 215, 216, 216, 216, 217, 217, 217, 218, 218, 218, 219, 219, 219, 220, 220, 220, 221, 221, 221, 222, 222, 222, 223, 223, 223, 224, 224, 224, 225, 225, 225, 226, 226, 226, 227, 227, 227, 228, 228, 228, 229, 229, 229, 230, 230, 230, 231, 231, 231, 232, 232, 232, 233, 233, 233, 234, 234, 234, 235, 235, 235, 236, 236, 236, 237, 237, 237, 238, 238, 238, 239, 239, 239, 240, 240, 240, 241, 241, 241, 242, 242, 242, 243, 243, 243, 244, 244, 244, 245, 245, 245, 246, 246, 246, 247, 247, 247, 248, 248, 248, 249, 249, 249, 250, 250, 250, 251, 251, 251, 252, 252, 252, 253, 253, 253, 254, 254, 254, 255, 255, 255] | 46.534694 | 3,459 | 0.556179 |
ae585b1ebb6ddaac2ce15869614a545c6f947635 | 342 | py | Python | pangolin/core/context_processors.py | skylifewww/pangolinreact | 8d8a45fd15c442618f2ed1ecab15e2e2ab4b7a3a | [
"MIT"
] | null | null | null | pangolin/core/context_processors.py | skylifewww/pangolinreact | 8d8a45fd15c442618f2ed1ecab15e2e2ab4b7a3a | [
"MIT"
] | null | null | null | pangolin/core/context_processors.py | skylifewww/pangolinreact | 8d8a45fd15c442618f2ed1ecab15e2e2ab4b7a3a | [
"MIT"
] | null | null | null | from django.conf import settings
from django.utils.timezone import now
from .utils import intspace, set_param
def extra(request):
ctx = {
'dir': dir, 'list': list, 'len': len, 'enumerate': enumerate, 'range': range,
'settings': settings,
'now': now, 'intspace': intspace, 'set_param': set_param}
return ctx
| 26.307692 | 85 | 0.652047 |
ae58d33794d0e8f0c6295bcf0262290cc4e159e7 | 1,163 | py | Python | Python/Advanced OOP/Attributes/Hotel/01. Hotel.py | teodoramilcheva/softuni-software-engineering | 98dc9faa66f42570f6538fd7ef186d2bd1d39bff | [
"MIT"
] | null | null | null | Python/Advanced OOP/Attributes/Hotel/01. Hotel.py | teodoramilcheva/softuni-software-engineering | 98dc9faa66f42570f6538fd7ef186d2bd1d39bff | [
"MIT"
] | null | null | null | Python/Advanced OOP/Attributes/Hotel/01. Hotel.py | teodoramilcheva/softuni-software-engineering | 98dc9faa66f42570f6538fd7ef186d2bd1d39bff | [
"MIT"
] | null | null | null | class Hotel:
def __init__(self, name: str):
self.name = name
self.rooms = []
self.guests = 0
@classmethod
def from_stars(cls, stars_count):
name = f'{stars_count} stars Hotel'
return cls(name)
def add_room(self, room):
self.rooms.append(room)
def take_room(self, room_number, people):
search_room = [r for r in self.rooms if r.number == room_number]
res = search_room[0].take_room(people)
if res is None:
self.guests += people
else:
return res
def free_room(self, room_number):
search_room = [r for r in self.rooms if r.number == room_number]
res = search_room[0].free_room()
if res is None:
self.guests -= search_room[0].guests
else:
return res
def print_status(self):
print(f'Hotel {self.name} has {self.guests} total guests')
print(f'Free rooms: {", ".join([str(r.number) for r in self.rooms if not r.is_taken])}')
print(f'Taken rooms: {", ".join([str(r.number) for r in self.rooms if r.is_taken])}')
| 29.820513 | 97 | 0.563199 |
ae5b1b9181972edef32d0c181d78511358cde1b1 | 2,671 | py | Python | 8_random_walker_segmentation_scikit-image.py | Data-Laboratory/WorkExamples | 27e58207e664da7813673e6792c0c30c0a5bf74c | [
"MIT"
] | 1 | 2021-12-15T22:27:27.000Z | 2021-12-15T22:27:27.000Z | 8_random_walker_segmentation_scikit-image.py | Data-Laboratory/WorkExamples | 27e58207e664da7813673e6792c0c30c0a5bf74c | [
"MIT"
] | null | null | null | 8_random_walker_segmentation_scikit-image.py | Data-Laboratory/WorkExamples | 27e58207e664da7813673e6792c0c30c0a5bf74c | [
"MIT"
] | null | null | null | #!/usr/bin/env python
__author__ = "Sreenivas Bhattiprolu"
__license__ = "Feel free to copy, I appreciate if you acknowledge Python for Microscopists"
# https://www.youtube.com/watch?v=6P8YhJa2V6o
"""
Using Random walker to generate lables and then segment and finally cleanup using closing operation.
"""
import matplotlib.pyplot as plt
from skimage import io, img_as_float
import numpy as np
img = img_as_float(io.imread("images/Alloy_noisy.jpg"))
#plt.hist(img.flat, bins=100, range=(0, 1))
# Very noisy image so histogram looks horrible. Let us denoise and see if it helps.
from skimage.restoration import denoise_nl_means, estimate_sigma
sigma_est = np.mean(estimate_sigma(img, multichannel=True))
denoise_img = denoise_nl_means(img, h=1.15 * sigma_est, fast_mode=True,
patch_size=5, patch_distance=3, multichannel=True)
#plt.hist(denoise_img.flat, bins=100, range=(0, 1))
# Much better histogram and now we can see two separate peaks.
#Still close enough so cannot use histogram based segmentation.
#Let us see if we can get any better by some preprocessing.
#Let's try histogram equalization
from skimage import exposure #Contains functions for hist. equalization
#eq_img = exposure.equalize_hist(denoise_img)
eq_img = exposure.equalize_adapthist(denoise_img)
#plt.imshow(eq_img, cmap='gray')
#plt.hist(denoise_img.flat, bins=100, range=(0., 1))
#Not any better. Let us stretch the hoistogram between 0.7 and 0.95
# The range of the binary image spans over (0, 1).
# For markers, let us include all between each peak.
markers = np.zeros(img.shape, dtype=np.uint)
markers[(eq_img < 0.8) & (eq_img > 0.7)] = 1
markers[(eq_img > 0.85) & (eq_img < 0.99)] = 2
from skimage.segmentation import random_walker
# Run random walker algorithm
# https://scikit-image.org/docs/dev/api/skimage.segmentation.html#skimage.segmentation.random_walker
labels = random_walker(eq_img, markers, beta=10, mode='bf')
plt.imsave("images/markers.jpg", markers)
segm1 = (labels == 1)
segm2 = (labels == 2)
all_segments = np.zeros((eq_img.shape[0], eq_img.shape[1], 3)) #nothing but denoise img size but blank
all_segments[segm1] = (1,0,0)
all_segments[segm2] = (0,1,0)
#plt.imshow(all_segments)
from scipy import ndimage as nd
segm1_closed = nd.binary_closing(segm1, np.ones((3,3)))
segm2_closed = nd.binary_closing(segm2, np.ones((3,3)))
all_segments_cleaned = np.zeros((eq_img.shape[0], eq_img.shape[1], 3))
all_segments_cleaned[segm1_closed] = (1,0,0)
all_segments_cleaned[segm2_closed] = (0,1,0)
plt.imshow(all_segments_cleaned)
plt.imsave("images/random_walker.jpg", all_segments_cleaned)
| 31.797619 | 102 | 0.7383 |
ae5c773b88cd0f9d9fbee09e572ef2fc27d6c119 | 40 | py | Python | smileyjoe_io/constant.py | SmileyJoe/smileyjoe_io | 29e3b55e33f17f799f59801158499809fcce0af4 | [
"MIT"
] | null | null | null | smileyjoe_io/constant.py | SmileyJoe/smileyjoe_io | 29e3b55e33f17f799f59801158499809fcce0af4 | [
"MIT"
] | 2 | 2020-02-11T23:34:28.000Z | 2020-06-05T17:33:09.000Z | smileyjoe_io/constant.py | SmileyJoe/smileyjoe_io | 29e3b55e33f17f799f59801158499809fcce0af4 | [
"MIT"
] | null | null | null | SUB_SECRET = 'secret'
SUB_MAIN = 'main'
| 13.333333 | 21 | 0.7 |
ae5dba7efd27593d74b0d517709967bd1f8e2e4a | 3,090 | py | Python | dockend/dockend.py | ChrisVidal10/dockend | 8904e1d017fcc1767d8593190df537a750a50b4c | [
"MIT"
] | null | null | null | dockend/dockend.py | ChrisVidal10/dockend | 8904e1d017fcc1767d8593190df537a750a50b4c | [
"MIT"
] | 1 | 2018-06-25T23:38:09.000Z | 2018-06-25T23:38:09.000Z | dockend/dockend.py | ChrisVidal10/dockend | 8904e1d017fcc1767d8593190df537a750a50b4c | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from termcolor import cprint
import argparse
import docker
DOCKER_CLIENT = docker.from_env()
def main():
try:
not_found_for_stop = False
not_found_for_start = False
ARGS = parser_arguments()
k_name_stop = 'byr' if ARGS.service == 'dla' else 'dev'
k_name_start = 'dev' if ARGS.service == 'dla' else 'byr'
containers_for_stop = docker_containers_list(k_name_stop)
containers_for_start = docker_containers_list(k_name_start)
if containers_for_stop:
stop_containers(containers_for_stop, k_name_stop)
else:
cprint("WARNING! Active containers for stop not found", 'yellow')
not_found_for_stop = True
if containers_for_start:
start_containers(containers_for_start, k_name_start)
else:
cprint("WARNING! Active containers for start not found", 'yellow')
not_found_for_start = True
if not not_found_for_start:
cprint("DONE! Happy Coding", "white", "on_green")
if not_found_for_start and not_found_for_stop:
cprint(
"STOP! Maybe you have problems with the containers. e.g. Containers not build", "white", "on_red")
except Exception:
cprint("ERROR! Docker is off or not installed", "white", "on_red")
exit(1)
def start_containers(container_lists, k_name):
try:
cprint("Start containers {}...".format(k_name), 'yellow')
for cont in container_lists:
cont.start()
cprint("OK containers {} up!".format(k_name), 'green')
except Exception as exc:
cprint("Error when starting the process (container starting process): {}".format(
exc), 'white', 'on_red')
exit(1)
def stop_containers(container_lists, k_name):
try:
cprint("Stop containers {}...".format(k_name), 'yellow')
for cont in container_lists:
cont.stop()
cprint("OK containers {} down!".format(k_name), 'green')
return True
except Exception as exc:
cprint("Error when starting the process (container stopping process): {}".format(
exc), 'white', 'on_red')
exit(1)
def docker_containers_list(key_name):
try:
return DOCKER_CLIENT.containers.list(filters={'name': key_name}, all=True)
except Exception as exc:
cprint("Error getting the list: {}".format(exc), 'red')
raise exc
def parser_arguments():
parser = argparse.ArgumentParser(
description='Tool for change backend services and process in docker environment (BYR-Microservicios/API-Integrada)')
parser.add_argument('-V',
'--version',
action='version',
version='%(prog)s {version}'.format(version='0.2.1'))
parser.add_argument('service',
choices=['byr', 'dla'],
type=str,
help='backend type')
args = parser.parse_args()
return args
if __name__ == '__main__':
main()
| 35.517241 | 124 | 0.612621 |
ae5e90efc8111cc99c18543d5afea38d02da46b8 | 125 | py | Python | crawlerhttp/http_header_generator.py | mcmin001/NBCrawler | ec6a348e32889fb9252651b203a725a39b6836ec | [
"Apache-2.0"
] | null | null | null | crawlerhttp/http_header_generator.py | mcmin001/NBCrawler | ec6a348e32889fb9252651b203a725a39b6836ec | [
"Apache-2.0"
] | null | null | null | crawlerhttp/http_header_generator.py | mcmin001/NBCrawler | ec6a348e32889fb9252651b203a725a39b6836ec | [
"Apache-2.0"
] | null | null | null | def get_http_header(user_agent):
# 字典数据类型 dict
headers = {
'user-agent': user_agent
}
return headers
| 17.857143 | 32 | 0.616 |
ae616a523c7cfa0788d9038fa4b59abb5b2c597c | 625 | py | Python | exp_figure/figure_3(grey).py | qqxx6661/LDSM | b2be6fdfdac00fc4a469a72b3a10686fa0f4bd80 | [
"MIT"
] | 4 | 2019-06-04T06:19:01.000Z | 2021-04-16T15:50:30.000Z | exp_figure/figure_3(grey).py | qqxx6661/LDSM | b2be6fdfdac00fc4a469a72b3a10686fa0f4bd80 | [
"MIT"
] | 1 | 2019-09-10T10:33:18.000Z | 2021-02-08T14:51:39.000Z | exp_figure/figure_3(grey).py | qqxx6661/LDSM | b2be6fdfdac00fc4a469a72b3a10686fa0f4bd80 | [
"MIT"
] | 2 | 2019-06-04T06:19:08.000Z | 2021-09-06T07:30:44.000Z | import random
import matplotlib.pyplot as plt
import numpy as np
# 在一个图形中创建两条线
fig = plt.figure(figsize=(10, 6))
ax1 = fig.add_subplot(1, 1, 1)
ax1.set_xlabel('Frame', fontsize=18)
ax1.set_ylabel('Overall Time Cost (s)', fontsize=18)
x = range(180)
y1 = []
y2 = []
for i in range(180):
y1.append(random.uniform(0.30, 0.32))
y2.append(random.uniform(0.36, 0.38))
print(y1)
print(y2)
ax1.plot(x, y1,linestyle=':',marker='o', label="1-cam scenario")
ax1.plot(x, y2,marker='>', label="8-cam scenario")
plt.xticks((0, 30, 60, 90, 120, 150, 180), fontsize=16)
plt.yticks(fontsize=18)
plt.legend(fontsize=12)
plt.show()
| 23.148148 | 64 | 0.68 |
ae6311f1dc6cb97bf176a5a088a3d0aac371ae07 | 176 | py | Python | src/radixlib/constants.py | 0xOmarA/RadixLib | 85d75a47d4c4df4c1a319b74857ae2c513933623 | [
"MIT"
] | 32 | 2022-01-12T16:52:28.000Z | 2022-03-24T18:05:47.000Z | src/radixlib/constants.py | 0xOmarA/RadixLib | 85d75a47d4c4df4c1a319b74857ae2c513933623 | [
"MIT"
] | 3 | 2022-01-12T17:01:55.000Z | 2022-02-12T15:14:16.000Z | src/radixlib/constants.py | 0xOmarA/RadixLib | 85d75a47d4c4df4c1a319b74857ae2c513933623 | [
"MIT"
] | 1 | 2022-01-21T04:28:07.000Z | 2022-01-21T04:28:07.000Z | from typing import Dict
XRD_RRI: Dict[str, str] = {
"mainnet": "xrd_rr1qy5wfsfh",
"stokenet": "xrd_tr1qyf0x76s",
"betanet": "xrd_br1qy73gwac",
"localnet": ""
} | 22 | 34 | 0.636364 |
ae63be6d85b78ced6ae0f350b22b8798f6f015df | 1,349 | py | Python | tests/examples/coreutils/ls/requirements/requirements.py | testflows/TestFlows-Core | 0aa17247dffd2f7199465031ab16cc4f12c9cfb0 | [
"Apache-2.0"
] | 3 | 2020-06-25T19:23:19.000Z | 2021-10-20T19:29:56.000Z | tests/examples/coreutils/ls/requirements/requirements.py | testflows/TestFlows-Core | 0aa17247dffd2f7199465031ab16cc4f12c9cfb0 | [
"Apache-2.0"
] | null | null | null | tests/examples/coreutils/ls/requirements/requirements.py | testflows/TestFlows-Core | 0aa17247dffd2f7199465031ab16cc4f12c9cfb0 | [
"Apache-2.0"
] | 1 | 2020-02-24T12:31:45.000Z | 2020-02-24T12:31:45.000Z | # These requirements were auto generated
# from software requirements specification (SRS)
# document by TestFlows v1.6.200716.1214830.
# Do not edit by hand but re-generate instead
# using 'tfs requirements generate' command.
from testflows.core import Requirement
RQ_SRS001_CU_LS = Requirement(
name='RQ.SRS001-CU.LS',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'The [ls] utility SHALL list the contents of a directory.\n'
),
link=None
)
RQ_SRS001_CU_LS_Synopsis = Requirement(
name='RQ.SRS001-CU.LS.Synopsis',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'The [ls] utility SHALL support the following synopsis.\n'
'\n'
'```bash\n'
'SYNOPSIS\n'
' ls [OPTION]... [FILE]...\n'
'```\n'
),
link=None
)
RQ_SRS001_CU_LS_Default_Directory = Requirement(
name='RQ.SRS001-CU.LS.Default.Directory',
version='1.0',
priority=None,
group=None,
type=None,
uid=None,
description=(
'The [ls] utility SHALL by default list information about the contents of the current directory.\n'
),
link=None
)
| 26.45098 | 107 | 0.578206 |
ae6410131f36b3418762c4c860f8c13f5bed9bd8 | 1,067 | py | Python | build/lib/flaskr/__init__.py | LayneWei/NLP-medical-information-extraction | 1657d956afd3a2c476da28e3e8a4f1c4ce4bdc4b | [
"MIT"
] | null | null | null | build/lib/flaskr/__init__.py | LayneWei/NLP-medical-information-extraction | 1657d956afd3a2c476da28e3e8a4f1c4ce4bdc4b | [
"MIT"
] | null | null | null | build/lib/flaskr/__init__.py | LayneWei/NLP-medical-information-extraction | 1657d956afd3a2c476da28e3e8a4f1c4ce4bdc4b | [
"MIT"
] | null | null | null | import os
from flask import Flask
#import SQLAlchemy
from flaskr import db
def clear_data(session):
meta = db.metadata
for table in reversed(meta.sorted_tables):
print('Clear table %s' % table)
session.execute(table.delete())
session.commit()
def create_app(test_config=None):
# create and configure the app
app = Flask(__name__, instance_relative_config=True)
app.config.from_mapping(
SECRET_KEY='dev',
DATABASE=os.path.join(app.instance_path, 'flaskr.sqlite'),
)
if test_config is None:
# load the instance config, if it exists, when not testing
app.config.from_pyfile('config.py', silent=True)
else:
# load the test config if passed in
app.config.from_mapping(test_config)
# ensure the instance folder exists
try:
os.makedirs(app.instance_path)
except OSError:
pass
from . import db
db.init_app(app)
from . import note
app.register_blueprint(note.bp)
app.add_url_rule('/', endpoint='index')
return app | 23.711111 | 66 | 0.66448 |
ae6515732de013312213bbfb2e08738b394327ad | 139 | py | Python | nlpatl/sampling/clustering/__init__.py | dumpmemory/nlpatl | 59209242d1ac26714b11b86261070ac50cc90432 | [
"MIT"
] | 18 | 2021-11-29T06:43:46.000Z | 2022-03-29T09:58:32.000Z | nlpatl/sampling/clustering/__init__.py | dumpmemory/nlpatl | 59209242d1ac26714b11b86261070ac50cc90432 | [
"MIT"
] | null | null | null | nlpatl/sampling/clustering/__init__.py | dumpmemory/nlpatl | 59209242d1ac26714b11b86261070ac50cc90432 | [
"MIT"
] | 1 | 2021-11-29T06:43:47.000Z | 2021-11-29T06:43:47.000Z | from nlpatl.sampling.clustering.nearest_mean import NearestMeanSampling
from nlpatl.sampling.clustering.farthest import FarthestSampling
| 46.333333 | 72 | 0.884892 |
ae689f1c1175daa6fc473f2cb48f19de2559deff | 830 | py | Python | EvaMap/Metrics/sameAs.py | benjimor/EvaMap | 42e616abe9f15925b885797d30496e30615989a0 | [
"MIT"
] | 1 | 2021-01-29T18:53:26.000Z | 2021-01-29T18:53:26.000Z | EvaMap/Metrics/sameAs.py | benjimor/EvaMap | 42e616abe9f15925b885797d30496e30615989a0 | [
"MIT"
] | 1 | 2021-06-06T17:56:00.000Z | 2021-06-06T17:56:00.000Z | EvaMap/Metrics/sameAs.py | benjimor/EvaMap | 42e616abe9f15925b885797d30496e30615989a0 | [
"MIT"
] | null | null | null | import rdflib
import requests
from EvaMap.Metrics.metric import metric
def sameAs(g_onto, liste_map, g_map, raw_data, g_link) :
result = metric()
result['name'] = "Use of sameAs properties"
nbPossible = 0
points = 0
set_URIs = set()
for s, _, _ in g_map.triples((None, None, None)) :
if isinstance(s, rdflib.term.URIRef) :
set_URIs.add(s)
for elt in set_URIs :
nbPossible = nbPossible + 1
for _, _, _ in g_map.triples((elt, rdflib.term.URIRef('http://www.w3.org/2002/07/owl#sameAs'), None)) :
points = points + 1
if points < 1 :
result['score'] = 0
result['feedbacks'].append("No sameAs defined")
else :
result['score'] = 0
if nbPossible != 0:
result['score'] = points/(nbPossible)
return result | 31.923077 | 112 | 0.595181 |
ae69157e9e2838981548e0247bcf82bb4114ecc3 | 1,501 | py | Python | zeeko/handlers/setup_package.py | alexrudy/Zeeko | fb4992724620ed548dd32c3201f79f5b7bebfe32 | [
"BSD-3-Clause"
] | 2 | 2017-07-23T22:06:05.000Z | 2020-02-28T07:54:15.000Z | zeeko/handlers/setup_package.py | alexrudy/Zeeko | fb4992724620ed548dd32c3201f79f5b7bebfe32 | [
"BSD-3-Clause"
] | 1 | 2020-10-29T19:54:06.000Z | 2020-10-29T19:54:06.000Z | zeeko/handlers/setup_package.py | alexrudy/Zeeko | fb4992724620ed548dd32c3201f79f5b7bebfe32 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
from zeeko._build_helpers import get_utils_extension_args, get_zmq_extension_args, _generate_cython_extensions, pxd, get_package_data
from astropy_helpers import setup_helpers
utilities = [pxd("..utils.rc"),
pxd("..utils.msg"),
pxd("..utils.pthread"),
pxd("..utils.lock"),
pxd("..utils.condition"),
pxd("..utils.clock")]
base = [ pxd("..cyloop.throttle"), pxd("..cyloop.statemachine"), pxd(".snail"), pxd(".base")]
dependencies = {
'base' : utilities + [ pxd("..cyloop.throttle") ],
'snail' : utilities + [ pxd("..cyloop.throttle"), pxd("..cyloop.statemachine") ],
'client' : utilities + base + [ pxd("..messages.receiver") ],
'server' : utilities + base + [ pxd("..messages.publisher") ],
}
def get_extensions(**kwargs):
"""Get the Cython extensions"""
extension_args = setup_helpers.DistutilsExtensionArgs()
extension_args.update(get_utils_extension_args())
extension_args.update(get_zmq_extension_args())
extension_args['include_dirs'].append('numpy')
package_name = __name__.split(".")[:-1]
extensions = [e for e in _generate_cython_extensions(extension_args, os.path.dirname(__file__), package_name)]
for extension in extensions:
name = extension.name.split(".")[-1]
if name in dependencies:
extension.depends.extend(dependencies[name])
return extensions | 38.487179 | 133 | 0.654231 |
ae69c8b77055ca55392fe8a19a30b6175954dde3 | 5,716 | py | Python | networkapi/api_route_map/v4/serializers.py | vinicius-marinho/GloboNetworkAPI | 94651d3b4dd180769bc40ec966814f3427ccfb5b | [
"Apache-2.0"
] | 73 | 2015-04-13T17:56:11.000Z | 2022-03-24T06:13:07.000Z | networkapi/api_route_map/v4/serializers.py | leopoldomauricio/GloboNetworkAPI | 3b5b2e336d9eb53b2c113977bfe466b23a50aa29 | [
"Apache-2.0"
] | 99 | 2015-04-03T01:04:46.000Z | 2021-10-03T23:24:48.000Z | networkapi/api_route_map/v4/serializers.py | shildenbrand/GloboNetworkAPI | 515d5e961456cee657c08c275faa1b69b7452719 | [
"Apache-2.0"
] | 64 | 2015-08-05T21:26:29.000Z | 2022-03-22T01:06:28.000Z | # -*- coding: utf-8 -*-
import logging
from django.db.models import get_model
from rest_framework import serializers
from networkapi.util.geral import get_app
from networkapi.util.serializers import DynamicFieldsModelSerializer
log = logging.getLogger(__name__)
class RouteMapV4Serializer(DynamicFieldsModelSerializer):
route_map_entries = serializers. \
SerializerMethodField('get_route_map_entries')
peer_groups = serializers. \
SerializerMethodField('get_peer_groups')
class Meta:
RouteMap = get_model('api_route_map', 'RouteMap')
model = RouteMap
fields = (
'id',
'name',
'route_map_entries',
'peer_groups'
)
basic_fields = (
'id',
'name',
)
default_fields = fields
details_fields = fields
def get_route_map_entries(self, obj):
return self.extends_serializer(obj, 'route_map_entries')
def get_peer_groups(self, obj):
return self.extends_serializer(obj, 'peer_groups')
def get_serializers(self):
routemap_slzs = get_app('api_route_map',
module_label='v4.serializers')
peergroup_slzs = get_app('api_peer_group',
module_label='v4.serializers')
if not self.mapping:
self.mapping = {
'route_map_entries': {
'obj': 'route_map_entries_id',
},
'route_map_entries__basic': {
'serializer': routemap_slzs.RouteMapEntryV4Serializer,
'kwargs': {
'kind': 'basic',
'many': True
},
'obj': 'route_map_entries'
},
'route_map_entries__details': {
'serializer': routemap_slzs.RouteMapEntryV4Serializer,
'kwargs': {
'kind': 'details',
'many': True
},
'obj': 'route_map_entries'
},
'peer_groups': {
'obj': 'peer_groups_id',
},
'peer_groups__basic': {
'serializer': peergroup_slzs.PeerGroupV4Serializer,
'kwargs': {
'kind': 'basic',
'many': True
},
'obj': 'peer_groups'
},
'peer_groups__details': {
'serializer': peergroup_slzs.PeerGroupV4Serializer,
'kwargs': {
'kind': 'details',
'many': True
},
'obj': 'peer_groups'
}
}
class RouteMapEntryV4Serializer(DynamicFieldsModelSerializer):
list_config_bgp = serializers.SerializerMethodField('get_list_config_bgp')
route_map = serializers.SerializerMethodField('get_route_map')
class Meta:
RouteMapEntry = get_model('api_route_map', 'RouteMapEntry')
model = RouteMapEntry
fields = (
'id',
'action',
'action_reconfig',
'order',
'list_config_bgp',
'route_map'
)
basic_fields = (
'id',
'action',
'action_reconfig',
'order'
)
default_fields = fields
details_fields = fields
def get_list_config_bgp(self, obj):
return self.extends_serializer(obj, 'list_config_bgp')
def get_route_map(self, obj):
return self.extends_serializer(obj, 'route_map')
def get_serializers(self):
lcb_slzs = get_app('api_list_config_bgp',
module_label='v4.serializers')
if not self.mapping:
self.mapping = {
'list_config_bgp': {
'obj': 'list_config_bgp_id',
},
'list_config_bgp__basic': {
'serializer': lcb_slzs.ListConfigBGPV4Serializer,
'kwargs': {
'kind': 'basic',
'prohibited': (
'route_map_entries__basic',
)
},
'obj': 'list_config_bgp'
},
'list_config_bgp__details': {
'serializer': lcb_slzs.ListConfigBGPV4Serializer,
'kwargs': {
'kind': 'details',
'prohibited': (
'route_map_entries__details',
)
},
'obj': 'list_config_bgp'
},
'route_map': {
'obj': 'route_map_id',
},
'route_map__basic': {
'serializer': RouteMapV4Serializer,
'kwargs': {
'kind': 'basic',
'prohibited': (
'route_map_entries__basic',
)
},
'obj': 'route_map'
},
'route_map__details': {
'serializer': RouteMapV4Serializer,
'kwargs': {
'kind': 'details',
'prohibited': (
'route_map_entries__details',
)
},
'obj': 'route_map'
}
}
| 31.065217 | 78 | 0.448216 |
ae6b38b5ef961be1df343f27398c8d975b548233 | 274 | py | Python | src/admin_panel/models.py | sahilsehgal1995/lenme-api | 65826619b039c5c4035b6e0c133c32014977489e | [
"MIT"
] | null | null | null | src/admin_panel/models.py | sahilsehgal1995/lenme-api | 65826619b039c5c4035b6e0c133c32014977489e | [
"MIT"
] | null | null | null | src/admin_panel/models.py | sahilsehgal1995/lenme-api | 65826619b039c5c4035b6e0c133c32014977489e | [
"MIT"
] | null | null | null | from src import db, BaseMixin
class Log(db.Model, BaseMixin):
action = db.Column(db.String(55), nullable=False)
owner_id = db.Column(db.Integer, db.ForeignKey('user.id'))
updated_data = db.Column(db.String(1024))
updated_model = db.Column(db.String(125))
| 27.4 | 62 | 0.70073 |
ae6dc1d38a589fb6dfb638a55ee82b80c824df9d | 10,329 | py | Python | actions/line_mmc.py | fmariv/udt-qgis-plugin | 20cbf8889f2a2448d982c7057a4cfbe37d90d78b | [
"MIT"
] | null | null | null | actions/line_mmc.py | fmariv/udt-qgis-plugin | 20cbf8889f2a2448d982c7057a4cfbe37d90d78b | [
"MIT"
] | 2 | 2021-09-02T07:22:24.000Z | 2021-09-22T05:31:45.000Z | actions/line_mmc.py | fmariv/udt-qgis-plugin | 20cbf8889f2a2448d982c7057a4cfbe37d90d78b | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
/***************************************************************************
UDTPlugin
In this file is where the LineMMC class is defined. The main function
of this class is to run the automation process that exports the geometries
and generates the metadata of a municipal line.
***************************************************************************/
"""
import os
import numpy as np
from PyQt5.QtCore import QVariant
from qgis.core import (QgsVectorLayer,
QgsCoordinateReferenceSystem,
QgsVectorFileWriter,
QgsMessageLog,
QgsField,
QgsProject)
from ..config import *
from .adt_postgis_connection import PgADTConnection
from ..utils import *
# TODO in progress...
class LineMMC(object):
""" Line MMC Generation class """
def __init__(self, line_id):
self.line_id = line_id
self.crs = QgsCoordinateReferenceSystem("EPSG:25831")
# ADT PostGIS connection
self.pg_adt = PgADTConnection(HOST, DBNAME, USER, PWD, SCHEMA)
self.pg_adt.connect()
# Layers
self.work_points_layer, self.work_lines_layer = None, None
def check_line_exists(self):
""" """
line_exists_points_layer = self.check_line_exists_points_layer()
line_exists_lines_layer = self.check_line_exists_lines_layer()
return line_exists_points_layer, line_exists_lines_layer
def check_line_exists_points_layer(self):
""" """
fita_mem_layer = self.pg_adt.get_layer('v_fita_mem', 'id_fita')
fita_mem_layer.selectByExpression(f'"id_linia"=\'{int(self.line_id)}\'', QgsVectorLayer.SetSelection)
selected_count = fita_mem_layer.selectedFeatureCount()
if selected_count > 0:
return True
else:
return False
def check_line_exists_lines_layer(self):
""" """
line_mem_layer = self.pg_adt.get_layer('v_tram_linia_mem', 'id_tram_linia')
line_mem_layer.selectByExpression(f'"id_linia"=\'{int(self.line_id)}\'', QgsVectorLayer.SetSelection)
selected_count = line_mem_layer.selectedFeatureCount()
if selected_count > 0:
return True
else:
return False
def generate_line_data(self):
""" """
# ########################
# SET DATA
# Copy data to work directory
self.copy_data_to_work()
# Set the layers paths
self.work_points_layer, self.work_lines_layer = self.set_layers_paths()
# ########################
# GENERATION PROCESS
line_mmc_points = LineMMCPoints(self.line_id, self.work_points_layer)
line_mmc_points.generate_points_layer()
line_mmc_lines = LineMMCLines(self.line_id, self.work_lines_layer)
line_mmc_lines.generate_lines_layer()
# TODO metadata
##########################
# DATA EXPORTING
# Make the output directories if they don't exist
# TODO export, saber nombre de los archivos de salida
def copy_data_to_work(self):
""" """
# Points layer
fita_mem_layer = self.pg_adt.get_layer('v_fita_mem', 'id_fita')
fita_mem_layer.selectByExpression(f'"id_linia"=\'{self.line_id}\'', QgsVectorLayer.SetSelection)
# Lines layer
line_mem_layer = self.pg_adt.get_layer('v_tram_linia_mem', 'id_tram_linia')
line_mem_layer.selectByExpression(f'"id_linia"=\'{self.line_id}\'', QgsVectorLayer.SetSelection)
# Export layers to the work space
QgsVectorFileWriter.writeAsVectorFormat(fita_mem_layer, os.path.join(LINIA_WORK_DIR, f'fites_{self.line_id}.shp'),
'utf-8', self.crs, 'ESRI Shapefile', True)
QgsVectorFileWriter.writeAsVectorFormat(line_mem_layer, os.path.join(LINIA_WORK_DIR, f'tram_linia_{self.line_id}.shp'),
'utf-8', self.crs, 'ESRI Shapefile', True)
# TODO: sin proyección
def set_layers_paths(self):
""" """
work_points_layer = QgsVectorLayer(os.path.join(LINIA_WORK_DIR, f'fites_{self.line_id}.shp'))
work_lines_layer = QgsVectorLayer(os.path.join(LINIA_WORK_DIR, f'tram_linia_{self.line_id}.shp'))
return work_points_layer, work_lines_layer
class LineMMCPoints(LineMMC):
def __init__(self, line_id, points_layer):
LineMMC.__init__(self, line_id)
self.work_points_layer = points_layer
def generate_points_layer(self):
""" """
self.add_fields()
self.fill_fields()
self.delete_fields()
def add_fields(self):
""" """
# Set new fields
id_u_fita_field = QgsField(name='IdUfita', type=QVariant.String, typeName='text', len=10)
id_fita_field = QgsField(name='IdFita', type=QVariant.String, typeName='text', len=18)
id_sector_field = QgsField(name='IdSector', type=QVariant.String, typeName='text', len=1)
id_fita_r_field = QgsField(name='IdFitaR', type=QVariant.String, typeName='text', len=3)
num_termes_field = QgsField(name='NumTermes', type=QVariant.String, typeName='text', len=3)
monument_field = QgsField(name='Monument', type=QVariant.String, typeName='text', len=1)
id_linia_field, valid_de_field, valid_a_field, data_alta_field, data_baixa_field = get_common_fields()
new_fields_list = [id_u_fita_field, id_fita_field, id_sector_field, id_fita_r_field, num_termes_field,
monument_field, id_linia_field]
self.work_points_layer.dataProvider().addAttributes(new_fields_list)
self.work_points_layer.updateFields()
def fill_fields(self):
""" """
self.work_points_layer.startEditing()
for point in self.work_points_layer.getFeatures():
point_id_fita = coordinates_to_id_fita(point['point_x'], point['point_y'])
point_r_fita = point_num_to_text(point['num_fita'])
point['IdUFita'] = point['id_u_fita'][:-2]
point['IdFita'] = point_id_fita
point['IdFitaR'] = point_r_fita
point['IdSector'] = point['num_sector']
point['NumTermes'] = point['num_termes']
point['IdLinia'] = int(point['id_linia'])
# TODO tiene Valid de o Data alta? Preguntar Cesc
if point['trobada'] == 1:
point['Monument'] = 'S'
else:
point['Monument'] = 'N'
self.work_points_layer.updateFeature(point)
self.work_points_layer.commitChanges()
def delete_fields(self):
""" """
delete_fields_list = list([*range(0, 31)])
self.work_points_layer.dataProvider().deleteAttributes(delete_fields_list)
self.work_points_layer.updateFields()
class LineMMCLines(LineMMC):
def __init__(self, line_id, lines_layer):
LineMMC.__init__(self, line_id)
self.work_lines_layer = lines_layer
self.arr_lines_data = np.genfromtxt(DIC_LINES, dtype=None, encoding=None, delimiter=';', names=True)
def generate_lines_layer(self):
""" """
self.add_fields()
self.fill_fields()
self.delete_fields()
def add_fields(self):
""" """
name_municipality_1_field = QgsField(name='NomTerme1', type=QVariant.String, typeName='text', len=100)
name_municipality_2_field = QgsField(name='NomTerme2', type=QVariant.String, typeName='text', len=100)
tipus_ua_field = QgsField(name='TipusUA', type=QVariant.String, typeName='text', len=17)
limit_prov_field = QgsField(name='LimitProvi', type=QVariant.String, typeName='text', len=1)
limit_vegue_field = QgsField(name='LimitVegue', type=QVariant.String, typeName='text', len=1)
tipus_linia_field = QgsField(name='TipusLinia', type=QVariant.String, typeName='text', len=8)
# TODO tiene Valid de o Data alta? Preguntar Cesc
id_linia_field, valid_de_field, valid_a_field, data_alta_field, data_baixa_field = get_common_fields()
new_fields_list = [id_linia_field, name_municipality_1_field, name_municipality_2_field, tipus_ua_field,
limit_prov_field, limit_vegue_field, tipus_linia_field,]
self.work_lines_layer.dataProvider().addAttributes(new_fields_list)
self.work_lines_layer.updateFields()
def fill_fields(self):
""" """
# TODO casi identica a la de Generador MMC...
self.work_lines_layer.startEditing()
for line in self.work_lines_layer.getFeatures():
line_id = line['id_linia']
line_data = self.arr_lines_data[np.where(self.arr_lines_data['IDLINIA'] == line_id)]
# Get the Tipus UA type
tipus_ua = line_data['TIPUSUA'][0]
if tipus_ua == 'M':
line['TipusUA'] = 'Municipi'
elif tipus_ua == 'C':
line['TipusUA'] = 'Comarca'
elif tipus_ua == 'A':
line['TipusUA'] = 'Comunitat Autònoma'
elif tipus_ua == 'E':
line['TipusUA'] = 'Estat'
elif tipus_ua == 'I':
line['TipusUA'] = 'Inframunicipal'
# Get the Limit Vegue type
limit_vegue = line_data['LIMVEGUE'][0]
if limit_vegue == 'verdadero':
line['LimitVegue'] = 'S'
else:
line['LimitVegue'] = 'N'
# Get the tipus Linia type
tipus_linia = line_data['TIPUSREG']
if tipus_linia == 'internes':
line['TipusLinia'] = 'MMC'
else:
line['TipusLinia'] = 'Exterior'
# Non dependant fields
line['IdLinia'] = line_id
line['NomTerme1'] = str(line_data['NOMMUNI1'][0])
line['NomTerme2'] = str(line_data['NOMMUNI2'][0])
line['LimitProvi'] = str(line_data['LIMPROV'][0])
self.work_lines_layer.updateFeature(line)
self.work_lines_layer.commitChanges()
def delete_fields(self):
""" """
delete_fields_list = list([*range(0, 12)])
self.work_lines_layer.dataProvider().deleteAttributes(delete_fields_list)
self.work_lines_layer.updateFields()
| 41.817814 | 127 | 0.618162 |
ae6e3ce852e6d0276690375427d2e2f3c5953dfb | 5,369 | py | Python | SentimentAnalysis.py | hoossainalik/instalyzer | 9ad7c59fba3f617801d3ec0c3ae216029ee0aece | [
"MIT"
] | null | null | null | SentimentAnalysis.py | hoossainalik/instalyzer | 9ad7c59fba3f617801d3ec0c3ae216029ee0aece | [
"MIT"
] | null | null | null | SentimentAnalysis.py | hoossainalik/instalyzer | 9ad7c59fba3f617801d3ec0c3ae216029ee0aece | [
"MIT"
] | null | null | null | """
Module: Sentiment Analysis
Author: Hussain Ali Khan
Version: 1.0.0
Last Modified: 29/11/2018 (Thursday)
"""
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
import pandas as pd
import re
import os
from emoji import UNICODE_EMOJI
import matplotlib.pyplot as plt
import seaborn as sns
class ResultData:
def __init__(self, data=[], scores=[]):
self.data = data
self.scores = scores
def get_data(self):
return self.data
def get_scores(self):
return self.scores
class SentimentAnalyzer:
def __init__(self):
self.analyzer = SentimentIntensityAnalyzer()
self.dataset = None
self.opened_dataset = None
def load_dataset(self, dir_name):
files_list = os.listdir(dir_name)
print("Please Select The DataSet That You Want To Open: ")
for i in range(len(files_list)):
print(i+1, ". ", files_list[i])
choice = int(input("Choice: "))
self.opened_dataset = files_list[choice-1]
self.dataset = pd.read_csv(dir_name + "/" + self.opened_dataset)
def sentiment_analyzer_scores(self, data):
score = self.analyzer.polarity_scores(data)
print("{:-<40} {}".format(data, str(score)))
def process_descriptions(self):
descriptions = self.dataset["description"]
scores = []
c_descriptions = []
for desc in descriptions:
desc = str(desc)
c_descriptions.append(desc[1:-1])
cleaned_descriptions = clean_list(c_descriptions)
# print("<----Post Descriptions Sentiment Scores---->")
for c_d in cleaned_descriptions:
scores.append(self.analyzer.polarity_scores(c_d))
# self.print_sentiment_scores(c_d)
# print("<------------------------------------------>")
rd = ResultData(cleaned_descriptions, scores)
return rd
def print_sentiment_scores(self, text):
txt = self.analyzer.polarity_scores(text)
print("{:-<40} {}".format(text, str(txt)))
def process_comments(self):
comments_lists = sa.dataset["comments"]
scores = []
all_comments = []
for c in comments_lists:
c = str(c).replace('[', '')
c = str(c).replace(']', '')
c = c.split(', ')
c = [comment.replace("'", "") for comment in c]
c = c[1::2]
for each_c in c:
all_comments.append(each_c)
cleaned_comments = clean_list(all_comments)
# print("<----Post Comments Sentiment Scores---->")
for c_c in cleaned_comments:
scores.append(self.analyzer.polarity_scores(c_c))
# self.print_sentiment_scores(c_c)
# print("<-------------------------------------->\n")
rd = ResultData(cleaned_comments, scores)
return rd
def save_results_as_csv(results, fn, c_name):
results_df = pd.DataFrame(results.get_scores())
results_df['class'] = results_df[['pos', 'neg', 'neu']].idxmax(axis=1)
results_df['class'] = results_df['class'].map({'pos': 'Positive', 'neg': 'Negative', 'neu': 'Neutral'})
text_df = pd.DataFrame(results.get_data(), columns=[c_name])
final_df = text_df.join(results_df)
print(final_df)
print(final_df.describe())
pie_plot_title = "Pie Plot For Sentiments Of " + c_name + " In dataset <" + fn + ">"
final_df["class"].value_counts().plot(kind="pie", autopct='%.1f%%', figsize=(8, 8), title=pie_plot_title)
pp = sns.pairplot(final_df, hue="class", height=3)
pp.fig.suptitle("Pair Plot For Sentiments Of "+c_name+" In dataset <"+fn+">")
plt.show()
final_df.to_csv("SentimentAnalysisResults/" + fn + ".csv")
# search your emoji
def is_emoji(s):
return s in UNICODE_EMOJI
# add space near your emoji
def add_space(text):
return ''.join(' ' + char if is_emoji(char) else char for char in text).strip()
def clean_text(text):
text = filter_mentions(text)
text = text.replace('#', '')
text = text.replace('/', ' ')
text = text.replace('_', ' ')
text = text.replace('❤', ' Love ')
text = text.replace('-', ' ')
text = re.sub(' +', ' ', text).strip()
text = re.sub(r'https?:/\/\S+', ' ', text).strip() # remove links
text = re.sub('[^A-Za-z0-9]+', ' ', text).strip()
text = add_space(text)
return text
def filter_mentions(text):
return " ".join(filter(lambda x: x[0] != '@', text.split()))
def clean_list(_list):
cleaned_list = []
for l in _list:
cleaned = clean_text(l)
if len(cleaned) > 0:
cleaned_list.append(cleaned)
return cleaned_list
def main():
sa = SentimentAnalyzer()
sa.load_dataset("Posts")
print("<---Sentiment Analysis Results On Post Descriptions--->")
description_results = sa.process_descriptions()
save_results_as_csv(description_results, sa.opened_dataset + "_descriptions_sa_results", "descriptions")
print("<----------------------------------------------------->")
print("<---Sentiment Analysis Results On All Post Comments--->")
comments_results = sa.process_comments()
save_results_as_csv(comments_results, sa.opened_dataset + "_comments_sa_results", "comments")
print("<----------------------------------------------------->")
if __name__ == "__main__":
main() | 28.558511 | 109 | 0.596573 |
ae6e8bcab2c7710339f988ae2adebe63a8a6d860 | 11,100 | py | Python | deep_sort_/track.py | brjathu/PHALP | 0502c0aa515292bc70e358fe3b3ec65e63215327 | [
"MIT"
] | 45 | 2022-02-23T04:32:22.000Z | 2022-03-31T15:02:39.000Z | deep_sort_/track.py | brjathu/PHALP | 0502c0aa515292bc70e358fe3b3ec65e63215327 | [
"MIT"
] | 5 | 2022-02-23T15:08:29.000Z | 2022-03-24T19:54:55.000Z | deep_sort_/track.py | brjathu/PHALP | 0502c0aa515292bc70e358fe3b3ec65e63215327 | [
"MIT"
] | 2 | 2022-02-26T13:01:19.000Z | 2022-03-24T04:53:29.000Z | """
Modified code from https://github.com/nwojke/deep_sort
"""
import numpy as np
import copy
import torch
import torch.nn as nn
import torch.nn.functional as F
import scipy.signal as signal
from scipy.ndimage.filters import gaussian_filter1d
class TrackState:
"""
Enumeration type for the single target track state. Newly created tracks are
classified as `tentative` until enough evidence has been collected. Then,
the track state is changed to `confirmed`. Tracks that are no longer alive
are classified as `deleted` to mark them for removal from the set of active
tracks.
"""
Tentative = 1
Confirmed = 2
Deleted = 3
class Track:
"""
A single target track with state space `(x, y, a, h)` and associated
velocities, where `(x, y)` is the center of the bounding box, `a` is the
aspect ratio and `h` is the height.
Parameters
----------
mean : ndarray
Mean vector of the initial state distribution.
covariance : ndarray
Covariance matrix of the initial state distribution.
track_id : int
A unique track identifier.
n_init : int
Number of consecutive detections before the track is confirmed. The
track state is set to `Deleted` if a miss occurs within the first
`n_init` frames.
max_age : int
The maximum number of consecutive misses before the track state is
set to `Deleted`.
feature : Optional[ndarray]
Feature vector of the detection this track originates from. If not None,
this feature is added to the `features` cache.
Attributes
----------
mean : ndarray
Mean vector of the initial state distribution.
covariance : ndarray
Covariance matrix of the initial state distribution.
track_id : int
A unique track identifier.
hits : int
Total number of measurement updates.
age : int
Total number of frames since first occurance.
time_since_update : int
Total number of frames since last measurement update.
state : TrackState
The current track state.
features : List[ndarray]
A cache of features. On each measurement update, the associated feature
vector is added to this list.
"""
def __init__(self, opt, track_id, n_init, max_age, feature=None, uv_map=None, bbox=None, detection_data=None, confidence=None, detection_id=None, dims=None, time=None):
self.opt = opt
self.track_id = track_id
self.hits = 1
self.age = 1
self.time_since_update = 0
self.state = TrackState.Tentative
if(dims is not None):
self.A_dim = dims[0]
self.P_dim = dims[1]
self.L_dim = dims[2]
self.phalp_uv_map = uv_map
self.phalp_uv_map_ = [uv_map]
self.phalp_uv_predicted = copy.deepcopy(self.phalp_uv_map)
self.phalp_uv_predicted_ = [copy.deepcopy(self.phalp_uv_map)]
self.phalp_appe_features = []
self.phalp_pose_features = []
self.phalp_loca_features = []
self.phalp_time_features = []
self.phalp_bbox = []
self.phalp_detection_id = []
self.detection_data = []
self.confidence_c = []
if feature is not None:
for i_ in range(self.opt.track_history):
self.phalp_appe_features.append(feature[:self.A_dim])
self.phalp_pose_features.append(feature[self.A_dim:self.A_dim+self.P_dim])
self.phalp_loca_features.append(feature[self.A_dim+self.P_dim:])
self.phalp_time_features.append(time)
self.phalp_bbox.append(bbox)
self.phalp_detection_id.append(detection_id)
self.detection_data.append(detection_data)
self.confidence_c.append(confidence[0])
self._n_init = n_init
self._max_age = max_age
self.track_data = {
"xy" : self.detection_data[-1]['xy'],
"bbox" : np.asarray(self.detection_data[-1]['bbox'], dtype=np.float),
}
self.phalp_pose_predicted_ = []
self.phalp_loca_predicted_ = []
self.phalp_features_ = []
def predict(self, phalp_tracker, increase_age=True):
"""Propagate the state distribution to the current time step using a
Kalman filter prediction step.
Parameters
----------
kf : kalman_filter.KalmanFilter
The Kalman filter.
"""
if(increase_age):
self.age += 1
self.time_since_update += 1
def add_predicted(self, appe=None, pose=None, loca=None, uv=None):
self.phalp_appe_predicted = copy.deepcopy(appe.numpy()) if(appe is not None) else copy.deepcopy(self.phalp_appe_features[-1])
self.phalp_pose_predicted = copy.deepcopy(pose.numpy()) if(pose is not None) else copy.deepcopy(self.phalp_pose_features[-1])
self.phalp_loca_predicted = copy.deepcopy(loca.numpy()) if(loca is not None) else copy.deepcopy(self.phalp_loca_features[-1])
self.phalp_features = np.concatenate((self.phalp_appe_predicted, self.phalp_pose_predicted, self.phalp_loca_predicted), axis=0)
self.phalp_pose_predicted_.append(self.phalp_pose_predicted)
if(len(self.phalp_pose_predicted_)>self.opt.n_init+1): self.phalp_pose_predicted_ = self.phalp_pose_predicted_[1:]
self.phalp_loca_predicted_.append(self.phalp_loca_predicted)
if(len(self.phalp_loca_predicted_)>self.opt.n_init+1): self.phalp_loca_predicted_ = self.phalp_loca_predicted_[1:]
self.phalp_features_.append(self.phalp_features)
if(len(self.phalp_features_)>self.opt.n_init+1): self.phalp_features_ = self.phalp_features_[1:]
def update(self, detection, detection_id, shot):
"""Perform Kalman filter measurement update step and update the feature
cache.
Parameters
----------
kf : kalman_filter.KalmanFilter
The Kalman filter.
detection : Detection
The associated detection.
"""
h = detection.tlwh[3]
w = detection.tlwh[2]
self.phalp_appe_features.append(detection.feature[:self.A_dim])
self.phalp_appe_features = copy.deepcopy(self.phalp_appe_features[1:])
self.phalp_pose_features.append(detection.feature[self.A_dim:self.A_dim+self.P_dim])
self.phalp_pose_features = copy.deepcopy(self.phalp_pose_features[1:])
self.phalp_loca_features.append(detection.feature[self.A_dim+self.P_dim:])
self.phalp_loca_features = copy.deepcopy(self.phalp_loca_features[1:])
if(shot==1): self.phalp_loca_features = [detection.feature[self.A_dim+self.P_dim:] for i in range(self.opt.track_history)]
self.phalp_time_features.append(detection.time)
self.phalp_time_features = copy.deepcopy(self.phalp_time_features[1:])
self.phalp_bbox.append(detection.tlwh)
self.phalp_bbox = self.phalp_bbox[1:]
self.confidence_c.append(detection.confidence_c)
self.confidence_c = self.confidence_c[1:]
self.detection_data.append(detection.detection_data)
self.detection_data = self.detection_data[1:]
self.phalp_detection_id.append(detection_id)
self.phalp_uv_map = copy.deepcopy(detection.uv_map)
self.phalp_uv_map_.append(copy.deepcopy(detection.uv_map))
if(self.opt.render or "T" in self.opt.predict):
mixing_alpha_ = self.opt.alpha*(detection.confidence_c**2)
ones_old = self.phalp_uv_predicted[3:, :, :]==1
ones_new = self.phalp_uv_map[3:, :, :]==1
ones_old = np.repeat(ones_old, 3, 0)
ones_new = np.repeat(ones_new, 3, 0)
ones_intersect = np.logical_and(ones_old, ones_new)
ones_union = np.logical_or(ones_old, ones_new)
good_old_ones = np.logical_and(np.logical_not(ones_intersect), ones_old)
good_new_ones = np.logical_and(np.logical_not(ones_intersect), ones_new)
new_rgb_map = np.zeros((3, 256, 256))
new_mask_map = np.zeros((1, 256, 256))-1
new_mask_map[ones_union[:1, :, :]] = 1.0
new_rgb_map[ones_intersect] = (1-mixing_alpha_)*self.phalp_uv_predicted[:3, :, :][ones_intersect] + mixing_alpha_*self.phalp_uv_map[:3, :, :][ones_intersect]
new_rgb_map[good_old_ones] = self.phalp_uv_predicted[:3, :, :][good_old_ones]
new_rgb_map[good_new_ones] = self.phalp_uv_map[:3, :, :][good_new_ones]
self.phalp_uv_predicted = np.concatenate((new_rgb_map, new_mask_map), 0)
self.phalp_uv_predicted_.append(self.phalp_uv_predicted)
if(len(self.phalp_uv_predicted_)>self.opt.n_init+1): self.phalp_uv_predicted_ = self.phalp_uv_predicted_[1:]
else:
self.phalp_uv_predicted = self.phalp_uv_map
self.track_data = {
"xy" : detection.detection_data['xy'],
"bbox" : np.asarray(detection.detection_data['bbox'], dtype=np.float64)
}
self.hits += 1
self.time_since_update = 0
if self.state == TrackState.Tentative and self.hits >= self._n_init:
self.state = TrackState.Confirmed
def mark_missed(self):
"""Mark this track as missed (no association at the current time step).
"""
if self.state == TrackState.Tentative:
self.state = TrackState.Deleted
elif self.time_since_update > self._max_age:
self.state = TrackState.Deleted
def is_tentative(self):
"""Returns True if this track is tentative (unconfirmed).
"""
return self.state == TrackState.Tentative
def is_confirmed(self):
"""Returns True if this track is confirmed."""
return self.state == TrackState.Confirmed
def is_deleted(self):
"""Returns True if this track is dead and should be deleted."""
return self.state == TrackState.Deleted
def smooth_bbox(self, bbox):
kernel_size = 5
sigma = 3
bbox = np.array(bbox)
smoothed = np.array([signal.medfilt(param, kernel_size) for param in bbox.T]).T
out = np.array([gaussian_filter1d(traj, sigma) for traj in smoothed.T]).T
return list(out) | 42.692308 | 176 | 0.607748 |
ae722ba487151806c74bc4bb207a931bee8b9346 | 818 | py | Python | meiduo_mall/apps/users/utils.py | 150619/meiduo_mall_project | c0441ad744c6dd0e2962d5e734c842e237b9ad0b | [
"MIT"
] | null | null | null | meiduo_mall/apps/users/utils.py | 150619/meiduo_mall_project | c0441ad744c6dd0e2962d5e734c842e237b9ad0b | [
"MIT"
] | null | null | null | meiduo_mall/apps/users/utils.py | 150619/meiduo_mall_project | c0441ad744c6dd0e2962d5e734c842e237b9ad0b | [
"MIT"
] | null | null | null | import re
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import JsonResponse
from apps.users.models import User
class AuthMobile(ModelBackend):
def authenticate(self, request, username=None, password=None, **kwargs):
try:
if re.match(r'^1[3-9]\d{9}$', username):
user = User.objects.get(mobile=username)
else:
user = User.objects.get(username=username)
except User.DoesNotExist:
return
if user and user.check_password(password):
return user
else:
return
class LoginRequiredJsonMixin(LoginRequiredMixin):
def handle_no_permission(self):
return JsonResponse({'code': 400, 'errmsg': '用户未登录'})
| 29.214286 | 76 | 0.656479 |
ae738c094d5028bff43bc7f3386c802a9cf32a46 | 95 | py | Python | ginger/scripts/templates/app_templates/app_name/urls.py | vivsh/django-ginger | d293109becc72845a23f2aeb732ed808a7a67d69 | [
"MIT"
] | null | null | null | ginger/scripts/templates/app_templates/app_name/urls.py | vivsh/django-ginger | d293109becc72845a23f2aeb732ed808a7a67d69 | [
"MIT"
] | null | null | null | ginger/scripts/templates/app_templates/app_name/urls.py | vivsh/django-ginger | d293109becc72845a23f2aeb732ed808a7a67d69 | [
"MIT"
] | null | null | null |
from django.conf.urls import url, patterns
from . import views
urlpatterns = patterns("",
)
| 11.875 | 42 | 0.726316 |
ae73d85a5f4dcb6adc5dcd4a48bf418ba8cec3c0 | 1,184 | py | Python | ppln/gatkRealignerTargCreator.py | asalomatov/nextgen-pipeline | 4ac358050075dc40d32a1c09160e86a41f093f98 | [
"MIT"
] | 4 | 2017-08-11T21:02:35.000Z | 2020-10-29T19:49:41.000Z | ppln/gatkRealignerTargCreator.py | asalomatov/nextgen-pipeline | 4ac358050075dc40d32a1c09160e86a41f093f98 | [
"MIT"
] | null | null | null | ppln/gatkRealignerTargCreator.py | asalomatov/nextgen-pipeline | 4ac358050075dc40d32a1c09160e86a41f093f98 | [
"MIT"
] | 2 | 2017-08-18T19:40:10.000Z | 2017-08-19T03:43:07.000Z | '''
'''
import sys, subprocess
sys.path.insert(0, '/nethome/asalomatov/projects/ppln')
import logProc
ntFlag = '-nt 10'
#interval_padding = '--interval_padding 0' # bed files padded with 100bp
interval_padding = '--interval_padding 200'
read_filter = '--read_filter BadCigar'
print '\nsys.args :', sys.argv[1:]
inbam, outfile, refGenome, knownindels, tmpdir, gatk, gaps, outdir = sys.argv[1:]
cmd = 'java -Xms750m -Xmx10g -XX:+UseSerialGC -Djava.io.tmpdir=%(tmpdir)s -jar %(gatk)s -T RealignerTargetCreator -I %(inbam)s --known %(knownindels)s -o %(outfile)s -R %(refGenome)s %(ntFlag)s %(read_filter)s'
#cmd = 'java -Xms750m -Xmx2500m -XX:+UseSerialGC -Djava.io.tmpdir=%(tmpdir)s -jar %(gatk)s -T RealignerTargetCreator -I %(inbam)s -o %(outfile)s -R %(refGenome)s %(ntFlag)s %(read_filter)s -L %(inbed)s -XL %(gaps)s'
cmd = cmd % locals()
print cmd
logProc.logProc(outfile, outdir, cmd, 'started')
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode == 0:
logProc.logProc(outfile, outdir, cmd, 'finished')
else:
logProc.logProc(outfile, outdir, cmd, 'failed', stderr)
sys.exit(1)
| 42.285714 | 215 | 0.705236 |
ae74ea38559f52ac217bf0d17616d5da35736211 | 14,773 | py | Python | functions_baseline_opencv.py | Shiro-LK/Super-Resolution-ProbaV | e6b9d9d62caa50b84cd5bdca906af53aa1a5de8b | [
"MIT"
] | null | null | null | functions_baseline_opencv.py | Shiro-LK/Super-Resolution-ProbaV | e6b9d9d62caa50b84cd5bdca906af53aa1a5de8b | [
"MIT"
] | null | null | null | functions_baseline_opencv.py | Shiro-LK/Super-Resolution-ProbaV | e6b9d9d62caa50b84cd5bdca906af53aa1a5de8b | [
"MIT"
] | 1 | 2020-04-15T10:36:31.000Z | 2020-04-15T10:36:31.000Z | # -*- coding: utf-8 -*-
import cv2
import numpy as np
import os
import pandas as pd
import math
from skimage import io
from skimage.transform import rescale
import skimage
import numba
from numba import prange
import time
from pathlib import Path
# MAX 35 IMG
## Create TXT FILE for loading
def import_norm_data(filename="data/norm.csv"):
dic = {}
file = pd.read_csv(filename, sep=" ", header= None, names=["name", "value"])
for i, (name, value) in file.iterrows():
dic[name] = value
return dic
def seperate_NIR_RED(filename):
with open(filename, "r") as f:
temp = [line.replace("\\","/").split() for line in f]
f_NIR = open(filename.replace(".txt", "_NIR.txt"), "w")
f_RED = open(filename.replace(".txt", "_RED.txt"), "w")
for line in temp:
if line[0].find("NIR") != -1:
f_NIR.write(line[0]+" " + line[1] + "\n")
else:
f_RED.write(line[0]+" " + line[1] + "\n")
f_NIR.close()
f_RED.close()
def create_data(path, normalize_data):
max_ = 0
f_train = open(path+"train.txt", "w")
f_test = open(path+"test.txt", "w")
folders1 = os.listdir(path)
for fold1 in folders1:
p1 = os.path.join(path, fold1)
if os.path.isdir(p1): # test/train fold
folders2 = os.listdir(p1)
for fold2 in folders2:
p2 = os.path.join(p1, fold2)
if os.path.isdir(p2): # NIR RED fold
folders3 = os.listdir(p2)
for fold3 in folders3:
p3 = os.path.join(p2, fold3)
if os.path.isdir(p3): #name imgset folders
if fold1 == "train":
f_train.write(p3 + " " + str(normalize_data[fold3]) + "\n")
elif fold1 == "test":
f_test.write(p3 + " " + str(normalize_data[fold3]) + "\n")
max_ = max(max_, len(os.listdir(p3)))
print(max_)
f_train.close()
f_test.close()
## Load all data
def load_data(filename, istrain=True):
with open(filename, "r") as f:
temp = [line.replace("\\","/").split() for line in f]
data = []
for path, v in temp:
norm = float(v)
if istrain:
LR, QM, SM, HR = get_scene(path, istrain)
data.append([LR, QM, norm, SM, HR])
else:
LR, QM, SM = get_scene(path, istrain)
data.append([LR, QM, norm])
return data
## load one scene data
def get_scene(path, istrain=True):
names = ['LR000.png', 'LR001.png', 'LR002.png', 'LR003.png', 'LR004.png', 'LR005.png',
'LR006.png', 'LR007.png', 'LR008.png', 'LR009.png', 'LR010.png', 'LR011.png',
'LR012.png', 'LR013.png', 'LR014.png', 'LR015.png', 'LR016.png', 'LR017.png',
'LR018.png', 'LR019.png', 'LR020.png', 'LR021.png', 'LR022.png', 'LR023.png',
'LR024.png', 'LR025.png', 'LR026.png', 'LR027.png', 'LR028.png', 'LR029.png',
'LR030.png', 'LR031.png', 'LR032.png', 'LR033.png', 'LR034.png',
'QM000.png', 'QM001.png', 'QM002.png', 'QM003.png',
'QM004.png', 'QM005.png', 'QM006.png', 'QM007.png', 'QM008.png', 'QM009.png',
'QM010.png', 'QM011.png', 'QM012.png', 'QM013.png', 'QM014.png', 'QM015.png',
'QM016.png', 'QM017.png', 'QM018.png', 'QM019.png', 'QM020.png', 'QM021.png',
'QM022.png', 'QM023.png', 'QM024.png', 'QM025.png', 'QM026.png', 'QM027.png',
'QM028.png', 'QM029.png', 'QM030.png', 'QM031.png', 'QM032.png', 'QM033.png',
'QM034.png', 'HR.png', 'SM.png']
if path is not None:
LR = []
QM = []
if istrain:
HR = os.path.join(path, names[-2])
SM = os.path.join(path, names[-1])
for lr in names[0:35]:
lr_path = os.path.join(path, lr)
if os.path.isfile(lr_path):
LR.append(lr_path)
else:
break
for qm in names[35:70]:
qm_path = os.path.join(path, qm)
if os.path.isfile(qm_path):
QM.append(qm_path)
else:
break
if istrain:
return [LR, QM, SM, HR]
else:
return [LR, QM, SM]
## METRIC FUNCTION FOR ONE SCENE
@numba.autojit
def score_scene(sr, hr, clearhr, norm, num_crop=6):
"""
score for one scene
"""
zSR = []
max_x, max_y = np.array(hr.shape) - num_crop
sr_ = sr[num_crop//2:-num_crop//2, num_crop//2:-num_crop//2]
np.place(clearhr, clearhr==0, np.nan)
zSR = np.zeros((num_crop + 1, num_crop + 1), np.float64)
for x_off in prange(0, num_crop+1):
for y_off in prange(0, num_crop+1):
clearHR_ = clearhr[x_off : x_off + max_x, y_off : y_off + max_y]
hr_ = hr[x_off:x_off + max_x, y_off:y_off + max_y]
diff = (hr_- sr_)* clearHR_
b = np.nanmean(diff)
## compute cMSE
cMSE = np.nanmean( (diff-b)**2)
cPSNR = -10.0*np.log10(cMSE)
zSR[x_off, y_off] = norm/cPSNR
return zSR.min()
@numba.autojit
def baseline_predict_scene(LR, QM, before=True, interpolation=cv2.INTER_CUBIC):
"""
baseline version 1 :
average images with the maximum number of clearance pixel
if before is true, average the image then apply the resize and return the resize image
else resize the images and return the average
"""
# load clearance map
n = len(QM)
clearance = np.zeros( (n,) )
#for cl in QM:
for i in prange(n):
cl = QM[i]
img_cl = skimage.img_as_float64( cv2.imread(cl , -1) ).astype(np.bool)
if img_cl is None:
print("error")
if len(np.unique(img_cl)) > 2:
print(np.unique(img_cl))
raise("Error during loading clearance map !!!! ")
#img_cl = img_cl/255 # normalize value 0-1
clearance[i] = np.sum(img_cl)
maxcl = clearance.max()
maxclears = [i for i in prange(len(clearance)) if clearance[i] == maxcl] # save index of image with max clearance
if before:
img_predict = np.zeros( (128, 128), dtype=np.float64)
#for ids in maxclears:
for i in prange(len(maxclears)):
ids = maxclears[i]
im = skimage.img_as_float64( cv2.imread(LR[ids], -1) )
img_predict += im
img_predict = img_predict/len(maxclears)
im_rescale = cv2.resize(img_predict, (384, 384), interpolation = interpolation)# rescale(im, scale=3, order=3, mode='edge', anti_aliasing=False, multichannel=False)#
return im_rescale
else:
# upscale
img_predict = np.zeros( (384, 384), dtype=np.float64)
#for ids in maxclears:
for i in prange(len(maxclears)):
ids = maxclears[i]
im = skimage.img_as_float64( cv2.imread(LR[ids], -1) )
im_rescale = cv2.resize(im, (384, 384), interpolation = interpolation)# rescale(im, scale=3, order=3, mode='edge', anti_aliasing=False, multichannel=False)#
img_predict += im_rescale
img_predict = img_predict/len(maxclears)
return img_predict
@numba.autojit
def baseline_predict_scenev2(LR, QM, interpolation=cv2.INTER_CUBIC):
"""
baseline version 2 :
average image with the maximum number of clearance pixel of one imageset
"""
# load clearance map
n = len(QM)
clearance = np.zeros( (n,) )
#for cl in QM:
for i in prange(n):
cl = QM[i]
img_cl = skimage.img_as_float64( cv2.imread(cl , -1) ).astype(np.bool)
if img_cl is None:
print("error")
if len(np.unique(img_cl)) > 2:
print(np.unique(img_cl))
raise("Error during loading clearance map !!!! ")
#img_cl = img_cl/255 # normalize value 0-1
clearance[i] = np.sum(img_cl)
maxcl = clearance.max()
maxclears = [i for i in prange(len(clearance)) if clearance[i] == maxcl] # save index of image with max clearance
dim = len(maxclears)
clearance_map = np.zeros( (dim, 128, 128), dtype=np.float64 )
im = np.zeros( (dim, 128, 128), dtype=np.float64)
for i in prange(dim):
ids = maxclears[i]
cl = QM[ids]
clearance_map[i] = skimage.img_as_float64( cv2.imread(cl , -1) )
im[i] = skimage.img_as_float64( cv2.imread(LR[ids], -1) )
img = im * clearance_map # pixel with no clearance equal 0
clear = clearance_map.sum(axis=0)
np.place(clear, clear==0, np.nan)
img_predict = np.sum(img, axis=0)/clear
# average value of maxclearance and replace nan value by them
img_average = img.mean(axis=0)
img_predict[ np.isnan(img_predict) ] = img_average[np.isnan(img_predict)]
# upscale img
img_resize= cv2.resize(img_predict, (384, 384), interpolation = interpolation)
return img_resize
@numba.autojit
def baseline_predict_scenev3(LR, QM, interpolation=cv2.INTER_CUBIC):
"""
baseline version 2 :
average image with the maximum number of clearance pixel of one imageset
"""
# load clearance map
n = len(QM)
clearance = np.zeros( (n,) )
#for cl in QM:
for i in prange(n):
cl = QM[i]
img_cl = skimage.img_as_float64( cv2.imread(cl , -1) ).astype(np.bool)
if img_cl is None:
print("error")
if len(np.unique(img_cl)) > 2:
print(np.unique(img_cl))
raise("Error during loading clearance map !!!! ")
#img_cl = img_cl/255 # normalize value 0-1
clearance[i] = np.sum(img_cl)
maxcl = clearance.max()
max_clearance_value = clearance.argsort()[::-1]
maxclears = [i for i in prange(len(clearance)) if clearance[i] == maxcl] # save index of image with max clearance
dim = len(maxclears)
clearance_map = np.zeros( (dim, 128, 128), dtype=np.float64 )
im = np.zeros( (dim, 128, 128), dtype=np.float64)
for i in prange(dim):
ids = maxclears[i]
cl = QM[ids]
clearance_map[i] = skimage.img_as_float64( cv2.imread(cl , -1) )
im[i] = skimage.img_as_float64( cv2.imread(LR[ids], -1) )
img = im * clearance_map # pixel with no clearance equal 0
clear = clearance_map.sum(axis=0)
np.place(clear, clear==0, np.nan)
img_predict = np.sum(img, axis=0)/clear
# replace nan value by value in image where the clearance is available
nan_map = clear.copy()
nan_map[~np.isnan(nan_map)] = 0.0
nan_map[np.isnan(nan_map)] = 1.0
for ids in max_clearance_value:
if clearance[ids] == maxcl:
pass
else:
cl = QM[ids]
img_temp = skimage.img_as_float64( cv2.imread(LR[ids], -1) )
clear_temp = skimage.img_as_float64( cv2.imread(cl , -1) )
temp = clear_temp*nan_map
np.place(temp, temp==0, np.nan)
temp = temp*img_temp
img_predict[np.isnan(img_predict)] = temp[np.isnan(img_predict)]
nan_map[:, :] = nan_map[:,:] - (nan_map*clear_temp)
# average value of maxclearance and replace nan value by them
img_average = img.mean(axis=0)
img_predict[ np.isnan(img_predict) ] = img_average[np.isnan(img_predict)]
# upscale img
img_resize= cv2.resize(img_predict, (384, 384), interpolation =interpolation)
return img_resize
@numba.autojit
def baseline_predict(data, istrain=True, evaluate=True, version=1, interpolation=cv2.INTER_CUBIC):
num = len(data)
predicted = np.zeros( (num, 384, 384) ) # number of images in the dataset to check
zsub = np.zeros((num,))
if istrain:
for i in prange( num ):
LR, QM, norm, SM, HR = data[i]
if version == 1:
img_predict = baseline_predict_scene(LR, QM, interpolation=interpolation)
elif version == 2:
img_predict = baseline_predict_scenev2(LR, QM, interpolation=interpolation)
elif version == 3:
img_predict = baseline_predict_scenev3(LR, QM, interpolation=interpolation)
else:
raise("methode not implemented ! ")
# save img
predicted[i] = img_predict
# evaluate
if evaluate:
num_crop = 6
clearHR = skimage.img_as_float64( cv2.imread(SM, -1 ) )
hr = skimage.img_as_float64( cv2.imread(HR, -1) )
zSR = score_scene(img_predict, hr, clearHR, norm, num_crop=num_crop)
zsub[i] = zSR
if evaluate:
print("evaluation \n number of elements : {0} \n Z = {1}".format(len(zsub), zsub.mean()))
return predicted
def baseline_predict_test(data, dirs = "results_baseline", interpolation=cv2.INTER_CUBIC):
num = len(data)
for i in range( num ):
LR, QM, norm = data[i]
p = Path(LR[0])
img_predict = baseline_predict_scene(LR, QM, interpolation=interpolation)
#print(img_predict.shape)
# save img
#predicted[i] = img_predict
#names[i] = p.parts[-2]
save_prediction(img_predict, p.parts[-2], directory=dirs)
def load_image2D(path, expand=False):
img = skimage.img_as_float64( cv2.imread(path, -1) )
#height, width = img.shape
#if scale > 1:
# img = cv2.resize(img, (height*scale, width*scale), interpolation = cv2.INTER_CUBIC)
if expand:
img = np.expand_dims(img, axis=2)
return img
def save_prediction(pred, names, directory):
try:
os.stat(directory)
except:
os.mkdir(directory)
#io.use_plugin('freeimage')
p = os.path.join(directory,names+'.png')
im = skimage.img_as_uint(pred)
#io.imsave(arr=im, fname= p, plugin="freeimage")
cv2.imwrite(p, im, [cv2.IMWRITE_PNG_COMPRESSION, 0])
#norm = import_norm_data()
#print(norm)
#
#create_data(path="data\\", normalize_data=norm)
#data_test = load_data(os.path.join("data","test.txt"), istrain=False)
#datas = load_data(os.path.join("data","train.txt"), istrain=True)
#begin = time.time()
#predict = baseline_predict(datas, istrain=True, evaluate=True, version=1)
#print(time.time()-begin)
#begin = time.time()
#baseline_predict_test(data_test)
#print(time.time()-begin)
| 34.678404 | 175 | 0.56434 |
ae75ff7994410f7e88a0e941f01acf2c32ca349b | 4,676 | py | Python | csvtoqbo.py | Airbitz/airbitz-ofx | 8dc9a851fc585e373611d6d8e27ae0e8540ea35b | [
"MIT"
] | 2 | 2016-01-08T20:14:21.000Z | 2018-06-15T17:58:09.000Z | csvtoqbo.py | EdgeApp/airbitz-ofx | 8dc9a851fc585e373611d6d8e27ae0e8540ea35b | [
"MIT"
] | null | null | null | csvtoqbo.py | EdgeApp/airbitz-ofx | 8dc9a851fc585e373611d6d8e27ae0e8540ea35b | [
"MIT"
] | 2 | 2016-01-08T20:14:22.000Z | 2016-03-30T19:59:48.000Z | #####################################################################
# #
# File: csvtoqbo.py #
# Developer: Paul Puey #
# Original Code by: Justin Leto #
# Forked from https://github.com/jleto/csvtoqbo #
# #
# main utility script file Python script to convert CSV files #
# of transactions exported from various platforms to QBO for #
# import into Quickbooks Online. #
# #
# Usage: python csvtoqbo.py <options> <csvfiles> #
# #
#####################################################################
import sys, traceback
import os
import logging
import csv
import qbo
import airbitzwallets
# If only utility script is called
if len(sys.argv) <= 1:
sys.exit("Usage: python %s <options> <csvfiles>\n"
"Where possible options include:\n"
" -btc Output bitcoin in full BTC denomination\n"
" -mbtc Output bitcoin in mBTC denomination\n"
" -bits Output bitcoin in bits (uBTC) denomination" % sys.argv[0]
)
# If help is requested
elif (sys.argv[1] == '--help'):
sys.exit("Help for %s not yet implemented." % sys.argv[0])
# Test for valid options, instantiate appropiate provider object
if sys.argv[1] == '-mbtc':
denom = 1000
elif sys.argv[1] == '-btc':
denom = 1
elif sys.argv[1] == '-bits':
denom = 1000000
myProvider = airbitzwallets.airbitzwallets()
# For each CSV file listed for conversion
for arg in sys.argv:
if sys.argv.index(arg) > 1:
try:
with open(arg[:len(arg)-3] + 'log'):
os.remove(arg[:len(arg)-3] + 'log')
except IOError:
pass
logging.basicConfig(filename=arg[:len(arg)-3] + 'log', level=logging.INFO)
logging.info("Opening '%s' CSV File" % myProvider.getName())
try:
with open(arg, 'r') as csvfile:
# Open CSV for reading
reader = csv.DictReader(csvfile, delimiter=',', quotechar='"')
#instantiate the qbo object
myQbo = None
myQbo = qbo.qbo()
txnCount = 0
for row in reader:
txnCount = txnCount+1
sdata = str(row)
#read in values from row of csv file
date_posted = myProvider.getDatePosted(myProvider,row)
txn_memo = myProvider.getTxnMemo(myProvider,row)
txn_amount = myProvider.getTxnAmount(myProvider,row)
txn_curamt = myProvider.getTxnCurAmt(myProvider,row)
txn_category = myProvider.getTxnCategory(myProvider,row)
txn_id = myProvider.getTxnId(myProvider,row)
name = myProvider.getTxnName(myProvider,row)
try:
#Add transaction to the qbo document
if myQbo.addTransaction(denom, date_posted, txn_memo, txn_id, txn_amount, txn_curamt, txn_category, name):
print('Transaction [' + str(txnCount) + '] added successfully!')
logging.info('Transaction [' + str(txnCount) + '] added successfully!')
except:
#Error adding transaction
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
print(''.join('!! ' + line for line in lines))
logging.info("Transaction [" + str(txnCount) + "] excluded!")
logging.info('>> Data: ' + str(sdata))
pass
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
print(''.join('!! ' + line for line in lines))
logging.info("Trouble reading CSV file!")
# After transactions have been read, write full QBO document to file
try:
filename = arg[:len(arg)-3] + 'qbo'
if myQbo.Write('./'+ filename):
print("QBO file written successfully!")
#log successful write
logging.info("QBO file %s written successfully!" % filename)
except:
#IO Error
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
print(''.join('!! ' + line for line in lines))
logging.info(''.join('!! ' + line for line in lines))
| 39.965812 | 155 | 0.534217 |
ae76f9115d5aeaf3a667d8ad57a43721c93b13f6 | 14,613 | py | Python | api/app/models/test_geofile.py | enermaps/enermaps | bac472e60e44724be605068103d01da0320483e6 | [
"Apache-2.0"
] | 5 | 2021-09-09T13:17:09.000Z | 2022-03-31T11:57:06.000Z | api/app/models/test_geofile.py | enermaps/enermaps | bac472e60e44724be605068103d01da0320483e6 | [
"Apache-2.0"
] | 154 | 2020-10-16T09:14:00.000Z | 2022-03-31T13:31:17.000Z | api/app/models/test_geofile.py | enermaps/enermaps | bac472e60e44724be605068103d01da0320483e6 | [
"Apache-2.0"
] | 9 | 2021-05-20T15:12:54.000Z | 2022-03-15T15:39:42.000Z | import copy
import json
import os
from app.common import path
from app.common.projection import epsg_string_to_proj4
from app.common.test import BaseApiTest
from . import geofile, storage
class TestLoad(BaseApiTest):
def testArea(self):
with self.flask_app.app_context():
layer = geofile.load("area/NUTS42")
self.assertTrue(layer is not None)
self.assertEqual(layer.name, "area/NUTS42")
self.assertTrue(isinstance(layer.storage, storage.AreaStorage))
def testRaster(self):
with self.flask_app.app_context():
layer = geofile.load("raster/42")
self.assertTrue(layer is not None)
self.assertEqual(layer.name, "raster/42")
self.assertTrue(isinstance(layer.storage, storage.RasterStorage))
def testVector(self):
with self.flask_app.app_context():
layer = geofile.load("vector/42")
self.assertTrue(layer is not None)
self.assertEqual(layer.name, "vector/42")
self.assertTrue(isinstance(layer.storage, storage.VectorStorage))
def testCMOutput(self):
with self.flask_app.app_context():
layer = geofile.load("cm/blah")
self.assertTrue(layer is not None)
self.assertEqual(layer.name, "cm/blah")
self.assertTrue(isinstance(layer.storage, storage.CMStorage))
class TestSaveVectorGeoJSON(BaseApiTest):
GEOJSON = {
"type": "FeatureCollection",
"features": [
{
"id": "FEATURE_ID",
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [7.4, 46.0],
},
"properties": {
"units": {"var1": "MW", "var2": "kWh", "var3": "kWh"},
"fields": {
"field1": "value1",
},
"legend": {"symbology": []},
"start_at": None,
"variables": {
"var1": 1000,
"var2": 2000,
"var3": None,
},
},
},
{
"id": "FEATURE_ID",
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [],
},
"properties": {
"units": {"var1": "MW", "var2": "kWh", "var3": "kWh"},
"fields": {
"field1": "value1",
},
"legend": {"symbology": []},
"start_at": None,
"variables": {
"var1": 1000,
"var2": 2000,
"var3": None,
},
},
},
],
}
def testFilesCreation(self):
with self.flask_app.app_context():
layer_name = "vector/42"
valid_variables = geofile.save_vector_geojson(
layer_name, copy.deepcopy(TestSaveVectorGeoJSON.GEOJSON)
)
self.assertTrue(valid_variables is not None)
self.assertTrue(isinstance(valid_variables, list))
self.assertTrue("var1" in valid_variables)
self.assertTrue("var2" in valid_variables)
self.assertTrue("var3" not in valid_variables)
self.assertTrue(
os.path.exists(f"{self.wms_cache_dir}/vectors/42/data.geojson")
)
self.assertTrue(
os.path.exists(f"{self.wms_cache_dir}/vectors/42/projection.txt")
)
self.assertTrue(
os.path.exists(f"{self.wms_cache_dir}/vectors/42/variables.json")
)
def testGeoJSONFile(self):
with self.flask_app.app_context():
layer_name = "vector/42"
geofile.save_vector_geojson(
layer_name, copy.deepcopy(TestSaveVectorGeoJSON.GEOJSON)
)
with open(f"{self.wms_cache_dir}/vectors/42/data.geojson", "r") as f:
geojson = json.load(f)
self.assertEqual(len(geojson["features"]), 1)
self.assertTrue("legend" not in geojson["features"][0]["properties"])
self.assertTrue("__variable__var1" in geojson["features"][0]["properties"])
self.assertTrue("__variable__var2" in geojson["features"][0]["properties"])
self.assertTrue("__variable__var3" in geojson["features"][0]["properties"])
def testVariablesFile(self):
with self.flask_app.app_context():
layer_name = "vector/42"
geofile.save_vector_geojson(
layer_name, copy.deepcopy(TestSaveVectorGeoJSON.GEOJSON)
)
with open(f"{self.wms_cache_dir}/vectors/42/variables.json", "r") as f:
variables = json.load(f)
self.assertTrue(isinstance(variables, list))
self.assertTrue("var1" in variables)
self.assertTrue("var2" in variables)
self.assertTrue("var3" not in variables)
class TestSaveRasterProjection(BaseApiTest):
def testFileCreation(self):
with self.flask_app.app_context():
layer_name = "raster/42"
geofile.save_raster_projection(
layer_name, epsg_string_to_proj4("EPSG:3035")
)
self.assertTrue(
os.path.exists(f"{self.wms_cache_dir}/rasters/42/projection.txt")
)
def testFileCreationWithFullLayerName(self):
with self.flask_app.app_context():
layer_name = path.make_unique_layer_name(
path.RASTER, 42, time_period="2015", variable="variable"
)
geofile.save_raster_projection(
layer_name, epsg_string_to_proj4("EPSG:3035")
)
self.assertTrue(
os.path.exists(f"{self.wms_cache_dir}/rasters/42/projection.txt")
)
def testFileContent(self):
with self.flask_app.app_context():
layer_name = "raster/42"
projection = epsg_string_to_proj4("EPSG:3035")
geofile.save_raster_projection(layer_name, projection)
with open(f"{self.wms_cache_dir}/rasters/42/projection.txt", "r") as f:
data = f.read()
self.assertEqual(data, projection)
class TestSaveRasterGeometries(BaseApiTest):
GEOJSON = {
"type": "FeatureCollection",
"features": [
{
"id": "FID1.tif",
"type": "Feature",
"geometry": {
"type": "Polygon",
"coordinates": [
[
[10, 30],
[20, 30],
[20, 40],
[10, 40],
[10, 30],
],
],
},
"properties": {
"units": {"var1": "MW", "var2": "kWh", "var3": "kWh"},
"fields": {
"field1": "value1",
},
"legend": {"symbology": []},
"start_at": None,
"variables": {
"var1": None,
"var2": None,
"var3": None,
},
},
}
],
}
def testSuccess(self):
with self.flask_app.app_context():
layer_name = path.make_unique_layer_name(
path.RASTER, 42, time_period="2015", variable="variable"
)
geofile.save_raster_geometries(
layer_name, copy.deepcopy(TestSaveRasterGeometries.GEOJSON)
)
folder = path.to_folder_path(layer_name)
filename = f"{self.wms_cache_dir}/rasters/{folder}/geometries.json"
self.assertTrue(os.path.exists(filename))
with open(filename, "r") as f:
geometries = json.load(f)
self.assertEqual(len(geometries), 1)
self.assertTrue("FID1.tif" in geometries)
polygon = geometries["FID1.tif"]
self.assertTrue(isinstance(polygon, list))
self.assertEqual(len(polygon), 5)
for i in range(len(polygon)):
self.assertEqual(
polygon[i],
TestSaveRasterGeometries.GEOJSON["features"][0]["geometry"][
"coordinates"
][0][i],
)
def testFailureNoFeatures(self):
with self.flask_app.app_context():
layer_name = path.make_unique_layer_name(
path.RASTER, 42, time_period="2015", variable="variable"
)
geojson = copy.deepcopy(TestSaveRasterGeometries.GEOJSON)
geojson["features"] = []
geofile.save_raster_geometries(layer_name, geojson)
folder = path.to_folder_path(layer_name)
self.assertFalse(
os.path.exists(f"{self.wms_cache_dir}/rasters/{folder}/geometries.json")
)
def testSuccessNoGeometry(self):
with self.flask_app.app_context():
layer_name = path.make_unique_layer_name(
path.RASTER, 42, time_period="2015", variable="variable"
)
geojson = copy.deepcopy(TestSaveRasterGeometries.GEOJSON)
geojson["features"][0]["geometry"] = None
geofile.save_raster_geometries(layer_name, geojson)
folder = path.to_folder_path(layer_name)
filename = f"{self.wms_cache_dir}/rasters/{folder}/geometries.json"
self.assertTrue(os.path.exists(filename))
with open(filename, "r") as f:
geometries = json.load(f)
self.assertEqual(len(geometries), 1)
self.assertTrue("FID1.tif" in geometries)
self.assertTrue(geometries["FID1.tif"] is None)
def testSuccessNotPolygon(self):
with self.flask_app.app_context():
layer_name = path.make_unique_layer_name(
path.RASTER, 42, time_period="2015", variable="variable"
)
geojson = copy.deepcopy(TestSaveRasterGeometries.GEOJSON)
geojson["features"][0]["geometry"]["type"] = "Point"
geofile.save_raster_geometries(layer_name, geojson)
folder = path.to_folder_path(layer_name)
filename = f"{self.wms_cache_dir}/rasters/{folder}/geometries.json"
self.assertTrue(os.path.exists(filename))
with open(filename, "r") as f:
geometries = json.load(f)
self.assertEqual(len(geometries), 1)
self.assertTrue("FID1.tif" in geometries)
self.assertTrue(geometries["FID1.tif"] is None)
class TestSaveRasterFile(BaseApiTest):
def testSimple(self):
with self.flask_app.app_context():
raster_filename = self.get_testdata_path("hotmaps-cdd_curr_adapted.tif")
with open(raster_filename, "rb") as f:
content = f.read()
self.assertTrue(geofile.save_raster_file("raster/42", "file.tif", content))
storage_instance = storage.create("raster/42")
self.assertTrue(
os.path.exists(storage_instance.get_file_path("raster/42", "file.tif"))
)
def testWithSubFolders(self):
with self.flask_app.app_context():
raster_filename = self.get_testdata_path("hotmaps-cdd_curr_adapted.tif")
with open(raster_filename, "rb") as f:
content = f.read()
self.assertTrue(
geofile.save_raster_file("raster/42", "subfolder/file.tif", content)
)
storage_instance = storage.create("raster/42")
self.assertTrue(
os.path.exists(
storage_instance.get_file_path("raster/42", "subfolder/file.tif")
)
)
class TestSaveCMFile(BaseApiTest):
def testSave(self):
with self.flask_app.app_context():
layer_name = "cm/some_name/01234567-0000-0000-0000-000000000000"
raster_filename = self.get_testdata_path("hotmaps-cdd_curr_adapted.tif")
with open(raster_filename, "rb") as f:
content = f.read()
self.assertTrue(geofile.save_cm_file(layer_name, "file.tif", content))
storage_instance = storage.create(layer_name)
self.assertTrue(
os.path.exists(storage_instance.get_file_path(layer_name, "file.tif"))
)
self.assertTrue(
os.path.exists(
storage_instance.get_projection_file(layer_name, "file.tif")
)
)
class TestSaveCMResult(BaseApiTest):
RESULT = {
"legend": {
"symbology": [],
}
}
def testSaveResult(self):
with self.flask_app.app_context():
layer_name = "cm/some_name/01234567-0000-0000-0000-000000000000"
self.assertTrue(geofile.save_cm_result(layer_name, TestSaveCMResult.RESULT))
storage_instance = storage.create(layer_name)
self.assertTrue(
os.path.exists(
storage_instance.get_file_path(layer_name, "result.json")
)
)
legend = geofile.get_cm_legend(layer_name)
self.assertTrue(legend is not None)
self.assertEqual(legend, TestSaveCMResult.RESULT["legend"])
class TestSaveCMParameters(BaseApiTest):
PARAMETERS = {
"selection": {},
"layer": "raster/42/file.tif",
"parameters": {},
}
def testSaveParameters(self):
with self.flask_app.app_context():
layer_name = "cm/some_name/01234567-0000-0000-0000-000000000000"
self.assertTrue(
geofile.save_cm_parameters(layer_name, TestSaveCMParameters.PARAMETERS)
)
storage_instance = storage.create(layer_name)
filename = storage_instance.get_file_path(layer_name, "parameters.json")
self.assertTrue(os.path.exists(filename))
with open(filename, "r") as f:
data = json.load(f)
self.assertEqual(data, TestSaveCMParameters.PARAMETERS)
| 34.222482 | 88 | 0.534729 |
ae776a59c5b3c2f0543df97b416e9a2ebb997432 | 488 | py | Python | mypylib/utils.py | WillJBrown/displayotron | 8fa143acc7a70c47d4a288bd81afb7241dd10b9f | [
"MIT"
] | null | null | null | mypylib/utils.py | WillJBrown/displayotron | 8fa143acc7a70c47d4a288bd81afb7241dd10b9f | [
"MIT"
] | null | null | null | mypylib/utils.py | WillJBrown/displayotron | 8fa143acc7a70c47d4a288bd81afb7241dd10b9f | [
"MIT"
] | null | null | null | import os
import sys
def get_script_path(): #the path to the actual called script
return os.path.realpath(sys.argv[0])
def get_script_dir(): #the path to the actual called script directory
return os.path.dirname(os.path.realpath(sys.argv[0]))
def get_executing_file_dir(): #e.g. the path to the module file
return os.path.realpath(__file__)
def get_executing_file_dir(): #e.g. the path to the folder the module is in
return os.path.dirname(os.path.realpath(__file__))
| 32.533333 | 75 | 0.75 |
ae777c31a89605e1d2a2a1e2bfa85ac8840eebee | 6,676 | py | Python | src/modules/base/Configuration.py | andreaswatch/piTomation | 140bff77ad0b84ad17898106c7be7dc48a2d0783 | [
"MIT"
] | null | null | null | src/modules/base/Configuration.py | andreaswatch/piTomation | 140bff77ad0b84ad17898106c7be7dc48a2d0783 | [
"MIT"
] | null | null | null | src/modules/base/Configuration.py | andreaswatch/piTomation | 140bff77ad0b84ad17898106c7be7dc48a2d0783 | [
"MIT"
] | null | null | null | '''
Basic piTomation configuration options.
'''
from pydantic import BaseModel
from typing import Any, Optional, Union
from pydantic.class_validators import validator
__pdoc__ = {
"WithPlugins": None,
"configuration": None
}
__registry: dict[type, list[type]] = {}
'''Contains all @configuration class types, key is the base type'''
def configuration(cls):
'''All configurations in the configuration file must be tagged with #@configuration, so that the __registry is aware about the classes.'''
def __register(self):
hasBase = False
base = None
for base in self.__bases__:
hasBase = True
if not base in __registry.keys():
__registry[base] = []
__registry[base].append(self)
if not hasBase:
if base is not None:
if not base in __registry.keys():
__registry[self.Type] = []
__register(cls)
return cls
def WithPlugins(t: type):
'''Used internally to add all derivered types to a list'''
if t in __registry.keys():
classes = list(__registry[t])
return Union[tuple(classes)] # type: ignore
raise Exception("AppConfiguration must get imported after all plugins")
#@configuration
class Configuration(BaseModel):
'''Base class for all configuration classes'''
def __init__(__pydantic_self__, **data: Any) -> None: #type: ignore
'''YAML configuration'''
super().__init__(**data)
debug: Optional[bool] = False
'''(Optional, bool): Enable additional debugging output for this instance'''
comment: Optional[str]
'''(Optional, string): Additional text information about this node. Not used anywhere.'''
#@configuration
class IdConfiguration(Configuration):
'''Base class for all configuration classes that provide an Id'''
id: str
'''(Required, string): This is the name of the node. It should always be unique in your piTomation network.'''
#@configuration
class VariablesConfiguration(Configuration):
'''Adds variables to an id, access: id(myId).variables.myVariable
<details>
Example:
```
platform: mqtt
variables:
- myVariableA: "ValueA"
- myVariableB: "On"
```
</details>
'''
variables: Optional[dict]
'''(Optional, dictionary of variables): Variables, exposed as id(xy).variables.name'''
#@configuration
class ConditionConfiguration(Configuration):
'''Configuration settings for a Condition.'''
actual: Union[str, dict]
'''(Required, string or dictionary): The actual value to compare, e.g. "{{payload}}".
The value can contain either a simple string or a dictionary of values (e.g. a json payload from a mqtt message).
'''
comperator: str
'''(Required, string): Function name used to compare the values, currently available: [contains, equals, startWith, endsWith].'''
inverted: Optional[bool] = False
'''(Optional, bool): Invert result.'''
expected: Union[str, dict]
'''(Required, stirng or dictionary): Expected value.'''
#@configuration
class ActionTriggerConfiguration(Configuration):
'''Configuration settings for an ActionTrigger.
<details>
# Example 1:
Print the last received payload to the console:
```
actions:
- action: print
values:
payload: "{{payload}}"
```
# Example 2:
Print the last received topic and payload to the console:
```
actions:
- action: print
values:
payload: "Got a message on topic '{{topic}}' with payload: {{payload}}"
</details>
```
'''
action: str
'''(Required, string): Id of the Node/Action to execute.'''
values: Optional[dict] = {}
'''(Optional, dictionary): Values to pass to the action.'''
class AutomationConfiguration(Configuration):
'''An Automation consists of optional conditions and a list of actions to execute.
<details>
# Example:
If you get a payload like this:
```
{"something": "value", "type": "REGISTER_OK"}
```
and want to check if the type == "REGISTER_OK", the configuration could look like this:
```
on_...:
- conditions:
- actual: "{{#payload}}{{type}}{{/payload}}"
comperator: equals
expected: REGISTER_OK
actions:
- action: print
values:
payload: Register is OK
```
If you also want to check for other values, you could add this configuration below the upper one:
```
- conditions:
- actual: "{{#payload}}{{type}}{{/payload}}"
comperator: equals
expected: REGISTER_FAIL
actions:
- action: print
values:
payload: Register is FAIL
```
</details>
'''
conditions: Optional[list[ConditionConfiguration]] = []
'''(Optional, list of conditions): piTomation evaluates these conditions before actions get executed, see `ConditionConfiguration`.'''
actions: list[ActionTriggerConfiguration] = []
'''(Required, list of actions): Actions to execute, see `ActionTriggerConfiguration`.'''
class StackableConfiguration(IdConfiguration, VariablesConfiguration):
'''Provides default Automations that are executed by all Platforms, Actions and Sensors'''
on_init: Optional[list[AutomationConfiguration]] = []
'''(Optional, List of Automations): Automations to execute after init is done, see `AutomationConfiguration`.'''
on_dispose: Optional[list[AutomationConfiguration]] = []
'''(Optional, List of Automations): Automations to execute before this platform is disposed, see `AutomationConfiguration`.'''
class PlatformConfiguration(StackableConfiguration):
'''Base class for all platform configuration classes'''
platform: str
'''(Required, string): Plugin name of the platform.'''
class ScriptConfiguration(StackableConfiguration):
'''Base clss for all script configuration classes.'''
platform: str
'''(Required, string): The platform of this script.'''
type: Optional[str]
'''(Optional, string): The class type of this script.'''
on_state_changed: Optional[list[AutomationConfiguration]] = []
'''(Optional, List of Automations): Automations to execute after the Sensor's state has changed, see `AutomationConfiguration`.'''
class ActionConfiguration(ScriptConfiguration):
'''Base class for all script configuration classes.'''
class SensorConfiguration(ScriptConfiguration):
'''Base clas for all sensor configuration classes'''
class DeviceConfiguration(VariablesConfiguration):
name: str
'''(Required, string): Name of the device.'''
version: str
'''(Required, string): Version of the configuration.'''
on_init: Optional[list[ActionTriggerConfiguration]] = []
'''(Optional, List of Actions): Actions to execute after init is done, see `ActionTriggerConfiguration`.'''
| 29.539823 | 142 | 0.687088 |
ae78cbed15f9ad137cc2fb68f470a749694330cd | 72 | py | Python | danceschool/payments/paypal/__init__.py | django-danceschool/django-danceschool | 65ae09ffdcb0821e82df0e1f634fe13c0384a525 | [
"BSD-3-Clause"
] | 32 | 2017-09-12T04:25:25.000Z | 2022-03-21T10:48:07.000Z | danceschool/payments/paypal/__init__.py | django-danceschool/django-danceschool | 65ae09ffdcb0821e82df0e1f634fe13c0384a525 | [
"BSD-3-Clause"
] | 97 | 2017-09-01T02:43:08.000Z | 2022-01-03T18:20:34.000Z | danceschool/payments/paypal/__init__.py | django-danceschool/django-danceschool | 65ae09ffdcb0821e82df0e1f634fe13c0384a525 | [
"BSD-3-Clause"
] | 19 | 2017-09-26T13:34:46.000Z | 2022-03-21T10:48:10.000Z | default_app_config = 'danceschool.payments.paypal.apps.PaypalAppConfig'
| 36 | 71 | 0.861111 |
ae7a6bf6cf0a8187540066ce63f57293b91d1b01 | 25 | py | Python | datamaps/__init__.py | fossabot/datamaps-1 | c66c3f20e43bd41ec0874f40f39bd0eff89fd476 | [
"MIT"
] | null | null | null | datamaps/__init__.py | fossabot/datamaps-1 | c66c3f20e43bd41ec0874f40f39bd0eff89fd476 | [
"MIT"
] | null | null | null | datamaps/__init__.py | fossabot/datamaps-1 | c66c3f20e43bd41ec0874f40f39bd0eff89fd476 | [
"MIT"
] | null | null | null | __version__ = "1.0.0b13"
| 12.5 | 24 | 0.68 |
ae7c6c2bff216ece676b2bf277a3dea69a26d9e6 | 2,414 | py | Python | python/opentrons_ot3_firmware/scripts/generate_header.py | Opentrons/ot3-firmware | 3047fbf54ed2bf9350a9fe02c0c0fb246ac0285a | [
"Apache-2.0"
] | 3 | 2021-09-21T13:20:27.000Z | 2021-12-02T13:12:32.000Z | python/opentrons_ot3_firmware/scripts/generate_header.py | Opentrons/ot3-firmware | 3047fbf54ed2bf9350a9fe02c0c0fb246ac0285a | [
"Apache-2.0"
] | 36 | 2021-08-10T15:18:09.000Z | 2022-03-30T19:08:13.000Z | python/opentrons_ot3_firmware/scripts/generate_header.py | Opentrons/ot3-firmware | 3047fbf54ed2bf9350a9fe02c0c0fb246ac0285a | [
"Apache-2.0"
] | null | null | null | """Script to generate c++ header file of canbus constants."""
from __future__ import annotations
import argparse
import io
from enum import Enum
from typing import Type, Any
import sys
from opentrons_ot3_firmware.constants import (
MessageId,
FunctionCode,
NodeId,
)
class block:
"""C block generator."""
def __init__(self, output: io.StringIO, start: str, terminate: str) -> None:
"""Construct a code block context manager.
Args:
output: the buffer in which to write
start: the text that begins the block
terminate: the text that ends the block
"""
self._output = output
self._start = start
self._terminate = terminate
def __enter__(self) -> block:
"""Enter the context manager."""
self._output.write(self._start)
return self
def __exit__(self, *exc: Any) -> None:
"""Exit the context manager."""
self._output.write(self._terminate)
def generate(output: io.StringIO) -> None:
"""Generate source code into output."""
output.write("/********************************************\n")
output.write("* This is a generated file. Do not modify. *\n")
output.write("********************************************/\n")
output.write("#pragma once\n\n")
with block(
output=output,
start="namespace can_ids {\n\n",
terminate="} // namespace can_ids\n\n",
):
write_enum(FunctionCode, output)
write_enum(MessageId, output)
write_enum(NodeId, output)
def write_enum(e: Type[Enum], output: io.StringIO) -> None:
"""Generate enum class from enumeration."""
output.write(f"/** {e.__doc__} */\n")
with block(
output=output, start=f"enum class {e.__name__} {{\n", terminate="};\n\n"
):
for i in e:
output.write(f" {i.name} = 0x{i.value:x},\n")
def main() -> None:
"""Entry point."""
parser = argparse.ArgumentParser(
description="Generate a C++ header file defining CANBUS constants."
)
parser.add_argument(
"target",
metavar="TARGET",
type=argparse.FileType("w"),
default=sys.stdout,
nargs="?",
help="path of header file to generate; use - or do not specify for stdout",
)
args = parser.parse_args()
generate(args.target)
if __name__ == "__main__":
main()
| 27.431818 | 83 | 0.584921 |
ae81bb11bf5eb162ed9c0bef3b103ae5f25903e5 | 772 | py | Python | icekit/response_pages/migrations/0001_initial.py | ic-labs/django-icekit | c507ea5b1864303732c53ad7c5800571fca5fa94 | [
"MIT"
] | 52 | 2016-09-13T03:50:58.000Z | 2022-02-23T16:25:08.000Z | icekit/response_pages/migrations/0001_initial.py | ic-labs/django-icekit | c507ea5b1864303732c53ad7c5800571fca5fa94 | [
"MIT"
] | 304 | 2016-08-11T14:17:30.000Z | 2020-07-22T13:35:18.000Z | icekit/response_pages/migrations/0001_initial.py | ic-labs/django-icekit | c507ea5b1864303732c53ad7c5800571fca5fa94 | [
"MIT"
] | 12 | 2016-09-21T18:46:35.000Z | 2021-02-15T19:37:50.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='ResponsePage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=255)),
('type', models.CharField(unique=True, max_length=5, choices=[(b'404', 'Page Not Found'), (b'500', 'Internal Server Error')])),
('is_active', models.BooleanField(default=False)),
],
options={
},
bases=(models.Model,),
),
]
| 29.692308 | 143 | 0.563472 |
ae81fe56f7386088702aa7258803c69277db7d71 | 5,495 | py | Python | tests/cancer.py | old-rob/cptac | 9b33893dd11c9320628a751c8840783a6ce81957 | [
"Apache-2.0"
] | null | null | null | tests/cancer.py | old-rob/cptac | 9b33893dd11c9320628a751c8840783a6ce81957 | [
"Apache-2.0"
] | null | null | null | tests/cancer.py | old-rob/cptac | 9b33893dd11c9320628a751c8840783a6ce81957 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Samuel Payne sam_payne@byu.edu
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# The purpose of this class is to organize a cancer object's datasets by
# type. dataset.py in the cptac package defines a lot of methods and members
# but there is no built-in way to call them in batches by type for testing.
import pytest
class Cancer:
metadata_types = [
'clinical',
'derived_molecular',
'experimental_design',
# See dataset.py for why these aren't included:
#'medical_history',
#'treatment',
#'followup'
]
valid_omics_dfs = [
'acetylproteomics',
'circular_RNA',
'CNV',
'lincRNA',
'lipidomics',
'metabolomics',
'miRNA',
'phosphoproteomics',
'phosphoproteomics_gene',
'proteomics',
'somatic_mutation_binary',
'transcriptomics',
'CNV_log2ratio',
'CNV_gistic'
]
important_mutation_genes = ["TP53", "KRAS", "ARID1A", "PTEN", "EGFR"]
multi_join_types = [
"acetylproteomics",
"CNV",
"CNV_gistic",
"CNV_log2ratio",
"phosphoproteomics",
"phosphoproteomics_gene",
"proteomics",
"somatic_mutation_binary",
"somatic_mutation",
"transcriptomics",
"clinical",
"derived_molecular",
"experimental_design"
]
def __init__(self, cancer_type, cancer_object):
"""
Initialize a Cancer object.
Cancer class is used as a wrapper for cptac.[Cancer] objects that will be tested.
Parameters:
cancer_type (string): name of the cancer
cancer_object (cptac.[Cancer]): Instance of the cptac.[Cancer] class
"""
self.cancer_type = cancer_type
self.cancer_object = cancer_object
self.metadata = list()
self.omics = list()
# self.mutations = list()
self.valid_getters = dict()
self.invalid_getters = dict()
self.multi_joinables = dict()
self._sort_datasets()
self._sort_getters()
self._gather_mutation_genes()
def _sort_datasets(self):
# categorize datasets for join tests
# omics, metadata,
datasets = self.cancer_object.get_data_list().items()
for (dataset, dimensions) in datasets:
if dataset in Cancer.metadata_types:
self.metadata.append(dataset)
elif dataset in Cancer.valid_omics_dfs:
self.omics.append(dataset)
if dataset in ["clinical", "transcriptomics", "proteomics"]:
self.multi_joinables[dataset] = list()
def _sort_getters(self):
# collect all possible getters
all_getters = set()
for attribute in dir(self.cancer_object):
if attribute.startswith("get_"):
all_getters.add(attribute)
### sift valid and invalid getters
datasets = self.cancer_object.get_data_list().keys()
# valid getters
for d in datasets:
try:
if d.startswith("CNV") and self.cancer_type == "Ucecconf":
getter_name = "get_CNV"
else:
getter_name = "get_" + d
valid_getter = getattr(self.cancer_object, getter_name)
self.valid_getters[getter_name] = valid_getter
except:
pytest.fail(f"unable to add get {d} attribute")
# invalid getters
for getter in all_getters:
if getter_name not in self.valid_getters.keys():
g = getattr(self.cancer_object, getter_name)
self.invalid_getters[getter_name] = g
def _gather_mutation_genes(self):
self.mutation_genes = list()
if "somatic_mutation" in self.cancer_object.get_data_list():
recorded_genes = self.cancer_object.get_somatic_mutation()["Gene"].tolist()
for g in self.important_mutation_genes:
if g in recorded_genes:
self.mutation_genes.append(g)
def get_dataset(self, dataset, CNV_type="log2ratio"):
'''
Args:
dataset: the desired dataset
CNV_type: if the desired dataset is CNV and the cancer type is Ucecconf,
you can specify which version of the dataset is returned.
Returns:
adataframe for the dataset desired
'''
if dataset == "CNV" and self.cancer_type == "Ucecconf":
return self.valid_getters["get_CNV"](CNV_type)
return self.valid_getters["get_" + dataset]()
def get_omics(self):
return self.omics
def get_metadata(self):
return self.metadata
def get_mutation_genes(self):
return self.mutation_genes
| 33.10241 | 89 | 0.593267 |
ae836b13e8e1a6b59a6fe8580af78a2b08d03bc1 | 1,958 | py | Python | mobula/solvers/LRUpdater.py | wkcn/mobula | 4eec938d6477776f5f2d68bcf41de83fb8da5195 | [
"MIT"
] | 47 | 2017-07-15T02:13:18.000Z | 2022-01-01T09:37:59.000Z | mobula/solvers/LRUpdater.py | wkcn/mobula | 4eec938d6477776f5f2d68bcf41de83fb8da5195 | [
"MIT"
] | 3 | 2018-06-22T13:55:12.000Z | 2020-01-29T01:41:13.000Z | mobula/solvers/LRUpdater.py | wkcn/mobula | 4eec938d6477776f5f2d68bcf41de83fb8da5195 | [
"MIT"
] | 8 | 2017-09-03T12:42:54.000Z | 2020-09-27T03:38:59.000Z | #coding=utf-8
import numpy as np
# TODO: MULTISTEP
LR_POLICY_NUM = 7
class LR_POLICY:
FIXED, STEP, EXP, INV, MULTISTEP, POLY, SIGMOID = range(LR_POLICY_NUM)
class LRUpdater:
def __init__(self, *args, **kwargs):
if "base_lr" in kwargs:
self.base_lr = kwargs["base_lr"]
else:
self.base_lr = kwargs.get("lr", 1.0)
self.gamma = kwargs.get("gamma", None)
self.stepsize = kwargs.get("stepsize", None)
self.power = kwargs.get("power", None)
self.max_iter = kwargs.get("max_iter", None)
self.method = None
self.lr_policy = kwargs.get("lr_policy", LR_POLICY.FIXED)
self.set_policy(self.lr_policy)
def set_policy(self, p):
self.lr_policy = p
self.method = LRUpdater.METHODS[p]
def get_lr(self, iter_num):
return self.method(self, iter_num)
def fixed(self, iter_num):
return self.base_lr
def step(self, iter_num):
# gamma, stepsize
return self.base_lr * np.power(self.gamma, (iter_num // self.stepsize))
def exp(self, iter_num):
# gamma
return self.base_lr * np.power(self.gamma, iter_num)
def inv(self, iter_num):
# gamma, power
return self.base_lr * np.power(1.0 + self.gamma * iter_num, -self.power)
def poly(self, iter_num):
# power, max_iter
return self.base_lr * np.power(1 - iter_num * 1.0 / self.max_iter, self.power)
def sigmoid(self, iter_num):
# gamma, stepsize
return self.base_lr * (1.0 / (1.0 + np.exp(-self.gamma * (iter_num - self.stepsize))))
LRUpdater.METHODS = [None] * LR_POLICY_NUM
LRUpdater.METHODS[LR_POLICY.FIXED] = LRUpdater.fixed
LRUpdater.METHODS[LR_POLICY.STEP] = LRUpdater.step
LRUpdater.METHODS[LR_POLICY.EXP] = LRUpdater.exp
LRUpdater.METHODS[LR_POLICY.INV] = LRUpdater.inv
LRUpdater.METHODS[LR_POLICY.POLY] = LRUpdater.poly
LRUpdater.METHODS[LR_POLICY.SIGMOID] = LRUpdater.sigmoid
| 37.653846 | 94 | 0.650664 |
ae83889dc0e1e2a10d944afb86e01b0c15293029 | 5,098 | py | Python | code/mlflow.py | michaelhball/ml_tidbits | 55b77fded5f31cd280f043c8aa792a07ca572170 | [
"MIT"
] | 1 | 2021-04-15T19:42:51.000Z | 2021-04-15T19:42:51.000Z | code/mlflow.py | michaelhball/ml_toolshed | 55b77fded5f31cd280f043c8aa792a07ca572170 | [
"MIT"
] | null | null | null | code/mlflow.py | michaelhball/ml_toolshed | 55b77fded5f31cd280f043c8aa792a07ca572170 | [
"MIT"
] | null | null | null | import git
from mlflow.tracking import MlflowClient
from .utils import scp_files
class MyMLFlowClient:
""" Class to handle all MLFlow interactions. Only need one such client (i.e. can be used for many
training runs).
"""
def __init__(self, tracking_uri):
""" Initialise
:param tracking_uri (str) MLFlow tracking URI for tracking API
"""
self.client = MlflowClient(tracking_uri=tracking_uri)
self.run = None
def create_new_run(self, experiment_name, user_name, set_tags=True, run_name=None):
""" Creates a new Run in MLFlow tracking server (e.g. at start of training pipeline)
:param experiment_name: (str) name of experiment to create run within
:param user_name: (str) user name of person creating run
:param set_tags: (bool) indicating whether to assign my default tagset to the given run
:param run_name: (str) optional name of run (auto-generated ID will be used if not provided)
:return run ID
"""
try:
experiment = self.client.get_experiment_by_name(experiment_name)
if experiment is None:
experiment_id = self.client.create_experiment(experiment_name)
self.client.set_experiment_tag(experiment_id, "created_by", user_name)
else:
experiment_id = experiment.experiment_id
run = self.client.create_run(experiment_id)
run_id = run.info.run_id
if set_tags:
if not self._set_run_tags(user_name, run_id, run_name):
return False
return run_id
except Exception as e:
print('Exception initialising MLFlow run', e)
return False
def _set_run_tags(self, user_name, run_id, run_name):
""""""
try:
repo = git.Repo(search_parent_directories=True)
self.client.set_tag(run_id, "run_id", run_id)
self.client.set_tag(run_id, "mlflow.runName", run_name if run_name is not None else run_id)
self.client.set_tag(run_id, "mlflow.user", user_name)
self.client.set_tag(run_id, "mlflow.source.git.repoURL", repo.remotes.origin.url)
self.client.set_tag(run_id, "mlflow.source.git.branch", repo.active_branch.name)
self.client.set_tag(run_id, "mlflow.source.git.commit", repo.head.object.hexsha)
return True
except Exception as e:
print('Exception setting MLFlow run system tags: \n', e)
return False
def log_param(self, run_id, param_dict):
""" Log a dictionary of params to MLFlow tracking server
:param run_id: (str) run ID
:param param_dict: (dict) dictionary of param_name: param_value
:return success indicator
"""
try:
for param_name, param_value in param_dict.items():
self.client.log_param(run_id, param_name, param_value)
return True
except Exception as e:
print(f'Exception logging params run {run_id}', e)
return False
def log_metrics(self, run_id, metric_dict, step=None, timestamp=None):
""" Log a dictionary of metrics to MLFlow tracking server (at particular step or timestamp)
:param run_id: (str) run ID
:param metric_dict: (dict) dictionary of metric_name: metric_value
:param step: (int) integer step to associate metrics with (e.g. epoch | iteration)
:param timestamp: (time) timestamp to associate metrics with
:return success indicator
"""
try:
for metric_name, metric_val in metric_dict.items():
self.client.log_metric(run_id, metric_name, metric_val, step=step, timestamp=timestamp)
return True
except Exception as e:
print(f'Exception logging metrics to run {run_id}', e)
return False
def download_artifact(self, run_id, remote_dir, local_dir, ssh_params=None):
""" Downloads artifact from MLFlow server (either local or over SSH)
:param run_id: (str) run ID
:param remote_dir: (path) relative path to artifact (inside run artifact storage)
:param local_dir: (path) local directory in which to save artifact
:param ssh_params: (dict) must contain host, username, and password, if included
:return: True if successful, False otherwise.
"""
try:
if ssh_params is not None:
run = self.client.get_run(run_id)
artifact_uri = f"{run.info.artifact_uri}/{remote_dir}"
success = scp_files(**ssh_params, remote_dir=artifact_uri, local_dir=local_dir, direction='from')
if isinstance(success, bool) and not success:
return False
else:
self.client.download_artifacts(run_id, remote_dir, local_dir)
return True
except Exception as e:
print(f'Exception downloading artifact from run {run_id}', e)
return False
| 41.112903 | 113 | 0.632797 |
ae84bc9755e4432da8e4dc0549c028ec150a10c7 | 4,215 | py | Python | infinite_nature/autocruise.py | DionysisChristopoulos/google-research | 7f59ef421beef32ca16c2a7215be74f7eba01a0f | [
"Apache-2.0"
] | 23,901 | 2018-10-04T19:48:53.000Z | 2022-03-31T21:27:42.000Z | infinite_nature/autocruise.py | davidfitzek/google-research | eb2b142f26e39aac1dcbb768417465ae9d4e5af6 | [
"Apache-2.0"
] | 891 | 2018-11-10T06:16:13.000Z | 2022-03-31T10:42:34.000Z | infinite_nature/autocruise.py | davidfitzek/google-research | eb2b142f26e39aac1dcbb768417465ae9d4e5af6 | [
"Apache-2.0"
] | 6,047 | 2018-10-12T06:31:02.000Z | 2022-03-31T13:59:28.000Z | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Uses a heuristic to automatically navigate generated scenes.
fly_camera.fly_dynamic will generate poses using disparity maps that avoid
crashing into nearby terrain.
"""
import pickle
import time
import config
import fly_camera
import imageio
import infinite_nature_lib
import numpy as np
import tensorflow as tf
tf.compat.v1.flags.DEFINE_string(
"output_folder", "autocruise_output",
"Folder to save autocruise results")
tf.compat.v1.flags.DEFINE_integer(
"num_steps", 500,
"Number of steps to fly.")
FLAGS = tf.compat.v1.flags.FLAGS
def generate_autocruise(np_input_rgbd, checkpoint,
save_directory, num_steps, np_input_intrinsics=None):
"""Saves num_steps frames of infinite nature using an autocruise algorithm.
Args:
np_input_rgbd: [H, W, 4] numpy image and disparity to start
Infinite Nature with values ranging in [0, 1]
checkpoint: (str) path to the pre-trained checkpoint
save_directory: (str) the directory to save RGB images to
num_steps: (int) the number of steps to generate
np_input_intrinsics: [4] estimated intrinsics. If not provided,
makes assumptions on the FOV.
"""
render_refine, style_encoding = infinite_nature_lib.load_model(checkpoint)
if np_input_intrinsics is None:
# 0.8 focal_x corresponds to a FOV of ~64 degrees. This can be
# manually changed if more assumptions about the input image is given.
h, w, unused_channel = np_input_rgbd.shape
ratio = w / float(h)
np_input_intrinsics = np.array([0.8, 0.8 * ratio, .5, .5], dtype=np.float32)
np_input_rgbd = tf.image.resize(np_input_rgbd, [160, 256])
style_noise = style_encoding(np_input_rgbd)
meander_x_period = 100
meander_y_period = 100
meander_x_magnitude = 0.0
meander_y_magnitude = 0.0
fly_speed = 0.2
horizon = 0.3
near_fraction = 0.2
starting_pose = np.array(
[[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0]],
dtype=np.float32)
# autocruise heuristic funciton
fly_next_pose_function = fly_camera.fly_dynamic(
np_input_intrinsics, starting_pose,
speed=fly_speed,
meander_x_period=meander_x_period,
meander_x_magnitude=meander_x_magnitude,
meander_y_period=meander_y_period,
meander_y_magnitude=meander_y_magnitude,
horizon=horizon,
near_fraction=near_fraction)
if not tf.io.gfile.exists(save_directory):
tf.io.gfile.makedirs(save_directory)
curr_pose = starting_pose
curr_rgbd = np_input_rgbd
t0 = time.time()
for i in range(num_steps - 1):
next_pose = fly_next_pose_function(curr_rgbd)
curr_rgbd = render_refine(
curr_rgbd, style_noise, curr_pose, np_input_intrinsics,
next_pose, np_input_intrinsics)
# Update pose information for view.
curr_pose = next_pose
imageio.imsave("%s/%04d.png" % (save_directory, i),
(255 * curr_rgbd[:, :, :3]).astype(np.uint8))
if i % 100 == 0:
print("%d / %d frames generated" % (i, num_steps))
print("time / step: %04f" % ((time.time() - t0) / (i + 1)))
print()
def main(unused_arg):
if len(unused_arg) > 1:
raise tf.app.UsageError(
"Too many command-line arguments.")
config.set_training(False)
model_path = "ckpt/model.ckpt-6935893"
input_pkl = pickle.load(open("autocruise_input1.pkl", "rb"))
generate_autocruise(input_pkl["input_rgbd"],
model_path,
FLAGS.output_folder,
FLAGS.num_steps)
if __name__ == "__main__":
tf.compat.v1.enable_eager_execution()
tf.compat.v1.app.run(main)
| 33.452381 | 80 | 0.704152 |
ae84e004def4ae5d171603fde9ae436d07658e06 | 1,675 | py | Python | tilequeue/queue/file.py | ducdk90/tilequeue | c664b5c89a9f0e6743405ab266aa9ca80b57806e | [
"MIT"
] | 29 | 2016-11-03T18:39:21.000Z | 2022-02-27T17:42:37.000Z | tilequeue/queue/file.py | ducdk90/tilequeue | c664b5c89a9f0e6743405ab266aa9ca80b57806e | [
"MIT"
] | 146 | 2016-07-07T16:41:07.000Z | 2021-12-11T00:27:20.000Z | tilequeue/queue/file.py | ducdk90/tilequeue | c664b5c89a9f0e6743405ab266aa9ca80b57806e | [
"MIT"
] | 28 | 2016-08-19T16:08:52.000Z | 2021-07-26T10:16:29.000Z | from tilequeue.queue import MessageHandle
import threading
class OutputFileQueue(object):
'''
A local, file-based queue for storing the coordinates of tiles to render.
Can be used as a drop-in replacement for `tilequeue.queue.sqs.SqsQueue`.
Note that it doesn't support reading/writing from multiple `tilequeue`
instances; you *can* `seed` and `process` at the same time, but you *can't*
run more than one `seed` or `write` instance at the same time. This is
primarily meant for development/debugging, so adding multi-process locking
probably isn't worth the complexity.
'''
def __init__(self, fp, read_size=10):
self.read_size = read_size
self.fp = fp
self.lock = threading.RLock()
def enqueue(self, payload):
with self.lock:
self.fp.write(payload + '\n')
def enqueue_batch(self, payloads):
n = 0
for payload in payloads:
self.enqueue(payload)
n += 1
return n, 0
def read(self):
with self.lock:
msg_handles = []
for _ in range(self.read_size):
payload = self.fp.readline().strip()
if payload:
msg_handle = MessageHandle(None, payload)
msg_handles.append(msg_handle)
return msg_handles
def job_done(self, msg_handle):
pass
def job_progress(self, handle):
pass
def clear(self):
with self.lock:
self.fp.seek(0)
self.fp.truncate()
return -1
def close(self):
with self.lock:
self.clear()
self.fp.close()
| 28.389831 | 79 | 0.590448 |
ae858354ab4f1914f4dfc11dd1d64a5507769f1b | 563 | py | Python | app/gui/repeater.py | TomVollerthun1337/logsmith | f2ecab4dea295d5493a9a3e77a2837b13fa139e5 | [
"Apache-2.0"
] | 19 | 2020-01-18T00:25:43.000Z | 2022-03-14T07:39:08.000Z | app/gui/repeater.py | TomVollerthun1337/logsmith | f2ecab4dea295d5493a9a3e77a2837b13fa139e5 | [
"Apache-2.0"
] | 85 | 2020-01-21T12:13:56.000Z | 2022-03-31T04:01:03.000Z | app/gui/repeater.py | TomVollerthun1337/logsmith | f2ecab4dea295d5493a9a3e77a2837b13fa139e5 | [
"Apache-2.0"
] | 2 | 2020-06-25T06:15:19.000Z | 2021-02-15T18:17:38.000Z | import logging
from PyQt5.QtCore import QTimer
logger = logging.getLogger('logsmith')
class Repeater:
def __init__(self):
self._current_task = None
def start(self, task, delay_seconds):
delay_millies = delay_seconds * 1000
self.stop()
logger.info('start timer')
timer = QTimer()
timer.setSingleShot(True)
timer.timeout.connect(task)
timer.start(delay_millies)
self._current_task = timer
def stop(self):
if self._current_task:
self._current_task.stop()
| 21.653846 | 44 | 0.635879 |
ae867f0e402cb89db3cccc626cd6f645b33f32f2 | 40 | py | Python | condensate/core/__init__.py | Zwierlein/condensate | 34908b7e99785e9a4a9c5c743fe1a8e6f4cbf4ad | [
"MIT"
] | 4 | 2021-07-24T10:57:06.000Z | 2021-12-11T01:24:54.000Z | condensate/core/__init__.py | Zwierlein/condensate | 34908b7e99785e9a4a9c5c743fe1a8e6f4cbf4ad | [
"MIT"
] | 9 | 2021-07-15T04:13:23.000Z | 2021-08-02T21:57:00.000Z | condensate/core/__init__.py | Zwierlein/condensate | 34908b7e99785e9a4a9c5c743fe1a8e6f4cbf4ad | [
"MIT"
] | 2 | 2021-07-21T10:39:30.000Z | 2021-08-01T03:05:14.000Z | from condensate.core.build import gpcore | 40 | 40 | 0.875 |
ae874a5f5cca2dcc55151c5b0e06fba1846032d7 | 250 | py | Python | urdubiometer/scanner/__init__.py | urdubiometer/urdubiometer | 034c1efc0403352caa9c5c944cf9450b8833bb24 | [
"BSD-3-Clause"
] | null | null | null | urdubiometer/scanner/__init__.py | urdubiometer/urdubiometer | 034c1efc0403352caa9c5c944cf9450b8833bb24 | [
"BSD-3-Clause"
] | 220 | 2019-07-30T19:20:59.000Z | 2022-03-28T10:33:19.000Z | urdubiometer/scanner/__init__.py | urdubiometer/urdubiometer | 034c1efc0403352caa9c5c944cf9450b8833bb24 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""Sub-level package for Scanner, a metrical scanner in Urdu BioMeter."""
__author__ = """A. Sean Pue"""
__email__ = "a@seanpue.com"
from .scanner import * # noqa
from .ghazal import * # noqa
from .types import * # noqa
| 22.727273 | 73 | 0.652 |
ae87c58fe103e3173ad0eb9f9da060726e492203 | 346 | py | Python | search01.py | kekelele/canlib-for-kvaser | 5f7f55319a33956de0bb9a1376ee7fbf897b8c4b | [
"Apache-2.0"
] | null | null | null | search01.py | kekelele/canlib-for-kvaser | 5f7f55319a33956de0bb9a1376ee7fbf897b8c4b | [
"Apache-2.0"
] | null | null | null | search01.py | kekelele/canlib-for-kvaser | 5f7f55319a33956de0bb9a1376ee7fbf897b8c4b | [
"Apache-2.0"
] | 1 | 2019-08-16T04:25:44.000Z | 2019-08-16T04:25:44.000Z | from canlib import canlib
num_channels = canlib.getNumberOfChannels()
print("Found %d channels" % num_channels)
for ch in range(0, num_channels):
chdata = canlib.ChannelData(ch)
print("%d. %s (%s / %s)" % (ch, chdata.device_name,
chdata.card_upc_no,
chdata.card_serial_no)) | 38.444444 | 55 | 0.592486 |
ae882f531080b30ee1443e5c07ad6e5e57ec1e85 | 2,208 | py | Python | src/assisted_test_infra/test_infra/helper_classes/config/base_config.py | nirarg/assisted-test-infra | e07c43501c1d9bfaa1aee3aea49f1ef359faee07 | [
"Apache-2.0"
] | null | null | null | src/assisted_test_infra/test_infra/helper_classes/config/base_config.py | nirarg/assisted-test-infra | e07c43501c1d9bfaa1aee3aea49f1ef359faee07 | [
"Apache-2.0"
] | null | null | null | src/assisted_test_infra/test_infra/helper_classes/config/base_config.py | nirarg/assisted-test-infra | e07c43501c1d9bfaa1aee3aea49f1ef359faee07 | [
"Apache-2.0"
] | null | null | null | from abc import ABC, abstractmethod
from dataclasses import asdict, dataclass
from typing import Any
from triggers.env_trigger import DataPool, Triggerable
@dataclass
class BaseConfig(Triggerable, ABC):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __post_init__(self):
"""
Set all variables to their default values
Assuming key on target dict (where get_default get its values from)
"""
for k, v in self.get_all().items():
try:
if v is None:
setattr(self, k, self.get_default(k))
except AttributeError:
setattr(self, k, None)
@abstractmethod
def _get_data_pool(self) -> DataPool:
pass
@classmethod
def get_annotations(cls):
"""Get attributes with annotations - same as obj.__annotations__ but recursive"""
annotations = {}
for c in cls.mro():
try:
annotations.update(**c.__annotations__)
except AttributeError:
# object, at least, has no __annotations__ attribute.
pass
return annotations
def get_default(self, key, default=None) -> Any:
global_variables = self._get_data_pool()
return getattr(global_variables, key, default)
def get_copy(self):
return self.__class__(**self.get_all())
def get_all(self) -> dict:
return asdict(self)
def _set(self, key: str, value: Any):
if hasattr(self, key):
self.__setattr__(key, value)
def set_value(self, attr: str, new_val):
setattr(self, attr, self._get_correct_value(attr, new_val))
def _get_correct_value(self, attr: str, new_val):
"""Get value in it's correct type"""
annotations = self.get_annotations()
if not hasattr(self, attr):
raise AttributeError(f"Can't find {attr} among {annotations}")
_type = annotations[attr]
if hasattr(_type, "__origin__"):
return _type.__origin__([new_val])
# str, int, float, bool, Path, and more
return new_val if isinstance(new_val, _type) else _type(new_val)
| 30.666667 | 89 | 0.613225 |
ae8a201243a94cc44dd5cdf663f89bc62c36cf5a | 917 | py | Python | licensecheck/types.py | matthewdeanmartin/LicenseCheck | 54063d10d2033adc77fe12ddac6c0ced1a5e6502 | [
"MIT"
] | null | null | null | licensecheck/types.py | matthewdeanmartin/LicenseCheck | 54063d10d2033adc77fe12ddac6c0ced1a5e6502 | [
"MIT"
] | null | null | null | licensecheck/types.py | matthewdeanmartin/LicenseCheck | 54063d10d2033adc77fe12ddac6c0ced1a5e6502 | [
"MIT"
] | null | null | null | """PackageCompat type.
"""
from __future__ import annotations
import typing
from enum import Enum
class PackageInfo(typing.TypedDict):
"""PackageInfo type."""
name: str
version: str
namever: str
size: int
home_page: str
author: str
license: str
class PackageCompat(PackageInfo):
"""PackageCompat type."""
license_compat: bool
class License(Enum):
"""License Enum to hold a set of potential licenses."""
# Public domain
PUBLIC = 0
UNLICENSE = 1
# Permissive GPL compatible
MIT = 10
BOOST = 11
BSD = 12
ISC = 13
NCSA = 14
PSFL = 15
# Other permissive
APACHE = 20
ECLIPSE = 21
ACADEMIC_FREE = 22
# LGPL
LGPL_X = 30
LGPL_2 = 31
LGPL_3 = 32
LGPL_2_PLUS = 33
LGPL_3_PLUS = 34
# GPL
GPL_X = 40
GPL_2 = 41
GPL_3 = 42
GPL_2_PLUS = 43
GPL_3_PLUS = 44
# AGPL
AGPL_3_PLUS = 50
# Other copyleft
MPL = 60
EU = 61
# PROPRIETARY
PROPRIETARY = 190
# No License
NO_LICENSE = 200
| 14.107692 | 56 | 0.687023 |