text string | size int64 | token_count int64 |
|---|---|---|
#!/usr/bin/env python3
def calc(x):
if x in results:
return results[x]
results[x] = (calc(x-1) - calc(x-2) + calc(x-3) - calc(x-4) + calc(x-5)*4660)%2**32
return results[x]
results = {}
for i in range(5):
results[i] = i**2+9029
for i in range(5,100022):
results[i] = calc(i)
print(results[100021]) | 304 | 154 |
#!/usr/bin/python3
# https://practice.geeksforgeeks.org/problems/top-k-numbers/0
def sol(arr, n, k):
f = {0:0}
rl = [0]*(k+1)
# Lets initialise a list of k+1 elements
# We have taken one extra element here so as we dont overwrite an existing
# result or subresult
for x in arr:
f[x] = f[x] + 1 if x in f else 1
rl[k] = x
# Store the newest element at the last meaning at position which
# has the least frequency
i = rl.index(x)
i-=1
# Find the position where the element occurs for the first time so
# as to adjust the elements preeceding that. The elements in
# succession remains unchanged
while i >= 0:
if f[rl[i]] < f[rl[i+1]]:
rl[i], rl[i+1] = rl[i+1], rl[i]
# If the element to the left has smaller frequency, swap it
elif f[rl[i]] == f[rl[i+1]] and rl[i] > rl[i+1]:
rl[i], rl[i+1] = rl[i+1], rl[i]
# If the number to the left has same frequency but the number is
# greater swap it
else:
break
# No point going further to the left
i-=1
for r in rl[:k]:
if not r:
continue
print(r, end=" ")
# Print the results as asked in the question
print() | 1,392 | 445 |
import os
import re
import argparse
import zipfile
import PyPDF2
from lxml import etree as ET
class PyMetaExtractor():
ext = ['docx', 'xlsx', 'pptx', 'pdf']
rexp = re.compile(r'.+\.({})$'.format('|'.join(ext)))
def __init__(self, directory):
self.directory = os.path.abspath(directory)
print("[*] Starting to search from: [{}]".format(self.directory))
return
def run(self):
for cwd, lod, lof in os.walk(self.directory):
for f in lof:
m = self.rexp.match(f)
if m:
fullpath = os.path.join(cwd, f)
try:
print('[*] {}'.format(fullpath))
if m.group(1) == 'pdf':
self.pdf(fullpath)
else:
self.openxml(fullpath)
print('')
except:
continue
def openxml(self, pathname):
zf = zipfile.ZipFile(pathname, 'r')
docprops = ET.fromstring(zf.read('docProps/core.xml'))
for meta in docprops.findall('*'):
if meta.tag[0] == '{':
tag = meta.tag.split('}')[1].title()
else:
tag = meta.tag.title()
value = meta.text
print(' [+] {:15s} => {}'.format(tag, value))
def pdf(self, pathname):
reader = PyPDF2.PdfFileReader(pathname)
meta = reader.getDocumentInfo()
for key in meta:
tag = key.lstrip('/')
value = meta[key]
print(' [+] {:15s} => {}'.format(tag, value))
if __name__ == '__main__':
print('''
_______________________________________
PyMeta version 1.0
Author: Joff Thyer (c) 2020
Black Hills Information Security
_______________________________________
''')
parser = argparse.ArgumentParser()
parser.add_argument('directory', help='starting directory')
args = parser.parse_args()
pm = PyMetaExtractor(args.directory)
pm.run() | 2,125 | 668 |
#!/usr/bin/env python
# Copyright (c) 2015. Mark E. Madsen <mark@madsenlab.org>
#
# This work is licensed under the terms of the Apache Software License, Version 2.0. See the file LICENSE for details.
"""
Description here
"""
import logging as log
import unittest
import pytransmission.popgen.wright_fisher as wf
import os
import tempfile
log.basicConfig(level=log.DEBUG, format='%(asctime)s %(levelname)s: %(message)s')
class WrightFisherTest(unittest.TestCase):
def test_mutation_from_theta(self):
theta = [0.25, 0.5, 1.0, 2.0, 3.0, 10.0]
popsize = 100
for t in theta:
mut = wf.wf_mutation_rate_from_theta(popsize, t)
self.assertTrue(True,"Not a full test, always passes")
if __name__ == "__main__":
unittest.main() | 782 | 298 |
# Generated by Django 2.0.2 on 2018-02-17 05:48
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('T2API', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='device',
options={'ordering': ('mac',), 'permissions': (('show_status', 'Can show status vars'), ('register_device', 'Can register a device'), ('change_product', 'Can change associated procduct'))},
),
]
| 490 | 157 |
import pytest
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql as pg
from jessiql import QueryObjectDict
from jessiql.sainfo.version import SA_14
from jessiql.testing.insert import insert
from jessiql.testing.recreate_tables import created_tables
from jessiql.util import sacompat
from .util.models import IdManyFieldsMixin, id_manyfields
from .util.test_queries import typical_test_sql_query_text, typical_test_query_results, typical_test_query_text_and_results
@pytest.mark.parametrize(('query_object', 'expected_query_lines',), [
# Empty
(dict(filter=None), []),
(dict(filter={}), []),
# Shortcut equality
(dict(filter={'a': 1}), ["WHERE a.a = 1"]),
# Scalar Operators
(dict(filter={'a': {'$eq': 1}}), ["WHERE a.a = 1"]),
(dict(filter={'a': {'$ne': 1}}), ["WHERE a.a IS DISTINCT FROM 1"]),
(dict(filter={'a': {'$lt': 1}}), ["WHERE a.a < 1"]),
(dict(filter={'a': {'$lte': 1}}), ["WHERE a.a <= 1"]),
(dict(filter={'a': {'$gte': 1}}), ["WHERE a.a >= 1"]),
(dict(filter={'a': {'$gt': 1}}), ["WHERE a.a > 1"]),
(dict(filter={'a': {'$prefix': 'ex-'}}), ["WHERE (a.a LIKE ex- || '%')"]),
(dict(filter={'a': {'$in': (1, 2, 3)}}), ["WHERE a.a IN ([POSTCOMPILE_a_1])" if SA_14 else
"WHERE a.a IN (1, 2, 3)"]),
(dict(filter={'a': {'$nin': (1, 2, 3)}}), ["WHERE (a.a NOT IN ([POSTCOMPILE_a_1]))" if SA_14 else
"WHERE a.a NOT IN (1, 2, 3)"]),
(dict(filter={'a': {'$exists': 0}}), ["WHERE a.a IS NULL"]),
(dict(filter={'a': {'$exists': 1}}), ["WHERE a.a IS NOT NULL"]),
# Multiple scalar comparisons
(dict(filter={'a': 1, 'b': 2}), ["WHERE a.a = 1 AND a.b = 2"]),
(dict(filter={'a': {'$gt': 1, '$ne': 10}}), ["WHERE a.a > 1 AND a.a IS DISTINCT FROM 10"]),
# Array operators, scalar operand
(dict(filter={'tags': {'$eq': 'a'}}), ["WHERE a = ANY (a.tags)"]),
(dict(filter={'tags': {'$ne': 'a'}}), ["WHERE a != ALL (a.tags)"]),
(dict(filter={'tags': {'$exists': 1}}), ["WHERE a.tags IS NOT NULL"]),
(dict(filter={'tags': {'$size': 0}}), ["WHERE array_length(a.tags, 1) IS NULL"]),
(dict(filter={'tags': {'$size': 1}}), ["WHERE array_length(a.tags, 1) = 1"]),
# Array operators, scalar operand
(dict(filter={'tags': {'$eq': ['a', 'b', 'c']}}), ["WHERE a.tags = CAST(ARRAY[a, b, c] AS VARCHAR[])"]),
(dict(filter={'tags': {'$ne': ['a', 'b', 'c']}}), ["WHERE a.tags != CAST(ARRAY[a, b, c] AS VARCHAR[])"]),
(dict(filter={'tags': {'$in': ['a', 'b', 'c']}}), ["WHERE a.tags && CAST(ARRAY[a, b, c] AS VARCHAR[])"]),
(dict(filter={'tags': {'$nin': ['a', 'b', 'c']}}), ["WHERE NOT a.tags && CAST(ARRAY[a, b, c] AS VARCHAR[])"]),
(dict(filter={'tags': {'$all': ['a', 'b', 'c']}}), ["WHERE a.tags @> CAST(ARRAY[a, b, c] AS VARCHAR[])"]),
# Comparison with a JSON value
# It is important to cast it to a correct value
(dict(filter={'j.user.name': 'kolypto'}), ["WHERE CAST((a.j #>> ('user', 'name')) AS TEXT) = kolypto"]),
(dict(filter={'j.user.name': 10}), ["WHERE CAST((a.j #>> ('user', 'name')) AS INTEGER) = 10"]),
(dict(filter={'j.user.name': True}), ["WHERE CAST((a.j #>> ('user', 'name')) AS BOOLEAN) = true"]),
(dict(filter={'j.user.name': None}), ["WHERE CAST((a.j #>> ('user', 'name')) AS TEXT) IS NULL"]),
])
def test_filter_sql(connection: sa.engine.Connection, query_object: QueryObjectDict, expected_query_lines: list[str]):
""" Typical test: what SQL is generated """
# Models
Base = sacompat.declarative_base()
class Model(IdManyFieldsMixin, Base):
__tablename__ = 'a'
# This Postgres-specific implementation has .contains() and .overlaps() implementations
tags = sa.Column(pg.ARRAY(sa.String))
# Test
typical_test_sql_query_text(query_object, Model, expected_query_lines)
@pytest.mark.parametrize(('query_object', 'expected_results'), [
# Empty input
(dict(), [{'id': n} for n in (1, 2, 3)]),
# Filter by column
(dict(filter={'a': 'not-found'}), []),
(dict(filter={'a': 'm-1-a'}), [{'id': 1}]),
# Filter by JSON value
(dict(filter={'j.m': '1-j'}), [{'id': 1}]),
])
def test_filter_results(connection: sa.engine.Connection, query_object: QueryObjectDict, expected_results: list[dict]):
""" Typical test: real data, real query, real results """
# Models
Base = sacompat.declarative_base()
class Model(IdManyFieldsMixin, Base):
__tablename__ = 'a'
# Data
with created_tables(connection, Base):
# Insert some rows
insert(connection, Model, [
id_manyfields('m', 1),
id_manyfields('m', 2),
id_manyfields('m', 3),
])
# Test
typical_test_query_results(connection, query_object, Model, expected_results)
@pytest.mark.parametrize(('query_object', 'expected_query_lines', 'expected_results'), [
# Simple filter: column equality
(dict(select=[{'articles': dict(filter={'id': 3})}]), [
'FROM u',
'FROM a',
# joined query includes: filter condition AND join condition
'WHERE a.user_id IN ([POSTCOMPILE_primary_keys]) AND a.id = 3' if SA_14 else
'WHERE a.user_id IN ([EXPANDING_primary_keys]) AND a.id = 3'
], [
{'id': 1, 'articles': [
{'id': 3, 'user_id': 1},
# no more rows
]}
]),
])
def test_joined_filter(connection: sa.engine.Connection, query_object: QueryObjectDict, expected_query_lines: list[str], expected_results: list[dict]):
""" Typical test: JOINs, SQL and results """
# Models
Base = sacompat.declarative_base()
class User(IdManyFieldsMixin, Base):
__tablename__ = 'u'
articles = sa.orm.relationship('Article', back_populates='author')
class Article(IdManyFieldsMixin, Base):
__tablename__ = 'a'
user_id = sa.Column(sa.ForeignKey(User.id))
author = sa.orm.relationship(User, back_populates='articles')
# Data
with created_tables(connection, Base):
# Insert some rows
insert(connection, User, [
id_manyfields('u', 1),
])
insert(connection, Article, [
id_manyfields('a', 1, user_id=1),
id_manyfields('a', 2, user_id=1),
id_manyfields('a', 3, user_id=1),
])
# Test
typical_test_query_text_and_results(connection, query_object, User, expected_query_lines, expected_results)
| 6,497 | 2,384 |
#!/usr/bin/env python
# encoding: utf-8
from numpy.distutils.core import setup, Extension
module1 = Extension('_porteagel_fortran', sources=['src/gaussianwake/gaussianwake.f90',
'src/gaussianwake/gaussianwake_bv.f90',
'src/gaussianwake/gaussianwake_dv.f90',
'src/gaussianwake/adStack.c',
'src/gaussianwake/adBuffer.f'],
# 'src/gaussianwake/lib_array.f90'],
extra_compile_args=['-O2', '-c'])
setup(
name='GaussianWake',
version='0.0.1',
description='Gaussian wake model published by Bastankhah and Porte Agel 2016',
install_requires=['openmdao>=1.7.3'],
package_dir={'': 'src'},
ext_modules=[module1],
dependency_links=['http://github.com/OpenMDAO/OpenMDAO.git@master'],
packages=['gaussianwake'],
license='Apache License, Version 2.0',
) | 1,051 | 331 |
import uuid
import threading
from datetime import datetime
from .execution import RunExecution
class RunBackend(object):
def __init__(self, workspace):
self.workspace = workspace
self.db = workspace.db
self.storage = workspace.storage
def create_run(self, specification):
run_id = str(uuid.uuid4())
run = {
"run_id": run_id,
"status": "created",
"created": datetime.utcnow(),
"specification": specification,
}
self.db.create_run(run)
run_execution = RunExecution(self.workspace, run_id)
run_execution_thread = threading.Thread(
target=run_execution.run, name=f"RunExecution {run_id}"
)
run_execution_thread.start()
return run
def terminate_run(self, run_id):
run = self.db.get_run(run_id)
if run["status"] == "terminated" or run["status"] == "run finished":
return
run["status"] = "terminated"
run["terminated"] = datetime.now()
self.db.update_run(run)
def delete_run(self, run_id):
self.terminate_run(run_id)
self.db.delete_run(run_id)
self.storage.delete_logs(run_id)
self.storage.delete_code(run_id)
def get_run(self, run_id):
return self.db.get_run(run_id)
def get_run_ids(self):
return self.db.get_run_ids()
def get_all_runs(self):
return self.db.get_all_runs()
| 1,473 | 469 |
def addAll(n1,n2):
result = 0
for i in range(n1,n2+1):
result += i
return result
print(addAll(1,10))
| 122 | 54 |
from __future__ import unicode_literals
import erpnext.education.utils as utils
import frappe
no_cache = 1
def get_context(context):
context.education_settings = frappe.get_single("Education Settings")
if not context.education_settings.enable_lms:
frappe.local.flags.redirect_location = '/'
raise frappe.Redirect
context.featured_programs = get_featured_programs()
def get_featured_programs():
return utils.get_portal_programs() | 440 | 151 |
"""Predict labels with lightgbm models"""
import os
import sys
import json
import pandas as pd
import lightgbm as lgb
from pathlib import Path
import competition as cc
from common import stop_watch, predict_chunk
# For osx
os.environ['KMP_DUPLICATE_LIB_OK'] = "True"
@stop_watch
def predict_with_lightgbm():
model_directory_path = cc.MODEL_PATH / sys.argv[1]
model_path_list = sorted(list(model_directory_path.glob("*.model")))
config_file = list(cc.CONFIG_PATH.glob(sys.argv[1] + "*.json"))[0]
with config_file.open() as f:
params = json.load(f)
params = params["Predict"]
if params["Version"] != cc.PREF:
assert False
preds = None
predict_df = None
test_csv_path = Path(cc.VALIDATION_PATH / sys.argv[1] / "test.csv")
test_X = pd.read_csv(test_csv_path)
test_X.reset_index(inplace=True)
for fold, model_path in enumerate(model_path_list):
print("=== [Predict] fold{} starts!! ===".format(fold))
model = lgb.Booster(model_file=str(model_path))
if predict_df is None:
predict_df = test_X["index"]
test_X = test_X.set_index("index")
if preds is None:
preds = predict_chunk(model, test_X) / len(model_path_list)
else:
preds += predict_chunk(model, test_X) / len(model_path_list)
sample_df = pd.read_csv(cc.SAMPLE_SUBMISSION_CSV_PATH)
predict_df = pd.DataFrame(predict_df)
predict_df["seg_id"] = sample_df["seg_id"]
predict_df["time_to_failure"] = preds
del predict_df["index"]
Path.mkdir(cc.SUBMIT_PATH, exist_ok=True, parents=True)
predict_df.to_csv(cc.SUBMIT_PATH / "{}.csv".format(sys.argv[1]), index=False)
if __name__ == "__main__":
predict_with_lightgbm()
| 1,748 | 620 |
from nose.tools import *
import tests
from tests import files
from catstuff.core_plugins.tasks.checksum.main import *
@raises(NotImplementedError)
def test_unknown_method_123():
checksum(files.empty_file, method=123)
@raises(NotImplementedError)
def test_unknown_method_None():
checksum(files.empty_file, method=None)
def test_filled_sha1():
actual = 'e904e143809b8ee161abdc55455bd5ff7773b5d7'.lower()
result = checksum(files.filled_file, method='sha1').lower()
assert_equal(result, actual)
def test_empty_sha1():
actual = 'da39a3ee5e6b4b0d3255bfef95601890afd80709'.lower()
result = checksum(files.empty_file, method='sha1').lower()
assert_equal(result, actual)
class Library:
skip_methods = set()
def __init__(self, file, actuals={}):
self.file = file
for actual in actuals:
setattr(self, actual, actuals[actual])
@nottest
def test_method(self, method):
actual = getattr(self, method, None)
if method in self.skip_methods:
return
if actual is None:
ok_(False, 'Checksum could not be verified')
result = checksum(self.file, method=method)
assert_equal(result.lower(), actual.lower())
class Zlib(Library):
def test_crc32(self):
self.test_method('crc32')
def test_adler32(self):
self.test_method('adler32')
class Hashlib(Library):
skip_methods = {'blake2b', 'blake2s'} # cannot verify these right now
def test_sha1(self):
self.test_method('sha1')
def test_sha256(self):
self.test_method('sha256')
def test_sha224(self):
self.test_method('sha224')
def test_sha384(self):
self.test_method('sha384')
def test_sha512(self):
self.test_method('sha512')
def test_blake2b(self):
self.test_method('blake2b')
def test_blake2s(self):
self.test_method('blake2s')
def test_md5(self):
self.test_method('md5')
def test_sha3_224(self):
self.test_method('sha3_224')
def test_sha3_256(self):
self.test_method('sha3_256')
def test_sha3_384(self):
self.test_method('sha3_384')
def test_sha3_512(self):
self.test_method('sha3_512')
def test_shake_128(self):
self.test_method('shake_128')
def test_shake_256(self):
self.test_method('shake_256')
class TestFilled(Hashlib, Zlib):
actuals = {
'sha1' : 'e904e143809b8ee161abdc55455bd5ff7773b5d7',
'sha224' : '0e06806c50baf975474d31dbd74eb394aee928c034c191d91dfc65dc',
'sha256' : '0191f9186550a8e8e0b3c4112757da13d1434d20f1ca7c3a1b2d1e4119301951',
'sha384' : 'c2e19786394a1924fa9086339581a22e7946d0b7834ec55750b32fe59b49c6748f64ecab20561722ff6ca2ac7a60ef39',
'sha512' :
'52936535a745013a6f1f2721f511f81530255569bf0682d50f30b9ce67c3ec5745b1bfe72142540f5f2d629984f1638ed8d6c0e8f86da57975dc202fa320d528',
# 'blake2b': '123',
# 'blake2s': '0168617AC4EBBD41BD461991C4F28871457F3ECBFBSDllF4363E4AA81EFC43AF',
'md5' : 'a5ed84a3e65b4cdb086fc9d4aa0c9a45',
'sha3_224' : 'a942c6d8d4103b136ff0b606a8095fad16b3b6bce76e78227c3df14f',
'sha3_256' : '38920b162fd995c3325b0b5b96e5b0068c05727de6983941b8fe89de9e195d28',
'sha3_384' : '49a2ea29ad16820a0c2845904f10a89d9dcade25786d97df4b245b58f7963b1cb5bb88df77d7095405455d9452b95564',
'sha3_512' :
'c34069059313d61a3030479ad9bff8cfc4308322840e8376dc9c6117bb3e39e47217bec789e1cfd8ebac61f059174352722c870b24b6b2800ed635f43e1ef285',
'shake_128': '0cf15a6a82525b3abd3cf74a99d8302fd44adb70333ff4db932080643591f4eea92ec63edcda1fb319f504bbcdf53014e5e7abfa6feb59060332b0a484775efa',
'shake_256': '4f35797528ece0c72d552b53eccc3e8c6a0ea3c1751008404b276978c572bd4e6fb1d2a06fd65c25291cf06855699213d4adce7d8702ed4f1b20a7c48b3fbcf1',
'crc32': 'f11b4ef9',
'adler32': '2bb91434'
}
def __init__(self):
Library.__init__(self, files.filled_file, actuals=self.actuals)
class TestPlugin(tests.classes.CSDBBaseTest):
def setup(self):
self.obj = Checksum()
def test_main(self):
pass # TODO: test this | 4,218 | 2,199 |
# Copyright 2021
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.urls import path
#
from archive.views import (
delete_collection,
delete_resource,
download,
edit_collection,
edit_resource,
home,
new_collection,
new_resource,
preview,
search,
view_collection,
view_resource,
)
app_name = "archive"
urlpatterns = [
path("", home, name="home"),
path("search", search, name="search"),
path("resource<path:path>", view_resource, name="resource_view"),
path("resource", view_resource, name="resource_view"),
path("new/collection<path:parent>", new_collection, name="new_collection"),
path("edit/collection<path:path>", edit_collection, name="edit_collection"),
path("delete/collection<path:path>", delete_collection, name="delete_collection"),
path("new/resource<path:parent>", new_resource, name="new_resource"),
path("edit/resource<path:path>", edit_resource, name="edit_resource"),
path("delete/resource<path:path>", delete_resource, name="delete_resource"),
path("view<path:path>", view_collection, name="view"),
path("view", view_collection, name="view"),
path("download<path:path>", download, name="download"),
path("preview<path:path>", preview, name="preview"),
]
| 1,782 | 522 |
#!/usr/bin/env python3
"""
Recover the tree relationship from a howde tree hierarchy file.
"""
from sys import argv,stdin,stdout,stderr,exit
from howde_tree_parse import read_howde_tree_file
def usage(s=None):
message = """
usage: cat howde_tree_file | recover_howde_tree [options]
--show=preorder list the tree in pre-order
(this is the default)
--show=postorder list the tree in post-order
--show=leafgroups list all leaf groups
--show=height for each node, list max distance to a leaf, and number
of descendants
--show:subtree=<node> create a listing file for a node and its descendants
--filespec=<spec> spec describing how node names are to be output; for
example /usr/nwu253/howdesbt/compressed/{name}.rrr.bf"""
if (s == None): exit (message)
else: exit ("%s\n%s" % (s,message))
def main():
# parse the command line
showWhat = "pre order"
fileSpec = None
for arg in argv[1:]:
if ("=" in arg):
argVal = arg.split("=",1)[1]
if (arg in ["--show=preorder","--show=pre"]):
showWhat = "pre order"
elif (arg in ["--show=postorder","--show=post"]):
showWhat = "post order"
elif (arg == "--show=leafgroups"):
showWhat = "leaf groups"
elif (arg == "--show=height"):
showWhat = "height etc"
elif (arg.startswith("--show:subtree=")) or (arg.startswith("--subtree=")):
showWhat = "subtree"
nodeName = argVal
elif (arg.startswith("--filespec=")):
if ("{name}" not in argVal):
usage("filespec MUST contain {name}\n(in \"%s\"" % arg)
fileSpec = argVal
elif (arg.startswith("--")):
usage("unrecognized option: %s" % arg)
else:
usage("unrecognized option: %s" % arg)
# process the tree
forest = read_howde_tree_file(stdin)
assert (len(forest) != 0), "input has no tree"
for tree in forest:
if (showWhat == "pre order"):
tree.list_pre_order()
elif (showWhat == "post order"):
tree.list_post_order()
elif (showWhat == "leaf groups"):
tree.list_leaf_groups()
elif (showWhat == "height etc"):
tree.compute_height_etc()
tree.list_height_etc()
elif (showWhat == "subtree"):
nameToNode = tree.build_dict()
assert (nodeName in nameToNode), \
"unknown node: \"%s\"" % nodeName
subtree = nameToNode[nodeName]
subtree.list_pre_order(fileSpec=fileSpec)
else:
assert (False), \
"internal error: unknown operation \"%s\"" % showWhat
if __name__ == "__main__": main()
| 2,528 | 934 |
import json
import sys
import tkinter
from config import *
from imagefy import *
from twitter import *
# Get old rules
old_rules = get_rules()
print('Old rules received.')
# Delete old rules
delete_response = delete_rules(old_rules)
print('Old rules deleted.')
# Generate new rules
query_rules = generate_rules()
print(str(len(query_rules)) + ' new rules generated.')
# Set new rules
set_response = set_rules(query_rules)
created_rules = str(set_response['meta']['summary']['created'])
print(created_rules + ' new rules set.')
# Initialize the GUI
master, frame, canvas = init_gui()
print('GUI Initialized')
# Start the stream
print()
print('Stream starting...')
print('Pause-Resume with CTRL+C, Exit with ESC')
print()
# Initialize tweet counter
cnt = 0
# Lists to hold image and canvas objects
images = []
canvas_images = []
while(True):
response = requests.get(url_stream, auth=bearer_oauth, params=query_params, stream=True)
if response.status_code != 200:
raise Exception("Cannot get stream (HTTP {}): {}".format(response.status_code, response.text))
try:
for response_line in response.iter_lines():
if response_line:
# Grab tweet
tweet = json.loads(response_line)
# Grab resulting image & priority flag
image = draw_tweet(tweet)
if image == -1: exit()
# Increment & print counters
cnt += 1
print('Tweet received, {} total tweets'.format(cnt))
# Add resulting image object to image list
images.append(image)
# If canvas fits more tweets, resize it
if (int(canvas.cget('height')) < min_tweet_height * max_onscreen_tweets):
frame.config(width=int(canvas.cget('width')), height=int(canvas.cget('height')) + image.height() + omargins[2])
canvas.config(width=int(canvas.cget('width')), height=int(canvas.cget('height')) + image.height() + omargins[2])
# Iterate through all canvas images
for canvas_image in canvas_images:
# Move the previous tweet lower
canvas.move(canvas_image, 0, image.height() + omargins[2])
# If onscreen tweet limit exceeded, delete oldest tweet
if len(canvas_images) > max_tweets:
canvas.delete(canvas_images[0])
canvas_images.pop(0)
images.pop(0)
# Paste new tweet
canvas_images.append(canvas.create_image(omargins[3], omargins[1], anchor=tkinter.NW, image=image))
canvas.update_idletasks()
canvas.update()
# Catch CTRL-C interrupt
except KeyboardInterrupt:
print('TRL+C detected, stream stopped.')
print('Press CTRL+C again to resume.')
while(True):
try:
canvas.update()
except KeyboardInterrupt:
print('TRL+C detected, resuming stream..')
print()
| 2,641 | 963 |
import pickle
import sys
import unittest2
import mitogen.core
import testlib
import plain_old_module
class ConstructorTest(testlib.TestCase):
klass = mitogen.core.CallError
def test_string_noargs(self):
e = self.klass('%s%s')
self.assertEquals(e.args[0], '%s%s')
self.assertTrue(isinstance(e.args[0], mitogen.core.UnicodeType))
def test_string_args(self):
e = self.klass('%s%s', 1, 1)
self.assertEquals(e.args[0], '11')
self.assertTrue(isinstance(e.args[0], mitogen.core.UnicodeType))
def test_from_exc(self):
ve = plain_old_module.MyError('eek')
e = self.klass(ve)
self.assertEquals(e.args[0], 'plain_old_module.MyError: eek')
self.assertTrue(isinstance(e.args[0], mitogen.core.UnicodeType))
def test_form_base_exc(self):
ve = SystemExit('eek')
e = self.klass(ve)
cls = ve.__class__
self.assertEquals(e.args[0],
# varies across 2/3.
'%s.%s: eek' % (cls.__module__, cls.__name__))
self.assertTrue(isinstance(e.args[0], mitogen.core.UnicodeType))
def test_from_exc_tb(self):
try:
raise plain_old_module.MyError('eek')
except plain_old_module.MyError:
ve = sys.exc_info()[1]
e = self.klass(ve)
self.assertTrue(e.args[0].startswith('plain_old_module.MyError: eek'))
self.assertTrue(isinstance(e.args[0], mitogen.core.UnicodeType))
self.assertTrue('test_from_exc_tb' in e.args[0])
def test_bytestring_conversion(self):
e = self.klass(mitogen.core.b('bytes'))
self.assertEquals(u'bytes', e.args[0])
self.assertTrue(isinstance(e.args[0], mitogen.core.UnicodeType))
def test_reduce(self):
e = self.klass('eek')
func, (arg,) = e.__reduce__()
self.assertTrue(func is mitogen.core._unpickle_call_error)
self.assertEquals(arg, e.args[0])
class UnpickleCallErrorTest(testlib.TestCase):
func = staticmethod(mitogen.core._unpickle_call_error)
def test_not_unicode(self):
self.assertRaises(TypeError,
lambda: self.func(mitogen.core.b('bad')))
def test_oversized(self):
self.assertRaises(TypeError,
lambda: self.func(mitogen.core.b('b'*10001)))
def test_reify(self):
e = self.func(u'some error')
self.assertEquals(mitogen.core.CallError, e.__class__)
self.assertEquals(1, len(e.args))
self.assertEquals(mitogen.core.UnicodeType, type(e.args[0]))
self.assertEquals(u'some error', e.args[0])
class PickleTest(testlib.TestCase):
klass = mitogen.core.CallError
def test_string_noargs(self):
e = self.klass('%s%s')
e2 = pickle.loads(pickle.dumps(e))
self.assertEquals(e2.args[0], '%s%s')
def test_string_args(self):
e = self.klass('%s%s', 1, 1)
e2 = pickle.loads(pickle.dumps(e))
self.assertEquals(e2.args[0], '11')
def test_from_exc(self):
ve = plain_old_module.MyError('eek')
e = self.klass(ve)
e2 = pickle.loads(pickle.dumps(e))
self.assertEquals(e2.args[0], 'plain_old_module.MyError: eek')
def test_from_exc_tb(self):
try:
raise plain_old_module.MyError('eek')
except plain_old_module.MyError:
ve = sys.exc_info()[1]
e = self.klass(ve)
e2 = pickle.loads(pickle.dumps(e))
self.assertTrue(e2.args[0].startswith('plain_old_module.MyError: eek'))
self.assertTrue('test_from_exc_tb' in e2.args[0])
if __name__ == '__main__':
unittest2.main()
| 3,632 | 1,342 |
from mpsp import FLIGHT, GROUNDTEST
from mpsp.mpsp import MPSP
import pyb
switch = pyb.Switch()
pyb.LED(3).on()
pyb.LED(1).on()
pyb.delay(4000)
pyb.LED(3).off()
pyb.LED(1).off()
if switch():
mode = FLIGHT
else:
mode = GROUNDTEST
pyb.delay(1000)
m = MPSP(mode)
m.init()
m.run()
| 288 | 146 |
import os
from pathlib import Path
from continual_rl.experiments.experiment import Experiment
from continual_rl.experiments.tasks.image_task import ImageTask
from continual_rl.policies.discrete_random.discrete_random_policy_config import DiscreteRandomPolicyConfig
from continual_rl.policies.discrete_random.discrete_random_policy import DiscreteRandomPolicy
class TestDiscreteRandomPolicy(object):
def test_end_to_end_batch(self, set_tmp_directory, cleanup_experiment, request):
"""
Not a unit test - a full (very short) run with Discrete Random for a sanity check that it's working.
This is testing: DiscreteRandomPolicy, ImageTask
"""
# Arrange
experiment = Experiment(tasks=[
ImageTask(task_id="some_id", action_space_id=0,
env_spec='BreakoutDeterministic-v4',
num_timesteps=10, time_batch_size=4, eval_mode=False,
image_size=[84, 84], grayscale=True)
])
config = DiscreteRandomPolicyConfig()
config.num_parallel_envs = 2 # To make it batched
# Make a subfolder of the output directory that only this experiment is using, to avoid conflict
output_dir = Path(request.node.experiment_output_dir, "discrete_random_batch")
os.makedirs(output_dir)
experiment.set_output_dir(output_dir)
config.set_output_dir(output_dir)
policy = DiscreteRandomPolicy(config, experiment.observation_space, experiment.action_spaces)
# Act
experiment.try_run(policy, summary_writer=None)
# Assert
assert Path(policy._config.output_dir, "core_process.log").is_file(), "Log file not created"
def test_end_to_end_sync(self, set_tmp_directory, cleanup_experiment, request):
"""
Not a unit test - a full (very short) run with Discrete Random for a sanity check that it's working.
This is testing: DiscreteRandomPolicy, ImageTask
"""
# Arrange
experiment = Experiment(tasks=[
ImageTask(task_id="end_to_end_sync", action_space_id=0,
env_spec='BreakoutDeterministic-v4',
num_timesteps=10, time_batch_size=4, eval_mode=False,
image_size=[84, 84], grayscale=True)
])
config = DiscreteRandomPolicyConfig()
config.num_parallel_envs = None # To make it sync
# Make a subfolder of the output directory that only this experiment is using, to avoid conflict
output_dir = Path(request.node.experiment_output_dir, "discrete_random_sync")
os.makedirs(output_dir)
experiment.set_output_dir(output_dir)
config.set_output_dir(output_dir)
policy = DiscreteRandomPolicy(config, experiment.observation_space, experiment.action_spaces)
# Act
experiment.try_run(policy, summary_writer=None)
# Assert
assert Path(policy._config.output_dir, "core_process.log").is_file(), "Log file not created"
| 3,027 | 874 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '../GUI Files/ui_main.ui'
#
# Created by: PyQt5 UI code generator 5.15.1
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
import resources
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(1507, 849)
MainWindow.setMinimumSize(QtCore.QSize(1280, 720))
MainWindow.setMaximumSize(QtCore.QSize(1920, 1080))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(45, 45, 45))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(45, 45, 45))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(45, 45, 45))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(45, 45, 45))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(45, 45, 45))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(45, 45, 45))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(45, 45, 45))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(45, 45, 45))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(45, 45, 45))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
MainWindow.setPalette(palette)
MainWindow.setStyleSheet("background-color: rgb(45, 45, 45);")
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.centralwidget)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setSpacing(0)
self.verticalLayout.setObjectName("verticalLayout")
self.Top_Bar = QtWidgets.QFrame(self.centralwidget)
self.Top_Bar.setMaximumSize(QtCore.QSize(16777215, 40))
self.Top_Bar.setStyleSheet("background-color: rgb(35, 35, 35);")
self.Top_Bar.setFrameShape(QtWidgets.QFrame.NoFrame)
self.Top_Bar.setFrameShadow(QtWidgets.QFrame.Raised)
self.Top_Bar.setObjectName("Top_Bar")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.Top_Bar)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout.setSpacing(0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.frame_toggle = QtWidgets.QFrame(self.Top_Bar)
self.frame_toggle.setMaximumSize(QtCore.QSize(70, 40))
self.frame_toggle.setStyleSheet("background-color:#7289DA;")
self.frame_toggle.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_toggle.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_toggle.setObjectName("frame_toggle")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.frame_toggle)
self.verticalLayout_2.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_2.setSpacing(0)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.horizontalLayout.addWidget(self.frame_toggle)
self.frame_top = QtWidgets.QFrame(self.Top_Bar)
self.frame_top.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_top.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_top.setObjectName("frame_top")
self.horizontalLayout.addWidget(self.frame_top)
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.label_3 = QtWidgets.QLabel(self.Top_Bar)
self.label_3.setStyleSheet("color: #FFFFFF;\n"
"font-weight: bold")
self.label_3.setObjectName("label_3")
self.horizontalLayout.addWidget(self.label_3)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem1)
self.to_taskbar_button = QtWidgets.QPushButton(self.Top_Bar)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.to_taskbar_button.sizePolicy().hasHeightForWidth())
self.to_taskbar_button.setSizePolicy(sizePolicy)
self.to_taskbar_button.setMinimumSize(QtCore.QSize(35, 35))
self.to_taskbar_button.setMaximumSize(QtCore.QSize(35, 35))
self.to_taskbar_button.setStyleSheet("QPushButton {\n"
"color: rgb(255, 255, 255);\n"
"border: 0px solid;\n"
"background-color:#232323 ;\n"
"padding:0.5em;\n"
"font-weight:900;\n"
"font-size:12px;\n"
"}\n"
"QPushButton:hover {\n"
" background-color:#5b6dae ;\n"
"\n"
"}")
self.to_taskbar_button.setObjectName("to_taskbar_button")
self.horizontalLayout.addWidget(self.to_taskbar_button)
self.quit_button = QtWidgets.QPushButton(self.Top_Bar)
self.quit_button.setMinimumSize(QtCore.QSize(35, 35))
self.quit_button.setMaximumSize(QtCore.QSize(35, 35))
self.quit_button.setStyleSheet("QPushButton {\n"
"color: rgb(255, 255, 255);\n"
"border: 0px solid;\n"
"background-color: #232323;\n"
"padding:0.5em;\n"
"font-weight:bold;\n"
"font-size:12px;\n"
"\n"
"}\n"
"QPushButton:hover {\n"
" background-color: #c42b2b;\n"
"}")
self.quit_button.setObjectName("quit_button")
self.horizontalLayout.addWidget(self.quit_button)
self.verticalLayout.addWidget(self.Top_Bar)
self.Content = QtWidgets.QFrame(self.centralwidget)
self.Content.setFrameShape(QtWidgets.QFrame.NoFrame)
self.Content.setFrameShadow(QtWidgets.QFrame.Raised)
self.Content.setObjectName("Content")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.Content)
self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_2.setSpacing(0)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.frame_left_menu = QtWidgets.QFrame(self.Content)
self.frame_left_menu.setMinimumSize(QtCore.QSize(70, 0))
self.frame_left_menu.setMaximumSize(QtCore.QSize(70, 16777215))
self.frame_left_menu.setStyleSheet("background-color: rgb(35, 35, 35);")
self.frame_left_menu.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_left_menu.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_left_menu.setObjectName("frame_left_menu")
self.horizontalLayout_3 = QtWidgets.QHBoxLayout(self.frame_left_menu)
self.horizontalLayout_3.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.frame_top_menus = QtWidgets.QFrame(self.frame_left_menu)
self.frame_top_menus.setMaximumSize(QtCore.QSize(60, 16777215))
self.frame_top_menus.setFrameShape(QtWidgets.QFrame.VLine)
self.frame_top_menus.setFrameShadow(QtWidgets.QFrame.Sunken)
self.frame_top_menus.setLineWidth(1)
self.frame_top_menus.setMidLineWidth(0)
self.frame_top_menus.setObjectName("frame_top_menus")
self.verticalLayout_4 = QtWidgets.QVBoxLayout(self.frame_top_menus)
self.verticalLayout_4.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_4.setSpacing(0)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.btn_page_1 = QtWidgets.QPushButton(self.frame_top_menus)
self.btn_page_1.setMinimumSize(QtCore.QSize(60, 50))
self.btn_page_1.setStyleSheet("QPushButton {\n"
" color: rgb(255, 255, 255);\n"
" background-color: rgb(35, 35, 35);\n"
" border: 0px solid;\n"
" font-weight:700;\n"
" font-size:12px;\n"
"}\n"
"QPushButton:hover {\n"
" background-color: #7289DA;\n"
"}")
self.btn_page_1.setText("")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/icons/Resources/icons/32px/video-cam.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btn_page_1.setIcon(icon)
self.btn_page_1.setIconSize(QtCore.QSize(48, 48))
self.btn_page_1.setObjectName("btn_page_1")
self.verticalLayout_4.addWidget(self.btn_page_1)
self.btn_page_2 = QtWidgets.QPushButton(self.frame_top_menus)
self.btn_page_2.setMinimumSize(QtCore.QSize(60, 50))
self.btn_page_2.setStyleSheet("QPushButton {\n"
" color: rgb(255, 255, 255);\n"
" background-color: rgb(35, 35, 35);\n"
" border: 0px solid;\n"
"font-weight:700;\n"
" font-size:12px;\n"
"}\n"
"QPushButton:hover {\n"
" background-color: #7289DA;\n"
"}")
self.btn_page_2.setText("")
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/icons/Resources/icons/32px/stat_light_gray.png"), QtGui.QIcon.Normal,
QtGui.QIcon.Off)
self.btn_page_2.setIcon(icon1)
self.btn_page_2.setIconSize(QtCore.QSize(32, 32))
self.btn_page_2.setObjectName("btn_page_2")
self.verticalLayout_4.addWidget(self.btn_page_2)
self.btn_page_4 = QtWidgets.QPushButton(self.frame_top_menus)
self.btn_page_4.setEnabled(True)
self.btn_page_4.setMinimumSize(QtCore.QSize(60, 50))
self.btn_page_4.setStyleSheet("QPushButton {\n"
" color: rgb(255, 255, 255);\n"
" background-color: rgb(35, 35, 35);\n"
" border: 0px solid;\n"
" font-weight:700;\n"
" font-size:12px;\n"
" border-radius: 25px;\n"
" border-style: outset;\n"
"}\n"
"QPushButton:hover {\n"
" background-color: #1f1f1f;\n"
"}")
self.btn_page_4.setText("")
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(":/icons/Resources/icons/32px/user_alternative.png"), QtGui.QIcon.Normal,
QtGui.QIcon.Off)
self.btn_page_4.setIcon(icon2)
self.btn_page_4.setIconSize(QtCore.QSize(32, 32))
self.btn_page_4.setObjectName("btn_page_4")
self.verticalLayout_4.addWidget(self.btn_page_4)
self.horizontalLayout_3.addWidget(self.frame_top_menus)
self.horizontalLayout_2.addWidget(self.frame_left_menu)
self.frame_pages = QtWidgets.QFrame(self.Content)
self.frame_pages.setFrameShape(QtWidgets.QFrame.NoFrame)
self.frame_pages.setFrameShadow(QtWidgets.QFrame.Sunken)
self.frame_pages.setObjectName("frame_pages")
self.verticalLayout_5 = QtWidgets.QVBoxLayout(self.frame_pages)
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.stackedWidget = QtWidgets.QStackedWidget(self.frame_pages)
self.stackedWidget.setObjectName("stackedWidget")
self.page_1 = QtWidgets.QWidget()
self.page_1.setMaximumSize(QtCore.QSize(16777207, 16777215))
self.page_1.setObjectName("page_1")
self.verticalLayout_7 = QtWidgets.QVBoxLayout(self.page_1)
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.gridLayout_5 = QtWidgets.QGridLayout()
self.gridLayout_5.setContentsMargins(5, -1, 5, 10)
self.gridLayout_5.setObjectName("gridLayout_5")
self.image_frame = QtWidgets.QLabel(self.page_1)
self.image_frame.setFrameShape(QtWidgets.QFrame.Panel)
self.image_frame.setFrameShadow(QtWidgets.QFrame.Sunken)
self.image_frame.setLineWidth(3)
self.image_frame.setText("")
self.image_frame.setPixmap(QtGui.QPixmap(":/placeholders/Resources/placeholder2.png"))
self.image_frame.setScaledContents(True)
self.image_frame.setObjectName("image_frame")
self.gridLayout_5.addWidget(self.image_frame, 0, 0, 1, 4)
self.start_stop_button = QtWidgets.QPushButton(self.page_1)
self.start_stop_button.setMinimumSize(QtCore.QSize(0, 50))
self.start_stop_button.setMaximumSize(QtCore.QSize(480, 50))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(114, 137, 218))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(114, 137, 218))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(114, 137, 218))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(114, 137, 218))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(114, 137, 218))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(114, 137, 218))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(114, 137, 218))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(114, 137, 218))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(114, 137, 218))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.start_stop_button.setPalette(palette)
self.start_stop_button.setStyleSheet("QPushButton {\n"
"color: rgb(255, 255, 255);\n"
"border: 0px solid;\n"
"background-color: #7289DA;\n"
"padding:0.5em;\n"
"font-weight:bold;\n"
"font-size:12px;\n"
"}\n"
"QPushButton:hover {\n"
" background-color: #667bc4;\n"
"}")
self.start_stop_button.setObjectName("start_stop_button")
self.gridLayout_5.addWidget(self.start_stop_button, 2, 0, 1, 1)
self.media_source_combobox = QtWidgets.QComboBox(self.page_1)
self.media_source_combobox.setMinimumSize(QtCore.QSize(0, 25))
self.media_source_combobox.setMaximumSize(QtCore.QSize(960, 16777215))
self.media_source_combobox.setAutoFillBackground(False)
self.media_source_combobox.setStyleSheet("QComboBox{\n"
"background-color: #2C2F33;\n"
"font-size:12px;\n"
"color:#FFFFFF;\n"
"font-weight:700;\n"
"\n"
"}\n"
"QComboBox:hover\n"
"{\n"
" border: 1px solid white;\n"
" background-color:#1a1c1e;\n"
"}\n"
"QListView\n"
"{\n"
"background-color: #2C2F33;\n"
"font-size:12px;\n"
"color:#FFFFFF;\n"
"font-weight:700;\n"
"}\n"
"")
self.media_source_combobox.setCurrentText("")
self.media_source_combobox.setObjectName("media_source_combobox")
self.gridLayout_5.addWidget(self.media_source_combobox, 1, 0, 1, 1)
self.info_title_box = QtWidgets.QLabel(self.page_1)
self.info_title_box.setMaximumSize(QtCore.QSize(100, 16777215))
self.info_title_box.setStyleSheet("color: #FFFFFF;\n"
"font-weight: bold;\n"
"font-size:12px;\n"
"margin-left:1em;\n"
"")
self.info_title_box.setObjectName("info_title_box")
self.gridLayout_5.addWidget(self.info_title_box, 1, 1, 2, 1)
self.out_info_box = QtWidgets.QLabel(self.page_1)
self.out_info_box.setStyleSheet("color: #FFFFFF;\n"
"font-size:12px;")
self.out_info_box.setText("")
self.out_info_box.setObjectName("out_info_box")
self.gridLayout_5.addWidget(self.out_info_box, 1, 2, 2, 1)
self.verticalLayout_7.addLayout(self.gridLayout_5)
self.stackedWidget.addWidget(self.page_1)
self.page_2 = QtWidgets.QWidget()
self.page_2.setObjectName("page_2")
self.verticalLayout_6 = QtWidgets.QVBoxLayout(self.page_2)
self.verticalLayout_6.setObjectName("verticalLayout_6")
self.gridLayout_3 = QtWidgets.QGridLayout()
self.gridLayout_3.setObjectName("gridLayout_3")
spacerItem2 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_3.addItem(spacerItem2, 3, 0, 1, 1)
self.reload_table_button = QtWidgets.QPushButton(self.page_2)
self.reload_table_button.setMinimumSize(QtCore.QSize(400, 60))
self.reload_table_button.setMaximumSize(QtCore.QSize(500, 16777215))
self.reload_table_button.setStyleSheet("QPushButton {\n"
"color: rgb(255, 255, 255);\n"
"border: 0px solid;\n"
"background-color: #7289DA;\n"
"padding:0.5em;\n"
"font-weight:700;\n"
"font-size:14px;\n"
"}\n"
"QPushButton:hover {\n"
" background-color: #667bc4;\n"
"}")
self.reload_table_button.setObjectName("reload_table_button")
self.gridLayout_3.addWidget(self.reload_table_button, 3, 1, 1, 1)
spacerItem3 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_3.addItem(spacerItem3, 3, 2, 1, 1)
self.report_table = QtWidgets.QTableWidget(self.page_2)
self.report_table.setMinimumSize(QtCore.QSize(800, 0))
self.report_table.setStyleSheet("QTableView {\n"
" selection-background-color: #232323;\n"
" background-color: #2d2d2d;\n"
" alternate-background-color: #2b2b2b;\n"
" color: #FFFFFF;\n"
" font-size: 14px;\n"
"\n"
"}\n"
"\n"
"QTableView QTableCornerButton::section {\n"
" background: #434343;\n"
" border: 0px solid;\n"
"}\n"
"QTableWidget {\n"
" gridline-color: #1e1e1e;\n"
"}\n"
"\n"
"QHeaderView::section {\n"
" padding-top:4px;\n"
" padding-bottom:4px;\n"
" padding-left:-0.5em;\n"
" background-color:#2a2a2a;\n"
" color:#FFFFFF;\n"
" border: 0px solid #fffff8;\n"
" font-weight: 800;\n"
" font-size:16px;\n"
"}")
self.report_table.setFrameShape(QtWidgets.QFrame.NoFrame)
self.report_table.setFrameShadow(QtWidgets.QFrame.Raised)
self.report_table.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.report_table.setAlternatingRowColors(True)
self.report_table.setShowGrid(True)
self.report_table.setGridStyle(QtCore.Qt.NoPen)
self.report_table.setRowCount(10)
self.report_table.setColumnCount(4)
self.report_table.setObjectName("report_table")
item = QtWidgets.QTableWidgetItem()
self.report_table.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.report_table.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.report_table.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.report_table.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.report_table.setItem(0, 0, item)
item = QtWidgets.QTableWidgetItem()
self.report_table.setItem(1, 1, item)
self.report_table.horizontalHeader().setDefaultSectionSize(200)
self.report_table.horizontalHeader().setMinimumSectionSize(50)
self.report_table.verticalHeader().setVisible(False)
self.report_table.verticalHeader().setDefaultSectionSize(50)
self.report_table.verticalHeader().setMinimumSectionSize(28)
self.gridLayout_3.addWidget(self.report_table, 0, 0, 1, 3, QtCore.Qt.AlignHCenter)
self.report_info_box = QtWidgets.QLabel(self.page_2)
self.report_info_box.setStyleSheet("color: #ff5048;\n"
"font-weight: 700;\n"
"font-size: 14px;\n"
"margin:2em;\n"
"")
self.report_info_box.setObjectName("report_info_box")
self.gridLayout_3.addWidget(self.report_info_box, 1, 0, 1, 3, QtCore.Qt.AlignHCenter)
self.verticalLayout_6.addLayout(self.gridLayout_3)
self.stackedWidget.addWidget(self.page_2)
self.sign_in_page = QtWidgets.QWidget()
self.sign_in_page.setObjectName("sign_in_page")
self.gridLayout = QtWidgets.QGridLayout(self.sign_in_page)
self.gridLayout.setObjectName("gridLayout")
self.gridLayout_4 = QtWidgets.QGridLayout()
self.gridLayout_4.setObjectName("gridLayout_4")
self.info_box = QtWidgets.QLabel(self.sign_in_page)
self.info_box.setMaximumSize(QtCore.QSize(16777215, 100))
self.info_box.setStyleSheet("color: #ff5048;\n"
"font-weight: 700;\n"
"")
self.info_box.setText("")
self.info_box.setObjectName("info_box")
self.gridLayout_4.addWidget(self.info_box, 3, 0, 1, 3, QtCore.Qt.AlignHCenter)
self.email_lbl_2 = QtWidgets.QLabel(self.sign_in_page)
self.email_lbl_2.setStyleSheet("color: rgb(255, 255, 255);\n"
"border: 0px solid;\n"
"font-weight:700;\n"
"font-size:12px;\n"
"margin-right:1em;\n"
"margin-bottom:2em;")
self.email_lbl_2.setObjectName("email_lbl_2")
self.gridLayout_4.addWidget(self.email_lbl_2, 0, 0, 1, 1, QtCore.Qt.AlignRight)
self.sign_in_button = QtWidgets.QPushButton(self.sign_in_page)
self.sign_in_button.setMinimumSize(QtCore.QSize(150, 35))
self.sign_in_button.setStyleSheet("QPushButton {\n"
"color: rgb(255, 255, 255);\n"
"border: 0px solid;\n"
"background-color: #7289DA;\n"
"padding:0.1em;\n"
"font-weight:bold;\n"
"font-size:12px;\n"
"}\n"
"QPushButton:hover {\n"
" background-color: #667bc4;\n"
"}")
self.sign_in_button.setObjectName("sign_in_button")
self.gridLayout_4.addWidget(self.sign_in_button, 2, 0, 1, 3, QtCore.Qt.AlignHCenter)
self.password_lbl_2 = QtWidgets.QLabel(self.sign_in_page)
self.password_lbl_2.setStyleSheet("color: rgb(255, 255, 255);\n"
"border: 0px solid;\n"
"font-weight:700;\n"
"font-size:12px;\n"
"margin-right:1em;\n"
"margin-bottom:3em;\n"
"")
self.password_lbl_2.setObjectName("password_lbl_2")
self.gridLayout_4.addWidget(self.password_lbl_2, 1, 0, 1, 1, QtCore.Qt.AlignRight)
self.email = QtWidgets.QLineEdit(self.sign_in_page)
self.email.setMinimumSize(QtCore.QSize(400, 0))
self.email.setMaximumSize(QtCore.QSize(450, 16777215))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(35, 35, 35))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(35, 35, 35))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(35, 35, 35))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(35, 35, 35))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(35, 35, 35))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(35, 35, 35))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(35, 35, 35))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(35, 35, 35))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(35, 35, 35))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.email.setPalette(palette)
self.email.setStyleSheet("background-color: rgb(35,35,35);\n"
"color: #FFFFFF;\n"
"border: 0px solid;\n"
"padding:0.5em;\n"
"margin-bottom:2em;\n"
"\n"
"")
self.email.setMaxLength(64)
self.email.setClearButtonEnabled(False)
self.email.setObjectName("email")
self.gridLayout_4.addWidget(self.email, 0, 1, 1, 1)
self.password = QtWidgets.QLineEdit(self.sign_in_page)
self.password.setMinimumSize(QtCore.QSize(400, 0))
self.password.setMaximumSize(QtCore.QSize(450, 16777215))
self.password.setStyleSheet("background-color: rgb(35,35,35);\n"
"color: #FFFFFF;\n"
"border: 0px solid;\n"
"padding:0.5em;\n"
"margin-bottom:3em;\n"
"")
self.password.setMaxLength(39)
self.password.setEchoMode(QtWidgets.QLineEdit.Password)
self.password.setClearButtonEnabled(False)
self.password.setObjectName("password")
self.gridLayout_4.addWidget(self.password, 1, 1, 1, 1)
self.gridLayout.addLayout(self.gridLayout_4, 0, 0, 1, 1)
self.stackedWidget.addWidget(self.sign_in_page)
self.profile_page = QtWidgets.QWidget()
self.profile_page.setObjectName("profile_page")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.profile_page)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.gridLayout_2 = QtWidgets.QGridLayout()
self.gridLayout_2.setObjectName("gridLayout_2")
self.label_5 = QtWidgets.QLabel(self.profile_page)
self.label_5.setMaximumSize(QtCore.QSize(800, 50))
self.label_5.setStyleSheet("color: rgb(255, 255, 255);\n"
"border: 0px solid;\n"
"font-weight:800;\n"
"font-size:13px;\n"
"margin-right:1em;")
self.label_5.setObjectName("label_5")
self.gridLayout_2.addWidget(self.label_5, 5, 0, 1, 1, QtCore.Qt.AlignRight)
self.label = QtWidgets.QLabel(self.profile_page)
self.label.setMaximumSize(QtCore.QSize(100, 50))
self.label.setStyleSheet("color: rgb(255, 255, 255);\n"
"border: 0px solid;\n"
"font-weight:800;\n"
"font-size:13px;\n"
"margin-right:1em;")
self.label.setObjectName("label")
self.gridLayout_2.addWidget(self.label, 3, 0, 1, 1, QtCore.Qt.AlignRight)
self.label_4 = QtWidgets.QLabel(self.profile_page)
self.label_4.setMaximumSize(QtCore.QSize(16777215, 50))
self.label_4.setStyleSheet("color: rgb(255, 255, 255);\n"
"border: 0px solid;\n"
"font-weight:800;\n"
"font-size:14px;\n"
"margin-bottom: 1.5em;\n"
"")
self.label_4.setObjectName("label_4")
self.gridLayout_2.addWidget(self.label_4, 2, 0, 1, 2, QtCore.Qt.AlignHCenter)
self.profile_info_box = QtWidgets.QLabel(self.profile_page)
self.profile_info_box.setMaximumSize(QtCore.QSize(16777215, 200))
self.profile_info_box.setStyleSheet("color: #2dbd2d;\n"
"font-weight: 700;\n"
"margin-top:3em;\n"
"font-size:13px;\n"
"")
self.profile_info_box.setText("")
self.profile_info_box.setObjectName("profile_info_box")
self.gridLayout_2.addWidget(self.profile_info_box, 7, 0, 1, 2, QtCore.Qt.AlignHCenter)
self.label_2 = QtWidgets.QLabel(self.profile_page)
self.label_2.setMaximumSize(QtCore.QSize(800, 50))
self.label_2.setStyleSheet("color: rgb(255, 255, 255);\n"
"border: 0px solid;\n"
"font-weight:800;\n"
"font-size:13px;\n"
"margin-right:1em;")
self.label_2.setObjectName("label_2")
self.gridLayout_2.addWidget(self.label_2, 4, 0, 1, 1, QtCore.Qt.AlignRight)
self.current_name_surname = QtWidgets.QLabel(self.profile_page)
self.current_name_surname.setMaximumSize(QtCore.QSize(16777215, 50))
self.current_name_surname.setStyleSheet("color: rgb(255, 255, 255);\n"
"border: 0px solid;\n"
"font-weight:400;\n"
"font-size:12px;\n"
"")
self.current_name_surname.setObjectName("current_name_surname")
self.gridLayout_2.addWidget(self.current_name_surname, 3, 1, 1, 1)
self.sign_out_button = QtWidgets.QPushButton(self.profile_page)
self.sign_out_button.setMinimumSize(QtCore.QSize(100, 30))
self.sign_out_button.setMaximumSize(QtCore.QSize(1000, 16777215))
self.sign_out_button.setStyleSheet("QPushButton {\n"
"color: rgb(255, 255, 255);\n"
"border: 0px solid;\n"
"background-color: #FF392E;\n"
"margin-top:2.5em;\n"
"padding:0.5em;\n"
"padding-left:5em;\n"
"padding-right:5em;\n"
"font-weight:bold;\n"
"font-size:12px;\n"
"}\n"
"QPushButton:hover {\n"
" background-color: #FF2115;\n"
"}")
self.sign_out_button.setObjectName("sign_out_button")
self.gridLayout_2.addWidget(self.sign_out_button, 6, 0, 1, 2, QtCore.Qt.AlignHCenter)
self.current_email = QtWidgets.QLabel(self.profile_page)
self.current_email.setMaximumSize(QtCore.QSize(16777215, 50))
self.current_email.setStyleSheet("color: rgb(255, 255, 255);\n"
"border: 0px solid;\n"
"font-weight:400;\n"
"font-size:12px;")
self.current_email.setObjectName("current_email")
self.gridLayout_2.addWidget(self.current_email, 4, 1, 1, 1)
self.pc_id_combo = QtWidgets.QComboBox(self.profile_page)
self.pc_id_combo.setMaximumSize(QtCore.QSize(40, 16777215))
self.pc_id_combo.setStyleSheet("QComboBox{\n"
"background-color: #2C2F33;\n"
"font-size:12px;\n"
"color:#FFFFFF;\n"
"font-weight:500;\n"
"\n"
"}\n"
"QComboBox:hover\n"
"{\n"
" border: 1px solid white;\n"
" background-color:#1a1c1e;\n"
"}\n"
"QListView\n"
"{\n"
"background-color: #2C2F33;\n"
"font-size:12px;\n"
"color:#FFFFFF;\n"
"font-weight:400;\n"
"}\n"
"")
self.pc_id_combo.setObjectName("pc_id_combo")
self.pc_id_combo.addItem("")
self.gridLayout_2.addWidget(self.pc_id_combo, 5, 1, 1, 1, QtCore.Qt.AlignLeft)
self.verticalLayout_3.addLayout(self.gridLayout_2)
self.stackedWidget.addWidget(self.profile_page)
self.verticalLayout_5.addWidget(self.stackedWidget)
self.horizontalLayout_2.addWidget(self.frame_pages)
self.verticalLayout.addWidget(self.Content)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
self.stackedWidget.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.label_3.setText(_translate("MainWindow", " Object Detective Alpha version 0.95"))
self.to_taskbar_button.setText(_translate("MainWindow", "_"))
self.quit_button.setText(_translate("MainWindow", "X"))
self.btn_page_1.setToolTip(_translate("MainWindow", "Ana Ekran"))
self.btn_page_2.setToolTip(_translate("MainWindow", "Raporlar"))
self.btn_page_4.setToolTip(_translate("MainWindow", "Hesabınız"))
self.start_stop_button.setText(_translate("MainWindow", "Başlat"))
self.info_title_box.setText(_translate("MainWindow", " Sol Toplam\n"
"Sağ Toplam\n"
" Durum"))
self.reload_table_button.setText(_translate("MainWindow", "Yenile"))
self.report_table.setSortingEnabled(False)
item = self.report_table.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Tarih"))
item = self.report_table.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Geçen Süre"))
item = self.report_table.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "Dışarı Çıkan"))
item = self.report_table.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "İçeri Giren"))
__sortingEnabled = self.report_table.isSortingEnabled()
self.report_table.setSortingEnabled(False)
self.report_table.setSortingEnabled(__sortingEnabled)
self.report_info_box.setText(_translate("MainWindow", "Test"))
self.email_lbl_2.setText(_translate("MainWindow", "E-posta:"))
self.sign_in_button.setText(_translate("MainWindow", "Giriş Yap"))
self.password_lbl_2.setText(_translate("MainWindow", "Parola:"))
self.email.setPlaceholderText(_translate("MainWindow", "Örn. provactus@domain.com"))
self.password.setPlaceholderText(_translate("MainWindow", "******"))
self.label_5.setText(_translate("MainWindow", "PC ID:"))
self.label.setText(_translate("MainWindow", "Ad Soyad: "))
self.label_4.setText(_translate("MainWindow", "Kullanıcı Bilgileri"))
self.label_2.setText(_translate("MainWindow", " E-posta: "))
self.current_name_surname.setText(_translate("MainWindow", "Testson"))
self.sign_out_button.setText(_translate("MainWindow", "Çıkış Yap"))
self.current_email.setText(_translate("MainWindow", "test@domain.com"))
self.pc_id_combo.setItemText(0, _translate("MainWindow", "1")) | 47,697 | 15,929 |
# -*- coding: utf-8 -*-
"""autograd_fin.ipynb
Automatically generated by Colaboratory.
"""
class Value:
""" stores a value and its gradient """
def __init__(self, data, _children=(), _op=''):
self.data = data
self.grad = 0
# internal variables used for autograd graph construction
self._backward = lambda: None
self._prev = set(_children)
self._op = _op # the op that produced this node, for graphviz / debugging / etc
def __add__(self, other):
other = other if isinstance(other, Value) else Value(other)
out = Value(self.data + other.data, (self, other), '+')
def _backward():
self.grad += out.grad
other.grad += out.grad
out._backward = _backward
return out
def __mul__(self, other):
other = other if isinstance(other, Value) else Value(other)
out = Value(self.data * other.data, (self, other), '*')
def _backward():
self.grad += other.data * out.grad
other.grad += self.data * out.grad
out._backward = _backward
return out
def __pow__(self, other):
assert isinstance(other, (int, float)), "only supporting int/float powers for now"
out = Value(self.data**other, (self,), f'**{other}')
def _backward():
self.grad += (other * self.data**(other-1)) * out.grad
out._backward = _backward
return out
def relu(self):
out = Value(np.where(self.data < 0, 0.0, self.data), (self,), 'ReLU')
def _backward():
self.grad += np.where(out.data > 0,1,0.0) * out.grad
out._backward = _backward
return out
def matmul(self,other):
other = other if isinstance(other, Value) else Value(other)
out = Value(np.matmul(self.data , other.data), (self, other), 'matmul')
def _backward():
self.grad += np.dot(out.grad,other.data.T)
other.grad += np.dot(self.data.T,out.grad)
out._backward = _backward
return out
def softmax(self):
b = np.reshape(np.max(self.data),(-1,1))
out = Value(np.exp(self.data-b) / np.sum(np.exp(self.data-b), axis=-1)[:, None], (self,), 'softmax')
#softmax = out.data
def _backward():
#print(f'self.grad{np.shape(self.grad)},out.grad {np.shape(out.grad)}, out.data {np.shape(out.data)}, new {np.shape(np.reshape(np.sum(out.grad * out.data, 1),[-1, 1]))}')
self.grad += (out.grad - np.reshape(
np.sum(out.grad * out.data, -1),
[-1, 1]
)) * out.data
out._backward = _backward
return out
def sigmoid(self):
out = Value( 1 / (1 + np.exp(-self.data)), (self,), 'sigmoid')
#sigmoid = out.data
def _backward():
self.grad += out.grad * out.data * (1 - out.data)
out._backward = _backward
return out
def log(self):
"""
if len(list(zip(*np.where(self.data == 0.0))))!=0:
print(self.data)
"""
out = Value(np.log(self.data),(self,),'log')
def _backward():
#np.where((out.grad/self.data)>0, np.log(myarray), 0)
self.grad += out.grad/self.data
out._backward = _backward
return out
def reshape(self,shape):
out = Value(np.reshape(self.data,shape),(self,),'reshape')
def _backward():
self.grad += np.reshape(out.grad,np.shape(self.data))
out._backward = _backward
return out
def concat(self,other,axis=-1):
#currently supports concatenation along 3rd dimension
assert isinstance(other, Value), "please use Value cast to other"
out = Value(np.concatenate((self.data,other.data),axis = axis))
def _backward():
self.grad += out.grad[:,:,0:self.data.shape[-1]]
other.grad += out.grad[:,:,self.data.shape[-1]:out.data.shape[-1]]
out._backward = _backward
return out
def reduce_sum(self,axis = None):
out = Value(np.sum(self.data,axis = axis), (self,), 'REDUCE_SUM')
def _backward():
output_shape = np.array(self.data.shape)
output_shape[axis] = 1
tile_scaling = self.data.shape // output_shape
grad = np.reshape(np.asarray(out.grad), output_shape)
self.grad += np.tile(grad, tile_scaling)
#tile_scaling = np.asarray(self.data.shape // np.asnumpy(output_shape))
#grad = np.reshape(np.asarray(out.grad), np.asnumpy(output_shape))
#self.grad += np.tile(grad, np.asnumpy(tile_scaling))
out._backward = _backward
return out
def backward(self):
# topological order all of the children in the graph
topo = []
visited = set()
def build_topo(v):
if v not in visited:
visited.add(v)
for child in v._prev:
build_topo(child)
topo.append(v)
build_topo(self)
# go one variable at a time and apply the chain rule to get its gradient
self.grad = 1
for v in reversed(topo):
#print(v)
v._backward()
def __neg__(self): # -self
return self * -1
def __radd__(self, other): # other + self
return self + other
def __sub__(self, other): # self - other
return self + (-other)
def __rsub__(self, other): # other - self
return other + (-self)
def __rmul__(self, other): # other * self
return self * other
def __truediv__(self, other): # self / other
return self * other**-1
def __rtruediv__(self, other): # other / self
return other * self**-1
def __repr__(self):
return f"Value(data={self.data}, grad={self.grad})"
class Dense():
def __init__(self,nin,nout,nonlin):
self.W = Value(np.random.randn(nin,nout))
self.b = Value(np.zeros_like(np.random.randn(nout,)))
self.nonlin = nonlin
def __call__(self,X,training = True):
out = X.matmul(self.W)+self.b if training else Value(np.matmul(X,self.W.data)+self.b.data[0])
out = out.softmax() if self.nonlin=='softmax' else out.relu() if self.nonlin=='relu' else out.sigmoid()
out = out if training else out.data
return out
def train(self,lr=0.01):
'''
only supporting sgd for now
'''
self.W.data = self.W.data - lr* self.W.grad
self.b.data = self.b.data - lr* self.b.grad
self.W.grad,self.b.grad = 0,0
from keras.datasets import mnist
import keras
import numpy as np
(x_train,y_train),(x_test,y_test) = mnist.load_data()
train_images = np.asarray(x_train, dtype=np.float64) / 255.0
test_images = np.asarray(x_test, dtype=np.float64) / 255.0
#train_images = train_images.reshape(60000,784)
#test_images = test_images.reshape(10000,784)
y_train = np.asarray(keras.utils.to_categorical(y_train))
dense1 = Dense(784,256,nonlin = 'relu')
dense2 = Dense(256,10,nonlin = 'softmax')
steps = 20000
batch_size = 32
for step in range(steps):
ri = np.random.permutation(train_images.shape[0])[:batch_size]
Xb, yb = Value(train_images[ri]), Value(y_train[ri])
Xb = Xb.reshape((32,784))
y_pred = dense1(Xb)
y_pred = y_pred.reshape((32,16,16))
y_pred = y_pred.reshape((32,256))
y_pred = dense2(y_pred).log()
zb = yb*y_pred
#outb = zb.reduce_sum(axis = 1)
finb = -(1/batch_size)*zb.reduce_sum() #cross entropy loss
finb.backward()
dense1.train()
dense2.train()
if step%1000==0:
print(finb)
from sklearn.metrics import accuracy_score
accuracy_score(np.argmax(dense2(dense1(np.reshape(test_images,(10000,784)),training=False),training=False),axis = 1),y_test)
16*16*32
a = np.random.randn(10,6,3)
b = np.random.randn(10,6,4)
np.shape(np.concatenate((a,b),axis= -1))
import tensorflow as tf
x,y = tf.random.normal((8,9,5),dtype = tf.float32),tf.random.normal((8,9,4),dtype = tf.float32)
with tf.GradientTape(persistent=True) as g:
g.watch(x)
g.watch(y)
a = tf.keras.layers.concatenate(inputs = [x,y],axis = -1)
fin = tf.reduce_sum(a)
fin
g.gradient(fin,x)
g.gradient(fin,y)
x,y = Value(np.asarray(x)),Value(np.asarray(y))
a = x.concat(y)
fin = a.reduce_sum()
fin
fin.backward()
x.grad
y.grad
np.concatenate((p[:,:,0:5],p[:,:,5:10]),axis = -1)==p
np.shape(p[:,:,5:10])
np.max(x.data,axis = -1)
| 8,600 | 3,127 |
from checker.remote import call_bad_event, call_good_event, lambda_handler
from tests.markers import unit
pytestmark = unit
def test_lambda_handler() -> None:
assert not lambda_handler(event={'code': 'print("hello world")'})['body']
def test_call_bad_event() -> None:
assert call_bad_event()['body']
def test_call_good_event() -> None:
assert not call_good_event()['body']
| 392 | 130 |
from pathlib import Path
def is_website_folder(path: Path):
"""Checks if path likely points at the hsf.github.io repository"""
existing_subfolders = [".git", "_profiles", "_data"]
for es in existing_subfolders:
if not (path / es).is_dir():
print(path, es)
return False
return True
| 331 | 100 |
class MyClass:
static = "Alfredo"
def __init__(self, state) -> None:
self.state = state
def print_static(self):
print(self.static)
@classmethod
def change_static(cls):
MyClass.static = "Programador"
obj1 = MyClass(True)
obj2 = MyClass(False)
obj1.change_static()
obj1.print_static()
obj2.print_static()
| 355 | 123 |
#
# Chris Lumens <clumens@redhat.com>
#
# Copyright 2007 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use, modify,
# copy, or redistribute it subject to the terms and conditions of the GNU
# General Public License v.2. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
# trademarks that are incorporated in the source code or documentation are not
# subject to the GNU General Public License and may only be used or replicated
# with the express permission of Red Hat, Inc.
#
from pykickstart.base import *
from pykickstart.errors import *
from pykickstart.options import *
import gettext
_ = lambda x: gettext.ldgettext("pykickstart", x)
class FC6_Services(KickstartCommand):
removedKeywords = KickstartCommand.removedKeywords
removedAttrs = KickstartCommand.removedAttrs
def __init__(self, writePriority=0, *args, **kwargs):
KickstartCommand.__init__(self, writePriority, *args, **kwargs)
self.op = self._getParser()
self.disabled = kwargs.get("disabled", [])
self.enabled = kwargs.get("enabled", [])
def __str__(self):
retval = KickstartCommand.__str__(self)
args = ""
if len(self.disabled) > 0:
args += " --disabled=\"%s\"" % ",".join(self.disabled)
if len(self.enabled) > 0:
args += " --enabled=\"%s\"" % ",".join(self.enabled)
if args != "":
retval += "# System services\nservices%s\n" % args
return retval
def _getParser(self):
def services_cb (option, opt_str, value, parser):
for d in value.split(','):
parser.values.ensure_value(option.dest, []).append(d.strip())
op = KSOptionParser()
op.add_option("--disabled", dest="disabled", action="callback",
callback=services_cb, nargs=1, type="string")
op.add_option("--enabled", dest="enabled", action="callback",
callback=services_cb, nargs=1, type="string")
return op
def parse(self, args):
(opts, extra) = self.op.parse_args(args=args, lineno=self.lineno)
self._setToSelf(self.op, opts)
if len(self.disabled) == 0 and len(self.enabled) == 0:
raise KickstartParseError, formatErrorMsg(self.lineno, msg=_("One of --disabled or --enabled must be provided."))
return self
| 2,809 | 854 |
import pytest
pytest.register_assert_rewrite('gen.tests.utils')
| 65 | 22 |
#!flask/bin/python
import os
import uuid
from flask import Flask, jsonify
from flask import request, jsonify
from flask_restplus import Api, Resource, fields
from flask_restplus import reqparse
import pandas as pd
import numpy as np
from joblib import load
import pickle
import json
import requests
#
# Model registering
#
modelDictionary = dict({
'models': [
{
'path': "models/miniloandefault-rfc.joblib",
},
{
'path': "models/miniloandefault-svm.joblib",
},
{
'path': "models/miniloandefault-xgb-c.joblib",
},
{
'path': "models/iris-svc.joblib",
}
]
})
# todo
# Propagate the joblib metadata into the model management dictionary
#
# Flask
#
app = Flask(__name__)
api = Api(app)
ns = api.namespace('automation/api/v1.0/prediction/admin', description='administration')
@ns.route('/is-alive') # Create a URL route to this resource
class HeartBeat(Resource): # Create a RESTful resource
def get(self): # Create GET endpoint
"""Returns an heart beat."""
return {'answer': 'ok'}
@ns.route("/models")
class Model(Resource):
def get(self):
"""Returns the list of ML models."""
return modelDictionary
model_key_descriptor = api.model('ModelKeyDescriptor', {
'name': fields.String(required=True, description="Name of the model", help="Name cannot be blank.",
default='iris-svc'),
'version': fields.String(required=True, description="Version of the model", help="Name cannot be blank.",
default='1.0'),
'format': fields.String(required=True, description="Format of the model", help="Name cannot be blank.",
default='joblib'),
})
model_metadata = api.model('ModelMetadata', {
'name': fields.String(required=True, description="Name of the model", help="Name cannot be blank."),
'version': fields.String(required=True, description="Version of the model", help="Name cannot be blank."),
'format': fields.String(required=True, description="Format of the model", help="Name cannot be blank."),
'author': fields.String(required=True, description="Author of the model", help="Name cannot be blank."),
'metrics': fields.Wildcard(fields.String),
'customProperties': fields.Wildcard(fields.String)
})
model_signature_parameter = api.model('ModelSignatureParameter', {
'name': fields.String(required=True, description="Name of the model", help="Name cannot be blank."),
'order': fields.String(required=True, description="Version of the model", help="Name cannot be blank."),
'type': fields.String(required=True, description="Version of the model", help="Name cannot be blank.")
})
model_signature = api.model('ModelSignature', {
'input': fields.List(fields.Raw(required=True, description="Inputs", help="Name cannot be blank.")),
'output': fields.List(fields.Raw(required=True, description="Outputs", help="Name cannot be blank."))
})
model_schema = api.model('ModelSchema', {
'metadata': fields.Nested(model_metadata),
'signature': fields.Nested(model_signature),
'customProperties': fields.Nested(model_metadata),
})
@ns.route('/model-schema')
class ModelSchema(Resource):
@api.expect(model_key_descriptor)
@api.response(202, 'ML Schema retrieved.', model_schema)
def post(self):
"""Returns the schema of a model."""
json_dictionary = request.json
print(json_dictionary)
# Model
model_name = json_dictionary["name"]
mode_version = json_dictionary["version"]
model_format = json_dictionary["format"]
# Compose the model path
model_path = 'models/' + model_name + '.' + model_format
# Local read
model_dictionary = load(model_path)
# Make a copy and remove the model from it as non serializable into JSON
model_dictionnary_copy = model_dictionary.copy()
del model_dictionnary_copy["model"]
del model_dictionnary_copy["metadata"]["creationDate"]
return model_dictionnary_copy
ns = api.namespace('automation/api/v1.0/prediction/invocation', description='run ML models')
request_model_descriptor = api.model('ModelDescriptor', {
'name': fields.String(required=True, description="Local path of the model", help="Name cannot be blank."),
'version': fields.String(required=True, description="Version of the model", help="Name cannot be blank."),
'format': fields.String(required=True, description="Format of the model", help="Name cannot be blank.")
})
prediction_request = api.model('PredictionRequest', {
'model': fields.Nested(request_model_descriptor),
'features': fields.Wildcard(fields.String)
})
prediction_response = api.model('PredictionResponse', {
'path': fields.String(required=True, description="Local path of the invoked predictive model",
help="Name cannot be blank."),
'id': fields.String(required=True, description="Uuid of the prediction", help="Name cannot be blank."),
'prediction': fields.String(required=False, description="The prediction", help="Name cannot be blank."),
'probabilities': fields.Wildcard(fields.String)
})
@ns.route('/')
class PredictionService(Resource):
@api.expect(prediction_request)
@api.response(201, 'Category successfully created.', prediction_response)
def post(self):
"""Computes a new prediction."""
try:
json_dictionary = request.json
print(json_dictionary)
# Model
json_model_dictionary = json_dictionary["model"]
model_name = json_model_dictionary["name"]
model_version = json_model_dictionary["version"]
model_format = json_model_dictionary["format"]
# Features
json_payload_dictionary = json_dictionary["features"]
# Compose the model path
model_path = 'models/' + model_name + '.' + 'joblib' # Picking joblib file by default
# Remote read
# response = requests.get('https://github.com/ODMDev/decisions-on-ml/blob/master/docker-python-flask-sklearn-joblist-json/models/miniloandefault-rfc.joblib?raw=true')
# Local read
dictionary = load(model_path)
# Access to the model metadata
metadata_dictionary = dictionary["metadata"]
# Introspect the signature
signature_dictionnary = dictionary["signature"]
signature_parameters = signature_dictionnary["input"]
parameter_values = []
for parameter in signature_parameters:
print(parameter)
name = parameter["name"]
type = parameter["type"]
value = float(json_payload_dictionary[name])
parameter_values.append(value)
# Local read
loaded_model = dictionary['model']
# Invocation
invocation_method = metadata_dictionary["invocation"]
response_dictionary = {
"path": model_path,
"id": str(uuid.uuid4())
}
if invocation_method == 'predict':
predicted_class = loaded_model.predict(
[parameter_values])
# Assume an array of a single element to be cast in int
found_class = predicted_class[0]
response_dictionary['prediction'] = found_class.item() # cast into int
if invocation_method == 'predict_proba':
prediction_wrapper = loaded_model.predict_proba(
[parameter_values])
probabilities = prediction_wrapper[0]
# Needs to be generalized
probability_dictionnary = {
"0": probabilities[0],
"1": probabilities[1]
}
response_dictionary["probabilities"] = probability_dictionnary
## Ok for RFC
predicted_class = np.where(probabilities == np.amax(probabilities))
response_dictionary['prediction'] = str(predicted_class[0][0])
# json_string = json.dumps(responseDictionary, indent=4)
print(response_dictionary)
return response_dictionary
except:
return "KO"
if __name__ == '__main__':
# Start a development server
app.run(port=5000, host='0.0.0.0')
| 8,529 | 2,297 |
# Copyright (c) Microsoft. All rights reserved.
import time
import logging
import torch
from rnndata import repackage_hidden, clone_hidden, get_batch
from utils import get_lr_mom, AverageMeter
def compute_accuracy(output, target, topk=(1,)):
"""Computes the precision@k for the specified values of k"""
with torch.no_grad():
if type(output) is not torch.Tensor:
# inception v3 model
output = output[0]
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0, keepdim=True)
res.append(correct_k.mul_(100.0 / batch_size))
return res
def mean_accuracy_multi_binary_label_with_logits(output, target, topk=(40, 13)):
with torch.no_grad():
if type(output) is not torch.Tensor:
# inception v3 model
output = output[0]
target = target.type(torch.int)
acc_all = torch.mean(((output > 0.0) == (target > 0.5)).type(torch.float), dim=0)
res = []
for k in topk:
acc_k = torch.mean(acc_all[:k], dim=0, keepdim=True)
res.append(acc_k.mul_(100.0))
return res
def seq_train(train_data, model, criterion, optimizer, epoch, ntokens,
batch_size, cfg, checkpointer, extend_stats, train_writer):
total_loss = 0.
start_time = time.time()
hidden = model.module.init_hidden(batch_size)
data_batches = range(0, train_data.size(0) - 1, cfg.MODEL.RNN.BPTT)
if cfg.MODEL.RNN.SHUFFLE:
if cfg.DATALOADER.RE == 'yes':
data_sampler = torch.randint(high=len(data_batches),
size=(len(data_batches),),
dtype=torch.int64).tolist()
elif cfg.DATALOADER.RE == 'no':
data_sampler = torch.randperm(len(data_batches)).tolist()
else:
raise ValueError(
"Invalid cfg.DATALOADER.RE input {}".format(cfg.DATALOADER.RE))
else:
data_sampler = range(0, len(data_batches))
for batch, data_i in enumerate(data_sampler):
i = data_batches[data_i]
# Turn on training mode which enables dropout.
model.train()
# get data
data, targets = get_batch(train_data, i, cfg.MODEL.RNN.BPTT)
# Starting each batch, we detach the hidden state from how it was previously produced.
# If we didn't, the model would try backpropagating all the way to start of the dataset.
# When cfg.MODEL.RNN.SHUFFLE is true, not initializing with 0 does not
# make sense. However, we just keep it here.
hidden = repackage_hidden(hidden, cfg.MODEL.RNN.INIT0)
if cfg.OPTIM.OPT in ['sgd_sls', 'salsa', 'ssls', 'salsa_new']:
hidden_clone = clone_hidden(hidden)
model.zero_grad()
output, hidden = model(data, hidden)
loss = criterion(output.view(-1, ntokens), targets)
loss.backward()
# `clip_grad_norm` helps prevent the exploding gradient problem in RNNs / LSTMs.
torch.nn.utils.clip_grad_norm_(model.parameters(), cfg.MODEL.RNN.CLIP)
# closure function defined for line search used in SGD_SLS
def eval_loss():
#if cfg.ls_eval:
if cfg.OPTIM.LS.EVAL:
model.eval()
with torch.no_grad():
output, _ = model(data, hidden_clone)
loss = criterion(output.view(-1, ntokens), targets)
return loss
if cfg.OPTIM.OPT in ['yaida_diag', 'yaida_seq', 'pflug_bat', 'pflug_seq',
'sasa_xd_seq', 'sasa_xd']:
optimizer.step(closure=extend_stats)
elif cfg.OPTIM.OPT in ['sgd_sls', 'salsa', 'ssls', 'salsa_new']:
optimizer.step(loss, closure=eval_loss)
else:
optimizer.step(closure=None)
total_loss += loss.item()
if batch % cfg.LOG_FREQ == 0 and batch > 0:
cur_loss = total_loss / cfg.LOG_FREQ
elapsed = time.time() - start_time
lr, mom = get_lr_mom(optimizer, cfg)
print(
'| epoch {:3d} | {:5d}/{:5d} batches | lr {:02.2f} | ms/batch {:5.2f} | '
'loss {:5.2f} | ppl {:8.2f}'.format(
epoch, batch, len(train_data) // cfg.MODEL.RNN.BPTT, lr,
elapsed * 1000 / cfg.LOG_FREQ, cur_loss,
cur_loss))
total_loss = 0
start_time = time.time()
train_writer.add_scalar("metrics/top1", cur_loss)
train_writer.add_scalar("metrics/loss", cur_loss)
lr, mom = get_lr_mom(optimizer, cfg)
train_writer.add_scalar("params/lr", lr)
train_writer.add_scalar("params/mom", mom)
checkpointer.trainacc.append(cur_loss)
checkpointer.trainloss.append(cur_loss)
checkpointer.lrs.append(lr)
checkpointer.moms.append(mom)
# Training
def train(train_loader, model, criterion, optimizer, epoch,
cfg, extend_stats, train_writer, checkpointer, device):
print('\nEpoch: %d' % epoch)
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
end = time.time()
for i, (input, target) in enumerate(train_loader):
# switch to train mode
model.train()
# measure data loading time
data_time.update(time.time() - end)
# compute output and record loss
input, target = input.to(device), target.to(device)
output = model(input)
if cfg.LOSS == "bce":
target = target.type(torch.float32)
if cfg.MODEL.ARCH == 'inception_v3':
loss = 0.5 * (criterion(output[0], target) + criterion(output[1], target))
else:
loss = criterion(output, target)
losses.update(loss.item(), input.size(0))
# measure and record accuracy
if cfg.LOSS == "xentropy":
prec1, prec5 = compute_accuracy(output, target, topk=(1, 5))
top1.update(prec1[0].item(), input.size(0))
top5.update(prec5[0].item(), input.size(0))
elif cfg.LOSS == "bce":
prec1, prec5 = mean_accuracy_multi_binary_label_with_logits(output, target, topk=(40, 13))
top1.update(prec1[0].item(), input.size(0))
top5.update(prec5[0].item(), input.size(0))
else:
top1.update(0.0, input.size(0))
top5.update(0.0, input.size(0))
# compute gradient and do SGD step
optimizer.zero_grad()
loss.backward()
# closure function defined for line search used in SGD_SLS
def eval_loss():
#if cfg.ls_eval:
if cfg.OPTIM.LS.EVAL:
model.eval()
with torch.no_grad():
output = model(input)
loss = criterion(output, target)
return loss
if cfg.OPTIM.OPT in ['yaida_diag', 'yaida_seq', 'pflug_bat', 'pflug_seq',
'sasa_xd_seq', 'sasa_xd']:
optimizer.step(closure=extend_stats)
elif cfg.OPTIM.OPT in ['sgd_sls', 'salsa', 'ssls', 'salsa_new']:
optimizer.step(loss, closure=eval_loss)
else:
optimizer.step(closure=None)
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
# only log once per cfg.LOG_FREQ param updates. adjust factor because pflug uses
# 3 batches to make 1 param update.
if i % cfg.LOG_FREQ == 0:
logging.info('Epoch: [{0}][{1}/{2}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Data {data_time.val:.3f} ({data_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\t'
'Prec@5 {top5.val:.3f} ({top5.avg:.3f})'.format(
epoch, i, len(train_loader), batch_time=batch_time,
data_time=data_time, loss=losses, top1=top1, top5=top5))
train_writer.add_scalar("metrics/top1", top1.val)
train_writer.add_scalar("metrics/top5", top5.val)
train_writer.add_scalar("metrics/loss", losses.val)
lr, mom = get_lr_mom(optimizer, cfg)
train_writer.add_scalar("params/lr", lr)
train_writer.add_scalar("params/mom", mom)
checkpointer.trainacc.append(top1.val)
checkpointer.trainloss.append(losses.val)
checkpointer.lrs.append(lr)
checkpointer.moms.append(mom)
def seq_evaluate(data_source, model, criterion, ntokens, eval_batch_size,
epoch, cfg, test_writer, checkpointer):
# Turn on evaluation mode which disables dropout.
eval_start_time = time.time()
model.eval()
total_loss = 0.
hidden = model.module.init_hidden(eval_batch_size)
with torch.no_grad():
for i in range(0, data_source.size(0) - 1, cfg.MODEL.RNN.BPTT):
data, targets = get_batch(data_source, i, cfg.MODEL.RNN.BPTT)
output, hidden = model(data, hidden)
output_flat = output.view(-1, ntokens)
total_loss += len(data) * criterion(output_flat, targets).item()
hidden = repackage_hidden(hidden, 0)
val_loss = total_loss / (len(data_source) - 1)
print('-' * 89)
print('| end of epoch {:3d} | time: {:5.2f}s | valid loss {:5.2f} | '
'valid ppl {:8.2f}'.format(epoch, (time.time() - eval_start_time),
val_loss, val_loss))
test_writer.add_scalar("metrics/top1", val_loss)
test_writer.add_scalar("metrics/loss", val_loss)
checkpointer.testloss.append(val_loss)
checkpointer.testacc.append(val_loss)
return val_loss
def validate(val_loader, model, criterion,
cfg, test_writer, checkpointer, device):
batch_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
# switch to evaluate mode
model.eval()
with torch.no_grad():
end = time.time()
for i, (input, target) in enumerate(val_loader):
input, target = input.to(device), target.to(device)
# compute output and record loss
output = model(input)
if cfg.LOSS == "bce":
target = target.type(torch.float32)
loss = criterion(output, target)
losses.update(loss.item(), input.size(0))
# measure and record accuracy
if cfg.LOSS == "xentropy":
prec1, prec5 = compute_accuracy(output, target, topk=(1, 5))
top1.update(prec1[0].item(), input.size(0))
top5.update(prec5[0].item(), input.size(0))
elif cfg.LOSS == "bce":
prec1, prec5 = mean_accuracy_multi_binary_label_with_logits(output, target, topk=(40, 13))
top1.update(prec1[0].item(), input.size(0))
top5.update(prec5[0].item(), input.size(0))
else:
top1.update(0.0, input.size(0))
top5.update(0.0, input.size(0))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % cfg.LOG_FREQ == 0:
logging.info('Test: [{0}/{1}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\t'
'Prec@5 {top5.val:.3f} ({top5.avg:.3f})'.format(
i, len(val_loader), batch_time=batch_time, loss=losses,
top1=top1, top5=top5))
print(' * Prec@1 {top1.avg:.3f} Prec@5 {top5.avg:.3f}'
.format(top1=top1, top5=top5))
test_writer.add_scalar("metrics/top1", top1.avg)
test_writer.add_scalar("metrics/top5", top5.avg)
test_writer.add_scalar("metrics/loss", losses.avg)
checkpointer.testloss.append(losses.avg)
checkpointer.testacc.append(top1.avg)
return top1.avg | 12,466 | 4,245 |
# -*- coding: utf-8 -*-
# Resource object code
#
# Created: Thu Sep 5 07:07:53 2019
# by: The Resource Compiler for PySide2 (Qt v5.13.1)
#
# WARNING! All changes made in this file will be lost!
from PySide2 import QtCore
qt_resource_data = b"\
\x00\x00\x015\
<\
contact>\x0d\x0a <g\
ivenName>John</g\
ivenName>\x0d\x0a <\
familyName>Doe</\
familyName>\x0d\x0a \
<birthdate>1977\
-12-25</birthdat\
e>\x0d\x0a <homeAdd\
ress>\x0d\x0a <\
street>Sandakerv\
eien 116</street\
>\x0d\x0a <zipC\
ode>N-0550</zipC\
ode>\x0d\x0a <c\
ity>Oslo</city>\x0d\
\x0a <countr\
y>Norway</countr\
y>\x0d\x0a </homeAd\
dress>\x0d\x0a</contac\
t>\x0d\x0a\
\x00\x00\x01\xc8\
<\
order>\x0d\x0a <cus\
tomerId>194223</\
customerId>\x0d\x0a \
<article>\x0d\x0a \
<articleId>2\
2242</articleId>\
\x0d\x0a <count\
>5</count>\x0d\x0a \
</article>\x0d\x0a \
<article>\x0d\x0a \
<articleId>32\
372</articleId>\x0d\
\x0a <count>\
12</count>\x0d\x0a \
<comment>wit\
hout stripes</co\
mment>\x0d\x0a </ar\
ticle>\x0d\x0a <art\
icle>\x0d\x0a <\
articleId>23649<\
/articleId>\x0d\x0a \
<count>2</c\
ount>\x0d\x0a </art\
icle>\x0d\x0a <deli\
veryDate>2009-01\
-23</deliveryDat\
e>\x0d\x0a <payed>t\
rue</payed>\x0d\x0a</o\
rder>\x0d\x0a\
\x00\x00\x06-\
<\
?xml version=\x221.\
0\x22?>\x0d\x0a<xsd:schem\
a xmlns:xsd=\x22htt\
p://www.w3.org/2\
001/XMLSchema\x22>\x0d\
\x0a\x0d\x0a <xsd:elem\
ent name=\x22recipe\
\x22>\x0d\x0a <xsd\
:complexType>\x0d\x0a \
<xsd:\
sequence>\x0d\x0a \
<xsd:\
element name=\x22ti\
tle\x22 type=\x22xsd:s\
tring\x22/>\x0d\x0a \
<xsd:e\
lement name=\x22ing\
redient\x22 type=\x22i\
ngredientType\x22 m\
axOccurs=\x22unboun\
ded\x22/>\x0d\x0a \
<xsd:ele\
ment name=\x22time\x22\
type=\x22timeType\x22\
/>\x0d\x0a \
<xsd:element\
name=\x22method\x22>\x0d\
\x0a \
<xsd:comple\
xType>\x0d\x0a \
\
<xsd:sequence>\x0d\x0a\
\
<xsd\
:element name=\x22s\
tep\x22 type=\x22xsd:s\
tring\x22 maxOccurs\
=\x22unbounded\x22/>\x0d\x0a\
\
</xsd:se\
quence>\x0d\x0a \
</x\
sd:complexType>\x0d\
\x0a \
</xsd:element>\x0d\
\x0a </x\
sd:sequence>\x0d\x0a \
</xsd:comp\
lexType>\x0d\x0a </\
xsd:element>\x0d\x0a\x0d\x0a\
<xsd:complex\
Type name=\x22ingre\
dientType\x22>\x0d\x0a \
<xsd:attrib\
ute name=\x22name\x22 \
type=\x22xsd:string\
\x22/>\x0d\x0a <xs\
d:attribute name\
=\x22quantity\x22 type\
=\x22xsd:positiveIn\
teger\x22/>\x0d\x0a \
<xsd:attribute\
name=\x22unit\x22 typ\
e=\x22xsd:string\x22/>\
\x0d\x0a </xsd:comp\
lexType>\x0d\x0a\x0d\x0a \
<xsd:complexType\
name=\x22timeType\x22\
>\x0d\x0a <xsd:\
attribute name=\x22\
quantity\x22 type=\x22\
xsd:positiveInte\
ger\x22/>\x0d\x0a \
<xsd:attribute n\
ame=\x22unit\x22>\x0d\x0a \
<xsd:si\
mpleType>\x0d\x0a \
<xsd:\
restriction base\
=\x22xsd:string\x22>\x0d\x0a\
\
<xsd:enumera\
tion value=\x22seco\
nds\x22/>\x0d\x0a \
<xsd\
:enumeration val\
ue=\x22minutes\x22/>\x0d\x0a\
\
<xsd:enumera\
tion value=\x22hour\
s\x22/>\x0d\x0a \
</xsd:rest\
riction>\x0d\x0a \
</xsd:simp\
leType>\x0d\x0a \
</xsd:attribute\
>\x0d\x0a </xsd:com\
plexType>\x0d\x0a\x0d\x0a</x\
sd:schema>\x0d\x0a\
\x00\x00\x02c\
<\
recipe>\x0d\x0a <ti\
tle>Cheese on To\
ast</title>\x0d\x0a \
<ingredient nam\
e=\x22Bread\x22 quanti\
ty=\x222\x22 unit=\x22sli\
ces\x22/>\x0d\x0a <ing\
redient name=\x22Ch\
eese\x22 quantity=\x22\
2\x22 unit=\x22slices\x22\
/>\x0d\x0a <time qu\
antity=\x223\x22 unit=\
\x22days\x22/>\x0d\x0a <m\
ethod>\x0d\x0a \
<step>1. Slice t\
he bread and che\
ese.</step>\x0d\x0a \
<step>2. Gr\
ill one side of \
each slice of br\
ead.</step>\x0d\x0a \
<step>3. Tu\
rn over the brea\
d and place a sl\
ice of cheese on\
each piece.</st\
ep>\x0d\x0a <st\
ep>4. Grill unti\
l the cheese has\
started to melt\
.</step>\x0d\x0a \
<step>5. Serve\
and enjoy!</ste\
p>\x0d\x0a </method\
>\x0d\x0a <comment>\
Tell your friend\
s about it!</com\
ment>\x0d\x0a</recipe>\
\x0d\x0a\
\x00\x00\x03\xd4\
<\
?xml version=\x221.\
0\x22?>\x0d\x0a<xsd:schem\
a xmlns:xsd=\x22htt\
p://www.w3.org/2\
001/XMLSchema\x22>\x0d\
\x0a\x0d\x0a <xsd:elem\
ent name=\x22contac\
t\x22>\x0d\x0a <xs\
d:complexType>\x0d\x0a\
<xsd\
:sequence>\x0d\x0a \
<xsd\
:element name=\x22g\
ivenName\x22 type=\x22\
xsd:string\x22/>\x0d\x0a \
<\
xsd:element name\
=\x22familyName\x22 ty\
pe=\x22xsd:string\x22/\
>\x0d\x0a \
<xsd:element \
name=\x22birthdate\x22\
type=\x22xsd:date\x22\
minOccurs=\x220\x22/>\
\x0d\x0a \
<xsd:element n\
ame=\x22homeAddress\
\x22 type=\x22address\x22\
/>\x0d\x0a \
<xsd:element\
name=\x22workAddre\
ss\x22 type=\x22addres\
s\x22 minOccurs=\x220\x22\
/>\x0d\x0a \
</xsd:sequence>\x0d\
\x0a </xsd:c\
omplexType>\x0d\x0a \
</xsd:element>\x0d\
\x0a\x0d\x0a <xsd:comp\
lexType name=\x22ad\
dress\x22>\x0d\x0a \
<xsd:sequence>\x0d\
\x0a <xs\
d:element name=\x22\
street\x22 type=\x22xs\
d:string\x22/>\x0d\x0a \
<xsd:el\
ement name=\x22zipC\
ode\x22 type=\x22xsd:s\
tring\x22/>\x0d\x0a \
<xsd:eleme\
nt name=\x22city\x22 t\
ype=\x22xsd:string\x22\
/>\x0d\x0a \
<xsd:element nam\
e=\x22country\x22 type\
=\x22xsd:string\x22/>\x0d\
\x0a </xsd:s\
equence>\x0d\x0a </\
xsd:complexType>\
\x0d\x0a\x0d\x0a</xsd:schema\
>\x0d\x0a\
\x00\x00\x022\
<\
recipe>\x0d\x0a <ti\
tle>Cheese on To\
ast</title>\x0d\x0a \
<ingredient nam\
e=\x22Bread\x22 quanti\
ty=\x222\x22 unit=\x22sli\
ces\x22/>\x0d\x0a <ing\
redient name=\x22Ch\
eese\x22 quantity=\x22\
2\x22 unit=\x22slices\x22\
/>\x0d\x0a <time qu\
antity=\x223\x22 unit=\
\x22minutes\x22/>\x0d\x0a \
<method>\x0d\x0a \
<step>1. Slic\
e the bread and \
cheese.</step>\x0d\x0a\
<step>2.\
Grill one side \
of each slice of\
bread.</step>\x0d\x0a\
<step>3.\
Turn over the b\
read and place a\
slice of cheese\
on each piece.<\
/step>\x0d\x0a \
<step>4. Grill u\
ntil the cheese \
has started to m\
elt.</step>\x0d\x0a \
<step>5. Se\
rve and enjoy!</\
step>\x0d\x0a </met\
hod>\x0d\x0a</recipe>\x0d\
\x0a\
\x00\x00\x01(\
<\
contact>\x0d\x0a <g\
ivenName>John</g\
ivenName>\x0d\x0a <\
familyName>Doe</\
familyName>\x0d\x0a \
<title>Prof.</t\
itle>\x0d\x0a <work\
Address>\x0d\x0a \
<street>Sandak\
erveien 116</str\
eet>\x0d\x0a <z\
ipCode>N-0550</z\
ipCode>\x0d\x0a \
<city>Oslo</cit\
y>\x0d\x0a <cou\
ntry>Norway</cou\
ntry>\x0d\x0a </wor\
kAddress>\x0d\x0a</con\
tact>\x0d\x0a\
\x00\x00\x01;\
<\
order>\x0d\x0a <cus\
tomerId>234219</\
customerId>\x0d\x0a \
<article>\x0d\x0a \
<articleId>2\
1692</articleId>\
\x0d\x0a <count\
>3</count>\x0d\x0a \
</article>\x0d\x0a \
<article>\x0d\x0a \
<articleId>24\
749</articleId>\x0d\
\x0a <count>\
9</count>\x0d\x0a <\
/article>\x0d\x0a <\
deliveryDate>200\
9-01-23</deliver\
yDate>\x0d\x0a <pay\
ed>yes</payed>\x0d\x0a\
</order>\x0d\x0a\
\x00\x00\x03~\
<\
?xml version=\x221.\
0\x22?>\x0d\x0a<xsd:schem\
a xmlns:xsd=\x22htt\
p://www.w3.org/2\
001/XMLSchema\x22>\x0d\
\x0a\x0d\x0a <xsd:elem\
ent name=\x22order\x22\
>\x0d\x0a <xsd:\
complexType>\x0d\x0a \
<xsd:s\
equence>\x0d\x0a \
<xsd:e\
lement name=\x22cus\
tomerId\x22 type=\x22x\
sd:positiveInteg\
er\x22/>\x0d\x0a \
<xsd:elem\
ent name=\x22articl\
e\x22 type=\x22article\
Type\x22 maxOccurs=\
\x22unbounded\x22/>\x0d\x0a \
<\
xsd:element name\
=\x22deliveryDate\x22 \
type=\x22xsd:date\x22/\
>\x0d\x0a \
<xsd:element \
name=\x22payed\x22 typ\
e=\x22xsd:boolean\x22/\
>\x0d\x0a <\
/xsd:sequence>\x0d\x0a\
</xsd:co\
mplexType>\x0d\x0a \
</xsd:element>\x0d\x0a\
\x0d\x0a <xsd:compl\
exType name=\x22art\
icleType\x22>\x0d\x0a \
<xsd:sequenc\
e>\x0d\x0a \
<xsd:element nam\
e=\x22articleId\x22 ty\
pe=\x22xsd:positive\
Integer\x22/>\x0d\x0a \
<xsd:ele\
ment name=\x22count\
\x22 type=\x22xsd:posi\
tiveInteger\x22/>\x0d\x0a\
<xsd\
:element name=\x22c\
omment\x22 type=\x22xs\
d:string\x22 minOcc\
urs=\x220\x22/>\x0d\x0a \
</xsd:sequenc\
e>\x0d\x0a </xsd:co\
mplexType>\x0d\x0a\x0d\x0a</\
xsd:schema>\x0d\x0a\
"
qt_resource_name = b"\
\x00\x0e\
\x00vJ\x1c\
\x00i\
\x00n\x00s\x00t\x00a\x00n\x00c\x00e\x00_\x000\x00.\x00x\x00m\x00l\
\x00\x0e\
\x00rJ\x1c\
\x00i\
\x00n\x00s\x00t\x00a\x00n\x00c\x00e\x00_\x004\x00.\x00x\x00m\x00l\
\x00\x0c\
\x08\x13\x87\xf4\
\x00s\
\x00c\x00h\x00e\x00m\x00a\x00_\x001\x00.\x00x\x00s\x00d\
\x00\x0e\
\x00sJ\x1c\
\x00i\
\x00n\x00s\x00t\x00a\x00n\x00c\x00e\x00_\x003\x00.\x00x\x00m\x00l\
\x00\x0c\
\x08\x10\x87\xf4\
\x00s\
\x00c\x00h\x00e\x00m\x00a\x00_\x000\x00.\x00x\x00s\x00d\
\x00\x0e\
\x00pJ\x1c\
\x00i\
\x00n\x00s\x00t\x00a\x00n\x00c\x00e\x00_\x002\x00.\x00x\x00m\x00l\
\x00\x0e\
\x00yJ\x1c\
\x00i\
\x00n\x00s\x00t\x00a\x00n\x00c\x00e\x00_\x001\x00.\x00x\x00m\x00l\
\x00\x0e\
\x00uJ\x1c\
\x00i\
\x00n\x00s\x00t\x00a\x00n\x00c\x00e\x00_\x005\x00.\x00x\x00m\x00l\
\x00\x0c\
\x08\x16\x87\xf4\
\x00s\
\x00c\x00h\x00e\x00m\x00a\x00_\x002\x00.\x00x\x00s\x00d\
"
qt_resource_struct = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x09\x00\x00\x00\x01\
\x00\x00\x00\xa2\x00\x00\x00\x00\x00\x01\x00\x00\x0fu\
\x00\x00\x00\x22\x00\x00\x00\x00\x00\x01\x00\x00\x019\
\x00\x00\x00b\x00\x00\x00\x00\x00\x01\x00\x00\x096\
\x00\x00\x00\xe6\x00\x00\x00\x00\x00\x01\x00\x00\x12\xd7\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\xc4\x00\x00\x00\x00\x00\x01\x00\x00\x11\xab\
\x00\x00\x00\x84\x00\x00\x00\x00\x00\x01\x00\x00\x0b\x9d\
\x00\x00\x00D\x00\x00\x00\x00\x00\x01\x00\x00\x03\x05\
\x00\x00\x01\x08\x00\x00\x00\x00\x00\x01\x00\x00\x14\x16\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 10,449 | 6,256 |
from .fcd import FCD
from .fcd import calculate_frechet_distance
__all__ = ['FCD', 'calculate_frechet_distance']
| 114 | 42 |
import gym
import numpy as np
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import Adam
from agents.dqn import DQN
def create_model(states, actions):
model = Sequential()
model.add(Dense(24, input_dim=states, activation='relu'))
model.add(Dense(24, activation='relu'))
model.add(Dense(actions, activation='linear'))
model.compile(loss='mse', optimizer=Adam(lr=1e-4))
return model
def play(gym_id, episodes=1, agent=None):
env = gym.make(gym_id)
for e in range(episodes):
state = env.reset()
total_reward = 0.
for t in range(500):
if agent is None:
action = env.action_space.sample() # take a random action
else:
action = agent.act(np.reshape(state, [1, agent.state_size]))
state, reward, done, _ = env.step(action)
total_reward += reward
if done:
print('Episode {}/{} done in {} steps, total reward {}: '.format(e+1, episodes, t+1, total_reward))
break
env.close()
def learn(gym_id, episodes=1000, batch_size=32, model_path="models/model.h5"):
env = gym.make(gym_id)
num_states = env.observation_space.shape[0]
num_actions = env.action_space.n
agent = DQN(create_model(num_states, num_actions))
for e in range(episodes):
state = env.reset()
state = np.reshape(state, [1, num_states])
total_reward = 0.
for steps in range(500):
action = agent.act(state)
next_state, reward, done, _ = env.step(action)
next_state = np.reshape(next_state, [1, agent.state_size])
agent.remember(state, action, reward, next_state, done)
total_reward += reward
state = next_state
if done:
print('Episode {}/{} done in {} steps, total reward {}: '.format(e+1, episodes, steps+1, total_reward))
if total_reward >= 200:
agent.save(model_path)
return agent
break
if agent.memory_size > batch_size:
agent.train(batch_size) # train the agent with the experience of the episode
env.close()
return None
if __name__ == '__main__':
agent = learn('CartPole-v0', episodes=1000, batch_size=24, model_path="./models/cartpole-full.h5")
play('CartPole-v0', episodes=5, agent=agent) | 2,441 | 781 |
"""
Date: 28/10/2021
Neo4J generator for ImmunoPoli project
"""
import neo4j as nj
import App.PlotDBStructure as ps
from random import randint, random
from enum import IntEnum
import datetime
MAX_CIVIC_NUMBER = 100
PHONE_NUMBER_LENGTH = 10
MAX_NUMBER_OF_FAMILY_MEMBER = 5
NUMBER_OF_FAMILY = 150
MAX_NUMBER_OF_CONTACT = 2000 # For new contact relationships
MAX_NUMBER_OF_VISIT = 5000 # For new visit relationships
MAX_NUMBER_OF_VACCINE = 750 # For new get vaccinated relationships
MAX_NUMBER_OF_TEST = 4000 # For new make test relationships
PROBABILITY_TO_HAVE_APP = 0.5
PROBABILITY_TO_BE_POSITIVE = 0.5
PROBABILITY_TO_BE_TESTED_AFTER_INFECTED = 0.8
MAX_NUMBER_OF_ATTEMPTS_FOR_VALID_DATE = 15
CONTACT_DAYS_BACKS = 10
VISITS_DAYS_BACKS = 150
VACCINES_DAYS_BACKS = 150
TESTS_DAYS_BACKS = 150
# BOLT = "bolt://localhost:7687"
# PASSWORD = "991437"
USER = "neo4j"
PASSWORD = "cJhfqi7RhIHR4I8ocQtc5pFPSEhIHDVJBCps3ULNzbA"
URI = "neo4j+s://057f4a80.databases.neo4j.io"
class PersonAttribute(IntEnum):
"""
Class enum for the attribute of a Person Node
"""
NAME = 0
SURNAME = 1
AGE = 2
MAIL = 3
NUMBER = 4
APP = 5
# And so on...
@classmethod
def numberOfAttribute(cls):
numAttribute = 0
for _ in PersonAttribute:
numAttribute += 1
return numAttribute
class LocationAttribute(IntEnum):
"""
Class enum for the attribute of a Location
"""
TYPE = 0
NAME = 1
ADDRESS = 2
CIVIC_NUMBER = 3
CAP = 4
CITY = 5
PROVINCE = 6
# and so on ...
@classmethod
def numberOfAttribute(cls):
numAttribute = 0
for _ in LocationAttribute:
numAttribute += 1
return numAttribute
class HouseAttribute(IntEnum):
"""
Class enum for the creation of the House
"""
ADDRESS = 0
CAP = 1
CITY = 2
PROVINCE = 3
@classmethod
def numberOfAttribute(cls):
numAttribute = 0
for _ in HouseAttribute:
numAttribute += 1
return numAttribute
class VaccineAttribute(IntEnum):
"""
Class enum for the attribute of a Location
"""
NAME = 0
PRODUCER = 1
# and so on ...
@classmethod
def numberOfAttribute(cls):
numAttribute = 0
for _ in VaccineAttribute:
numAttribute += 1
return numAttribute
def openConnection():
"""
Method that starts a connection with the database
:return: the driver for the connection
"""
connection = nj.GraphDatabase.driver(
uri=URI, auth=nj.basic_auth(USER, PASSWORD))
return connection
def closeConnection(connection):
"""
Method that close a connection
:param connection: is the connection to terminate
"""
connection.close()
def readNames():
"""
Method that reads the possible names from a file
:return: a list containing the names
"""
namesRead = []
with open("Files/Names.txt", 'r', encoding='utf8') as f:
for line in f:
if line == "\n":
continue
namesRead.append(line.rstrip('\n').rstrip().lstrip())
f.close()
return namesRead
def readSurnames():
"""
Method that reads the possible surnames from a file
:return: a list containing the surnames
"""
surnamesRead = []
with open("Files/Surnames.txt", 'r', encoding='utf8') as f:
for line in f:
if line == "\n":
continue
surnamesRead.append(line.rstrip('\n').rstrip().lstrip())
f.close()
return surnamesRead
def readLocations():
"""
Method that reads the possible locations from a file
:return: a list containing the locations
"""
locationsRead = []
# Parallel reading from address_file and locations_file
with open("Files/PublicPlaces.txt", 'r', encoding='utf8') as f:
for line in f:
if line == "\n":
continue
details = line.split(",")
address = []
for detail in details:
address.append(detail.rstrip('\n').rstrip().lstrip())
locationsRead.append(address)
f.close()
return locationsRead
def readHouseAddresses():
"""
Method that reads different addresses from a file
:return: a list of addresses
"""
addressesRead = []
with open("Files/HouseAddresses.txt", 'r', encoding='utf8') as f:
for line in f:
if line == "\n":
continue
details = line.split(",")
address = []
for detail in details:
address.append(detail.rstrip('\n').rstrip().lstrip())
addressesRead.append(address)
f.close()
return addressesRead
def readVaccines():
"""
Method that reads the possible vaccines from a file
:return: a list containing the vaccines
"""
vaccinesRead = []
with open("Files/Vaccines.txt", 'r', encoding='utf8') as vaccine_file:
for vaccine_lines in vaccine_file:
vaccineDetails = vaccine_lines.split(",")
details = []
for vaccineDetail in vaccineDetails:
details.append(vaccineDetail.lstrip().rstrip().rstrip('\n'))
vaccinesRead.append(details)
vaccine_file.close()
return vaccinesRead
def readTests():
"""
Method that reads the possible locations from a file
:return: a list containing the locations
"""
testsList = []
with open("Files/Tests.txt", 'r', encoding='utf8') as f:
for line in f:
if line == "\n":
continue
testsList.append(line.rstrip('\n').rstrip().lstrip())
f.close()
return testsList
def deleteAll(tx):
"""
Method that deletes every node and every link
:param tx: is the transaction
:return: nothing
"""
query = (
"MATCH(p1:Person)-[a:APP_CONTACT]->(p2:Person)"
"WHERE a.date < date() - duration({Days: 10}) OR (a.date = date() - duration({Days: 10}) AND a.hour < time())"
"DELETE a"
)
tx.run(query)
def countAll(tx):
"""
Method that count the number of Nodes
:param tx: is the transaction
:return: the number of Nodes
"""
query = (
"MATCH (n) "
"RETURN COUNT(n) AS count "
"LIMIT $limit"
)
result = tx.run(query, limit=10)
return [record["count"] for record in result]
def findAll(tx):
"""
Methods that fins the whole structure of the database
:param tx: is the transaction
:return: the whole structure
"""
query = (
"MATCH (n1)-[r]->(n2) "
"RETURN n1 AS node1 , r AS relationship , n2 AS node2 "
)
result = tx.run(query)
return [(record["node1"], record["relationship"], record["node2"]) for record in result]
def findAllPerson(tx):
"""
Method that finds all the nodes Person in the data base
:param tx: is the transaction
:return: a list of nodes
"""
query = (
"MATCH (p:Person) "
"RETURN p , ID(p);"
)
results = tx.run(query).data()
return results
def findAllHome(tx):
"""
Method that finds all the nodes House in the data base
:param tx: is the transaction
:return: a list of nodes
"""
query = (
"MATCH (h:House) "
"RETURN h , ID(h);"
)
results = tx.run(query).data()
return results
def findAllLocation(tx):
"""
Method that finds all the nodes Location in the data base
:param tx: is the transaction
:return: a list of nodes
"""
query = (
"MATCH (l:Location) "
"RETURN l , ID(l);"
)
results = tx.run(query).data()
return results
def findAllVaccine(tx):
"""
Method that finds all the nodes Vaccine in the data base
:param tx: is the transaction
:return: a list of nodes
"""
query = (
"MATCH (v:Vaccine) "
"RETURN v , ID(v);"
)
results = tx.run(query).data()
return results
def findAllTest(tx):
"""
Method that finds all the nodes Test in the data base
:param tx: is the transaction
:return: a list of nodes
"""
query = (
"MATCH (t:Test) "
"RETURN t , ID(t);"
)
results = tx.run(query).data()
return results
def findAllLiveRelationships(tx):
"""
Method that finds all Live relationships in the data base
:param tx: is the transaction
:return: a list of relationships
"""
query = (
"MATCH (n1:Person)-[r:LIVE]->(n2:House) "
"RETURN ID(n1) , r , ID(n2);"
)
results = tx.run(query).data()
return results
def findAllAppContactRelationships(tx):
"""
Method that finds all App_Contact relationships in the data base
:param tx: is the transaction
:return: a list of relationships
"""
query = (
"MATCH (n1:Person)-[r:APP_CONTACT]->(n2:Person) "
"RETURN ID(n1) , r , r.date , r.hour, ID(n2);"
)
results = tx.run(query).data()
return results
def findAllVisitRelationships(tx):
"""
Method that finds all VISIT relationships in the data base
:param tx: is the transaction
:return: a list of relationships
"""
query = (
"MATCH (n1:Person)-[r:VISIT]->(n2:Location) "
"RETURN ID(n1) , r , r.date , r.start_hour , r.end_hour , ID(n2);"
)
results = tx.run(query).data()
return results
def findAllGetVaccineRelationships(tx):
"""
Method that finds all GET (a vaccine) relationships in the data base
:param tx: is the transaction
:return: a list of relationships
"""
query = (
"MATCH (n1:Person)-[r:GET_VACCINE]->(n2:Vaccine) "
"RETURN ID(n1) , r , r.date , r.country , r.expirationDate , ID(n2);"
)
results = tx.run(query).data()
return results
def findAllMakeTestRelationships(tx):
"""
Method that finds all MAKE (a test) relationships in the data base
:param tx: is the transaction
:return: a list of relationships
"""
query = (
"MATCH (n1:Person)-[r:MAKE_TEST]->(n2:Test) "
"RETURN ID(n1) , r , r.date , r.hour , r.result , ID(n2);"
)
results = tx.run(query).data()
return results
def findAllInfectedRelationships(tx):
"""
Method that finds all INFECTED relationships in the data base
:param tx: is the transaction
:return: a list of relationships
"""
query = (
"MATCH (n1:Person)-[r:COVID_EXPOSURE]->(n2:Person) "
"RETURN ID(n1) , r , r.date , r.name , ID(n2);"
)
results = tx.run(query).data()
return results
def createFamilies(namesList, surnamesList):
"""
Method that initialize a list of all the family relationships
:return: a list of list (a list of family)
"""
familiesList = []
surnameIndex = 0
for _ in range(0, NUMBER_OF_FAMILY):
# Choose a size for the family
numberOfMembers = randint(1, MAX_NUMBER_OF_FAMILY_MEMBER)
# Family will contain the name in pos 0 and the surname in pos 1
familyEl = [None] * numberOfMembers
casualFamily = False
for j in range(0, len(familyEl)):
familyEl[j] = [None] * PersonAttribute.numberOfAttribute()
# Append a random name
name = str(namesList[randint(0, len(names) - 1)])
familyEl[j][int(PersonAttribute.NAME)] = name
# Append the next surname
surname = str(surnamesList[surnameIndex])
familyEl[j][int(PersonAttribute.SURNAME)] = surname
# Append a random age
if j == 0:
age = randint(18, 99)
else:
age = randint(1, 99)
familyEl[j][int(PersonAttribute.AGE)] = age
# Append the mail
mail = name.lower() + "." + surname.lower() + str(age) + "@immunoPoli.it"
familyEl[j][int(PersonAttribute.MAIL)] = mail
# Append the phone number
number = 0
for i in range(0, PHONE_NUMBER_LENGTH):
number += randint(0, 9) * 10 ** i
familyEl[j][int(PersonAttribute.NUMBER)] = number
# Append the app attribute
if random() < PROBABILITY_TO_HAVE_APP:
app = "True"
else:
app = "False"
familyEl[j][int(PersonAttribute.APP)] = app
# In every family there will be at least 2 surnames
# In case of friends living together there is a probability of 30% to have more than 2 surnames in a family
if j == 0 and randint(0, 100) < 30: # Family of not familiar
casualFamily = True
if j == 0 or (numberOfMembers > 2 and casualFamily):
surnameIndex += 1
if surnameIndex >= len(surnames):
surnameIndex = 0
familiesList.append(familyEl)
surnameIndex += 1
if surnameIndex >= len(surnames):
surnameIndex = 0
return familiesList
def createNodesFamily(familiesList, houseAddressesList):
"""
Method that append some command to the general query
:param houseAddressesList: is the list containing addresses ofr houses
:param familiesList: is the list of families
:return: nothing
"""
creationQuery = [] # Query that will contains all the queries for the node creation
relationshipsQuery = [] # Query that will contains all the queries for the relationship creation
for familyEl in familiesList:
for memberEl in familyEl:
currentQuery = (
"CREATE (p:Person {name: \"" + str(memberEl[int(PersonAttribute.NAME)]) + "\" , surname: \"" +
str(memberEl[int(PersonAttribute.SURNAME)]) + "\" , age: \"" + str(
memberEl[int(PersonAttribute.AGE)]) +
"\" , mail: \"" + str(memberEl[int(PersonAttribute.MAIL)]) + "\" , number: \"" +
str(memberEl[int(PersonAttribute.NUMBER)]) + "\" , app: \"" +
str(memberEl[int(PersonAttribute.APP)]) + "\"}); "
)
creationQuery.append(currentQuery)
# Create the name of the house
memberFamily = familyEl[0]
familyName = memberFamily[PersonAttribute.NAME] + " " + memberFamily[PersonAttribute.SURNAME] + " house"
addressIndex = randint(0, len(houseAddressesList) - 1)
address = houseAddressesList[addressIndex]
civicNumber = randint(0, MAX_CIVIC_NUMBER)
currentQuery = (
"CREATE (h:House {name: \"" + str(familyName) + "\" , address: \"" + str(
address[HouseAttribute.ADDRESS]) +
"\", civic_number: \"" + str(civicNumber) + "\" , CAP: \"" + str(address[HouseAttribute.CAP]) +
"\", city: \"" + str(address[HouseAttribute.CITY]) + "\" , province: \""
+ str(address[HouseAttribute.PROVINCE]) + "\"}); "
)
creationQuery.append(currentQuery)
# Create the LIVE relationships
for memberEl in familyEl:
currentQuery = (
"MATCH (p:Person) , (h:House) "
"WHERE p.name = \"" + str(memberEl[int(PersonAttribute.NAME)]) +
"\" AND p.surname = \"" + str(memberEl[int(PersonAttribute.SURNAME)]) + "\" AND p.age= \"" +
str(memberEl[int(PersonAttribute.AGE)]) + "\" AND h.name = \"" + str(familyName) +
"\" AND h.address = \"" + str(address[HouseAttribute.ADDRESS]) + "\" AND h.civic_number = \"" +
str(civicNumber) + "\" AND h.CAP = \"" + str(address[HouseAttribute.CAP]) +
"\" AND h.city = \"" + str(address[HouseAttribute.CITY]) + "\" AND h.province = \"" +
str(address[HouseAttribute.PROVINCE]) + "\" "
"CREATE (p)-[:LIVE]->(h);"
)
relationshipsQuery.append(currentQuery)
return creationQuery, relationshipsQuery
def createNodeLocations(locationsList):
"""
Method that creates the query for the creation of the public places
:param locationsList: is a list containing all the locations
:return: a query
"""
locationsQuery = []
for locationEl in locationsList:
currentQuery = (
"CREATE (l:Location {name: \"" + str(locationEl[int(LocationAttribute.NAME)]) + "\" , type: \"" +
str(locationEl[int(LocationAttribute.TYPE)]) + "\" , address: \"" +
str(locationEl[int(LocationAttribute.ADDRESS)]) + "\" , civic_number: \"" +
str(locationEl[int(LocationAttribute.CIVIC_NUMBER)]) + "\", CAP: \"" +
str(locationEl[int(LocationAttribute.CAP)]) + "\" , city: \"" +
str(locationEl[int(LocationAttribute.CITY)]) + "\" , province: \"" +
str(locationEl[int(LocationAttribute.PROVINCE)]) + "\"}); "
)
locationsQuery.append(currentQuery)
return locationsQuery
def createNodeVaccines(vaccinesList):
"""
Method that creates the query for the creation of the vaccines node
:param vaccinesList: is a list containing all the vaccines
:return: a query
"""
vaccinesQuery = []
for vaccineEl in vaccinesList:
currentQuery = (
"CREATE (v:Vaccine {name: \"" + str(vaccineEl[int(VaccineAttribute.NAME)]) + "\" , producer: \"" +
str(vaccineEl[int(VaccineAttribute.PRODUCER)]) + "\"}); "
)
vaccinesQuery.append(currentQuery)
return vaccinesQuery
def createNodeTests(testsList):
"""
Method that creates the query for the creation of the tests
:param testsList: is a list containing all the possible type of tests
:return: a query
"""
testsQuery = []
for testEl in testsList:
currentQuery = (
"CREATE (t:Test {name: \"" + str(testEl) + "\"}); "
)
testsQuery.append(currentQuery)
return testsQuery
def createRelationshipsAppContact(d, pIds):
"""
Method that creates random relationship
:param d: is the connection (driver)
:param pIds: list of Person ids
:return: nothing
"""
# Create the number of app contact for the day
numOfContact = MAX_NUMBER_OF_CONTACT
for _ in range(0, numOfContact):
# Choose two random people
randomIndex = randint(0, len(pIds) - 1)
pId1 = pIds[randomIndex]
randomIndex = randint(0, len(pIds) - 1)
pId2 = pIds[randomIndex]
# Choose the hour/date
# Verify if it's the same node
if pId1 == pId2:
continue
date = datetime.date.today() - datetime.timedelta(days=randint(0, CONTACT_DAYS_BACKS))
date = date.strftime("%Y-%m-%d")
h = randint(0, 23)
minutes = randint(0, 59)
if minutes < 10:
minutes = "0" + str(minutes)
hour = str(h) + ":" + str(minutes) + ":00"
n = 0
while not (validateDate(d, date, pId1, hour) or not validateDate(d, date, pId2, hour)) \
and n < MAX_NUMBER_OF_ATTEMPTS_FOR_VALID_DATE:
date = datetime.date.today() - datetime.timedelta(days=randint(0, 20))
date = date.strftime("%Y-%m-%d")
h = randint(0, 23)
minutes = randint(0, 59)
if minutes < 10:
minutes = "0" + str(minutes)
hour = str(h) + ":" + str(minutes) + ":00"
n = n + 1
if n == MAX_NUMBER_OF_ATTEMPTS_FOR_VALID_DATE:
continue
query = (
"MATCH (p1:Person) , (p2:Person) "
"WHERE ID(p1) = $pId1 AND ID(p2) = $pId2 "
"MERGE (p1)-[:APP_CONTACT { hour: time($hour) , date: date($date)}]->(p2) "
"MERGE (p1)<-[:APP_CONTACT { hour: time($hour) , date: date($date)}]-(p2)"
)
# Execute the query
with d.session() as s:
s.write_transaction(createContact, query, pId1, pId2, hour, date)
def createRelationshipsVisit(d, pIds, lIds):
"""
Method that creates VISIT relationships
:param d: is the connection (driver)
:param pIds: is a list of Person ids
:param lIds: is a list of Location ids
:return: nothing
"""
# Choose how many new visit relationships
numberOfVisits = MAX_NUMBER_OF_VISIT
for _ in range(0, numberOfVisits):
lIndex = randint(0, len(lIds) - 1)
locationId = lIds[lIndex]
pIndex = randint(0, len(pIds) - 1)
personId = pIds[pIndex]
# Choose the hour/date
date = datetime.date.today() - datetime.timedelta(days=randint(0, VISITS_DAYS_BACKS))
date = date.strftime("%Y-%m-%d")
h = randint(0, 22)
minutes = randint(0, 59)
if minutes < 10:
minutes = "0" + str(minutes)
startHour = str(h) + ":" + str(minutes)
h = randint(h, 23)
minutes = randint(0, 59)
if minutes < 10:
minutes = "0" + str(minutes)
endHour = str(h) + ":" + str(minutes)
n = 0
while not validateDate(d, date, personId, endHour) and n < MAX_NUMBER_OF_ATTEMPTS_FOR_VALID_DATE:
date = datetime.date.today() - datetime.timedelta(days=randint(0, 150))
date = date.strftime("%Y-%m-%d")
h = randint(0, 22)
minutes = randint(0, 59)
if minutes < 10:
minutes = "0" + str(minutes)
startHour = str(h) + ":" + str(minutes)
h = randint(h, 23)
minutes = randint(0, 59)
if minutes < 10:
minutes = "0" + str(minutes)
endHour = str(h) + ":" + str(minutes)
n = n + 1
if n == MAX_NUMBER_OF_ATTEMPTS_FOR_VALID_DATE:
continue
query = (
"MATCH (p:Person) , (l:Location) "
"WHERE ID(p) = $personId AND ID(l) = $locationId "
"MERGE (p)-[:VISIT {date: date($date) , start_hour: time($startHour) , end_hour: time($endHour)}]->(l); "
)
# Execute the query
with d.session() as s:
s.write_transaction(createVisit, query, personId, locationId, date, startHour, endHour)
def validateDate(d, date, personId, hour):
"""
Method that validate the date, if the last test before the date is positive return false
:param d: driver
:param date: date to check
:param personId: person to check
:param hour: hour to check
:return: true if it's valid
"""
query = (
"MATCH (p:Person)-[r:MAKE_TEST]->(:Test) "
"WHERE ID(p) = $personId AND (date($date)>r.date OR(date($date)=r.date AND time($hour)>r.hour)) "
"RETURN r.date as date,r.result as result,r.hour as hour "
"ORDER BY date DESC "
"LIMIT 1 ")
# Execute the query
with d.session() as s:
precDates = s.read_transaction(checkDate, query, personId, date, hour)
if precDates is None or len(precDates) == 0 or precDates[0]["result"] == "Negative":
return True
else:
return False
def createRelationshipsGetVaccine(d, pIds, vIds):
"""
Method that creates GET vaccine relationships
:param d: is the connection (driver)
:param pIds: is a list of Person ids
:param vIds: is a list of Vaccine ids
:return: nothing
"""
# Choose how many new visit relationships
numberOfVaccines = MAX_NUMBER_OF_VACCINE
for _ in range(0, numberOfVaccines):
vIndex = randint(0, len(vIds) - 1)
vaccineId = vIds[vIndex]
pIndex = randint(0, len(pIds) - 1)
personId = pIds[pIndex]
date = datetime.date.today() - datetime.timedelta(days=randint(0, VACCINES_DAYS_BACKS))
country = "Italy"
# For the future: maybe do a random country
# Ask to neo4j server how many vaccines the user did
query = (
"MATCH (p:Person)-[r]->(v:Vaccine) "
"WHERE ID(p) = $personId AND type(r)='GET_VACCINE'"
"RETURN count(p) as count,ID(v) as vaccineID,r.expirationDate as date"
)
with d.session() as s:
datas = s.read_transaction(gettingNumberVaccines, query, personId)
# if no vaccines do one, else make the second vaccine
if len(datas) == 0:
string2 = str(date + datetime.timedelta(days=28)).split("-")
expDate = datetime.date(int(string2[0]), int(string2[1]), int(string2[2]))
else:
if len(datas) == 1:
string1 = str(datas[0]["date"]).split("-")
date = datetime.date(int(string1[0]), int(string1[1]), int(string1[2]))
string2 = str(date + datetime.timedelta(days=365)).split("-")
expDate = datetime.date(int(string2[0]), int(string2[1]), int(string2[2]))
vaccineId = datas[0]["vaccineID"]
else:
continue
date = date.strftime("%Y-%m-%d")
expDate = expDate.strftime("%Y-%m-%d")
query = (
"MATCH (p:Person) , (v:Vaccine) "
"WHERE ID(p) = $personId AND ID(v) = $vaccineId "
"MERGE (p)-[:GET_VACCINE{date:date($date),country:$country,expirationDate:date($expDate)}]->(v); "
)
# Execute the query
with d.session() as s:
s.write_transaction(createGettingVaccine, query, personId, vaccineId, date, country, expDate)
def createRelationshipsMakeTest(d, pIds, tIds):
"""
Method that creates MAKE test relationships
:param d: is the connection (driver)
:param pIds: is a list of Person ids
:param tIds: is a list of Test ids
:return: nothing
"""
# Choose how many new visit relationships
numberOfTest = MAX_NUMBER_OF_TEST
for _ in range(0, numberOfTest):
probability = random()
tIndex = randint(0, len(tIds) - 1)
testId = tIds[tIndex]
pIndex = randint(0, len(pIds) - 1)
personId = pIds[pIndex]
date = datetime.date.today() - datetime.timedelta(days=randint(0, TESTS_DAYS_BACKS))
h = randint(0, 23)
minutes = randint(0, 59)
if minutes < 10:
minutes = "0" + str(minutes)
string_date = date.strftime("%Y-%m-%d")
hour = str(h) + ":" + str(minutes)
if probability < PROBABILITY_TO_BE_POSITIVE:
result = "Positive"
else:
result = "Negative"
query = (
"MATCH (p:Person) , (t:Test) "
"WHERE ID(p) = $personId AND ID(t) = $testId "
"MERGE (p)-[:MAKE_TEST{date:date($date) , hour: time($hour) ,result:$result}]->(t); "
)
# If negative, all infections have to be neglected
if probability >= PROBABILITY_TO_BE_POSITIVE:
# Check whether or not I have been infected by someone
delete_possible_infection_command = (
"MATCH ()-[i:COVID_EXPOSURE]->(p:Person)"
"WHERE ID(p) = $personId AND (date($date) >= i.date + duration({days: 7})) "
"DELETE i"
)
with d.session() as s:
s.write_transaction(delete_possible_infection, delete_possible_infection_command,
personId, string_date, hour)
# Execute the query
with d.session() as s:
s.write_transaction(createMakingTest, query, personId, testId, string_date, hour, result)
def delete_possible_infection(tx, command, personId, date, hour):
"""
Method
:param command: delete infection command to be performed
:param personId: person whose infection is deleted
:param date: date of the test
:param hour: hour of the test
"""
tx.run(command, personId=personId, date=date, hour=hour)
def createVisit(tx, query, personId, locationId, date, startHour, endHour):
"""
Method that executes the query to create a VISIT relationship
:param endHour: ending time of the visit
:param startHour: starting time of the visit
:param date: date of the visit
:param tx: is the transaction
:param query: is the query to create a visit relationship
:param personId: is the id of the Person
:param locationId: is the id of the Location
:return: nothing
"""
tx.run(query, personId=personId, locationId=locationId, date=date, startHour=startHour,
endHour=endHour)
def createGettingVaccine(tx, query, personId, vaccineId, date, country, expDate):
"""
Method that executes the query to create a VISIT relationship
:param tx: is the transaction
:param query: is the query to create a visit relationship
:param personId: is the id of the Person
:param vaccineId: is the id of the Vaccine
:param date: date of the vaccine
:param country: country
:param expDate: expiration date of the vaccine
:return: nothing
"""
tx.run(query, personId=personId, vaccineId=vaccineId, date=date, country=country, expDate=expDate)
def gettingNumberVaccines(tx, query, personId):
"""
Method that executes the query to create a GET vaccinated relationship
:param tx: is the transaction
:param query: is the query to create a visit relationship
:param personId: is the id of the Person
:return: a list of the vaccines already administered to the Person
"""
return tx.run(query, personId=personId).data()
def createMakingTest(tx, query, personId, testId, date, hour, result):
"""
Method that executes the query to create a VISIT relationship
:param tx: is the transaction
:param query: is the query to create a visit relationship
:param personId: is the id of the Person
:param testId: is the id of the Test
:param date: date of the vaccine
:param hour: hour of the test
:param result: result of the test
:return: nothing
"""
tx.run(query, personId=personId, testId=testId, date=date, hour=hour, result=result)
def findAllPositivePerson():
"""
Method that finds all the positive person
:return: a list of positive ids
"""
query = (
"""
MATCH (p:Person)-[t:MAKE_TEST{result: \"Positive\"}]->()
WHERE NOT EXISTS {
MATCH (p)-[t2:MAKE_TEST{result: \"Negative\"}]->()
WHERE t2.date > t.date
}
RETURN distinct ID(p) , t.date as infectionDate , t.hour as infectionHour
"""
)
positiveIdsFound = runQueryRead(driver, query)
return positiveIdsFound
def checkDate(tx, query, personId, date, hour):
"""
Method that executes the query to return the last test before the date
:param date: hypothetical date of the visit
:param tx: is the transaction
:param query: is the query to get the test
:return: date of the precedent test
"""
return tx.run(query, personId=personId, date=date, hour=hour).data()
def createRelationshipsInfect(id, test_date, test_hour, daysBack):
"""
Method that finds all the contacts of a positive person
:param daysBack: is the number of days to look in the past
:param id: is the id of the positive person
:return: a list of people who got in contact with the positive person
"""
familyQuery = (
"MATCH (pp:Person)-[:LIVE]->(h:House)<-[:LIVE]-(ip:Person) "
"WHERE ID(pp) = $id AND ip <> pp AND NOT (ip)<-[:COVID_EXPOSURE]-(pp)"
"RETURN DISTINCT ID(ip);"
)
"""
IMPORTANT: ($date) represents the date from which we check the contacts. It is the date of positive test - 7 days
We check all contacts until the date of positive test
"""
appContactQuery = (
"MATCH (pp:Person)-[r1:APP_CONTACT]->(ip:Person) "
"WHERE ID(pp) = $id AND (r1.date > date($date) OR (r1.date = date($date) AND r1.hour >= time($hour))) "
"AND (r1.date < date($date) + duration({days:7}) OR (r1.date = date($date)+duration({days:7}) AND "
"r1.hour <= time($hour))) "
"AND NOT "
"(pp)-[:COVID_EXPOSURE{date: r1.date}]->(ip)"
"RETURN DISTINCT ID(ip) , r1.date;"
)
locationContactQuery = (
"MATCH (pp:Person)-[r1:VISIT]->(l:Location)<-[r2:VISIT]-(ip:Person) "
"WHERE ID(pp) = $id AND ip <> pp AND (r1.date > date($date) OR (r1.date = date($date) AND r1.start_hour >= time($hour))) "
"AND (r1.date < date($date) + duration({days:7}) OR (r1.date = date($date)+duration({days:7}) AND "
"r1.end_hour <= time($hour))) AND r2.date = r1.date AND "
"((r1.start_hour < r2.start_hour AND r1.end_hour > r2.start_hour) OR "
"(r2.start_hour < r1.start_hour AND r2.end_hour > r1.start_hour)) AND NOT "
"(pp)-[:COVID_EXPOSURE{name: l.name , date: r1.date}]->(ip)"
"RETURN DISTINCT ID(ip) , r1.date , l.name;"
)
# date = datetime.date.today() - datetime.timedelta(daysBack)
"""
date is referred to date test - daysback
"""
date = test_date - datetime.timedelta(daysBack)
infectedIds = []
with driver.session() as s:
familyInfected = s.read_transaction(findInfectInFamily, familyQuery, id)
appInfected = s.read_transaction(findInfect, appContactQuery, id, date, test_hour)
locationInfected = s.read_transaction(findInfect, locationContactQuery, id, date, test_hour)
for el in familyInfected, appInfected, locationInfected:
if len(el) > 0:
# Take just the id
infectedIds.append(el[0]['ID(ip)'])
infectedIds = []
for el in familyInfected:
infectedIds.append(el['ID(ip)'])
for infectedId in infectedIds:
query = (
"MATCH (pp:Person) , (ip:Person) "
"WHERE ID(pp) = $id AND ID(ip) = $ipid "
"CREATE (pp)-[:COVID_EXPOSURE{date:date($date)}]->(ip);"
)
s.write_transaction(createInfectFamily, query, id, infectedId, date.strftime("%Y-%m-%d"))
infectedIds = []
for el in appInfected:
details = []
details.append(el['ID(ip)'])
details.append(el['r1.date'])
infectedIds.append(details)
for infectedId, infectedDate in infectedIds:
query = (
"MATCH (pp:Person) , (ip:Person) "
"WHERE ID(pp) = $id AND ID(ip) = $ipid "
"CREATE (pp)-[:COVID_EXPOSURE{date: date($date)}]->(ip);"
)
s.write_transaction(createInfectApp, query, id, infectedId, infectedDate)
infectedIds = []
for el in locationInfected:
details = []
details.append(el['ID(ip)'])
details.append(el['r1.date'])
details.append(el['l.name'])
infectedIds.append(details)
for infectedId, infectedDate, infectedPlace in infectedIds:
query = (
"MATCH (pp:Person) , (ip:Person) "
"WHERE ID(pp) = $id AND ID(ip) = $ipid "
"CREATE (pp)-[:COVID_EXPOSURE{date: date($date) , name: $name}]->(ip);"
)
s.write_transaction(createInfectLocation, query, id, infectedId, infectedDate, infectedPlace)
def delete_negative_after_exposure():
"""
Method that deletes exposure for people who made a negative test after a covid exposure
"""
query = ("match ()-[c:COVID_EXPOSURE]->(p)-[m:MAKE_TEST{result:\"Negative\"}]->(t) "
"where m.date >= c.date + duration({days: 7}) "
"delete c")
with driver.session() as session:
session.run(query)
def createInfectFamily(tx, query, id, ipid, date):
"""
Method that create the relationship Infect
"""
tx.run(query, id=id, ipid=ipid, date=date)
def createInfectApp(tx, query, id, ipid, date):
"""
Method that create the relationship Infect
"""
tx.run(query, id=id, ipid=ipid, date=date)
def createInfectLocation(tx, query, id, ipid, date, name):
"""
Method that create the relationship Infect
"""
tx.run(query, id=id, ipid=ipid, date=date, name=name)
def findInfectInFamily(tx, query, id):
"""
Method that executes the query to find the infected member of a family
:param tx: is the transaction
:param query: is the query to execute
:param id: is the id of the positive Person
"""
result = tx.run(query, id=id).data()
return result
def findInfect(tx, query, id, date, hour):
"""
Method that executes the query to find the Person infected by other Persons
:param tx: is the transaction
:param query: is the query to execute
:param id: is the id of the positive Person
:param date: is the date from wich start the tracking
"""
result = tx.run(query, id=id, date=date, hour=hour).data()
return result
def createContact(tx, query, pId1, pId2, hour, date):
"""
Method that executes the query to create a CONTACT_APP relationship
:param date: the date of the contact
:param hour: the hour of the contact
:param tx: is the transaction
:param query: is the query to perform
:param pId1: is the id of the first Person
:param pId2: is the id of the second Person
:return: nothing
"""
tx.run(query, pId1=pId1, pId2=pId2, hour=hour, date=date)
def getPersonIds(withApp=False):
"""
Method that retrieves all the ids of Person Node
:param withApp: if True, retrieve the id of person with app = True
:return: a list of integer corresponding to the person ids
"""
with driver.session() as s:
ids = s.write_transaction(getPersonId, withApp)
pIds = []
for idEl in ids:
pIds.append(idEl["ID(p)"])
return pIds
def getPersonId(tx, withApp):
"""
Method that retrieves the ids of Person in the data base
:param tx: is the transaction
:param withApp: if True, retrieve the id of person with app = True
:return: a list of ids
"""
if not withApp:
query = (
"MATCH (p:Person) "
"RETURN ID(p);"
)
else:
query = (
"MATCH (p:Person) "
"WHERE p.app = \"True\" "
"RETURN ID(p);"
)
idsList = tx.run(query).data()
return idsList
def getLocationsIds():
"""
Method that retrieves all the ids of Location Node
:return: a list of integer corresponding to the location ids
"""
with driver.session() as s:
ids = s.write_transaction(getLocationsId)
lIds = []
for idEl in ids:
lIds.append(idEl["ID(l)"])
return lIds
def getLocationsId(tx):
"""
Method that retrieve a list of location ids
:param tx: is the transaction
:return: a list of ids
"""
query = (
"MATCH (l:Location)"
"RETURN ID(l)"
)
idsList = tx.run(query).data()
return idsList
def getVaccinesId(tx):
"""
Method that retrieve a list of location ids
:param tx: is the transaction
:return: a list of ids
"""
query = (
"MATCH (v:Vaccine)"
"RETURN ID(v)"
)
idsList = tx.run(query).data()
return idsList
def getVaccinesIds():
"""
Method that retrieves all the ids of Vaccine Node
:return: a list of integer corresponding to the vaccine ids
"""
with driver.session() as s:
ids = s.write_transaction(getVaccinesId)
vIds = []
for idEl in ids:
vIds.append(idEl["ID(v)"])
return vIds
def getTestsIds():
"""
Method that retrieves all the ids of test Node
:return: a list of integer corresponding to the test ids
"""
with driver.session() as s:
ids = s.write_transaction(getTestsId)
tIds = []
for idEl in ids:
tIds.append(idEl["ID(t)"])
return tIds
def getTestsId(tx):
"""
Method that retrieve a list of location ids
:param tx: is the transaction
:return: a list of ids
"""
query = (
"MATCH (t:Test)"
"RETURN ID(t)"
)
idsList = tx.run(query).data()
return idsList
def runQuery(tx, query, isReturn=False):
"""
Method that runs a generic query
:param tx: is the transaction
:param query: is the query to perform
:param isReturn: if True return the results, return nothing otherwise
"""
result = tx.run(query)
if isReturn:
return result.data()
def runQueryWrite(d, queryList):
"""
Method that run a generic query
:param d: is the connection to the database (driver)
:param queryList: is the query to run -> it's already completed
:return: nothing
"""
for query in queryList:
with d.session() as s:
s.write_transaction(runQuery, query)
def runQueryRead(d, query):
"""
Method that run a generic query
:param d: is the connection to the database
:param query: is the query to run -> it's already completed
:return: nothing
"""
with d.session() as s:
results = s.read_transaction(runQuery, query, True)
return results
def printDatabase():
"""
Method use to print the database structure using PlotDBStructure module
:return: nothing
"""
with driver.session() as s:
personNodes = s.read_transaction(findAllPerson)
houseNodes = s.read_transaction(findAllHome)
locationNodes = s.read_transaction(findAllLocation)
vaccineNodes = s.read_transaction(findAllVaccine)
testNodes = s.read_transaction(findAllTest)
liveRelationships = s.read_transaction(findAllLiveRelationships)
visitRelationships = s.read_transaction(findAllVisitRelationships)
appContactRelationships = s.read_transaction(findAllAppContactRelationships)
getRelationships = s.read_transaction(findAllGetVaccineRelationships)
makeRelationships = s.read_transaction(findAllMakeTestRelationships)
infectRelationships = s.read_transaction(findAllInfectedRelationships)
# Initialize the network attribute
ps.PlotDBStructure.__init__()
# Add nodes
ps.PlotDBStructure.addStructure(personNodes)
ps.PlotDBStructure.addStructure(houseNodes)
ps.PlotDBStructure.addStructure(locationNodes)
ps.PlotDBStructure.addStructure(vaccineNodes)
ps.PlotDBStructure.addStructure(testNodes)
# Add relationships
ps.PlotDBStructure.addStructure(liveRelationships)
ps.PlotDBStructure.addStructure(visitRelationships)
ps.PlotDBStructure.addStructure(appContactRelationships)
ps.PlotDBStructure.addStructure(makeRelationships)
ps.PlotDBStructure.addStructure(getRelationships)
ps.PlotDBStructure.addStructure(infectRelationships)
# Show the graph structure
ps.PlotDBStructure.showGraph()
return
if __name__ == '__main__':
# Open the connection
driver = openConnection()
# Only read from the graph
# printDatabase()
# Close the connection
# closeConnection(driver)
# exit()
# Read names from the file
names = readNames()
# Read surnames from the file
surnames = readSurnames()
# Read locations
locations = readLocations()
# Read house addresses
houseAddresses = readHouseAddresses()
vaccines = readVaccines()
tests = readTests()
# Create the family list
print("Creating families...")
families = createFamilies(names, surnames)
# Query is an attribute that will contain the whole query to instantiate the database
generalQuery = []
# Generate all the Person Nodes and the family relationships
cQuery, rQuery = createNodesFamily(families, houseAddresses)
# Generate the locations node
lQuery = createNodeLocations(locations)
# Generate the vaccines nodes
vQuery = createNodeVaccines(vaccines)
# Generate the tests nodes
tQuery = createNodeTests(tests)
# Adds the creation node queries to the generalQuery
for subQuery in cQuery:
generalQuery.append(subQuery)
for subQuery in lQuery:
generalQuery.append(subQuery)
for subQuery in vQuery:
generalQuery.append(subQuery)
for subQuery in tQuery:
generalQuery.append(subQuery)
# Adds the relationships queries to the generalQuery
for subQuery in rQuery:
generalQuery.append(subQuery)
# Delete the nodes already present
with driver.session() as session:
numberOfNodes = session.write_transaction(deleteAll)
# Generate the structure performing the node and relationship creation
runQueryWrite(driver, generalQuery)
# Generate random tests
# Take tests ids
print("Creating random tests...")
testsIds = getTestsIds()
personIds = getPersonIds()
# # Generate the relationship
createRelationshipsMakeTest(driver, personIds, testsIds)
# Generate random contacts with app tracing
# Take Person ids of people with app attribute equal to True)
print("Creating random app contact relationships...")
personIds = getPersonIds(True)
# Generate the relationships
createRelationshipsAppContact(driver, personIds)
# Generate random visits
# Take Location ids
locationIds = getLocationsIds()
personIds = getPersonIds()
# Generate the relationship
print("Creating random visit relationships...")
createRelationshipsVisit(driver, personIds, locationIds)
# Generate random vaccines
# Take vaccines ids
vaccineIds = getVaccinesIds()
print("Creating random vaccines...")
# Generate the relationship
createRelationshipsGetVaccine(driver, personIds, vaccineIds)
# Verify the nodes are been created
# with driver.session() as session:
# numberOfNodes = session.read_transaction(countAll)
# print("Number of nodes: " + str(numberOfNodes))
# Find all the positive Person
data_for_positive = findAllPositivePerson()
print("Creating covid exposure relationships...")
for positive in data_for_positive:
positive_id = positive['ID(p)']
contagion_date = str(positive['infectionDate'])
# Instruction needed to comply with Python way to manage dates
contagion_datetime = datetime.datetime.strptime(contagion_date, "%Y-%m-%d")
contagion_hour = str(positive['infectionHour'])
createRelationshipsInfect(positive_id, contagion_datetime, contagion_hour, 7)
# Search all the infected Person tracked
delete_negative_after_exposure()
# Print the whole structure
printDatabase()
# Close the connection
closeConnection(driver)
| 46,505 | 14,650 |
# -*- coding: utf-8 -*-#
'''
# Name: HyperParameters_4_0
# Description:
# Author: super
# Date: 2020/6/2
'''
from MiniFramework.EnumDef_4_0 import *
# this class is for two-layer NN only
class HyperParameters_4_1(object):
def __init__(self, eta=0.1, max_epoch=10000, batch_size=5,
net_type=NetType.Fitting,
init_method=InitialMethod.Xavier,
optimizer_name=OptimizerName.SGD,
stopper = None):
self.eta = eta
self.max_epoch = max_epoch
# if batch_size == -1, it is FullBatch
if batch_size == -1:
self.batch_size = self.num_example
else:
self.batch_size = batch_size
# end if
self.net_type = net_type
self.init_method = init_method
self.optimizer_name = optimizer_name
self.stopper = stopper
def toString(self):
title = str.format("bz:{0},eta:{1},init:{2},op:{3}", self.batch_size, self.eta, self.init_method.name, self.optimizer_name.name)
return title | 1,073 | 358 |
from ...isa.inst import *
import numpy as np
class Vmv_x_s(Inst):
name = 'vmv.x.s'
# vmv.x.s rd, vs2
# x[rd] = vs2[0] (vs1=0)
def golden(self):
return self['vs2'][0]
| 192 | 90 |
# I. Любители конференций
# ID успешной посылки 66248195
from collections import Counter
def conference_lovers(id_university, k):
number_participant = Counter(id_university)
k_max = number_participant.most_common()[0:k:]
result = [univer[0] for univer in k_max]
print(' '.join(map(str, result)))
def read_input():
_ = int(input())
id_university = [int(element) for element in input().strip().split()]
k = int(input())
return(id_university, k)
if __name__ == '__main__':
id_university, k = read_input()
conference_lovers(id_university, k)
| 584 | 216 |
from app.helpers.render import render_template, render_error
from app.controllers import user
from app.models.User import User, UserAuthToken
from app.server import server
from flask import g, request, redirect, url_for, abort
from app.session.csrf import csrf_protected
# noinspection PyUnusedLocal
@server.route("/user/data/me", methods=['GET'])
def get_my_profile():
return user.get_my_profile()
@server.route("/users/data/<int:user_id>", methods=['GET'])
def get_profile(user_id):
return user.get_profile(user_id)
@server.route("/user/followers/<int:user_id>/page/<int:page>", methods=['GET'])
@csrf_protected
def get_followers(user_id, page):
return user.get_followers(user_id, page=page)
@server.route("/user/following/<int:user_id>/page/<int:page>", methods=['GET'])
@csrf_protected
def get_following(user_id, page):
return user.get_following(user_id, page=page)
@server.route("/user/follow/<int:target_user_id>", methods=['POST'])
def follow_user(target_user_id):
if not isinstance(g.user, User):
return render_error('Unauthorized'), 401
return user.follow(g.user.id, target_user_id)
@server.route("/user/unfollow/<int:target_user_id>", methods=['POST'])
def unfollow_user(target_user_id):
if not isinstance(g.user, User):
return render_error('Unauthorized'), 401
return user.unfollow(g.user.id, target_user_id)
@server.route("/user/<int:user_id>", defaults={"name": None})
@server.route("/user/<int:user_id>/<name>")
def get_user(user_id, name):
matched_user = User.query.filter_by(id=user_id, deleted=False).first()
if matched_user is None:
return abort(404)
# Redirect if name is incorrect. add 'noredirect=1' flag to avoid infinite redirection in
# exceptional circumstances
if name != matched_user.name and request.args.get('noredirect', '0') != '1':
return redirect(url_for('get_user', user_id=user_id, name=matched_user.name, **request.args, noredirect='1'), code=301)
stackexchange_login = UserAuthToken.\
query.\
filter_by(user_id=user_id, issuer='stackexchange.com').\
order_by(UserAuthToken.id.desc()).\
first() if matched_user.linked_stackexchange_public else None
return render_template('user.html', user=matched_user, stackexchange_login=stackexchange_login)
| 2,327 | 791 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import time
from collections import OrderedDict
import torch
import sys
try:
sys.path.append("cider")
from pyciderevalcap.ciderD.ciderD import CiderD
from pyciderevalcap.cider.cider import Cider
sys.path.append("coco-caption")
from pycocoevalcap.bleu.bleu import Bleu
from pyciderevalcap.NKRE_D.nkpe_D import Nkpe_D
except:
print('cider or coco-caption missing')
CiderD_scorer = None
Cider_scorer = None
Bleu_scorer = None
Nkpe_scorer = None
#CiderD_scorer = CiderD(df='corpus')
def init_scorer(cached_tokens):
global CiderD_scorer
CiderD_scorer = CiderD_scorer or CiderD(df=cached_tokens)
global Cider_scorer
Cider_scorer = Cider_scorer or Cider(df=cached_tokens)
global Bleu_scorer
Bleu_scorer = Bleu_scorer or Bleu(4)
global Nkpe_scorer
Nkpe_scorer = Nkpe_scorer or Nkpe_D()
def array_to_str(arr):
out = ''
for i in range(len(arr)):
out += str(arr[i]) + ' '
if arr[i] == 0 :
break
return out.strip()
def get_self_critical_reward(greedy_res, data_gts, gen_result, opt):
batch_size = len(data_gts)
gen_result_size = gen_result.shape[0]
seq_per_img = gen_result_size // len(data_gts) # gen_result_size = batch_size * seq_per_img
assert greedy_res.shape[0] == batch_size
res = OrderedDict()
gen_result = gen_result.data.cpu().numpy()
greedy_res = greedy_res.data.cpu().numpy()
for i in range(gen_result_size):
res[i] = [array_to_str(gen_result[i])]
for i in range(batch_size):
res[gen_result_size + i] = [array_to_str(greedy_res[i])]
gts = OrderedDict()
for i in range(len(data_gts)):
gts[i] = [array_to_str(data_gts[i][j]) for j in range(len(data_gts[i]))]
res_ = [{'image_id':i, 'caption': res[i]} for i in range(len(res))]
res__ = {i: res[i] for i in range(len(res_))}
gts_ = {i: gts[i // seq_per_img] for i in range(gen_result_size)}
gts_.update({i+gen_result_size: gts[i] for i in range(batch_size)})
if opt.cider_reward_weight > 0:
_, cider_scores = CiderD_scorer.compute_score(gts_, res_)
print('Cider scores:', _)
else:
cider_scores = 0
if opt.nkpe_reward_weight > 0:
_, nkpe_scores = Nkpe_scorer.compute_score(gts_, res_)
print('Nkpe scores:', _)
else:
nkpe_scores = 0
if opt.bleu_reward_weight > 0:
_, bleu_scores = Bleu_scorer.compute_score(gts_, res__)
bleu_scores = np.array(bleu_scores[3])
print('Bleu scores:', _[3])
else:
bleu_scores = 0
scores = opt.cider_reward_weight * cider_scores + opt.bleu_reward_weight * bleu_scores + opt.nkpe_reward_weight * nkpe_scores
# scores = cider_scores * nkpe_scores * 3
scores = scores[:gen_result_size].reshape(batch_size, seq_per_img) - scores[-batch_size:][:, np.newaxis]
scores = scores.reshape(gen_result_size)
rewards = np.repeat(scores[:, np.newaxis], gen_result.shape[1], 1)
return rewards
def get_self_critical_reward_2(data_gts, gen_result, monte_carlo_count):
global Nkpe_scorer
Nkpe_scorer = Nkpe_scorer or Nkpe_D()
# reward = np.zeros((gen_result.shape[0], 1))
gen_result_size = gen_result.shape[0]
seq_per_img = gen_result_size // len(data_gts) // monte_carlo_count # gen_result_size = batch_size * seq_per_img
batch_size = gen_result_size // monte_carlo_count
res = OrderedDict()
gen_result = gen_result.data.cpu().numpy()
for i in range(gen_result_size):
# print(gen_result[i])
res[i] = [array_to_str(gen_result[i])]
gts = OrderedDict()
for i in range(len(data_gts)):
gts[i] = [array_to_str(data_gts[i][j]) for j in range(len(data_gts[i]))]
res_ = [{'image_id': i, 'caption': res[i]} for i in range(len(res))]
gts_ = {int(gen_result_size//monte_carlo_count) * i + j: gts[j // seq_per_img] for i in range(monte_carlo_count) for j in range(int(gen_result_size//monte_carlo_count))}
_, nkpe_scores = Nkpe_scorer.compute_score(gts_, res_)
# print('Nkpe scores:', _)
reward = torch.from_numpy(nkpe_scores).cuda()
reward = reward.view(batch_size, monte_carlo_count, -1).sum(1)
return reward
def get_self_critical_reward_3(greedy_res, data_gts, gen_result, current_generated, opt, monte_carlo_count=2):
batch_size = len(data_gts)
gen_result_size = gen_result.shape[0]
seq_length = gen_result.shape[1]
seq_per_img = gen_result_size // len(data_gts) # gen_result_size = batch_size * seq_per_img
assert greedy_res.shape[0] == batch_size
current_generated_size = current_generated.size(0)
t = current_generated_size // gen_result_size
res = OrderedDict()
gen_result = gen_result.data.cpu().numpy()
greedy_res = greedy_res.data.cpu().numpy()
for i in range(gen_result_size):
res[i] = [array_to_str(gen_result[i])]
for i in range(batch_size):
res[gen_result_size + i] = [array_to_str(greedy_res[i])]
cur_res = OrderedDict()
current_generated = current_generated.data.cpu().numpy()
for i in range(current_generated_size):
cur_res[i] = [array_to_str(current_generated[i])]
# gen_result = gen_result.data.cpu().numpy()
# greedy_res = greedy_res.data.cpu().numpy()
gts = OrderedDict()
for i in range(len(data_gts)):
gts[i] = [array_to_str(data_gts[i][j]) for j in range(len(data_gts[i]))]
cur_res_ = [{'image_id':i, 'caption': cur_res[i]} for i in range(len(cur_res))]
gts_ = {i: gts[i // seq_per_img] for i in range(gen_result_size)}
gts_cur_ = {j*gen_result_size + i: gts_[i] for j in range(t) for i in range(len(gts_)) }
# start = time.time()
_, nkpe_scores = Nkpe_scorer.compute_score(gts_cur_, cur_res_)
# print('scores time {}'.format(time.time() - start))
print('Nkpe scores:', _)
nkpe_scores_list = np.split(nkpe_scores, t/monte_carlo_count, axis=0)
result = np.zeros((gen_result_size, seq_length), dtype=nkpe_scores.dtype)
for t, item in enumerate(nkpe_scores_list):
item_list = np.split(item, monte_carlo_count, axis=0)
res_scores = np.zeros((gen_result_size,), dtype=nkpe_scores.dtype)
for item_i in item_list:
res_scores += item_i
result[:,t*6: t*6+6] = np.repeat((res_scores/monte_carlo_count).reshape(-1,1),6, axis=1)
# scores = scores[:gen_result_size].reshape(batch_size, seq_per_img) - scores[-batch_size:][:, np.newaxis]
# scores = scores.reshape(gen_result_size)
#
# rewards = np.repeat(scores[:, np.newaxis], gen_result.shape[1], 1)
rewards = result
return rewards
def get_scores(data_gts, gen_result, opt):
batch_size = gen_result.size(0)# batch_size = sample_size * seq_per_img
seq_per_img = batch_size // len(data_gts)
res = OrderedDict()
gen_result = gen_result.data.cpu().numpy()
for i in range(batch_size):
res[i] = [array_to_str(gen_result[i])]
gts = OrderedDict()
for i in range(len(data_gts)):
gts[i] = [array_to_str(data_gts[i][j]) for j in range(len(data_gts[i]))]
res_ = [{'image_id':i, 'caption': res[i]} for i in range(batch_size)]
res__ = {i: res[i] for i in range(batch_size)}
gts = {i: gts[i // seq_per_img] for i in range(batch_size)}
if opt.cider_reward_weight > 0:
_, cider_scores = CiderD_scorer.compute_score(gts, res_)
print('Cider scores:', _)
else:
cider_scores = 0
if opt.bleu_reward_weight > 0:
_, bleu_scores = Bleu_scorer.compute_score(gts, res__)
bleu_scores = np.array(bleu_scores[3])
print('Bleu scores:', _[3])
else:
bleu_scores = 0
scores = opt.cider_reward_weight * cider_scores + opt.bleu_reward_weight * bleu_scores
return scores
def get_self_cider_scores(data_gts, gen_result, opt):
batch_size = gen_result.size(0)# batch_size = sample_size * seq_per_img
seq_per_img = batch_size // len(data_gts)
res = []
gen_result = gen_result.data.cpu().numpy()
for i in range(batch_size):
res.append(array_to_str(gen_result[i]))
scores = []
for i in range(len(data_gts)):
tmp = Cider_scorer.my_self_cider([res[i*seq_per_img:(i+1)*seq_per_img]])
def get_div(eigvals):
eigvals = np.clip(eigvals, 0, None)
return -np.log(np.sqrt(eigvals[-1]) / (np.sqrt(eigvals).sum())) / np.log(len(eigvals))
scores.append(get_div(np.linalg.eigvalsh(tmp[0]/10)))
scores = np.array(scores)
return scores | 8,616 | 3,433 |
#from gps_handler import GpsHandler
import math
class GpsFenceHandler:
def __init__(self, settings):
self.settings = settings
self.settings.load()
if self.settings.get_data() is None:
#gpsh = GpsHandler()
#gps_data = gpsh.get_gps_data()
default_settings = {"enabled": False, "thresholds":{"dist":100,"speed":10}, "gps":None }
self.settings.set_data(default_settings)
self.settings.save()
def enable(self):
data = self.settings.get_data()
data["enabled"] = True
self.settings.save()
def disable(self):
data = self.settings.get_data()
data["enabled"] = False
data["gps"] = None
self.settings.save()
#def refresh(self, client):
# data = self.settings.get_data()
# gpsh = GpsHandler()
# gps_data = gpsh.get_gps_data()
# data["gps"] = gps_data
# self.settings.save()
def get_settings(self):
return self.settings
def distance(self,lat1, lon1, lat2, lon2):
radius = 6371*1000 # m
dlat = math.radians(lat2-lat1)
dlon = math.radians(lon2-lon1)
a = math.sin(dlat/2) * math.sin(dlat/2) + math.cos(math.radians(lat1)) \
* math.cos(math.radians(lat2)) * math.sin(dlon/2) * math.sin(dlon/2)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a))
d = radius * c
return d
def check_triggers(self, gps_data):
ret = {"dist": False, "speed": False }
thresholds = self.settings.get_data()["thresholds"]
state = self.settings.get_data()["gps"]
if state is not None:
dist = self.distance(state['latitude'],state['longitude'],gps_data['latitude'],gps_data['longitude'])
if dist > thresholds['dist']:
ret["dist"] = True
if state["speed"] > thresholds["speed"]:
ret["speed"] = True
return ret
| 1,984 | 670 |
import RPi.GPIO as GPIO
# to use Raspberry Pi board pin numbers
GPIO.setmode(GPIO.BOARD)
# set up the GPIO channels - one input and one output
GPIO.setup(7, GPIO.OUT)
GPIO.output(7, GPIO.LOW)
GPIO.setup(7, GPIO.IN)
| 219 | 103 |
# Copyright (C) 2012 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import unittest
import logging
import struct
import netaddr
import array
from nose.tools import *
from nose.plugins.skip import Skip, SkipTest
from ryu.ofproto import ether, inet
from ryu.lib import mac
from ryu.lib.packet import *
LOG = logging.getLogger('test_packet')
class TestPacket(unittest.TestCase):
""" Test case for packet
"""
dst_mac = mac.haddr_to_bin('AA:AA:AA:AA:AA:AA')
src_mac = mac.haddr_to_bin('BB:BB:BB:BB:BB:BB')
dst_ip = int(netaddr.IPAddress('192.168.128.10'))
dst_ip_bin = struct.pack('!I', dst_ip)
src_ip = int(netaddr.IPAddress('192.168.122.20'))
src_ip_bin = struct.pack('!I', src_ip)
payload = '\x06\x06\x47\x50\x00\x00\x00\x00' \
+ '\xcd\xc5\x00\x00\x00\x00\x00\x00' \
+ '\x10\x11\x12\x13\x14\x15\x16\x17' \
+ '\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f'
def get_protocols(self, pkt):
protocols = {}
for p in pkt:
if hasattr(p, 'protocol_name'):
protocols[p.protocol_name] = p
else:
protocols['payload'] = p
return protocols
def setUp(self):
pass
def tearDown(self):
pass
def test_arp(self):
# buid packet
e = ethernet.ethernet(self.dst_mac, self.src_mac,
ether.ETH_TYPE_ARP)
a = arp.arp(1, ether.ETH_TYPE_IP, 6, 4, 2,
self.src_mac, self.src_ip, self.dst_mac,
self.dst_ip)
p = packet.Packet()
p.add_protocol(e)
p.add_protocol(a)
p.serialize()
# ethernet !6s6sH
e_buf = self.dst_mac \
+ self.src_mac \
+ '\x08\x06'
# arp !HHBBH6sI6sI
a_buf = '\x00\x01' \
+ '\x08\x00' \
+ '\x06' \
+ '\x04' \
+ '\x00\x02' \
+ self.src_mac \
+ self.src_ip_bin \
+ self.dst_mac \
+ self.dst_ip_bin
buf = e_buf + a_buf
eq_(buf, p.data)
# parse
pkt = packet.Packet(array.array('B', p.data))
protocols = self.get_protocols(pkt)
p_eth = protocols['ethernet']
p_arp = protocols['arp']
# ethernet
ok_(p_eth)
eq_(self.dst_mac, p_eth.dst)
eq_(self.src_mac, p_eth.src)
eq_(ether.ETH_TYPE_ARP, p_eth.ethertype)
# arp
ok_(p_arp)
eq_(1, p_arp.hwtype)
eq_(ether.ETH_TYPE_IP, p_arp.proto)
eq_(6, p_arp.hlen)
eq_(4, p_arp.plen)
eq_(2, p_arp.opcode)
eq_(self.src_mac, p_arp.src_mac)
eq_(self.src_ip, p_arp.src_ip)
eq_(self.dst_mac, p_arp.dst_mac)
eq_(self.dst_ip, p_arp.dst_ip)
def test_vlan_arp(self):
# buid packet
e = ethernet.ethernet(self.dst_mac, self.src_mac,
ether.ETH_TYPE_8021Q)
v = vlan.vlan(0b111, 0b1, 3, ether.ETH_TYPE_ARP)
a = arp.arp(1, ether.ETH_TYPE_IP, 6, 4, 2,
self.src_mac, self.src_ip, self.dst_mac,
self.dst_ip)
p = packet.Packet()
p.add_protocol(e)
p.add_protocol(v)
p.add_protocol(a)
p.serialize()
# ethernet !6s6sH
e_buf = self.dst_mac \
+ self.src_mac \
+ '\x81\x00'
# vlan !HH
v_buf = '\xF0\x03' \
+ '\x08\x06'
# arp !HHBBH6sI6sI
a_buf = '\x00\x01' \
+ '\x08\x00' \
+ '\x06' \
+ '\x04' \
+ '\x00\x02' \
+ self.src_mac \
+ self.src_ip_bin \
+ self.dst_mac \
+ self.dst_ip_bin
buf = e_buf + v_buf + a_buf
eq_(buf, p.data)
# parse
pkt = packet.Packet(array.array('B', p.data))
protocols = self.get_protocols(pkt)
p_eth = protocols['ethernet']
p_vlan = protocols['vlan']
p_arp = protocols['arp']
# ethernet
ok_(p_eth)
eq_(self.dst_mac, p_eth.dst)
eq_(self.src_mac, p_eth.src)
eq_(ether.ETH_TYPE_8021Q, p_eth.ethertype)
# vlan
ok_(p_vlan)
eq_(0b111, p_vlan.pcp)
eq_(0b1, p_vlan.cfi)
eq_(3, p_vlan.vid)
eq_(ether.ETH_TYPE_ARP, p_vlan.ethertype)
# arp
ok_(p_arp)
eq_(1, p_arp.hwtype)
eq_(ether.ETH_TYPE_IP, p_arp.proto)
eq_(6, p_arp.hlen)
eq_(4, p_arp.plen)
eq_(2, p_arp.opcode)
eq_(self.src_mac, p_arp.src_mac)
eq_(self.src_ip, p_arp.src_ip)
eq_(self.dst_mac, p_arp.dst_mac)
eq_(self.dst_ip, p_arp.dst_ip)
def test_ipv4_udp(self):
# buid packet
e = ethernet.ethernet(self.dst_mac, self.src_mac,
ether.ETH_TYPE_IP)
ip = ipv4.ipv4(4, 5, 1, 0, 3, 1, 4, 64, inet.IPPROTO_UDP, 0,
self.src_ip, self.dst_ip)
u = udp.udp(0x190F, 0x1F90, 0, 0)
p = packet.Packet()
p.add_protocol(e)
p.add_protocol(ip)
p.add_protocol(u)
p.add_protocol(self.payload)
p.serialize()
# ethernet !6s6sH
e_buf = self.dst_mac \
+ self.src_mac \
+ '\x08\x00'
# ipv4 !BBHHHBBHII
ip_buf = '\x45' \
+ '\x01' \
+ '\x00\x3C' \
+ '\x00\x03' \
+ '\x20\x04' \
+ '\x40' \
+ '\x11' \
+ '\x00\x00' \
+ self.src_ip_bin \
+ self.dst_ip_bin
# udp !HHHH
u_buf = '\x19\x0F' \
+ '\x1F\x90' \
+ '\x00\x28' \
+ '\x00\x00'
buf = e_buf + ip_buf + u_buf + self.payload
# parse
pkt = packet.Packet(array.array('B', p.data))
protocols = self.get_protocols(pkt)
p_eth = protocols['ethernet']
p_ipv4 = protocols['ipv4']
p_udp = protocols['udp']
# ethernet
ok_(p_eth)
eq_(self.dst_mac, p_eth.dst)
eq_(self.src_mac, p_eth.src)
eq_(ether.ETH_TYPE_IP, p_eth.ethertype)
# ipv4
ok_(p_ipv4)
eq_(4, p_ipv4.version)
eq_(5, p_ipv4.header_length)
eq_(1, p_ipv4.tos)
l = len(ip_buf) + len(u_buf) + len(self.payload)
eq_(l, p_ipv4.total_length)
eq_(3, p_ipv4.identification)
eq_(1, p_ipv4.flags)
eq_(64, p_ipv4.ttl)
eq_(inet.IPPROTO_UDP, p_ipv4.proto)
eq_(self.src_ip, p_ipv4.src)
eq_(self.dst_ip, p_ipv4.dst)
t = bytearray(ip_buf)
struct.pack_into('!H', t, 10, p_ipv4.csum)
eq_(packet_utils.checksum(t), 0)
# udp
ok_(p_udp)
eq_(0x190f, p_udp.src_port)
eq_(0x1F90, p_udp.dst_port)
eq_(len(u_buf) + len(self.payload), p_udp.total_length)
eq_(0x77b2, p_udp.csum)
t = bytearray(u_buf)
struct.pack_into('!H', t, 6, p_udp.csum)
ph = struct.pack('!IIBBH', self.src_ip, self.dst_ip, 0,
17, len(u_buf) + len(self.payload))
t = ph + t + self.payload
eq_(packet_utils.checksum(t), 0)
# payload
ok_('payload' in protocols)
eq_(self.payload, protocols['payload'].tostring())
def test_ipv4_tcp(self):
# buid packet
e = ethernet.ethernet(self.dst_mac, self.src_mac,
ether.ETH_TYPE_IP)
ip = ipv4.ipv4(4, 5, 0, 0, 0, 0, 0, 64, inet.IPPROTO_TCP, 0,
self.src_ip, self.dst_ip)
t = tcp.tcp(0x190F, 0x1F90, 0x123, 1, 6, 0b101010, 2048, 0, 0x6f,
'\x01\x02')
p = packet.Packet()
p.add_protocol(e)
p.add_protocol(ip)
p.add_protocol(t)
p.add_protocol(self.payload)
p.serialize()
# ethernet !6s6sH
e_buf = self.dst_mac \
+ self.src_mac \
+ '\x08\x00'
# ipv4 !BBHHHBBHII
ip_buf = '\x45' \
+ '\x00' \
+ '\x00\x4C' \
+ '\x00\x00' \
+ '\x00\x00' \
+ '\x40' \
+ '\x06' \
+ '\x00\x00' \
+ self.src_ip_bin \
+ self.dst_ip_bin
# tcp !HHIIBBHHH + option
t_buf = '\x19\x0F' \
+ '\x1F\x90' \
+ '\x00\x00\x01\x23' \
+ '\x00\x00\x00\x01' \
+ '\x60' \
+ '\x2A' \
+ '\x08\x00' \
+ '\x00\x00' \
+ '\x00\x6F' \
+ '\x01\x02\x00\x00'
buf = e_buf + ip_buf + t_buf + self.payload
# parse
pkt = packet.Packet(array.array('B', p.data))
protocols = self.get_protocols(pkt)
p_eth = protocols['ethernet']
p_ipv4 = protocols['ipv4']
p_tcp = protocols['tcp']
# ethernet
ok_(p_eth)
eq_(self.dst_mac, p_eth.dst)
eq_(self.src_mac, p_eth.src)
eq_(ether.ETH_TYPE_IP, p_eth.ethertype)
# ipv4
ok_(p_ipv4)
eq_(4, p_ipv4.version)
eq_(5, p_ipv4.header_length)
eq_(0, p_ipv4.tos)
l = len(ip_buf) + len(t_buf) + len(self.payload)
eq_(l, p_ipv4.total_length)
eq_(0, p_ipv4.identification)
eq_(0, p_ipv4.flags)
eq_(64, p_ipv4.ttl)
eq_(inet.IPPROTO_TCP, p_ipv4.proto)
eq_(self.src_ip, p_ipv4.src)
eq_(self.dst_ip, p_ipv4.dst)
t = bytearray(ip_buf)
struct.pack_into('!H', t, 10, p_ipv4.csum)
eq_(packet_utils.checksum(t), 0)
# tcp
ok_(p_tcp)
eq_(0x190f, p_tcp.src_port)
eq_(0x1F90, p_tcp.dst_port)
eq_(0x123, p_tcp.seq)
eq_(1, p_tcp.ack)
eq_(6, p_tcp.offset)
eq_(0b101010, p_tcp.bits)
eq_(2048, p_tcp.window_size)
eq_(0x6f, p_tcp.urgent)
eq_(len(t_buf), p_tcp.length)
t = bytearray(t_buf)
struct.pack_into('!H', t, 16, p_tcp.csum)
ph = struct.pack('!IIBBH', self.src_ip, self.dst_ip, 0,
6, len(t_buf) + len(self.payload))
t = ph + t + self.payload
eq_(packet_utils.checksum(t), 0)
# payload
ok_('payload' in protocols)
eq_(self.payload, protocols['payload'].tostring())
| 10,894 | 4,562 |
import itertools
import torch
import torchvision
import torchvision.transforms as transforms
import matplotlib.pyplot as plt
device = torch.device("cpu")
transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])
trainset = torchvision.datasets.CIFAR10(root='./data', train=True, download=True, transform=transform)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=4, shuffle=True, num_workers=0)
testset = torchvision.datasets.CIFAR10(root='./data', train=False, download=True, transform=transform)
testloader = torch.utils.data.DataLoader(testset, batch_size=4, shuffle=False, num_workers=0)
classes = ('plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck')
# functions to show an image
# import matplotlib.pyplot as plt
# import numpy as np
#
#
# def imshow(img):
# img = img / 2 + 0.5 # unnormalize
# npimg = img.numpy()
# plt.imshow(np.transpose(npimg, (1, 2, 0)))
# plt.show()
#
#
# # get some random training images
# dataiter = iter(trainloader)
# images, labels = dataiter.next()
#
# # show images
# imshow(torchvision.utils.make_grid(images))
# # print labels
# print(' '.join('%5s' % classes[labels[j]] for j in range(4)))
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
class Quant(torch.autograd.Function):
"""
We can implement our own custom autograd Functions by subclassing
torch.autograd.Function and implementing the forward and backward passes
which operate on Tensors.
"""
@staticmethod
def forward(ctx, input, quant_param):
"""
In the forward pass we receive a Tensor containing the input and return
a Tensor containing the output. ctx is a context object that can be used
to stash information for backward computation. You can cache arbitrary
objects for use in the backward pass using the ctx.save_for_backward method.
"""
ctx.save_for_backward(input)
ctx.save_for_backward(Variable(torch.ones(1, 1), requires_grad=False))
return input
@staticmethod
def backward(ctx, grad_output):
"""
In the backward pass we receive a Tensor containing the gradient of the loss
with respect to the output, and we need to compute the gradient of the loss
with respect to the input.
"""
input = ctx.saved_tensors
grad_input = grad_output.clone()
# grad_input[input < 0] = 0
return grad_input
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(in_channels=3, out_channels=6, kernel_size=5)
self.pool = nn.MaxPool2d(kernel_size=2, stride=2)
self.conv2 = nn.Conv2d(in_channels=6, out_channels=16, kernel_size=5)
self.fc1 = nn.Linear(in_features=16 * 5 * 5, out_features=120)
self.fc2 = nn.Linear(in_features=120, out_features=84)
self.fc3 = nn.Linear(in_features=84, out_features=10)
def forward(self, x):
x = F.relu(self.conv1(x))
x = self.pool(x)
x = F.relu(self.conv2(x))
x = self.pool(x)
x = x.view(-1, 16 * 5 * 5)
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3(x)
return x
net = Net()
import torch.optim as optim
# def convQuant(convLayer: nn.Module):
# convLayer()
# y_pred = relu(x.mm(w1)).mm(w2)
#
# # Compute and print loss
# loss = (y_pred - y).pow(2).sum()
# if t % 100 == 99:
# print(t, loss.item())
#
# # Use autograd to compute the backward pass.
# loss.backward()
#
# # Update weights using gradient descent
# with torch.no_grad():
# w1 -= learning_rate * w1.grad
# w2 -= learning_rate * w2.grad
#
# # Manually zero the gradients after updating weights
# w1.grad.zero_()
# w2.grad.zero_()
QuantizationCrunch = {}
def forward_hook(module: nn.Module, input: tuple, output: torch.Tensor) -> None:
# print("Forward Module : {}. hash {}".format(module, module.__hash__()))
# for layer in module.modules():
if isinstance(module, nn.Conv2d):
# print("Layer dict {}".format(module.state_dict().keys()))
# str = "quant_{}_input".format(list(module.named_modules())
# module.register_parameter("orig_input", input[0])
# module.register_buffer("orig_output", output[0])
QuantizationCrunch[str(module.__hash__())] = {"input": input[0], "output": output[0]} # module.
def backward_hook(module: nn.Module, grad_input: torch.Tensor, grad_output: torch.Tensor) -> None:
# print("Backward Module : {}, grad_inp: {}, grad_out: {}".format(module, len(grad_input), len(grad_output)))
# Forward pass: compute predicted y using operations; we compute
# ReLU using our custom autograd operation.
if isinstance(module, nn.Conv2d):
inp = QuantizationCrunch[str(module.__hash__())]["input"]
# module.register_buffer("orig_output", output)
# torch.utils.hooks.RemovableHandle(clone).remove()
# print("w: {}, b: {}".format(module.weight, module.bias))
quant_out, quant_weights = quantize(module, module.weight, module.bias, inp)
# module.weight = torch.nn.Parameter(quant_weights)
def quantize(mod: torch.nn.Module, weights: torch.Tensor, bias: torch.Tensor, inp: torch.Tensor) -> {torch.Tensor,
torch.Tensor}:
orig_out = mod.forward(inp)
weights_shape = weights.shape
l_w = weights.flatten().tolist()
for i in range(len(l_w)):
if l_w[i] * 10_000 % 1 > 0:
# print("vl {} : {}".format(l_w[i], int(l_w[i] * 1_000) / 1_000.))
l_w[i] = int(l_w[i] * 10_000) / 10_000.
# if l_w[i] != 0:
# l_w[i] = 0.
q_w = torch.as_tensor(l_w).requires_grad_(False).reshape(weights_shape)
# print("q_w : {}".format(q_w))
mod.weight = torch.nn.Parameter(q_w, True)
quant_out = mod.forward(inp)
plt.plot(range(len(orig_out.flatten().tolist())), (quant_out - orig_out).flatten().tolist(), ",")
# print("Diff {}".format(quant_out - orig_out))
QuantizationCrunch.pop(str(mod.__hash__()))
return [quant_out, q_w]
criterion = nn.CrossEntropyLoss()
# print(net.parameters())
optimizer = optim.Adam(net.parameters(), lr=0.001) # , momentum=0.9)
for epoch in range(50): # loop over the dataset multiple times
running_loss = 0.0
for i, data in itertools.islice(enumerate(trainloader, 0), 25):
# get the inputs; data is a list of [inputs, labels]
inputs, labels = data
# zero the parameter gradients
optimizer.zero_grad()
# forward + backward + optimize
outputs = net(inputs)
# state = net.state_dict()
# print("state dict: {}".format(state))
net.conv1.register_forward_hook(hook=forward_hook)
net.conv1.register_backward_hook(hook=backward_hook)
# net.conv2.register_forward_hook(hook=forward_hook)
# net.conv2.register_backward_hook(hook=backward_hook)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
# for layer in net.modules():
# # print("\tModules {} ".format(layer))
# if isinstance(layer, nn.Conv2d):
# print("Layer dict {}".format(layer.state_dict().keys()))
# w1 = layer.state_dict().get("weights")
# learning_rate = 0.01
# with torch.no_grad():
# w1 -= learning_rate * w1.grad
#
# # Manually zero the gradients after updating weights
# w1.grad.zero_()
# print statistics
running_loss += loss.item()
if i % 5 == 4: # print every 2000 mini-batches
print('[%d, %5d] loss: %.3f' % (epoch + 1, i + 1, running_loss / 5))
running_loss = 0.0
plt.show()
print('Finished Training')
correct = 0
total = 0
with torch.no_grad():
for data in testloader:
images, labels = data
outputs = net(images)
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum().item()
l_w = net.conv1.weight.flatten().tolist()
for i in range(len(l_w)):
if l_w[i] * 10_000 % 1 > 0:
# print("vl {} : {}".format(l_w[i], int(l_w[i] * 1_000) / 1_000.))
print("not worked")
print("w1 : {}".format(
net.conv1.weight.tolist()
))
print("w2 : {}".format(
net.conv2.weight.tolist()
))
print("Accuracy of network on the 10_000 test images: %d %%" % (100 * correct / total))
| 8,739 | 3,058 |
from typing import List
from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException, Query
from sqlalchemy.orm import Session
from dispatch.enums import Visibility
from dispatch.auth.models import DispatchUser
from dispatch.auth.service import get_current_user
from dispatch.database import get_db, search_filter_sort_paginate
from dispatch.auth.models import UserRoles
from .models import JobCreate, JobPagination, JobRead, JobUpdate
from .service import create, delete, get, update, get_by_code
router = APIRouter()
@router.get("/", response_model=JobPagination, summary="Retrieve a list of all jobs.")
def get_jobs(
db_session: Session = Depends(get_db),
page: int = 1,
items_per_page: int = Query(5, alias="itemsPerPage"),
query_str: str = Query(None, alias="q"),
sort_by: List[str] = Query(None, alias="sortBy[]"),
descending: List[bool] = Query(None, alias="descending[]"),
fields: List[str] = Query([], alias="fields[]"),
ops: List[str] = Query([], alias="ops[]"),
values: List[str] = Query([], alias="values[]"),
current_user: DispatchUser = Depends(get_current_user),
):
"""
Retrieve a list of all jobs.
"""
# we want to provide additional protections around restricted jobs
# Because we want to proactively filter (instead of when the item is returned
# we don't use fastapi_permissions acls.
return search_filter_sort_paginate(
db_session=db_session,
model="Job",
query_str=query_str,
page=page,
items_per_page=items_per_page,
sort_by=sort_by,
descending=descending,
fields=fields,
values=values,
ops=ops,
join_attrs=[("tag","requested_primary_worker")],
)
@router.get("/{job_id}", response_model=JobRead, summary="Retrieve a single job.")
def get_job(
*,
db_session: Session = Depends(get_db),
job_id: str,
current_user: DispatchUser = Depends(get_current_user),
):
"""
Retrieve details about a specific job.
"""
job = get(db_session=db_session, job_id=job_id)
if not job:
raise HTTPException(status_code=404, detail="The requested job does not exist.")
return job
@router.post("/", response_model=JobRead, summary="Create a new job.")
def create_job(
*,
db_session: Session = Depends(get_db),
job_in: JobCreate,
current_user: DispatchUser = Depends(get_current_user),
background_tasks: BackgroundTasks,
):
"""
Create a new job.
"""
job = get_by_code(db_session=db_session, code=job_in.code)
if job:
raise HTTPException(status_code=400, detail="The job with this code already exists.")
job = create(db_session=db_session, **job_in.dict())
# background_tasks.add_task(job_create_flow, job_id=job.id)
return job
@router.put("/{job_id}", response_model=JobRead, summary="Update an existing job.")
def update_job(
*,
db_session: Session = Depends(get_db),
job_id: str,
job_in: JobUpdate,
current_user: DispatchUser = Depends(get_current_user),
background_tasks: BackgroundTasks,
):
"""
Update an worker job.
"""
job = get(db_session=db_session, job_id=job_id)
if not job:
raise HTTPException(status_code=404, detail="The requested job does not exist.")
previous_job = JobRead.from_orm(job)
# NOTE: Order matters we have to get the previous state for change detection
job = update(db_session=db_session, job=job, job_in=job_in)
return job
@router.post("/{job_id}/join", summary="Join an job.")
def join_job(
*,
db_session: Session = Depends(get_db),
job_id: str,
current_user: DispatchUser = Depends(get_current_user),
background_tasks: BackgroundTasks,
):
"""
Join an worker job.
"""
job = get(db_session=db_session, job_id=job_id)
if not job:
raise HTTPException(status_code=404, detail="The requested job does not exist.")
background_tasks.add_task(
job_add_or_reactivate_participant_flow, current_user.code, job_id=job.id
)
@router.delete("/{job_id}", response_model=JobRead, summary="Delete an job.")
def delete_job(*, db_session: Session = Depends(get_db), job_id: str):
"""
Delete an worker job.
"""
job = get(db_session=db_session, job_id=job_id)
if not job:
raise HTTPException(status_code=404, detail="The requested job does not exist.")
delete(db_session=db_session, job_id=job.id)
@router.get("/metric/forecast/{job_type}", summary="Get job forecast data.")
def get_job_forecast(*, db_session: Session = Depends(get_db), job_type: str):
"""
Get job forecast data.
"""
return make_forecast(db_session=db_session, job_type=job_type)
| 4,738 | 1,552 |
import os
import re
import pickle
from flask import Flask, render_template, url_for, flash, redirect, session
from forms import SettingsForm, CredentialForm, EditCredentialForm, EditSettingsForm
from CloudStackApiClient import CloudStackApiClient
from CloudStackConfig import CloudStackConfig, cloudstack_file
from timezone import TIMEZONE, DEFAULT_TIMEZONE
from datetime import datetime, timedelta
app = Flask(__name__)
app.config['SECRET_KEY'] = '04f38b5709e0425f716a3e630b01085b'
autoscaling_file = "/auto-scaling/autoscaling.status"
@app.route('/')
@app.route("/dashboard")
def dashboard():
conf = CloudStackConfig()
if not conf.has_cloudstack_section():
session.pop('_flashes', None)
flash(f'Please input Cloudstack credential first', 'success')
return redirect(url_for('editcredential'))
if not conf.has_tenant_section():
flash(f'Please complete the settings!', 'success')
return redirect(url_for('editsettings'))
if not conf.has_autoscaling_section():
flash(f'Please complete the settings', 'success')
return redirect(url_for('editsettings'))
params = {}
params["title"] = 'Autoscale Dashboard'
params["labels"] = None
params["datasets"] = None
params["autoscaling_data"] = None
if not os.path.exists(autoscaling_file):
params["message"] = 'Autoscaling file does not exist, Please try to reload in minutes'
else:
with open(autoscaling_file, 'rb') as fd:
autoscaling_data = pickle.load(fd)
labels = []
for uuid, value in autoscaling_data['status'].items():
labels = [ x[0] for x in value ]
break
if conf.get_timezone() is not None:
offset = timedelta(seconds=int(conf.get_timezone()))
for i , utc_str in enumerate(labels):
utc_datetime = datetime.strptime(utc_str, '%H:%M:%S') + offset
labels[i] = utc_datetime.strftime('%H:%M:%S')
datasets = []
for uuid, value in autoscaling_data['status'].items():
color = re.sub('^[^-]*([^-])-[^-]*([^-])-[^-]*([^-])-[^-]*([^-])-[^-]*([^-]{2})$', '#\\1\\2\\3\\4\\5', uuid)
datasets.append({
"label": autoscaling_data['vm'][uuid]['name'],
"borderColor": color,
"fill": False,
"data": [ x[1] for x in value ]
})
params["labels"] = labels
params["datasets"] = datasets
params["autoscaling_data"] = autoscaling_data
return render_template('dashboard.html', **params)
@app.route("/credential", methods=['GET', 'POST'])
def credential():
conf = CloudStackConfig()
if not conf.has_cloudstack_section():
flash(f'Please input Cloudstack credential first', 'success')
return redirect(url_for('editcredential'))
form = CredentialForm()
if form.validate_on_submit():
return redirect(url_for('editcredential'))
cs_secret = conf.get_secret()
cs_key = conf.get_key()
cs_endpoint = conf.get_endpoint()
params = {}
params["title"] = 'Credential'
params["form"] = form
params["cs_secret"] = cs_secret
params["cs_key"] = cs_key
params["cs_endpoint"] = cs_endpoint
return render_template('credential.html', **params)
@app.route("/editcredential", methods=['GET', 'POST'])
def editcredential():
form = EditCredentialForm()
conf = CloudStackConfig()
if form.validate_on_submit():
if not conf.has_cloudstack_section():
conf.add_cloudstack_section()
if conf.has_tenant_section():
conf.remove_tenant_section()
if conf.has_autoscaling_section():
conf.remove_autoscaling_section()
if conf.has_vm_section():
conf.remove_vm_section()
conf.set_secret(form.secret.data)
conf.set_key(form.key.data)
conf.set_endpoint(form.endpoint.data)
conf.update_configfile()
flash(f'Credential updated for {form.key.data}!, Please update autoscale settings', 'success')
return redirect(url_for('editsettings'))
params = {}
if conf.get_secret():
params["cs_secret"] = conf.get_secret()
if conf.get_key():
params["cs_key"] = conf.get_key()
if conf.get_endpoint():
params["cs_endpoint"]= conf.get_endpoint()
params["title"] = 'Edit Credential'
params["form"] = form
return render_template('editcredential.html', **params)
@app.route("/settings", methods=['GET', 'POST'])
def settings():
conf = CloudStackConfig()
if not conf.has_cloudstack_section():
flash(f'Please input Cloudstack credential first', 'success')
return redirect(url_for('editcredential'))
if not conf.has_tenant_section():
flash(f'Please complete the settings', 'success')
return redirect(url_for('editsettings'))
if not conf.has_autoscaling_section():
flash(f'Please complete the settings', 'success')
return redirect(url_for('editsettings'))
form = SettingsForm()
cs = CloudStackApiClient.get_instance()
if form.validate_on_submit():
return redirect(url_for('editsettings'))
tenant_lb_rule_uuid = conf.get_lb_rule_uuid()
tenant_zone_uuid = conf.get_zone_uuid()
tenant_template_uuid = conf.get_template_uuid()
tenant_serviceoffering_uuid = conf.get_serviceoffering_uuid()
autoscaling_autoscaling_vm = conf.get_autoscaling_vm()
autoscaling_upper_limit = conf.get_upper_limit()
autoscaling_lower_limit = conf.get_lower_limit()
tenant_zone_name = cs.get_zone_name(tenant_zone_uuid)
tenant_lb_rule_name = cs.get_lb_name(tenant_lb_rule_uuid)
tenant_template_name = cs.get_tp_name(tenant_template_uuid)
tenant_serviceoffering_name = cs.get_sv_name(tenant_serviceoffering_uuid)
networks_name_list = []
if conf.has_tenant_section():
for nw_uuid in conf.get_networks():
nw_name = cs.get_nw_name(nw_uuid)
networks_name_list.append(nw_name)
vms_name_list = []
if conf.has_vm_section():
for vm in conf.get_vm_list():
vm_uuid = conf.get_vm_uuid(vm)
vm_name = cs.get_vm_name(vm_uuid)
vms_name_list.append(vm_name)
timezone = dict(TIMEZONE).get(DEFAULT_TIMEZONE)
if conf.get_timezone() is not None:
timezone = dict(TIMEZONE).get(conf.get_timezone())
params = {}
params["title"] = 'Settings'
params["form"] = form
params["tenant_zone_name"] = tenant_zone_name
params["tenant_lb_rule_name"] = tenant_lb_rule_name
params["tenant_template_name"] = tenant_template_name
params["networks_name_list"] = networks_name_list
params["tenant_serviceoffering_name"] = tenant_serviceoffering_name
params["autoscaling_autoscaling_vm"] = autoscaling_autoscaling_vm
params["autoscaling_upper_limit"] = autoscaling_upper_limit
params["autoscaling_lower_limit"] = autoscaling_lower_limit
params["vms_name_list"] = vms_name_list
params["timezone"] = timezone
return render_template('settings.html', **params)
@app.route("/editsettings", methods=['GET', 'POST'])
def editsettings():
form = EditSettingsForm()
cs = CloudStackApiClient.get_instance()
messages = []
form.template_uuid.choices = cs.listTemplates(force=True)
if not form.template_uuid.choices:
form.template_uuid.errors = ['Please create templates first!']
messages.append({'category':'danger','content':'Please create templates first!'})
form.nws.choices = cs.listNetworks(force=True)
form.lb_rule_uuid.choices = cs.listLoadBalancerRules(force=True)
if not form.lb_rule_uuid.choices:
form.lb_rule_uuid.errors = ['Please create LB rules first!']
messages.append({'category':'danger','content':'Please create LB Rules first!'})
form.serviceoffering_uuid.choices = cs.listServiceOfferings(force=True)
form.zone_uuid.choices = cs.listZones(force=True)
form.vms.choices = cs.listVirtualMachines(force=True)
conf = CloudStackConfig()
if form.validate_on_submit():
if conf.has_tenant_section():
conf.remove_tenant_section()
conf.add_tenant_section()
conf.set_zone_uuid(form.zone_uuid.data)
conf.set_lb_rule_uuid(form.lb_rule_uuid.data)
conf.set_template_uuid(form.template_uuid.data)
conf.set_serviceoffering_uuid(form.serviceoffering_uuid.data)
for num, uuid in enumerate(form.nws.data, start=1):
conf.set_nw("network{}_uuid".format(num), uuid)
if conf.has_autoscaling_section():
conf.remove_autoscaling_section()
conf.add_autoscaling_section()
conf.set_autoscaling_vm(form.autoscaling_vm.data)
conf.set_upper_limit(form.upper_limit.data)
conf.set_lower_limit(form.lower_limit.data)
if conf.has_vm_section():
conf.remove_vm_section()
conf.add_vm_section()
for num, uuid in enumerate(form.vms.data, start=1):
conf.set_vm("vm{}_uuid".format(num), uuid)
if conf.has_dashboard_section():
conf.remove_dashboard_section()
conf.add_dashboard_section()
conf.set_timezone(form.timezone.data)
conf.update_configfile()
flash(f'Settings has been updated!', 'success')
return redirect(url_for('settings'))
params = {}
if conf.has_tenant_section() and conf.has_autoscaling_section():
tenant_zone_uuid = conf.get_zone_uuid()
tenant_lb_rule_uuid = conf.get_lb_rule_uuid()
tenant_template_uuid = conf.get_template_uuid()
tenant_serviceoffering_uuid = conf.get_serviceoffering_uuid()
nws = conf.get_networks()
autoscaling_autoscaling_vm = conf.get_autoscaling_vm()
autoscaling_upper_limit = conf.get_upper_limit()
autoscaling_lower_limit = conf.get_lower_limit()
vms = []
if conf.has_vm_section():
for vm in conf.get_vm_list():
vms.append(conf.get_vm_uuid(vm))
form.zone_uuid.default = tenant_zone_uuid
form.template_uuid.default = tenant_template_uuid
form.nws.default = nws
form.serviceoffering_uuid.default = tenant_serviceoffering_uuid
form.lb_rule_uuid.default = tenant_lb_rule_uuid
form.vms.default = vms
if conf.get_timezone() is not None:
form.timezone.default = conf.get_timezone()
form.process()
params = {
"tenant_zone_uuid": tenant_zone_uuid,
"tenant_lb_rule_uuid": tenant_lb_rule_uuid,
"tenant_template_uuid": tenant_template_uuid,
"nws": nws,
"tenant_serviceoffering_uuid": tenant_serviceoffering_uuid,
"autoscaling_autoscaling_vm": autoscaling_autoscaling_vm,
"autoscaling_upper_limit": autoscaling_upper_limit,
"autoscaling_lower_limit": autoscaling_lower_limit,
"vms": vms,
}
params["title"] = 'Edit Settings'
params["form"] = form
params["messages"] = messages
return render_template('editsettings.html', **params)
if __name__ == '__main__':
app.run(host="0.0.0.0", port=8080, debug=True)
| 11,451 | 3,682 |
#!/usr/local/bin/python
import fnmatch
import io
import os
import re
import subprocess
import sys
import asfpy.messaging
import yaml
import yaml.constructor
# LDAP to CNAME mappings for some projects
WSMAP = {
"whimsy": "whimsical",
"empire": "empire-db",
"webservices": "ws",
"infrastructure": "infra",
"comdev": "community",
}
# Hack to get around 'on: foo' being translated to 'True: foo' in pyYaml:
yaml.constructor.SafeConstructor.bool_values["on"] = "on"
# YAML String locator debug dict
ALL_STRINGS = {}
# Allowed GH Actions, in glob format
ALLOWED_ACTIONS = [
"actions/*", # GitHub Common Actions
"github/*", # GitHub's own Action collection
"apache/*", # Apache's action collection
"*/*@" + "[a-f0-9]"*40, # Any SHA1-pinned action (assuming it's been reviewed)
]
def capture_string_location(self, node):
""" Constructor that captures where in the yaml all strings are located, for debug/response purposes """
if self.name not in ALL_STRINGS:
ALL_STRINGS[self.name] = []
ALL_STRINGS[self.name].append((node.value, str(node.start_mark)))
return self.construct_scalar(node)
# Re-route all strings through our capture function
yaml.constructor.SafeConstructor.add_constructor(u"tag:yaml.org,2002:str", capture_string_location)
def contains(filename, value=None, fnvalue=None):
""" If a string is contained within a yaml (and is not a comment or key), return where we found it """
if filename in ALL_STRINGS:
for el in ALL_STRINGS[filename]:
if (value and value in el[0]) or (fnvalue and fnmatch.fnmatch(el[0], fnvalue)):
return el[1].strip()
def get_yaml(filename, refname):
""" Fetch a yaml file from a specific branch, return its contents to caller as parsed object"""
try:
devnull = open(os.devnull, "w")
fdata = subprocess.check_output(("/usr/bin/git", "show", "%s:%s" % (refname, filename)), stderr=devnull)
except subprocess.CalledProcessError as e: # Git show failure, no such file/branch
fdata = None
if fdata:
try:
stream = io.BytesIO(fdata)
stream.name = filename
return yaml.safe_load(stream)
except yaml.YAMLError as e:
pass # If yaml doesn't work, we do not need to scan it :)
return None
def get_values(yml, tagname):
""" Returns all matching tag values from the yaml """
for key, value in yml.iteritems():
if key == tagname:
yield value
elif isinstance(value, dict):
for subvalue in get_values(value, tagname):
yield subvalue
elif isinstance(value, list):
for subitem in value:
if isinstance(subitem, dict):
for subvalue in get_values(subitem, tagname):
yield subvalue
def notify_private(cfg, subject, text):
""" Notify a project's private list about issues... """
# infer project name
m = re.match(r"(?:incubator-)?([^-.]+)", cfg.repo_name)
pname = m.group(1)
pname = WSMAP.get(pname, pname)
# recps = ["private@%s.apache.org" % pname, "private@infra.apache.org"]
recps = ["notifications@infra.apache.org"] # For now, send to projects later.
# Tell project what happened, on private@
asfpy.messaging.mail(
sender="GitBox Security Scan <gitbox@apache.org>",
recipients=recps,
subject=subject,
message=text,
)
def scan_for_problems(yml, filename):
""" Scan for all potential security policy issues in the yaml """
problems = ""
# Rule 1: No pull_request_target triggers if secrets are used in the workflow
if "on" in yml:
triggers = yml.get("on", [])
if (isinstance(triggers, list) or isinstance(triggers, dict)) and "pull_request_target" in triggers:
# No ${{ secrets.GITHUB_TOKEN }} etc in pull_request_target workflows.
secrets_where = contains(filename, fnvalue="${{ secrets.* }}")
if secrets_where:
problems += (
"- Workflow can be triggered by forks (pull_request_target) but contains references to secrets %s!\n"
% secrets_where
)
# No imports via from_secret!
from_secret = get_values(yml, "from_secret")
if from_secret:
secrets_where = contains(filename, value="from_secret")
problems += (
"- Workflow can be triggered by forks (pull_request_target) but contains references to secrets %s!\n"
% secrets_where
)
# Rule 2: All external refs must be pinned or within whitelisted groups
for use_ref in get_values(yml, "uses"):
good = False
for am in ALLOWED_ACTIONS:
if fnmatch.fnmatch(use_ref, am):
good = True
if not good:
problems += '- "%s" (%s) is not an allowed GitHub Actions reference.\n' % (
use_ref,
contains(filename, use_ref),
)
return problems
def main():
import asfgit.cfg as cfg
import asfgit.git as git
# For each push
for ref in git.stream_refs(sys.stdin):
# For each commit in push
for commit in ref.commits():
cfiles = commit.files()
# For each file in commit
for filename in cfiles:
# Is this a GHA file?
if filename.startswith(".github/workflows/") and (
filename.endswith(".yml") or filename.endswith(".yaml")
):
yml = get_yaml(filename, ref.name)
problems = scan_for_problems(yml, filename)
if problems:
notify_private(
cfg,
"Security policy warning for GitHub Actions defined in %s.git: %s"
% (cfg.repo_name, filename),
"The following issues were detected while scanning %s in the %s repository:\n\n"
"%s\n\n"
"Please see https://s.apache.org/ghactions for our general policies on GitHub Actions.\n"
"With regards,\nASF Infrastructure <users@infra.apache.org>."
% (filename, cfg.repo_name, problems),
)
# Test when being called directly
if __name__ == "__main__":
my_yaml = yaml.safe_load(open("test.yml"))
probs = scan_for_problems(my_yaml, "test.yml")
print(probs)
| 6,679 | 1,936 |
# Generated by Django 3.1.6 on 2021-02-16 05:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('authentications', '0010_user_phone'),
]
operations = [
migrations.AlterField(
model_name='user',
name='phone',
field=models.CharField(blank=True, default='+251911111111', max_length=50, null=True, verbose_name='Phone'),
),
]
| 453 | 160 |
from rest_framework import serializers
from calories.models import Food
class FoodSerializer(serializers.ModelSerializer):
class Meta:
fields = ('pk', 'name', 'calories')
model = Food
class FoodListSerializer(serializers.Serializer):
foods = serializers.ListField(child=FoodSerializer(many=True))
class Meta:
fields = ('foods')
class FoodRequestSerializer(serializers.Serializer):
'''
Used for getting data
'''
foods = serializers.CharField()
| 502 | 138 |
from django.shortcuts import render, redirect
from django.contrib.auth.decorators import login_required
from social_django.models import UserSocialAuth
import tweepy, os
'''
from djangoworks.settings import isDebug
if isDebug() == True:
try:
from djangoworks.configs import twitter
SOCIAL_AUTH_TWITTER_KEY = twitter.SOCIAL_AUTH_TWITTER_KEY
SOCIAL_AUTH_TWITTER_SECRET = twitter.SOCIAL_AUTH_TWITTER_SECRET
except:
pass
else:
'''
SOCIAL_AUTH_TWITTER_KEY = os.environ['SOCIAL_AUTH_TWITTER_KEY']
SOCIAL_AUTH_TWITTER_SECRET = os.environ['SOCIAL_AUTH_TWITTER_SECRET']
@login_required
def top(request):
user = UserSocialAuth.objects.get(user_id=request.user.id)
if 'words' in request.GET:
try:
auth = UserSocialAuth.objects.filter(user=request.user).get()
handler = tweepy.OAuthHandler(SOCIAL_AUTH_TWITTER_KEY, SOCIAL_AUTH_TWITTER_SECRET)
handler.set_access_token(auth.tokens["oauth_token"],auth.tokens["oauth_token_secret"])
api = tweepy.API(auth_handler=handler)
Message = {
'words': request.GET.get('words'),
}
msg = Message['words']
print(msg)
api.update_status(msg)
return render(request, 'top.html', Message)
except:
ErrorMessage = {
'words': "Couldn't tweet because you said the same thing again.",
}
return render(request, 'top.html', ErrorMessage)
else:
return render(request,'top.html',{'user': user})
| 1,606 | 525 |
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
prefix = 'v2_'
| 74 | 31 |
## Architechture
lpips_type = 'vgg'
first_inv_type = 'w'
optim_type = 'adam'
## Locality regularization
latent_ball_num_of_samples = 1
locality_regularization_interval = 1
use_locality_regularization = False
regulizer_l2_lambda = 0.1
regulizer_lpips_lambda = 0.1
regulizer_alpha = 30
## Loss
pt_l1_lambda = 1
pt_l2_lambda = 1
pt_lpips_lambda = 1
pt_lpips_layers = [0, 1, 2, 3, 4]
## Steps
LPIPS_value_threshold = 0.06
L2_value_threshold = 0.03
max_pti_steps = 350
first_inv_steps = 450
max_images_to_invert = 30
## Optimization
n_avg_samples = 10000
pti_learning_rate = 3e-4
first_inv_lr = 5e-3
train_batch_size = 1
use_last_w_pivots = False
| 646 | 313 |
sample_data = {
"role": [{
"roleType":
"beneficiaryCustomer",
"party": [{
"partyType":
"individual",
"account": [{
"network": "swift",
"institutionName": "Westpac Banking Corporation",
"institutionCode": "WBC",
"country": "AU",
"branchId": "733-750",
"branchName": "Wangaratta",
"streetAddress": "98 Murphy Street",
"postcode": "3677",
"suburb": "Wangaratta",
"state": "VIC",
"number": "063015189"
}],
"name": [{
"fullName": "DAISEY TRIMUEL"
}],
"gender":
"MALE",
"identification": [{
"identificationType": "benefitsCard",
"identifier": "22417129951"
}],
"jobTitle":
"Unemployed",
"address": [{
"suburb": "Wangaratta",
"streetAddress": "1 Steane Street",
"postcode": "3677",
"state": "VIC",
"country": "AU",
"geolocation": {
"lon": "146.3088965",
"lat": "-36.3563499"
}
}],
"partyId":
"dcf06b5b29c3e950e392d6b5f26d3003e8c84c8fed79edf9edf07d9be6c15d5b",
"consolidatedPartyId":
"293283256248a98e52bd86aa85b77e13cee9bf38db13ab28d967ecd680bde941"
}]
}, {
"roleType":
"orderingCustomer",
"party": [{
"partyType":
"individual",
"account": [{
"network": "swift",
"branchName": "BANCO MILLENNIUM ATLANTICO, S.A.",
"country": "AO",
"suburb": "Luanda",
"branchId": "PRTLAOLU",
"number": "165993405"
}],
"name": [{
"fullName": "Mrs M ACEDO"
}],
"gender":
"FEMALE",
"identification": [{
"identificationType": "benefitsCard",
"identifier": "2241712983951"
}],
"address": [{
"suburb": "Chela",
"postcode": "",
"state": "",
"country": "AO",
"geolocation": {
"lon": "15.43358",
"lat": "-12.30261"
}
}],
"partyId":
"9b399e47b71d50fa2978fa18a316278e8b3ff6693e238f4184636a977dd44eeb",
"consolidatedPartyId":
"f6add6b65ff284fb27588e4188a2f60233fa41cce34cadca484196684b310c01"
}]
}],
"transaction": {
"direction": "incoming",
"reference": "0100438011944",
"amount": "138973.64",
"transactionDatetime": "2020-04-10T09:08:08+1000"
},
"report": {
"reportNumber": 595146553746,
"reportType": "internationalFundsTransferInstruction",
"reporter": "Westpac Banking Corporation",
"reporterId": "45430",
"processedDatetime": "2020-04-11T04:33:30+1000",
"submissionId": "45430-20200411-001"
}
}
transaction_report_mapping = {
"index_patterns": ["transaction-report"],
"template" : {
"settings": {
"number_of_shards": 3,
"number_of_replicas": 0,
"index.codec": "best_compression",
"index.refresh_interval": "30s",
"index.max_result_window": 20000,
#"sort.field": "report.processedDatetime",
#"sort.order": "asc",
"analysis": {
"analyzer": {
"account_number_analyzer": {
"tokenizer": "standard",
"char_filter": ["account_number_filter"]
}
},
"char_filter": {
"account_number_filter": {
"type": "pattern_replace",
"pattern": "[^0-9]",
"replacement": ""
}
}
}
},
"mappings": {
"properties": {
"role": {
"type": "nested",
"properties": {
"party": {
"type": "nested",
"properties": {
"account": {
"type": "nested",
"properties": {
"branchName": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"branchId": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"country": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"network": {
"type": "text",
"copy_to": "all",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"number": {
"type": "text",
"copy_to": "all",
"analyzer": "account_number_analyzer",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"postcode": {
"type": "text",
"copy_to": "all",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"state": {
"type": "text",
"copy_to": "all",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"streetAddress": {
"type": "text",
"copy_to": "all",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"suburb": {
"type": "text",
"copy_to": "all",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
},
"address": {
"type": "nested",
"properties": {
"country": {
"type": "text",
"copy_to": "all",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"geolocation": {
"type": "geo_point"
},
"postcode": {
"type": "text",
"copy_to": "all",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"state": {
"type": "text",
"copy_to": "all",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"streetAddress": {
"type": "text",
"copy_to": "all",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"suburb": {
"type": "text",
"copy_to": "all",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
},
"gender": {
"type": "text",
"copy_to": "all",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"identification": {
"type": "nested",
"properties": {
"identifier": {
"type": "text",
"copy_to": "all",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"identificationType": {
"type": "text",
"copy_to": "all",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"identificationSubType": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
},
"jobTitle": {
"type": "text",
"copy_to": "all",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"name": {
"type": "nested",
"properties": {
"fullName": {
"type": "text",
"copy_to": "all",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
},
"partyId": {
"type": "text",
"copy_to": "all",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"partyType": {
"type": "text",
"copy_to": "all",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
},
"roleType": {
"type": "text",
"copy_to": "all",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
},
"transaction": {
"properties": {
"amount": {
"type": "double"
},
"direction": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"transactionDatetime": {
"type":
"date",
"format":
"yyyy-MM-dd'T'HH:mm:ssZ||yyyy-MM-dd||strict_date_optional_time"
}
}
},
"report": {
"properties": {
"processedDatetime": {
"type":
"date",
"format":
"yyyy-MM-dd'T'HH:mm:ssZ||yyyy-MM-dd||strict_date_optional_time"
},
"reportNumber": {
"type": "long"
},
"reportType": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"reporter": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"reporterId": {
"type": "integer",
},
"submissionId": {
"type": "keyword",
}
}
},
"all": {
"type": "text"
}
}
}}
} | 19,323 | 4,025 |
# -*- coding: utf-8 -*-
"""
Created on Thu Dec 9 22:11:56 2021
@author: Mike
"""
import serial
import time
arduino = serial.Serial("COM2", 9600)
time.sleep(2)
print("Presione 1 para mandar y 2 para apagar: ")
while 1:
datousuario = input()
if datousuario == "1":
arduino.write(b'34.23;34.23;45.22')
print("Mandar")
elif datousuario == "2":
arduino.close()
print("Apagar")
break
| 458 | 189 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Jun 1 09:15:37 2019
@author: athreya
"""
#1
def tax(bill):
"""Adds 8% tax to a restaurant bill."""
bill *= 1.08
print ("With tax: %f" % bill)
return bill
#2
def tip(bill):
"""Adds 15% tip to a restaurant bill."""
bill *= 1.15
print ("With tip: %f" % bill)
return bill
meal_cost = 100
meal_with_tax = tax(meal_cost) #108.00
meal_with_tip = tip(meal_with_tax) #124.20
#Functions 3
def square(n):
"""Returns the square of a number."""
squared = n ** 2
print ("%d squared is %d." % (n, squared))
return squared
square(10) #100
#4
def power(base,exponent):
result = base ** exponent
print ("%d to the power of %d is %d." % (base, exponent, result))
power(5,3) #5 to the power of 3 is 125.
#5
import math
print(math.sqrt(25)) #5.0
#from math import * //imports all math functions
#6
def biggest_number(*args):
print (max(args))
return max(args)
def smallest_number(*args):
print (min(args))
return min(args)
def distance_from_zero(arg):
print (abs(arg))
return abs(arg)
biggest_number(-10, -5, 5, 10)
smallest_number(-10, -5, 5, 10)
distance_from_zero(-10)
#OUTPUT
#10
#-10
#10
| 1,240 | 544 |
# -*- encoding: utf-8 -*-
"""
Views: Encontramos todas las vistas del sistema de establecimientos.
@author Camilo Ramírez
@contact camilolinchis@gmail.com
camilortte@hotmail.com
@camilortte on Twitter
@copyright Copyright 2014-2015, RecomendadorUD
@license GPL
@date 2014-10-10
@satus Pre-Alpha
@version= 0..215
"""
import datetime
import json
#Django
from django.shortcuts import render,redirect
from django.views.generic.base import View, TemplateView
from django.http import HttpResponse, Http404,HttpResponseRedirect
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_control
from django.forms import DateField
from django.contrib.gis.geos import Polygon, GEOSGeometry
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.core.urlresolvers import reverse_lazy , lazy,reverse
from django.contrib import messages
from django.views.generic import (
DetailView, CreateView , ListView, UpdateView,
DeleteView)
#externals apps
from vanilla import CreateView as CreateViewVanilla
from vanilla import TemplateView as TemplateViewVanilla
from haystack.query import SearchQuerySet
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework.authentication import SessionAuthentication, BasicAuthentication
from notifications import notify
#Internal apps
from apps.account_system.models import User
from apps.recommender_system.models import EstablecimientosRecommender
#Serializers
from .serializers import EstablecimientoSerializer, PaginatedEstablecimientoSerializer
#Models
from .models import (
Establecimiento, Comentario, Imagen, SubCategoria,
Categoria, TiposSolicitud, Solicitud)
#Forms
from .forms import (
ComentarioForm, EstablecimientoForm,
UploadImageForm, CategoriasFilterForm,
SolicitudForm, EstablecimientoTemporalForm)
class DetalleEstablecimientoView(DetailView):
u"""
Se encarga de mostrar todos los datos de un establecimiento (entre
ellos las images), ademas carga todos los forms como comentarios y
rating requeridos para la correcta interacción.
Hereda todo de DetailView
Attributes:
template_name (str): Plantilla que se cargará.
model (Mode): Clase del modelo que se usará.
"""
template_name = "establishment/detail.html"
model= Establecimiento
def get_context_data(self, **kwargs):
u"""
Se agregan contenxtos como las imagenes, los forms de
agregar y eliminar imagenes así como tambien los forms
de agregar y eliminar comentarios.
Tambien se realiza la paginación de comentarios.
Tambien realiza las respectiva validaciones de quien puede,
eliminiar, y agregar.
"""
context = super(DetalleEstablecimientoView, self).get_context_data(**kwargs)
establecimiento = self.object
context['imagenes'] = Imagen.objects.filter(establecimientos=establecimiento)
count=Imagen.objects.filter(establecimientos=establecimiento).count()
if count < settings.MAX_IMAGES_PER_PLACE:
context['imagenes_nulas'] = range(count,settings.MAX_IMAGES_PER_PLACE)
context['establecimiento'] =Establecimiento
if self.request.user.is_authenticated():
context['form_image'] = UploadImageForm
usuario = self.request.user
usuario_comentario=Comentario.objects.filter(author=usuario,post=establecimiento)
#esta vacio puede comentar
if not usuario_comentario:
data = {
'sender':context['object'].id,
'is_public':True
}
context['form'] = ComentarioForm(initial=data)
else:
#No esta vacio no puede comentar
pass
if self.request.user.is_organizacional():
propietario=Establecimiento.objects.filter(administradores=self.request.user,id=establecimiento.id)
if propietario:
#Es propietario del establecimiento
context['propietario']=True
comentarios=Comentario.objects.filter(post=establecimiento,is_public=True)
paginator = Paginator(comentarios, settings.MAX_COMMENTS_PER_PAGE) # Show 10 contacts per page
page = self.request.GET.get('page')
try:
comentarios = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
comentarios = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
comentarios = paginator.page(paginator.num_pages)
context['comentarios'] = comentarios
context['can_upload_image']=self.can_upload_image()
return context
def can_upload_image(self):
"""
Se encarga de validar si es posible subir otra imagen
"""
if self.request.user.is_authenticated():
if self.object.imagen_set.all().count() >= settings.MAX_IMAGES_PER_PLACE:
return False
else:
if self.request.user.is_superuser or self.object.administradores.filter(id=self.request.user.id):
return True
else:
if Imagen.objects.filter(usuarios=self.request.user,establecimientos=self.object).count() >= settings.MAX_UPLOAD_PER_USER:
return False
return True
else:
return False
@method_decorator(cache_control(must_revalidate=True, no_cache=True, no_store=True))
def dispatch(self, *args, **kwargs):
print "Establecimiento consultado"
return super(DetalleEstablecimientoView, self).dispatch(*args, **kwargs)
class JSONMixin(object):
u"""
JSONMixin es un mixin para enviar los comentarios mediante JSON.
"""
def render_to_response(self, context, **httpresponse_kwargs):
return self.get_json_response(
self.convert_context_to_json(context),
**httpresponse_kwargs
)
def get_json_response(self, content, **httpresponse_kwargs):
return HttpResponse(
content,
content_type='application/json',
**httpresponse_kwargs
)
def convert_context_to_json(self, context):
u"""
Este método serializa un formulario de Django y
retorna un objeto JSON con sus campos y errores
retorna un objecto JSON
"""
form = context.get('form')
to_json = {}
options = context.get('options', {})
to_json.update(options=options)
fields = {}
for field_name, field in form.fields.items():
if isinstance(field, DateField) \
and isinstance(form[field_name].value(), datetime.date):
fields[field_name] = \
unicode(form[field_name].value().strftime('%d.%m.%Y'))
else:
fields[field_name] = \
form[field_name].value() \
and unicode(form[field_name].value()) \
or form[field_name].value()
to_json.update(fields=fields)
if form.errors:
errors = {
'non_field_errors': form.non_field_errors(),
}
fields = {}
for field_name, text in form.errors.items():
fields[field_name] = text
errors.update(fields=fields)
to_json.update(errors=errors)
else:
to_json={}
context['success'] = True
to_json.update(success=context.get('success', False))
print "RETORNA ", json.dumps(to_json)
return json.dumps(to_json)
class CommentCreateView(JSONMixin, CreateView):
u"""
Se envcarga de crear un nuevo comentario, usa el mixisn JSON
para crearlo con json.
Atributes:
model (Mode): Clase del modelo que se usará.
form_class (Form): La clase formulario para la creación.
"""
model = Comentario
form_class = ComentarioForm
# once the user submits the form, validate the form and create the new user
def post(self, request, *args, **kwargs):
u"""
Validamos los datos del formulario y segun esto procedemos con la
creación del comentario.
se usa la clase validate comment para validar.
Returns:
Llamado a la clase padre de form_valid si es valido el comentario
De lo contrario se llama a la clase padre form_invalid()
"""
self.object = None
# setup the form
# we can use get_form this time as we no longer need to set the data property
form = self.get_form(self.form_class)
# print "KAWARGS: ",kwargs
# print "ARGS; ",args
self.establecimiento_id=kwargs['pk']
self.success_url=reverse('establecimiento_detail_url',kwargs={'pk':self.establecimiento_id})
form.instance.author = self.request.user
form.instance.post = Establecimiento.objects.get(id=self.establecimiento_id)
if form.is_valid() and self.validate_comment():
return self.form_valid(form)
else:
return self.form_invalid(form)
def form_valid(self, form):
self.object = form.save()
messages.success(self.request, u"Comentario creado.")
return self.render_to_response(self.get_context_data(form=form))
def validate_comment(self):
u"""
Se validan que el usuario no halla comentado anteriormente en el mismo
establecimiento,
Returns:
True si puede comentario
False si ya comento y no pdra comentar
"""
comentario=Comentario.objects.filter(author=self.request.user.id,post=self.establecimiento_id)
print comentario
if not comentario:
#No existe ningun comentario
return True
else:
#Si existe un comentario
return False
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(CommentCreateView, self).dispatch(*args, **kwargs)
class EliminarEstablecimiento(DeleteView):
u"""
Clase encargada de eliminar un establecimiento solo por el usuario
propietario
"""
model = Establecimiento
success_url=reverse_lazy('establecimientos_propios_ur')
def get_object(self, queryset=None):
u"""
Validamos que el objeto que se eliminará sea propiedad del
usuario que lo elminará
Returns:
Context si el usuario es quien eliminara su propio establecimiento
Http404 si es un usuario invalido intentnaod eliminar.
"""
establecimiento_id= self.kwargs['pk']
establecimiento=Establecimiento.objects.filter(id=establecimiento_id,administradores=self.request.user.id)
if establecimiento and (self.request.user.is_organizacional or self.request.user.is_superuser ):
context = super(EliminarEstablecimiento, self).get_object(queryset=None)
return context
#De lo contrario
else:
print "No puede elimianr el comentario y esta intentando romper el sistema"
raise Http404
def delete(self, request, *args, **kwargs):
ctx= super(EliminarEstablecimiento, self).delete(request,*args, **kwargs)
messages.success(self.request, u"Establecimiento Eliminado.")
return ctx
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(EliminarEstablecimiento, self).dispatch(*args, **kwargs)
class EliminarComentario(DeleteView):
u"""
Clase para eliminar un comentario, este solo podrá ser eliminado
por el autor, el propietario del establecimiento o un usuario
administrador
Atributes:
model (Model): Modelo que se usará.
"""
model = Comentario
def get_object(self, queryset=None):
u"""
Validamos que el objeto que se eliminará sea propiedad del
usuario que lo elminará
Returns:
Context si el usuario es quien eliminara su propio comentario
Http404 si es un usuario invalido intentnaod eliminar.
"""
establecimiento_id= self.kwargs['establecimiento_id']
comentario_id= self.kwargs['comentario_id']
#obj = super(EliminarComentario, self).get_object()
comentario=Comentario.objects.filter(author=self.request.user.id,post=establecimiento_id,id=comentario_id)
#Si comentario no esta vacio
if ( comentario):
#comentario.delete()
context = {'establecimiento_id':establecimiento_id, 'comentario_id':comentario_id}
return context
#De lo contrario
else:
print "No puede elimianr el comentario y esta intentando romper el sistema"
raise Http404
return {'comentario_id':comentario_id}
def delete(self, request, *args, **kwargs):
u"""
se comprueba que el comentario a eliminar sea eliminado por el propietario del comentario
o por un usuario administrador. Si todo es valido se eliminara.
Returns:
HttpResponseRedirect A el establecimiento que alojó el comentario.
"""
comentario_id = self.kwargs['comentario_id']
establecimiento_id = self.kwargs['establecimiento_id']
if request.user.is_superuser:
comentario=Comentario.objects.get(id=comentario_id)
comentario.delete()
else:
comentario=Comentario.objects.filter(author=request.user,
post=Establecimiento.objects.get(id=establecimiento_id),
id=comentario_id)
#No esta vacio
if comentario:
if comentario[0].author.id==request.user.id:
comentario[0].delete()
messages.success(self.request, u"Comentario Eliminado.")
self.success_url = reverse('establecimiento_detail_url', kwargs={'pk': establecimiento_id})
return HttpResponseRedirect(self.success_url)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(EliminarComentario, self).dispatch(*args, **kwargs)
class Establecimientoslist(ListView):
u"""
Lista los establecimientos en el sisitema, segun los criterios de busqueda.
Atributes:
paginate_by (int): Numero de establecimientos por pagina.
model (Model): Modelo
template_name (String): Template donde se carga la información
"""
paginate_by = 10
model = Establecimiento
template_name = "establishment/list.html"
def get_context_data(self, **kwargs):
u"""
Se agrega el contenxto el formulario de categorias
"""
context = super(Establecimientoslist, self).get_context_data(**kwargs)
#context['now'] = timezone.now()
context['form_categorias']=CategoriasFilterForm
return context
class CrearEstablecimiento(CreateViewVanilla):
u"""
Crea un nuevo establecimiento
"""
model= Establecimiento
template_name = "establishment/create.html"
content_type = None
form_class = EstablecimientoForm
success_url = lazy(reverse, str)("home_url") #Esta se modifica en el metodo get_succes_url
def get_success_url(self):
messages.success(self.request, u"Establecimiento creado.")
return reverse_lazy('establecimiento_detail_url',
kwargs={'pk': self.object.id})
def form_invalid(self, form):
return super(CrearEstablecimiento, self).form_invalid(form)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(CrearEstablecimiento, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
u"""
Se agregan los contexto de longitud y latitud
"""
ctx = super(CrearEstablecimiento, self).get_context_data(**kwargs)
form=kwargs.get('form')
position= form.instance.position
if position is not None:
pnt = GEOSGeometry(position) # WKT
ctx['lng'] = pnt.y
ctx['lat'] = pnt.x
return ctx
"""
DEPRECATE
"""
class RecargarDatosEstablecimiento(TemplateViewVanilla):
def render_to_response(self, context, **httpresponse_kwargs):
return self.get_json_response(
self.convert_context_to_json(context),
**httpresponse_kwargs
)
def get_json_response(self, content, **httpresponse_kwargs):
return HttpResponse(
content,
content_type='application/json',
**httpresponse_kwargs
)
def convert_context_to_json(self, context):
u""" Este método serializa un formulario de Django y
retorna un objeto JSON con sus campos y errores
"""
form = context.get('form')
to_json = {}
options = context.get('options', {})
to_json.update(options=options)
fields = {}
for field_name, field in form.fields.items():
if isinstance(field, DateField) \
and isinstance(form[field_name].value(), datetime.date):
fields[field_name] = \
unicode(form[field_name].value().strftime('%d.%m.%Y'))
else:
fields[field_name] = \
form[field_name].value() \
and unicode(form[field_name].value()) \
or form[field_name].value()
to_json.update(fields=fields)
if form.errors:
errors = {
'non_field_errors': form.non_field_errors(),
}
fields = {}
for field_name, text in form.errors.items():
fields[field_name] = text
errors.update(fields=fields)
to_json.update(errors=errors)
else:
to_json={}
context['success'] = True
to_json.update(success=context.get('success', False))
return json.dumps(to_json)
class UpdateEstablecimiento(UpdateView):
u"""
Actualizar los datos de un establecimientos
"""
model= Establecimiento
template_name = "establishment/edit.html"
content_type = None
form_class = EstablecimientoForm
success_url = lazy(reverse, str)("home_url") #Esta se modifica en el metodo get_succes_url
def get_success_url(self):
messages.success(self.request, u"Establecimiento Actualizado.")
return reverse_lazy('establecimiento_detail_url',
kwargs={'pk': self.object.id})
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(UpdateEstablecimiento, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
u"""
Se agregan los contexto de longitud y latitud
"""
ctx = super(UpdateEstablecimiento, self).get_context_data(**kwargs)
pnt = GEOSGeometry(self.object.position) # WKT
ctx['lng'] = pnt.y
ctx['lat'] = pnt.x
return ctx
def get_object(self, queryset=None):
u"""
Solo puede actualizar los datos un usuario organizacional dueño del
establecimiento o un usuario administrador
Returns:
Establecimiento object si es valido
Http404 si no es valido
"""
if self.request.user.is_superuser:
#Si el usuario es administrador
obj = super(UpdateEstablecimiento, self).get_object()
else:
#Si es organizacional
if self.request.user.is_organizacional():
obj = super(UpdateEstablecimiento, self).get_object()
establecimientos=Establecimiento.objects.filter(administradores=self.request.user, id=obj.id)
if not establecimientos:
raise Http404
else:
print "Intentando romper el sistema"
raise Http404
return obj
# class CrearEstablecimiento2(CreateViewVanilla):
# u"""
# DEPRECATE
# ---------
# Crear un establecimiento mediante JSON y AJAX
# """
# model= Establecimiento
# template_name = "establishment/create2.html"
# content_type = None
# form_class = CategoriasForm2
# success_url = lazy(reverse, str)("home_url")
# def form_invalid(self, form):
# # This method is called when valid form data has been POSTed.
# # It should return an HttpResponse.
# return super(CrearEstablecimiento2, self).form_invalid(form)
# @method_decorator(login_required)
# def dispatch(self, *args, **kwargs):
# return super(CrearEstablecimiento2, self).dispatch(*args, **kwargs)
# class Busqueda(TemplateView):
# template_name = "establishment/busqueda.html"
class Autocomplete(View):
u"""
Se encarga de autocompletar la busqueda mediante JSON.
"""
def get(self, request, *args, **kwargs):
u"""
Se realizan los querys de la bsuqueda de la siguiente manera:
sqs--> Se buscan los nombres que coincidan con el caracter de busqueda
sq1--> Se buscan el email que coincidan con el caracter de busqueda
sqs2--> Se buscan la pagina web que coincidan con el caracter de busqueda
sqs3--> Se buscan la direccion que coincidan con el caracter de busqueda
TODO:
Se planea agregar Categorias y sub categorias en la busqueda.
Notes:
Tener cudado en devolver un objecto JSON y no el de python,
ya que es propenso a ataque XSS.
Returns:
Objeto JSON con los resultados de las coincidencias
"""
q=request.GET.get('q', None)
if q is not None and q != "":
sqs = SearchQuerySet().autocomplete(nombre__icontains=q)[:10]
sqs = SearchQuerySet().autocomplete(nombre=q)[:10]
# sqs2 = SearchQuerySet().autocomplete(email=q)[:10]
# sqs3 = SearchQuerySet().autocomplete(web_page=q)[:10]
# sqs4 = SearchQuerySet().autocomplete(address=q)[:10]
# sqs5 = SearchQuerySet().autocomplete(sub_categorias=q)[:10]
# sqs5 = SearchQuerySet().autocomplete(tag=q)[:10]
establecimientos=[]
establecimientos=self.get_establecimientis(establecimientos, sqs)
# establecimientos=self.get_establecimientis(establecimientos, sqs2)
# establecimientos=self.get_establecimientis(establecimientos, sqs3)
# establecimientos=self.get_establecimientis(establecimientos, sqs4)
# establecimientos=self.get_establecimientis(establecimientos, sqs5)
# establecimientos=self.get_establecimientis(establecimientos, sqs5)
else:
# categoria=Categoria.objects.filter(tag__icontains=q)
# sub_cate=SubCategoria.objects.filter(categorias=categoria)
# query=establecimientos.objects.filter(sub_categorias=sub_cate)
# if query:
# establecimientos=query
# else:
# establecimientos =[]
establecimientos= []
# Make sresult.ure you return a JSON object, not a bare list.
# Otherwise, you could be vulnerable to an XSS attack.
the_data = json.dumps({
'results': establecimientos
})
return HttpResponse(the_data, content_type='application/json')
def existe(self,establecimientos,id):
"""
Obtener los ids valido para no repetir informacion
"""
for element in establecimientos:
if element.get('id')==str(id):
return True
return False
def get_establecimientis(self,establecimientos,sqs):
u"""
Se encarga de agregar nuevos resultado(Establecmientos) sin repetirlos
"""
for resultado in sqs:
if not self.existe(establecimientos, resultado.pk):
temporal={'id':resultado.pk,'nombre':resultado.nombre,'address':resultado.address,
'web_page':resultado.web_page,'email':resultado.email,'sub_categorias':resultado.sub_categorias}
establecimientos.append(temporal)
return establecimientos
class DeleteImagen(DeleteView):
u"""
Elimianr una imagen de un establecimiento
"""
model=Imagen
success_url = reverse_lazy('home_url')
def get_object(self, queryset=None):
u"""
Se valida que el que elimine sea quien subió la imange,
un usuario organizacional o un usuario administrador.
Returns:
Establecimiento object si es valido.
Http404 raise si es invalido.
"""
if self.request.user.is_superuser:
#Si el usuario es administrador
obj = super(DeleteImagen, self).get_object()
else:
obj = super(DeleteImagen, self).get_object()
imagen = Imagen.objects.filter(usuarios=self.request.user,id=obj.id)
#Si el usuario es propietario de la imagen
if imagen:
#Si hay coincidencias
pass
else:
#Si el usuario es dueno del establecimiento
if self.request.user.is_organizacional:
establecimientos=Establecimiento.objects.filter(administradores=self.request.user)
if establecimientos:
obj = super(DeleteImagen, self).get_object()
else:
print "Intentando romper el sistema"
raise Http404
else:
print "Intentando romper el sistema"
raise Http404
print " SE BORRARA LA IMAGEN: ",obj.id
messages.success(self.request, u"Imagen eliminada.")
self.success_url=reverse_lazy('establecimiento_detail_url',
kwargs={'pk': self.kwargs['est_id']})
return obj
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(DeleteImagen, self).dispatch(*args, **kwargs)
from datetime import datetime as mydatetime
class Solicitar(View):
u"""
Se encarga de crear una solicitud depenedindo de esta.
Tiene los metodos GET y POST
"""
def get(self, request,tipo_solicitud,establecimiento_id):
u"""
Se cargan los formularios dependiendo del tipo de solicitud;
0 --> Solicitud administracion establecimiento
1 --> Solicitud de edicion del establecimiento, se agrega
en el contexto el formulario de edicion y la longitud
y latitud actuales del establecimiento.
2 --> Solicitud de eliminacion por repeticion
3 --> Solicitud de eliminacion por inexistencia.
En todas las solicitudes se agrega un formulario de solicitud.
"""
establecimiento=Establecimiento.objects.get(id=establecimiento_id)
if tipo_solicitud=='0':
formulario= SolicitudForm()
else:
if tipo_solicitud=='1' and establecimiento_id:
formulario= SolicitudForm()
est=Establecimiento.objects.get(id=establecimiento_id)
lng=est.position.y
lat=est.position.x
formulario2= EstablecimientoTemporalForm(instance=est)
return render(request, 'establishment/solicitud.html', {
'form':formulario, 'form2':formulario2,
'lng':lng,'lat':lat,
'tipo_solicitud':tipo_solicitud,
'establecimiento':establecimiento
})
else:
if tipo_solicitud=='2':
formulario= SolicitudForm()
else:
if tipo_solicitud=='3':
formulario= SolicitudForm()
else:
raise Http404
return render(request, 'establishment/solicitud.html',
{'form':formulario,
'tipo_solicitud':tipo_solicitud,
'establecimiento':establecimiento
})
def post(self, request,tipo_solicitud,establecimiento_id):
u"""
Dependiendo del request se realizan las acciones
0 --> Solicitud administracion establecimiento
1 --> Solicitud de edicion del establecimiento
2 --> Solicitud de eliminacion por repeticion
3 --> Solicitud de eliminacion por inexistencia.
"""
enviada=False
tipo=""
id_EstablecimientoTemporal=None
if request.user.is_authenticated():
"""Solicitud de administracion"""
if tipo_solicitud=='0':
formulario= SolicitudForm(request.POST)
if formulario.is_valid():
enviada=True
tipo="administracion"
else:
"""Solictud de edicion del establecimiento"""
if tipo_solicitud=='1':
formulario= SolicitudForm(request.POST)
formulario2= EstablecimientoTemporalForm(request.POST)#Establecimieto temporal
if formulario2.is_valid():
id_EstablecimientoTemporal=formulario2.save()
if formulario.is_valid():
enviada=True
tipo="modificacion"
else:
pnt = GEOSGeometry(formulario2.cleaned_data['position']) # WKT
lng=pnt.y
lat=pnt.x
formulario=SolicitudForm(data=request.POST)
formulario2= EstablecimientoTemporalForm(data=request.POST)
return render(request, 'establishment/solicitud.html', {
'form':formulario, 'form2':formulario2,
'lng':lng,'lat':lat
})
else:
if tipo_solicitud=='2':
formulario= SolicitudForm(request.POST)
if formulario.is_valid():
enviada=True
tipo="eliminacion"
else:
if tipo_solicitud=='3':
formulario= SolicitudForm(request.POST)
if formulario.is_valid():
enviada=True
tipo="eliminacion"
else:
raise Http404
if enviada and tipo != "":
notify.send(
request.user,
recipient= request.user,
verb="Solicitud Enviada",
description="Hola "+request.user.first_name+" para informarte que estamos mirando tu"\
"e solicitud de "+tipo+", gracias por tu paciencia.",
timestamp=mydatetime.now()
)
if not id_EstablecimientoTemporal:
self.create_solicitud(tipo.title()+formulario.cleaned_data['contenido'],
request.user, establecimiento_id, tipo)
else:
self.create_solicitud(tipo.title()+formulario.cleaned_data['contenido'],
request.user, establecimiento_id, tipo,id_EstablecimientoTemporal)
messages.success(self.request, u"Solicitud enviada.")
print "Solicitud pendiente"
return redirect('/establecimientos/'+establecimiento_id+'/')
formulario=SolicitudForm(data=request.POST)
formulario2= EstablecimientoTemporalForm(data=request.POST)
return render(request, 'establishment/solicitud.html', {
'form':formulario, 'form2':formulario2,
})
def create_solicitud(self,contenido,user,establecimiento_id,tipo_solicitud,establecimientos_temporales=None):
u"""
Se crear un nueva solicitud dependiendo si es de edicion o de las demas.
"""
if establecimientos_temporales is None:
Solicitud.objects.create(
contenido=contenido,
usuarios=user,
establecimientos=Establecimiento.objects.get(id=establecimiento_id),
tipo_solicitudes=TiposSolicitud.objects.get(nombre=tipo_solicitud)
)
print "Solicitud creada"
else:
Solicitud.objects.create(
contenido=contenido,
usuarios=user,
establecimientos=Establecimiento.objects.get(id=establecimiento_id),
tipo_solicitudes=TiposSolicitud.objects.get(nombre=tipo_solicitud),
establecimientos_temporales=establecimientos_temporales,
)
print "Solicitud creada"
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(Solicitar, self).dispatch(*args, **kwargs)
class EstablecimientosPropios(ListView):
u"""
Carga los establecimientos propios de un usuario organizacional.
"""
paginate_by = 10
model = Establecimiento
template_name = "establishment/list_own.html"
context_object_name = "establecimientos_propios"
def get_context_data(self, **kwargs):
"""
Se agrega el contexto del formulario de categorias
"""
print "Establecimiento consultado"
context = super(EstablecimientosPropios, self).get_context_data(**kwargs)
#context['now'] = timezone.now()
context['form_categorias']=CategoriasFilterForm
return context
def get_queryset(self):
"""
Se filtra los establecimientos por el propietario del request
"""
query=Establecimiento.objects.filter(administradores=self.request.user)
print "Query: ",query
return query
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(EstablecimientosPropios, self).dispatch(*args, **kwargs)
class UploadImagenView(View):
u"""
DEPRECATE
---------
Subir una imagen
"""
def post(self, request,pk, *args, **kwargs):
form = UploadImageForm(request.POST, request.FILES)
if form.is_valid():
# print "FOrm valida: "
# print "Pk: ",pk
# print "CONTENIDO DE IMAGEN: ",request.FILES['imagen']
establecimiento=Establecimiento.objects.get(id=pk)
Imagen.objects.create(imagen=request.FILES['imagen'],establecimientos=establecimiento)
return redirect('establecimiento_detail_url',pk=pk)
else:
return redirect('establecimiento_detail_url',pk=pk)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(UploadImagenView, self).dispatch(*args, **kwargs)
from django.db.models import Q
class BusquedarView(TemplateView):
template_name = "establishment/busqueda.html"
def get(self, request, *args, **kwargs):
q=request.GET.get('q',None)
if q is not None:
establecimientos=Establecimiento.objects.filter(
Q(nombre__icontains=q) | Q(email__icontains=q) |
Q(address__icontains=q) | Q(email__icontains=q) |
Q(sub_categorias__icontains=SubCategoria.objects.filter(tag__icontains=q)))
paginator = Paginator(establecimientos, 20)
page = request.GET.get('page')
try:
establecimientos = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
establecimientos = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
establecimientos = paginator.page(paginator.num_pages)
else:
establecimientos=None
return render(request, "establishment/busqueda.html",{'datos':establecimientos,'query':q})
def existe(self,establecimientos,id):
"""
Obtener los ids valido para no repetir informacion
"""
for element in establecimientos:
if element.get('id')==str(id):
return True
return False
def get_establecimientis(self,establecimientos,sqs):
u"""
Se encarga de agregar nuevos resultado(Establecmientos) sin repetirlos
"""
for resultado in sqs:
if not self.existe(establecimientos, resultado.pk):
temporal={'id':resultado.pk,'nombre':resultado.nombre,'address':resultado.address,
'web_page':resultado.web_page,'email':resultado.email,'sub_categorias':resultado.sub_categorias}
establecimientos.append(temporal)
return establecimientos
#################################################################################################
##################################################### ###########################
##################################################### APIS VIEWS ###########################
##################################################### ###########################
#################################################################################################
class EstablecimientoCreateApiView(APIView):
u"""
DEPRECATE
---------
Crear un establecimiento mediante REST
"""
def post(self, request, format=None):
serializer = EstablecimientoSerializer(data=request.DATA)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class CalificacionApiView(APIView):
u"""
Calificar un establecimiento median JSON soportado por REST
Attributes:
authentication_classes (AthenticacionRest): Tipos de autenticacion para poder calificar
"""
authentication_classes = ( SessionAuthentication, BasicAuthentication,)
def get(self, request, pk,format=None):
u"""
Obiene las calificaciones realizadas por un usuario en concreto y el procentaje total
de calificacines del establecimiento en concreto.
Returns:
Si todo es correcto retorna un objeto JSON con los datos, de lo contrario
retorna vacio
"""
try:
establecimiento = Establecimiento.objects.get(id=pk)
user=User.objects.get(id=request.user.id)
calificacion_usuario= establecimiento.rating.get_rating_for_user(user)
salida={
"ratin_estableicimiento":round(establecimiento.rating.get_rating(),1),
"rating_usuario":calificacion_usuario
}
except Exception, e:
try:
salida={
"ratin_estableicimiento":round(establecimiento.rating.get_rating(),1),
"rating_usuario":0
}
except Exception, e:
salida={}
print "ERROR: ",e
return Response(salida,status=status.HTTP_200_OK)
def post(self, request, pk,format=None):
u"""
Se modifica una calificacion si existe , de lo contrario se crea una nueva.
Returns:
Ok HTTP_201_CREATED si to salio bien,
de lo contrario HTTP_400_BAD_REQUEST
"""
try:
calificacion = request.DATA.get("calificacion")
respuesta=""
establecimiento = Establecimiento.objects.get(id=pk)
if calificacion:
calificacion=int(calificacion)
if calificacion>=1 and calificacion<=5 :
recommender=EstablecimientosRecommender()
print "BIne"
# print establecimiento
# print calificacion
# print request.user
# print request.META['REMOTE_ADDR']
# print request.__dict__
# print "----------__>\n"
# print request.COOKIES
establecimiento.rating.add(
score=calificacion,
user=request.user,
ip_address=request.META['REMOTE_ADDR']
)
print "MAL"
print "Actualizando matriz"
recommender.precompute()
respuesta="Calificacion realizada"
return Response(respuesta, status=status.HTTP_201_CREATED)
else:
respuesta="Valor no valido"
except Exception, e:
print "Ell establecimiento no existe "
respuesta="Algo salio mal ",str(e)
print e
return Response(respuesta, status=status.HTTP_400_BAD_REQUEST)
class UploadImagenApiView(APIView):
u"""
Subir una imagen median REST por JSON
"""
authentication_classes = ( SessionAuthentication, BasicAuthentication,)
def post(self, request, pk,format=None):
u"""
Se valida que la imagen cumpla los requisitos (jpeg,pjpeg,png,jpg) menor o igual
a 5 MB, el usuario convencional no podra subir mas de 3 imagenes
y el establecimientono
no tendrá más de 8 imagenes
"""
try:
establecimiento = Establecimiento.objects.get(id=pk)
imagen_count=Imagen.objects.filter(usuarios=request.user,establecimientos=establecimiento).count()
propietario=establecimiento.administradores.filter(id=request.user.id)
if establecimiento.imagen_set.count() < settings.MAX_IMAGES_PER_PLACE:
if imagen_count <settings.MAX_UPLOAD_PER_USER or self.request.user.is_superuser or propietario:
if request.FILES:
#imagen = request.FILES.get("imagen")
imagen= request.FILES[u'file']
size=int(imagen.size)
#validate content type
main, sub = imagen.content_type.split('/')
if not (main == 'image' and sub.lower() in ['jpeg', 'pjpeg', 'png', 'jpg']):
respuesta={"error":'Please use a JPEG, JPG or PNG image.'}
else:
if size <= settings.MAX_UPLOAD_SIZE:
respuesta=""
element=Imagen(imagen=imagen,establecimientos=establecimiento,usuarios=request.user)
element.save()
respuesta="OK"
return Response(respuesta, status=status.HTTP_201_CREATED)
else:
print "Supera el tamanio de la image."
respuesta={"error":"La imagen no puede ser mayor a 10 MB"}
else:
respuesta={"error":"No subio nada"}
else:
respuesta={"error":"Limite maximo de imagenes por usuario en este establecimiento"}
else:
respuesta={"error":"Limite maximo de imagenes para el establecimiento"}
except Exception, e:
print "EL ESTABLEcimiento no existe"
respuesta="Algo salio mal"
print e
return Response(respuesta, status=status.HTTP_400_BAD_REQUEST)
class EstablecimientosByBoung(APIView):
u"""
Se encarga de mostrar los establecimiento que se encuentran en la zona visible
de google maps, esto lo realiza mediante los boungs y una consulta del tipo
de base de datos geos.
Todo se hace por JSON
"""
def get(self, request, format=None):
u"""
Obtiene los boung y realzia la consulta, ordena la consulta por rating_score
Returns:
JSON object.
"""
# boung_data_x1 = request.GET.get("x1",None)
# boung_data_y1 = request.GET.get("y1")
# boung_data_x2 = request.GET.get("x2")
# boung_data_y2 = request.GET.get("y2")
boung_data_x1 = request.GET.get("y1",None)
boung_data_y1 = request.GET.get("x1")
boung_data_x2 = request.GET.get("y2")
boung_data_y2 = request.GET.get("x2")
number_page=request.GET.get("pagina",None)
if number_page is None:
number_page=1
# print "boung_data_x1: ",boung_data_x1
# print "boung_data_y1: ",boung_data_y1
# print "boung_data_x2: ",boung_data_x2
# print "boung_data_y1: ",boung_data_y2
if boung_data_x1 is not None:
nombre=request.GET.get("nombre",None)
categoria=request.GET.get("categoria",None)
sub_categoria=request.GET.get("sub_categoria",None)
# print "Valores: "
# print "Nombre: ",nombre
# print "Categoria: ",categoria
# print "sub_categoria: ",sub_categoria
box=Polygon.from_bbox((boung_data_x1,boung_data_y1,boung_data_x2,boung_data_y2))
query=Establecimiento.objects.filter(position__within=box,visible=True).order_by('rating_score')
if nombre:
query=query.filter(nombre__icontains=nombre)
if sub_categoria:
query=query.filter(sub_categorias=sub_categoria)
else:
if categoria:
categoria=Categoria.objects.get(id=categoria)
sub_cate=SubCategoria.objects.filter(categorias=categoria)
query=query.filter(sub_categorias=sub_cate)
paginator = Paginator(query, settings.ITEMS_PAGINATE)
query=paginator.page(number_page)
serializer=PaginatedEstablecimientoSerializer(query)
salida=serializer.data
#print "Respuesta: ",salida
else:
salida={"error":"None"}
return Response(salida,status=status.HTTP_200_OK)
###########################################################################################
##################################################### #####################
##################################################### SIGNALS #####################
##################################################### #####################
###########################################################################################
from django.db.models.signals import pre_delete, post_save
from django.dispatch.dispatcher import receiver
from apps.externals.djangoratings.models import Vote, Score
@receiver(pre_delete, sender=Imagen)
def Imagen_delete(sender, instance, **kwargs):
u"""
Cuando una imagen se borrar tambíen se borrara en el disco.
"""
instance.imagen.delete(False)
@receiver(pre_delete, sender=Establecimiento)
def establecimiento_delete(sender, instance, **kwargs):
u"""
Cuando una establecimiento se borra tambíen se borrara los votos
"""
print "Establecimiento eliminado"
id_establecimiento=instance.id
Vote.objects.filter(object_id=id_establecimiento).delete()
Score.objects.filter(object_id=id_establecimiento).delete()
print "Calificacion eliminada"
recommender=EstablecimientosRecommender()
print "Eliminando recomendacion"
recommender.storage.remove_recommendations(instance)
print "Computando"
recommender.precompute()
@receiver(post_save, sender=Establecimiento)
def establecimiento_save(sender, instance, created,**kwargs):
if created:
print "Estalecimiento creado"
else:
print "Establecimiento actualizado"
@receiver(post_save, sender=Solicitud)
def solicitud_created(sender, instance,created, **kwargs):
if created:
print "Solicitud creada"
@receiver(pre_delete, sender=Solicitud)
def solicitud_delete(sender, instance, **kwargs):
print "Solicitud eliminada"
| 50,571 | 14,126 |
# Copyright 2020 The Private Cardinality Estimation Framework Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for wfa_cardinality_estimation_evaluation_framework.simulations.simulator."""
import io
from absl.testing import absltest
import numpy as np
import pandas as pd
from wfa_cardinality_estimation_evaluation_framework.estimators.base import EstimateNoiserBase
from wfa_cardinality_estimation_evaluation_framework.estimators.base import EstimatorBase
from wfa_cardinality_estimation_evaluation_framework.estimators.base import SketchBase
from wfa_cardinality_estimation_evaluation_framework.estimators.exact_set import AddRandomElementsNoiser
from wfa_cardinality_estimation_evaluation_framework.estimators.exact_set import ExactMultiSet
from wfa_cardinality_estimation_evaluation_framework.estimators.exact_set import LosslessEstimator
from wfa_cardinality_estimation_evaluation_framework.evaluations.configs import SketchEstimatorConfig
from wfa_cardinality_estimation_evaluation_framework.simulations import set_generator
from wfa_cardinality_estimation_evaluation_framework.simulations import simulator
def get_simple_simulator(sketch_estimator_config=None):
if not sketch_estimator_config:
sketch_estimator_config = SketchEstimatorConfig(
name='exact_set-lossless', sketch_factory=ExactMultiSet,
estimator=LosslessEstimator())
set_generator_factory = (
set_generator.IndependentSetGenerator.
get_generator_factory_with_num_and_size(
universe_size=1, num_sets=1, set_size=1))
return simulator.Simulator(
num_runs=1,
set_generator_factory=set_generator_factory,
sketch_estimator_config=sketch_estimator_config,
sketch_random_state=np.random.RandomState(1),
set_random_state=np.random.RandomState(2))
class RandomSketchForTestRandomSeed(SketchBase):
@classmethod
def get_sketch_factory(cls):
def f(random_seed):
return cls(random_seed=random_seed)
return f
def __init__(self, random_seed):
self.cardinality = random_seed
def add_ids(self, ids):
_ = ids
class EstimatorForTestRandomSeed(EstimatorBase):
def __call__(self, sketch_list):
return [sketch_list[-1].cardinality]
class FakeEstimateNoiser(EstimateNoiserBase):
def __init__(self):
self._calls = 0
def __call__(self, cardinality_estimate):
self._calls += 1
return 10
class FakeSetGenerator(set_generator.SetGeneratorBase):
"""Generator for a fixed collection of sets."""
@classmethod
def get_generator_factory(cls, set_list):
def f(random_state):
return cls(set_list)
return f
def __init__(self, set_list):
self.set_list = set_list
def __iter__(self):
for s in self.set_list:
yield s
return self
class SimulatorTest(absltest.TestCase):
def test_simulator_run_one(self):
sim = get_simple_simulator()
data_frame = sim.run_one()
self.assertLen(data_frame, 1)
for pub in data_frame['num_sets']:
self.assertEqual(pub, 1)
def test_simulator_run_one_with_estimate_noiser(self):
fake_estimate_noiser = FakeEstimateNoiser()
sketch_estimator_config = SketchEstimatorConfig(
name='exact_set-lossless',
sketch_factory=ExactMultiSet, estimator=LosslessEstimator(),
estimate_noiser=fake_estimate_noiser)
sim = get_simple_simulator(sketch_estimator_config)
data_frame = sim.run_one()
self.assertLen(data_frame, 1)
self.assertEqual(
data_frame[simulator.ESTIMATED_CARDINALITY_BASENAME + '1'].iloc[0], 10)
self.assertEqual(fake_estimate_noiser._calls, 1)
def test_simulator_run_all_and_aggregate(self):
sim = get_simple_simulator()
data_frames = sim.run_all_and_aggregate()
self.assertLen(data_frames, 2)
for pub in data_frames[0]['num_sets']:
self.assertEqual(pub, 1)
def test_simulator_run_all_and_aggregate_with_noise(self):
rs = np.random.RandomState(3)
sketch_estimator_config = SketchEstimatorConfig(
name='exact_set-lossless',
sketch_factory=ExactMultiSet,
estimator=LosslessEstimator(),
sketch_noiser=AddRandomElementsNoiser(num_random_elements=3,
random_state=rs))
sim = get_simple_simulator(sketch_estimator_config)
data_frames = sim.run_all_and_aggregate()
self.assertLen(data_frames, 2)
for pub in data_frames[0]['num_sets']:
self.assertEqual(pub, 1)
self.assertEqual(
data_frames[0][simulator.ESTIMATED_CARDINALITY_BASENAME + '1'][0], 4)
self.assertEqual(
data_frames[0][simulator.TRUE_CARDINALITY_BASENAME + '1'][0], 1)
self.assertEqual(
data_frames[0][simulator.RELATIVE_ERROR_BASENAME + '1'][0], 3)
def test_simulator_run_all_and_aggregate_multiple_runs(self):
sketch_estimator_config = SketchEstimatorConfig(
name='exact_set-lossless',
sketch_factory=ExactMultiSet, estimator=LosslessEstimator())
set_generator_factory = (
set_generator.IndependentSetGenerator.
get_generator_factory_with_num_and_size(
universe_size=1, num_sets=1, set_size=1))
sim = simulator.Simulator(
num_runs=5,
set_generator_factory=set_generator_factory,
sketch_estimator_config=sketch_estimator_config)
data_frames = sim.run_all_and_aggregate()
self.assertLen(data_frames, 2)
self.assertLen(data_frames[0], 5)
for pub in data_frames[0]['num_sets']:
self.assertEqual(pub, 1)
def test_simulator_run_all_and_aggregate_write_file(self):
sketch_estimator_config = SketchEstimatorConfig(
name='exact_set-lossless',
sketch_factory=ExactMultiSet, estimator=LosslessEstimator())
set_generator_factory = (
set_generator.IndependentSetGenerator.
get_generator_factory_with_num_and_size(
universe_size=1, num_sets=1, set_size=1))
file_df = io.StringIO()
file_df_agg = io.StringIO()
sim = simulator.Simulator(
num_runs=5,
set_generator_factory=set_generator_factory,
sketch_estimator_config=sketch_estimator_config,
file_handle_raw=file_df,
file_handle_agg=file_df_agg)
df, df_agg = sim()
# Test if the saved data frame is the same as the one returned from the
# simulator.
file_df.seek(0)
df_from_csv = pd.read_csv(file_df)
pd.testing.assert_frame_equal(df, df_from_csv)
file_df_agg.seek(0)
df_agg_from_csv = pd.read_csv(file_df_agg,
header=[0, 1], index_col=0)
pd.testing.assert_frame_equal(df_agg, df_agg_from_csv)
def test_get_sketch_same_run_same_random_state(self):
sketch_estimator_config = SketchEstimatorConfig(
name='exact_set-lossless',
sketch_factory=RandomSketchForTestRandomSeed,
estimator=EstimatorForTestRandomSeed())
set_generator_factory = (
set_generator.IndependentSetGenerator.
get_generator_factory_with_num_and_size(
universe_size=1, num_sets=2, set_size=1))
sim = simulator.Simulator(
num_runs=1,
set_generator_factory=set_generator_factory,
sketch_estimator_config=sketch_estimator_config)
df, _ = sim()
self.assertEqual(
df.loc[df['num_sets'] == 1, simulator.ESTIMATED_CARDINALITY_BASENAME + '1'].values,
df.loc[df['num_sets'] == 2, simulator.ESTIMATED_CARDINALITY_BASENAME + '1'].values)
def test_get_sketch_different_runs_different_random_state(self):
sketch_estimator_config = SketchEstimatorConfig(
name='random_sketch-estimator_for_test_random_seed',
sketch_factory=RandomSketchForTestRandomSeed,
estimator=EstimatorForTestRandomSeed())
set_generator_factory = (
set_generator.IndependentSetGenerator.
get_generator_factory_with_num_and_size(
universe_size=1, num_sets=1, set_size=1))
sim = simulator.Simulator(
num_runs=2,
set_generator_factory=set_generator_factory,
sketch_estimator_config=sketch_estimator_config)
df, _ = sim()
self.assertNotEqual(
df.loc[df['run_index'] == 0, simulator.ESTIMATED_CARDINALITY_BASENAME + '1'].values,
df.loc[df['run_index'] == 1, simulator.ESTIMATED_CARDINALITY_BASENAME + '1'].values)
def test_extend_histogram(self):
self.assertEqual(simulator.Simulator._extend_histogram(None, [], 1), [0])
self.assertEqual(simulator.Simulator._extend_histogram(None, [3, 2, 1], 1), [3])
self.assertEqual(simulator.Simulator._extend_histogram(None, [3, 2, 1], 2), [3, 2])
self.assertEqual(simulator.Simulator._extend_histogram(None, [3, 2, 1], 3), [3, 2, 1])
self.assertEqual(simulator.Simulator._extend_histogram(None, [3, 2, 1], 5), [3, 2, 1, 0, 0])
def test_shuffle_distance(self):
with self.assertRaises(AssertionError):
simulator.Simulator(0,0,0)._shuffle_distance([], [])
with self.assertRaises(AssertionError):
simulator.Simulator(0,0,0)._shuffle_distance([1], [])
self.assertEqual(simulator.Simulator(0,0,0)._shuffle_distance(
[1], [1]), 0.0)
self.assertEqual(simulator.Simulator(0,0,0)._shuffle_distance(
[10], [10]), 0.0)
self.assertEqual(simulator.Simulator(0,0,0)._shuffle_distance(
[1, 1], [1]), 1.0)
self.assertEqual(simulator.Simulator(0,0,0)._shuffle_distance(
[1, 1], [1, 1]), 0.0)
self.assertEqual(simulator.Simulator(0,0,0)._shuffle_distance(
[2, 1, 0], [2, 2, 1]), 0.5)
def test_multiple_frequencies(self):
sketch_estimator_config = SketchEstimatorConfig(
name='exact-set-multiple-frequencies',
sketch_factory=ExactMultiSet,
estimator=LosslessEstimator(),
max_frequency=3)
set_generator_factory = (
FakeSetGenerator.get_generator_factory(
[[1, 1, 1, 2, 2, 3], [1, 1, 1, 3, 3, 4]]))
sim = simulator.Simulator(
num_runs=1,
set_generator_factory=set_generator_factory,
sketch_estimator_config=sketch_estimator_config)
df, _ = sim()
expected_columns = ['num_sets',
simulator.ESTIMATED_CARDINALITY_BASENAME + '1',
simulator.ESTIMATED_CARDINALITY_BASENAME + '2',
simulator.ESTIMATED_CARDINALITY_BASENAME + '3',
simulator.TRUE_CARDINALITY_BASENAME + '1',
simulator.TRUE_CARDINALITY_BASENAME + '2',
simulator.TRUE_CARDINALITY_BASENAME + '3',
simulator.SHUFFLE_DISTANCE,
'run_index',
simulator.RELATIVE_ERROR_BASENAME + '1',
simulator.RELATIVE_ERROR_BASENAME + '2',
simulator.RELATIVE_ERROR_BASENAME + '3']
expected_data = [
[1, 3, 2, 1, 3, 2, 1, 0., 0, 0., 0., 0.],
[2, 4, 3, 2, 4, 3, 2, 0., 0, 0., 0., 0.]
]
expected_df = pd.DataFrame(expected_data, columns=expected_columns)
pd.testing.assert_frame_equal(df, expected_df)
if __name__ == '__main__':
absltest.main()
| 11,547 | 4,110 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
if __name__ == '__main__':
a = ()
print(type(a))
b = tuple()
print(type(b))
a = (1, 2, 3, 4, 5)
print(type(a))
print(a)
a = tuple([1, 2, 3, 4])
print(a)
not_a_tuple = (42)
tpl = (42,)
print(type(not_a_tuple))
print(type(tpl))
| 326 | 161 |
# -*- coding:utf-8 -*-
# @Time : 2019/7/30 9:00 PM
# @Author : __wutonghe__
from django.core.mail import send_mail
def send_email():
pass
| 150 | 72 |
'''
Created on May 17, 2019
@author: Tim Kreuzer
'''
def get_token(path):
with open(path, 'r') as f:
token = f.read().rstrip()
return token
| 158 | 64 |
#!/usr/bin/env python3
# Pty buffer size detect script
# From: https://superuser.com/a/1452858
# Results:
# MacOS 11.2.3: pts write blocked after 1023 bytes (0 KiB)
import os
from pty import openpty
from fcntl import fcntl, F_GETFL, F_SETFL
from itertools import count
def set_nonblock(fd):
flags = fcntl(fd, F_GETFL)
flags |= os.O_NONBLOCK
fcntl(fd, F_SETFL, flags)
master, slave = openpty()
set_nonblock(slave)
for i in count():
try:
os.write(slave, b'a')
except BlockingIOError:
i -= 1
break
print("pts write blocked after {} bytes ({} KiB)".format(i, i//1024)) | 616 | 249 |
"""This module implements the core ShivyC functionality."""
__version__ = "0.3.2"
| 83 | 28 |
# Generated by Django 3.0.5 on 2020-04-28 19:10
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Error',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=250)),
('created', models.DateTimeField(auto_now_add=True)),
('latest', models.DateTimeField()),
('count', models.PositiveIntegerField(default=0)),
('message', models.TextField(blank=True, default='')),
('to', models.TextField(blank=True, default='')),
('error_message', models.TextField(blank=True, default='')),
('stack_trace', models.TextField(blank=True, default='')),
('fixed', models.BooleanField(default=False, help_text="If you set fixed to True and the error happens again, a new error will be opened. So you should set this once you THINK you've fixed it so you'll find out if you haven't.")),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='LongRequest',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('path', models.CharField(max_length=300)),
('total_duration', models.DecimalField(decimal_places=3, help_text='In seconds.', max_digits=10)),
('method', models.CharField(max_length=10)),
('variables', models.TextField(help_text='request.POST or request.GET, depending on the method')),
('created', models.DateTimeField(auto_now_add=True)),
('latest', models.DateTimeField()),
('count', models.PositiveIntegerField(default=1)),
],
options={
'ordering': ('-created',),
},
),
migrations.CreateModel(
name='Notification',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=250)),
('created', models.DateTimeField(auto_now_add=True)),
('latest', models.DateTimeField()),
('count', models.PositiveIntegerField(default=0)),
('message', models.TextField(blank=True, default='')),
('to', models.TextField(blank=True, default='')),
],
options={
'abstract': False,
},
),
]
| 2,815 | 730 |
#!/usr/bin/env python
"""python module for the LTC1380
created 17, 06, 2021
last modified 17, 06, 2021
Copyright 2021 Andrew Buckin
"""
import smbus
import time
Enable = 8
class LTC1380:
def __init__(self, i2cAddress=0x48):
self.i2cAddress = i2cAddress
self.bus = smbus.SMBus(1)
try:
self.Enable()
except IOError:
print("No i2c device at address:", self.i2cAddress,)
self.Desable()
return
def Enable(self):
self.bus.write_byte(self.i2cAddress, Enable)
return
def Desable(self):
self.bus.write_byte(self.i2cAddress, 0x00)
return
def SetChannel(self, Channel):
self.bus.write_byte(self.i2cAddress, Enable | Channel)
return
if __name__ == "__main__":
MUX = LTC1380(i2cAddress=0x48)
Channel = list(range(0, 8, 1)) # data loop DO>DI
for i in Channel:
print(i)
MUX.SetChannel(i)
time.sleep(0.5)
MUX.Desable
| 1,007 | 396 |
def isprime(x):
for y in range (2,x/2+1):
if x%y==0:
return False
return True
def iscircular(z):
s=str(z)
for i in range(len(s)):
a=s[1:]+s[0]
if not isprime(int(a)):
return False
s=str(a)
return True
v=0
for x in range(2,1000000):
if iscircular(x):
print x
v+=1
print v
| 395 | 166 |
from .misc_util import orthogonal_init, xavier_uniform_init
import torch.nn as nn
import torch
import torch.nn.functional as F
class Flatten(nn.Module):
def forward(self, x):
return x.reshape(x.size(0), -1)
class MlpModel(nn.Module):
def __init__(self,
input_dims=4,
hidden_dims=[64, 64],
**kwargs):
"""
input_dim: (int) number of the input dimensions
hidden_dims: (list) list of the dimensions for the hidden layers
use_batchnorm: (bool) whether to use batchnorm
"""
super(MlpModel, self).__init__()
# Hidden layers
hidden_dims = [input_dims] + hidden_dims
layers = []
for i in range(len(hidden_dims) - 1):
in_features = hidden_dims[i]
out_features = hidden_dims[i + 1]
layers.append(nn.Linear(in_features, out_features))
layers.append(nn.ReLU())
self.layers = nn.Sequential(*layers)
self.output_dim = hidden_dims[-1]
self.apply(orthogonal_init)
def forward(self, x):
for layer in self.layers:
x = layer(x)
return x
class NatureModel(nn.Module):
def __init__(self,
in_channels,
**kwargs):
"""
input_shape: (tuple) tuple of the input dimension shape (channel, height, width)
filters: (list) list of the tuples consists of (number of channels, kernel size, and strides)
use_batchnorm: (bool) whether to use batchnorm
"""
super(NatureModel, self).__init__()
self.layers = nn.Sequential(
nn.Conv2d(in_channels=in_channels, out_channels=32, kernel_size=8, stride=4), nn.ReLU(),
nn.Conv2d(in_channels=32, out_channels=64, kernel_size=4, stride=2), nn.ReLU(),
nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1), nn.ReLU(),
Flatten(),
nn.Linear(in_features=64*7*7, out_features=512), nn.ReLU()
)
self.output_dim = 512
self.apply(orthogonal_init)
def forward(self, x):
x = self.layers(x)
return x
class ResidualBlock(nn.Module):
def __init__(self,
in_channels):
super(ResidualBlock, self).__init__()
self.conv1 = nn.Conv2d(in_channels=in_channels, out_channels=in_channels, kernel_size=3, stride=1, padding=1)
self.conv2 = nn.Conv2d(in_channels=in_channels, out_channels=in_channels, kernel_size=3, stride=1, padding=1)
def forward(self, x):
out = nn.ReLU()(x)
out = self.conv1(out)
out = nn.ReLU()(out)
out = self.conv2(out)
return out + x
class ImpalaBlock(nn.Module):
def __init__(self, in_channels, out_channels):
super(ImpalaBlock, self).__init__()
self.conv = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=1)
self.res1 = ResidualBlock(out_channels)
self.res2 = ResidualBlock(out_channels)
def forward(self, x):
x = self.conv(x)
x = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)(x)
x = self.res1(x)
x = self.res2(x)
return x
class ImpalaModel(nn.Module):
def __init__(self,
in_channels,
**kwargs):
super(ImpalaModel, self).__init__()
self.block1 = ImpalaBlock(in_channels=in_channels, out_channels=16)
self.block2 = ImpalaBlock(in_channels=16, out_channels=32)
self.block3 = ImpalaBlock(in_channels=32, out_channels=32)
self.fc = nn.Linear(in_features=32 * 8 * 8, out_features=256)
self.output_dim = 256
self.apply(xavier_uniform_init)
def forward(self, x):
x = self.block1(x)
x = self.block2(x)
x = self.block3(x)
x = nn.ReLU()(x)
x = Flatten()(x)
x = self.fc(x)
x = nn.ReLU()(x)
return x
class GRU(nn.Module):
def __init__(self, input_size, hidden_size):
super(GRU, self).__init__()
self.gru = orthogonal_init(nn.GRU(input_size, hidden_size), gain=1.0)
def forward(self, x, hxs, masks):
# Prediction
if x.size(0) == hxs.size(0):
# input for GRU-CELL: (L=sequence_length, N, H)
# output for GRU-CELL: (output: (L, N, H), hidden: (L, N, H))
masks = masks.unsqueeze(-1)
x, hxs = self.gru(x.unsqueeze(0), (hxs * masks).unsqueeze(0))
x = x.squeeze(0)
hxs = hxs.squeeze(0)
# Training
# We will recompute the hidden state to allow gradient to be back-propagated through time
else:
# x is a (T, N, -1) tensor that has been flatten to (T * N, -1)
N = hxs.size(0)
T = int(x.size(0) / N)
# unflatten
x = x.view(T, N, x.size(1))
# Same deal with masks
masks = masks.view(T, N)
# Let's figure out which steps in the sequence have a zero for any agent
# We will always assume t=0 has a zero in it as that makes the logic cleaner
# (can be interpreted as a truncated back-propagation through time)
has_zeros = ((masks[1:] == 0.0) \
.any(dim=-1)
.nonzero()
.squeeze()
.cpu())
# +1 to correct the masks[1:]
if has_zeros.dim() == 0:
# Deal with scalar
has_zeros = [has_zeros.item() + 1]
else:
has_zeros = (has_zeros + 1).numpy().tolist()
# add t=0 and t=T to the list
has_zeros = [0] + has_zeros + [T]
hxs = hxs.unsqueeze(0)
outputs = []
for i in range(len(has_zeros) - 1):
# We can now process steps that don't have any zeros in masks together!
# This is much faster
start_idx = has_zeros[i]
end_idx = has_zeros[i + 1]
rnn_scores, hxs = self.gru(
x[start_idx:end_idx],
hxs * masks[start_idx].view(1, -1, 1))
outputs.append(rnn_scores)
# assert len(outputs) == T
# x is a (T, N, -1) tensor
x = torch.cat(outputs, dim=0)
# flatten
x = x.view(T * N, -1)
hxs = hxs.squeeze(0)
return x, hxs
class ConvBlock(nn.Module):
def __init__(self, in_features, out_features, num_conv, pool=False):
super(ConvBlock, self).__init__()
features = [in_features] + [out_features for i in range(num_conv)]
layers = []
for i in range(len(features)-1):
layers.append(nn.Conv2d(in_channels=features[i], out_channels=features[i+1], kernel_size=3, padding=1, bias=True))
layers.append(nn.BatchNorm2d(num_features=features[i+1], affine=True, track_running_stats=True))
layers.append(nn.ReLU())
if pool:
layers.append(nn.MaxPool2d(kernel_size=2, stride=2, padding=0))
self.op = nn.Sequential(*layers)
def forward(self, x):
return self.op(x)
class LinearAttentionBlock(nn.Module):
def __init__(self, in_features):
super(LinearAttentionBlock, self).__init__()
self.op = nn.Conv2d(in_channels=in_features, out_channels=1, kernel_size=1, padding=0, bias=False)
def forward(self, l, g):
N, C, W, H = l.size()
c = self.op(l+g)
# out N, 1, W, H
a = F.softmax(c.view(N,1,-1), dim=2).view(N,1,W,H)
g = torch.mul(a.expand_as(l), l)
g = g.view(N,C,-1).sum(dim=2) # batch_sizexC
return c.view(N,1,W,H), g
class AttentionModel(nn.Module):
def __init__(self, in_channels, **kwargs):
super(AttentionModel, self).__init__()
self.conv_block1 = ConvBlock(in_channels, 8, 2)
self.conv_block2 = ConvBlock(8, 16, 2)
self.conv_block3 = ConvBlock(16, 32, 2)
self.conv_block4 = ConvBlock(32, 64, 3)
self.conv_block5 = ConvBlock(64, 64, 3)
self.conv_block6 = ConvBlock(64, 64, 3, pool=True)
self.dense = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=1, padding=0, bias=True)
# for l1 attetnion
self.projector = nn.Conv2d(32, 64, kernel_size=1, padding=0, bias=False)
self.att1 = LinearAttentionBlock(64)
self.att2 = LinearAttentionBlock(64)
self.att3 = LinearAttentionBlock(64)
self.output_dim = 256
self.embed = nn.Linear(in_features=64*4, out_features=self.output_dim)
def forward(self, x):
# input N, 3, 64, 64
x = self.conv_block1(x)
# out N, 8, 64, 64
x = self.conv_block2(x)
# out N, 16, 64, 64
x = self.conv_block3(x)
# out N, 32, 64, 64
l1 = F.max_pool2d(x, kernel_size=2, stride=2, padding=0)
# out N, 32, 32, 32
l2 = F.max_pool2d(self.conv_block4(l1), kernel_size=2, stride=2, padding=0)
# out N, 64, 16, 16
l3 = F.max_pool2d(self.conv_block5(l2), kernel_size=2, stride=2, padding=0)
# out N, 64, 8, 8
x = self.conv_block6(l3)
# out N, 64, 1, 1
g = self.dense(x)
# out N, 64, 1, 1
c1, g1 = self.att1(self.projector(l1), g)
c2, g2 = self.att2(l2, g)
c3, g3 = self.att3(l3, g)
N, C, _, _ = g.size()
g = g.view(N,C,-1).sum(dim=2)
g = torch.cat((g,g1,g2,g3), dim=1)
g = self.embed(g)
return [g,c1,c2,c3] | 9,618 | 3,564 |
import logging
import os
from flask import Flask, request
import pandas as pd
import torch
from torch_template import Model
PATH = "/home/jovyan"
LOAD_PATH = os.path.join(PATH, "runs/03-56 17_02_22/model.pth")
OUTPUT_PATH = os.path.join(PATH, "runs/api")
# Configuring logging
logging.basicConfig(
filename=os.path.join(OUTPUT_PATH, "run.log"),
format="%(asctime)s - %(levelname)s - %(message)s",
encoding="utf-8",
level=logging.INFO,
)
app = Flask(__name__)
model = Model(1, 1).to("cpu")
logging.info(f"Model:\n{model}")
model.load_state_dict(torch.load(LOAD_PATH))
logging.info(f"Loaded model from {LOAD_PATH}")
model.eval()
X = torch.tensor([1]).type(torch.LongTensor).to("cpu")
logging.info(f"X: {X.type}")
logging.info(f"{model.forward(X)}")
@app.route("/model")
def model():
logging.info(f"Model:\n{model}")
return f"Model:\n{model}"
@app.route("/predict", methods=["POST"])
def predict():
if request.method == "POST":
input_json = request.get_json()
input_df = pd.read_json(input_json)
logging.info(f"Input DataFrame: {input_df}\n")
X = torch.tensor(input_df.values)[0]
logging.info(f"X shape: {X.shape}\n")
logging.info(f"X: {X}\n")
pred = model.forward(X)
return pred
if __name__ == "__main__":
app.run(host="localhost", port=6006)
| 1,350 | 515 |
from keras.models import Sequential
from keras.layers import Conv2D, Conv2DTranspose, Input, BatchNormalization, PReLU
from keras.callbacks import ModelCheckpoint, Callback, TensorBoard
from keras.optimizers import SGD, Adam
import numpy as np
import math
import os
import random
from os import listdir, makedirs
from os.path import isfile, join, exists
from PIL import Image
import os.path, sys
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
from s3sync import S3SyncCallback
def model(scale = 2):
d = 56
s = 12
m = 4
c = 3
SRCNN = Sequential()
SRCNN.add(Conv2D(nb_filter=d, nb_row=5, nb_col=5, init='glorot_uniform', border_mode='same', bias=True, input_shape=(100, 100, 3)))
SRCNN.add(PReLU(shared_axes=[1, 2]))
SRCNN.add(Conv2D(nb_filter=s, nb_row=1, nb_col=1, init='glorot_uniform', border_mode='same', bias=True))
SRCNN.add(PReLU(shared_axes=[1, 2]))
for i in range(m):
SRCNN.add(Conv2D(nb_filter=s, nb_row=3, nb_col=3, init='glorot_uniform', border_mode='same', bias=True))
SRCNN.add(PReLU(shared_axes=[1, 2]))
SRCNN.add(Conv2D(nb_filter=d, nb_row=1, nb_col=1, init='glorot_uniform', border_mode='same', bias=True))
SRCNN.add(PReLU(shared_axes=[1, 2]))
SRCNN.add(Conv2DTranspose(filters=3, kernel_size=(9,9), strides=(scale, scale), init='glorot_uniform', border_mode='same', bias=True))
adam = Adam(lr=0.0003)
SRCNN.compile(optimizer=adam, loss='mean_squared_error', metrics=['mean_squared_error'])
return SRCNN
class MyDataGenerator(object):
def flow_from_directory(self, input_dir, label_dir, batch_size=32):
images = []
labels = []
while True:
files = listdir(input_dir)
random.shuffle(files)
for f in files:
images.append(self.load_image(input_dir, f))
labels.append(self.load_image(label_dir, f))
if len(images) == batch_size:
x_inputs = np.asarray(images)
x_labels = np.asarray(labels)
images = []
labels = []
yield x_inputs, x_labels
def load_image(self, src_dir, f):
X = np.asarray(Image.open(join(src_dir, f)).convert('RGB'), dtype='float32')
X /= 255.
return X
def train(log_dir, model_dir, train_dir, test_dir, eval_img, scale, epochs, steps):
srcnn_model = model(scale)
print(srcnn_model.summary())
datagen = MyDataGenerator()
train_gen = datagen.flow_from_directory(os.path.join(
train_dir, 'input'),
os.path.join(train_dir, 'label'),
batch_size = 10)
val_gen = datagen.flow_from_directory(
os.path.join(test_dir, 'input'),
os.path.join(test_dir, 'label'),
batch_size = 10)
class PredCallback(Callback):
def on_epoch_end(self, epoch, logs=None):
pass
#pred.predict(self.model, eval_img, 'base-%d.png' % epoch, 'out-%d.png' % epoch, False)
class PSNRCallback(Callback):
def on_epoch_end(self, epoch, logs=None):
loss = logs['loss'] * 255.
val_loss = logs['val_loss'] * 255.
psnr = 20 * math.log10(255. / math.sqrt(loss))
val_psnr = 20 * math.log10(255. / math.sqrt(val_loss))
print("\n")
print("PSNR:%s" % psnr)
print("PSNR(val):%s" % val_psnr)
pd_cb = PredCallback()
ps_cb = PSNRCallback()
md_cb = ModelCheckpoint(os.path.join(model_dir,'check.h5'), monitor='val_loss', verbose=1, save_best_only=True, save_weights_only=False, mode='min', period=1)
tb_cb = TensorBoard(log_dir=log_dir)
s3_cb = S3SyncCallback(s3_base_url='s3://tryswift/super-resolution-kit/log', log_dir=log_dir)
srcnn_model.fit_generator(
generator = train_gen,
steps_per_epoch = steps,
validation_data = val_gen,
validation_steps = steps,
epochs = epochs,
callbacks=[ps_cb, md_cb, tb_cb, s3_cb])
srcnn_model.save(os.path.join(model_dir,'model.h5'))
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("log_dir")
parser.add_argument("model_dir")
parser.add_argument("train_dir")
parser.add_argument("test_dir")
parser.add_argument("--eval_img")
parser.add_argument("-scale", type=int, default=2)
parser.add_argument("-epochs", type=int, default=100)
parser.add_argument("-steps", type=int, default=100)
args = parser.parse_args()
print(args)
if not exists(args.model_dir):
makedirs(args.model_dir)
train(args.log_dir, args.model_dir, args.train_dir, args.test_dir, args.eval_img, args.scale, args.epochs, args.steps)
| 4,768 | 1,770 |
import re
import traceback
import discord
from discord.ext import commands
class ErrorHandler(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.Cog.listener()
async def on_command_error(self, ctx, error):
if isinstance(error, (commands.CommandNotFound, commands.CommandOnCooldown)):
return
waiting = await ctx.send(f"{ctx.author.mention}->エラーが発生しました...原因を解析しています...")
if isinstance(error, commands.MissingRequiredArgument):
arg = str(error.param)
varname = {
'object_gos': 'サーバーオブジェクトもしくは文字列',
'database': '操作したいデータベース',
'object_mor': '検索したい役職もしくはメンバー',
'announcedata': 'アナウンスする文章',
'noteuser': 'Noteのユーザー名',
'channelname': 'チャンネル名',
'channel': 'チャンネル',
'sqlcmd': 'SQLステートメント',
'roll_data': '抽選するもの',
'_triger': '絵文字の追加名',
'code': 'コード',
'userid': 'ユーザーID',
'reason': '理由',
'target': '処置を行う相手',
'playername': '検索するプレイヤー',
'artist': '歌手名',
'song': '曲名',
'text': '打ち込みたい文章',
'math_value': '計算させたい式',
'ip': '検索したいサーバーのIPアドレス',
'settype': 'タイプ指定',
'triger': 'メモを呼び出すためのトリガー',
'role': '役職',
'onlinetype': 'オンライン表示',
'playing': 'アクティビティー',
'check': 'タイプ指定',
'tododata': 'ToDoの文章',
'user': 'ユーザー',
'invite_user': '招待したいユーザー',
'sentence': '文章',
'title': 'タイトル',
'bantype': 'BANのタイプ',
'badge_type': 'バッジのタイプ',
'get_type': '付与するタイプ',
'guild': 'サーバー名',
'data_id': 'ID',
}
arg = re.split('[.,:]', arg)
embed = discord.Embed(
title="引数不足です!", description=f"引数``{arg[0]}``が足りていません!", color=discord.Colour.from_rgb(255, 0, 0))
try:
desc = varname[arg[0]]
embed.add_field(name=f"💡もしかしたら...",
value=f"``{desc}``が不足していませんか?")
except:
pass
await waiting.edit(content=f"{ctx.author.mention}->", embed=embed)
return
elif isinstance(error, commands.BadArgument):
await ctx.send(dir(error))
try:
await ctx.send(dir(error.__context__))
except:
pass
target_dir = {
'int': '数値',
'Member': 'メンバー',
'user': 'ユーザー',
'Guild': 'サーバー',
'Emoji': '絵文字'
}
target = str(error.args).split()[2].replace('"', '')
embed = discord.Embed(
title=f'取得に失敗しました!', description=f"引数の``{target}``を取得できませんでした!", color=discord.Colour.from_rgb(255, 0, 0))
try:
desc = target_dir[target]
embed.add_field(
name="💡もしかして...", value=f"引数の``{desc}``は実際に存在していますか?\n実際に存在しているオブジェクトでも、凜花が認識していないオブジェクトは取得できない場合があります。")
except:
pass
await waiting.edit(content=f"{ctx.author.mention}->", embed=embed)
return
elif isinstance(error, (commands.MissingPermissions, commands.BotMissingPermissions)):
perm = error.missing_perms[0]
try:
perm = self.bot.permissions_dir[perm]
except:
pass
if isinstance(error, commands.MissingPermissions):
await waiting.edit(content=f"{ctx.author.mention}->", embed=discord.Embed(title=f"権限不足です!", description=f"このコマンドを実行するには、``{perm}``が必要です!", color=discord.Colour.from_rgb(255, 0, 0)))
else:
await waiting.edit(content=f"{ctx.author.mention}->", embed=discord.Embed(title=f"Botの権限不足です!", description=f"このコマンドを実行するには、Botに``{perm}``を付与する必要があります!", color=discord.Colour.from_rgb(255, 0, 0)))
return
try:
await waiting.edit(content=f"{ctx.author.mention}->{error}")
except:
await waiting.edit(content=f"{ctx.author.mention}->エラーが解析できませんでした!")
s_error = traceback.format_exception(
type(error), error, error.__traceback__)
print(s_error)
for i in range(len(s_error)):
while len("".join(s_error[i:i+2])) < 2000-15 and len("".join(s_error[i+1:])) != 0:
s_error[i:i+2] = ["".join(s_error[i:i+2])]
webhook = await self.bot.fetch_webhook(800731709104324658)
for i in range(0, len(s_error), 3):
await webhook.send(embeds=[discord.Embed(description=f"```py\n{y}```").set_footer(text=f"{i+x+1}/{len(s_error)}") for x, y in enumerate(s_error[i:i+3])])
def setup(bot):
bot.add_cog(ErrorHandler(bot))
| 5,023 | 1,924 |
"""
Channel0 is used for connection level communication between RabbitMQ and the
client on channel 0.
"""
import locale
import logging
import sys
from pamqp import header
from pamqp import heartbeat
from pamqp import specification
from rabbitpy import __version__
from rabbitpy import base
from rabbitpy import events
from rabbitpy import exceptions
from rabbitpy.utils import queue
LOGGER = logging.getLogger(__name__)
DEFAULT_LOCALE = locale.getdefaultlocale()
del locale
class Channel0(base.AMQPChannel):
"""Channel0 is used to negotiate a connection with RabbitMQ and for
processing and dispatching events on channel 0 once connected.
:param dict connection_args: Data required to negotiate the connection
:param events_obj: The shared events coordination object
:type events_obj: rabbitpy.events.Events
:param exception_queue: The queue where any pending exceptions live
:type exception_queue: queue.Queue
:param write_queue: The queue to place data to write in
:type write_queue: queue.Queue
:param write_trigger: The socket to write to, to trigger IO writes
:type write_trigger: socket.socket
"""
CHANNEL = 0
CLOSE_REQUEST_FRAME = specification.Connection.Close
DEFAULT_LOCALE = 'en-US'
def __init__(self, connection_args, events_obj, exception_queue,
write_queue, write_trigger, connection):
super(Channel0, self).__init__(
exception_queue, write_trigger, connection)
self._channel_id = 0
self._args = connection_args
self._events = events_obj
self._exceptions = exception_queue
self._read_queue = queue.Queue()
self._write_queue = write_queue
self._write_trigger = write_trigger
self._state = self.CLOSED
self._max_channels = connection_args['channel_max']
self._max_frame_size = connection_args['frame_max']
self._heartbeat_interval = connection_args['heartbeat']
self.properties = None
def close(self):
"""Close the connection via Channel0 communication."""
if self.open:
self._set_state(self.CLOSING)
self.rpc(specification.Connection.Close())
@property
def heartbeat_interval(self):
"""Return the AMQP heartbeat interval for the connection
:rtype: int
"""
return self._heartbeat_interval
@property
def maximum_channels(self):
"""Return the AMQP maximum channel count for the connection
:rtype: int
"""
return self._max_channels
@property
def maximum_frame_size(self):
"""Return the AMQP maximum frame size for the connection
:rtype: int
"""
return self._max_frame_size
def on_frame(self, value):
"""Process a RPC frame received from the server
:param pamqp.message.Message value: The message value
"""
LOGGER.debug('Received frame: %r', value.name)
if value.name == 'Connection.Close':
LOGGER.warning('RabbitMQ closed the connection (%s): %s',
value.reply_code, value.reply_text)
self._set_state(self.CLOSED)
self._events.set(events.SOCKET_CLOSED)
self._events.set(events.CHANNEL0_CLOSED)
self._connection.close()
if value.reply_code in exceptions.AMQP:
err = exceptions.AMQP[value.reply_code](value.reply_text)
else:
err = exceptions.RemoteClosedException(value.reply_code,
value.reply_text)
self._exceptions.put(err)
self._trigger_write()
elif value.name == 'Connection.Blocked':
LOGGER.warning('RabbitMQ has blocked the connection: %s',
value.reason)
self._events.set(events.CONNECTION_BLOCKED)
elif value.name == 'Connection.CloseOk':
self._set_state(self.CLOSED)
self._events.set(events.CHANNEL0_CLOSED)
elif value.name == 'Connection.OpenOk':
self._on_connection_open_ok()
elif value.name == 'Connection.Start':
self._on_connection_start(value)
elif value.name == 'Connection.Tune':
self._on_connection_tune(value)
elif value.name == 'Connection.Unblocked':
LOGGER.info('Connection is no longer blocked')
self._events.clear(events.CONNECTION_BLOCKED)
elif value.name == 'Heartbeat':
pass
else:
LOGGER.warning('Unexpected Channel0 Frame: %r', value)
raise specification.AMQPUnexpectedFrame(value)
def send_heartbeat(self):
"""Send a heartbeat frame to the remote connection."""
self.write_frame(heartbeat.Heartbeat())
def start(self):
"""Start the AMQP protocol negotiation"""
self._set_state(self.OPENING)
self._write_protocol_header()
def _build_open_frame(self):
"""Build and return the Connection.Open frame.
:rtype: pamqp.specification.Connection.Open
"""
return specification.Connection.Open(self._args['virtual_host'])
def _build_start_ok_frame(self):
"""Build and return the Connection.StartOk frame.
:rtype: pamqp.specification.Connection.StartOk
"""
properties = {
'product': 'rabbitpy',
'platform': 'Python {0}.{1}.{2}'.format(*sys.version_info),
'capabilities': {'authentication_failure_close': True,
'basic.nack': True,
'connection.blocked': True,
'consumer_cancel_notify': True,
'publisher_confirms': True},
'information': 'See https://rabbitpy.readthedocs.io',
'version': __version__}
return specification.Connection.StartOk(client_properties=properties,
response=self._credentials,
locale=self._get_locale())
def _build_tune_ok_frame(self):
"""Build and return the Connection.TuneOk frame.
:rtype: pamqp.specification.Connection.TuneOk
"""
return specification.Connection.TuneOk(self._max_channels,
self._max_frame_size,
self._heartbeat_interval)
@property
def _credentials(self):
"""Return the marshaled credentials for the AMQP connection.
:rtype: str
"""
return '\0%s\0%s' % (self._args['username'], self._args['password'])
def _get_locale(self):
"""Return the current locale for the python interpreter or the default
locale.
:rtype: str
"""
if not self._args['locale']:
return DEFAULT_LOCALE[0] or self.DEFAULT_LOCALE
return self._args['locale']
@staticmethod
def _negotiate(client_value, server_value):
"""Return the negotiated value between what the client has requested
and the server has requested for how the two will communicate.
:param int client_value:
:param int server_value:
:return: int
"""
return min(client_value, server_value) or \
(client_value or server_value)
def _on_connection_open_ok(self):
LOGGER.debug('Connection opened')
self._set_state(self.OPEN)
self._events.set(events.CHANNEL0_OPENED)
def _on_connection_start(self, frame_value):
"""Negotiate the Connection.Start process, writing out a
Connection.StartOk frame when the Connection.Start frame is received.
:type frame_value: pamqp.specification.Connection.Start
:raises: rabbitpy.exceptions.ConnectionException
"""
if not self._validate_connection_start(frame_value):
LOGGER.error('Could not negotiate a connection, disconnecting')
raise exceptions.ConnectionResetException()
self.properties = frame_value.server_properties
for key in self.properties:
if key == 'capabilities':
for capability in self.properties[key]:
LOGGER.debug('Server supports %s: %r',
capability, self.properties[key][capability])
else:
LOGGER.debug('Server %s: %r', key, self.properties[key])
self.write_frame(self._build_start_ok_frame())
def _on_connection_tune(self, frame_value):
"""Negotiate the Connection.Tune frames, waiting for the
Connection.Tune frame from RabbitMQ and sending the Connection.TuneOk
frame.
:param specification.Connection.Tune frame_value: Tune frame
"""
self._max_frame_size = self._negotiate(self._max_frame_size,
frame_value.frame_max)
self._max_channels = self._negotiate(self._max_channels,
frame_value.channel_max)
LOGGER.debug('Heartbeat interval (server/client): %r/%r',
frame_value.heartbeat, self._heartbeat_interval)
# Properly negotiate the heartbeat interval
if self._heartbeat_interval is None:
self._heartbeat_interval = frame_value.heartbeat
elif self._heartbeat_interval == 0 or frame_value.heartbeat == 0:
self._heartbeat_interval = 0
self.write_frame(self._build_tune_ok_frame())
self.write_frame(self._build_open_frame())
@staticmethod
def _validate_connection_start(frame_value):
"""Validate the received Connection.Start frame
:param specification.Connection.Start frame_value: Frame to validate
:rtype: bool
"""
if (frame_value.version_major, frame_value.version_minor) != \
(specification.VERSION[0], specification.VERSION[1]):
LOGGER.warning('AMQP version error (received %i.%i, expected %r)',
frame_value.version_major,
frame_value.version_minor,
specification.VERSION)
return False
return True
def _write_protocol_header(self):
"""Send the protocol header to the connected server."""
self.write_frame(header.ProtocolHeader())
| 10,481 | 2,850 |
#!/usr/bin/env python3
from sys import argv
from subprocess import run
from pathlib import Path
def format_source(fp):
"""Run isort and then yapf to format the python files contained in
fp. Sends the output to /dev/null.
Parameters
----------
fp: str
The file path to search recursively for python files.
"""
style = "'{based_on_style: pep8, column_limit: 120}'"
for file in Path(fp).rglob("*.py"):
print(" -", str(file))
run(f"isort --dont-float-to-top {file} > /dev/null; yapf -i --style={style} {file} > /dev/null", shell=True)
def format_docstrings(fp):
"""Use docformatter to format docstrings using docformatter. This should be
done to PEP-8 covention.
Parameters
----------
fp: str
The file path to research recursively for python files.
"""
for file in Path(fp).rglob("*.py"):
print(" -", str(file))
run(f"docformatter -i {file} > /dev/null", shell=True)
def strip_type_hints(fp):
"""Stip type hints from source files.
Parameters
----------
fp: str
The file path to search recursively for python files.
"""
for file in Path(fp).rglob("*.py"):
print(" -", str(file))
run(f"strip-hints {file} > tmp.txt; mv tmp.txt {file}", shell=True)
if "--strip-hints" in argv:
print("Stripping type hints:")
strip_type_hints("pypython")
strip_type_hints("scripts")
print("Reformating source files:")
format_source("pypython")
format_source("scripts")
print("Reformatting docstrings")
format_docstrings("pypython")
format_docstrings("scripts")
| 1,617 | 543 |
import json
from base64 import b64encode
from Crypto.Cipher import AES, DES
from Crypto.Util.Padding import pad
from Crypto.Hash import MD5
from urllib.parse import quote
### 用于加密生成 Cpdaily-Extension,传入表单以及个人配置数据
def extensionEncrypt(data):
key = b"b3L26XNL"
iv = bytes([1, 2, 3, 4, 5, 6, 7, 8])
data = bytes(json.dumps(data), encoding='utf-8')
print(data)
cipher = DES.new(key, DES.MODE_CBC, iv)
secret_bytes = cipher.encrypt(pad(data, DES.block_size))
encrypted = b64encode(secret_bytes).decode('utf-8')
return encrypted
def formBodyEncrypt(data):
key = b'ytUQ7l2ZZu8mLvJZ'
iv = bytes([1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7])
data = bytes(json.dumps(data), encoding='utf-8')
cipher = AES.new(key, AES.MODE_CBC, iv)
secret_bytes = cipher.encrypt(pad(data, AES.block_size))
encrypted = b64encode(secret_bytes).decode('utf-8')
return encrypted
def getSignHash(str):
jstr = json.dumps(str)
temp = bytes(quote(jstr) + '=&ytUQ7l2ZZu8mLvJZ', encoding='utf-8')
h = MD5.new(data=temp)
return h.hexdigest()
| 1,091 | 480 |
# X86 disassembler for Python
# Copyright (c) 2011-2012 Rusty Wagner
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
FLAG_LOCK = 1
FLAG_REP = 2
FLAG_REPNE = 4
FLAG_REPE = 8
FLAG_OPSIZE = 16
FLAG_ADDRSIZE = 32
FLAG_64BIT_ADDRESS = 64
FLAG_INSUFFICIENT_LENGTH = 0x80000000
FLAG_ANY_REP = (FLAG_REP | FLAG_REPE | FLAG_REPNE)
DEC_FLAG_LOCK = 0x0020
DEC_FLAG_REP = 0x0040
DEC_FLAG_REP_COND = 0x0080
DEC_FLAG_BYTE = 0x0100
DEC_FLAG_FLIP_OPERANDS = 0x0200
DEC_FLAG_IMM_SX = 0x0400
DEC_FLAG_INC_OPERATION_FOR_64 = 0x0800
DEC_FLAG_OPERATION_OP_SIZE = 0x1000
DEC_FLAG_FORCE_16BIT = 0x2000
DEC_FLAG_INVALID_IN_64BIT = 0x4000
DEC_FLAG_DEFAULT_TO_64BIT = 0x8000
DEC_FLAG_REG_RM_SIZE_MASK = 0x03
DEC_FLAG_REG_RM_2X_SIZE = 0x01
DEC_FLAG_REG_RM_FAR_SIZE = 0x02
DEC_FLAG_REG_RM_NO_SIZE = 0x03
ControlRegs = ["cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", "cr8", "cr9", "cr10", "cr11", "cr12", "cr13", "cr14", "cr15"]
DebugRegs = ["dr0", "dr1", "dr2", "dr3", "dr4", "dr5", "dr6", "dr7", "dr8", "dr9", "dr10", "dr11", "dr12", "dr13", "dr14", "dr15"]
TestRegs = ["tr0", "tr1", "tr2", "tr3", "tr4", "tr5", "tr6", "tr7", "tr8", "tr9", "tr10", "tr11", "tr12", "tr13", "tr14", "tr15"]
MainOpcodeMap = [
["add", "rm_reg_8_lock"], ["add", "rm_reg_v_lock"], ["add", "reg_rm_8"], ["add", "reg_rm_v"], # 0x00
["add", "eax_imm_8"], ["add", "eax_imm_v"], ["push", "push_pop_seg"], ["pop", "push_pop_seg"], # 0x04
["or", "rm_reg_8_lock"], ["or", "rm_reg_v_lock"], ["or", "reg_rm_8"], ["or", "reg_rm_v"], # 0x08
["or", "eax_imm_8"], ["or", "eax_imm_v"], ["push", "push_pop_seg"], [None, "two_byte"], # 0x0c
["adc", "rm_reg_8_lock"], ["adc", "rm_reg_v_lock"], ["adc", "reg_rm_8"], ["adc", "reg_rm_v"], # 0x10
["adc", "eax_imm_8"], ["adc", "eax_imm_v"], ["push", "push_pop_seg"], ["pop", "push_pop_seg"], # 0x14
["sbb", "rm_reg_8_lock"], ["sbb", "rm_reg_v_lock"], ["sbb", "reg_rm_8"], ["sbb", "reg_rm_v"], # 0x18
["sbb", "eax_imm_8"], ["sbb", "eax_imm_v"], ["push", "push_pop_seg"], ["pop", "push_pop_seg"], # 0x1c
["and", "rm_reg_8_lock"], ["and", "rm_reg_v_lock"], ["and", "reg_rm_8"], ["and", "reg_rm_v"], # 0x20
["and", "eax_imm_8"], ["and", "eax_imm_v"], [None, None], ["daa", "no_operands"], # 0x24
["sub", "rm_reg_8_lock"], ["sub", "rm_reg_v_lock"], ["sub", "reg_rm_8"], ["sub", "reg_rm_v"], # 0x28
["sub", "eax_imm_8"], ["sub", "eax_imm_v"], [None, None], ["das", "no_operands"], # 0x2c
["xor", "rm_reg_8_lock"], ["xor", "rm_reg_v_lock"], ["xor", "reg_rm_8"], ["xor", "reg_rm_v"], # 0x30
["xor", "eax_imm_8"], ["xor", "eax_imm_v"], [None, None], ["aaa", "no_operands"], # 0x34
["cmp", "rm_reg_8"], ["cmp", "rm_reg_v"], ["cmp", "reg_rm_8"], ["cmp", "reg_rm_v"], # 0x38
["cmp", "eax_imm_8"], ["cmp", "eax_imm_v"], [None, None], ["aas", "no_operands"], # 0x3c
["inc", "op_reg_v"], ["inc", "op_reg_v"], ["inc", "op_reg_v"], ["inc", "op_reg_v"], # 0x40
["inc", "op_reg_v"], ["inc", "op_reg_v"], ["inc", "op_reg_v"], ["inc", "op_reg_v"], # 0x44
["dec", "op_reg_v"], ["dec", "op_reg_v"], ["dec", "op_reg_v"], ["dec", "op_reg_v"], # 0x48
["dec", "op_reg_v"], ["dec", "op_reg_v"], ["dec", "op_reg_v"], ["dec", "op_reg_v"], # 0x4c
["push", "op_reg_v_def64"], ["push", "op_reg_v_def64"], ["push", "op_reg_v_def64"], ["push", "op_reg_v_def64"], # 0x50
["push", "op_reg_v_def64"], ["push", "op_reg_v_def64"], ["push", "op_reg_v_def64"], ["push", "op_reg_v_def64"], # 0x54
["pop", "op_reg_v_def64"], ["pop", "op_reg_v_def64"], ["pop", "op_reg_v_def64"], ["pop", "op_reg_v_def64"], # 0x58
["pop", "op_reg_v_def64"], ["pop", "op_reg_v_def64"], ["pop", "op_reg_v_def64"], ["pop", "op_reg_v_def64"], # 0x5c
[["pusha", "pushad"], "op_size_no64"], [["popa", "popad"], "op_size_no64"], ["bound", "reg_rm2x_v"], ["arpl", "arpl"], # 0x60
[None, None], [None, None], [None, None], [None, None], # 0x64
["push", "imm_v_def64"], ["imul", "reg_rm_imm_v"], ["push", "immsx_v_def64"], ["imul", "reg_rm_immsx_v"], # 0x68
["insb", "edi_dx_8_rep"], [["insw", "insd"], "edi_dx_op_size_rep"], ["outsb", "dx_esi_8_rep"], [["outsw", "outsd"], "dx_esi_op_size_rep"], # 0x6c
["jo", "relimm_8_def64"], ["jno", "relimm_8_def64"], ["jb", "relimm_8_def64"], ["jae", "relimm_8_def64"], # 0x70
["je", "relimm_8_def64"], ["jne", "relimm_8_def64"], ["jbe", "relimm_8_def64"], ["ja", "relimm_8_def64"], # 0x74
["js", "relimm_8_def64"], ["jns", "relimm_8_def64"], ["jpe", "relimm_8_def64"], ["jpo", "relimm_8_def64"], # 0x78
["jl", "relimm_8_def64"], ["jge", "relimm_8_def64"], ["jle", "relimm_8_def64"], ["jg", "relimm_8_def64"], # 0x7c
[0, "group_rm_imm_8_lock"], [0, "group_rm_imm_v_lock"], [0, "group_rm_imm_8_no64_lock"], [0, "group_rm_immsx_v_lock"], # 0x80
["test", "rm_reg_8"], ["test", "rm_reg_v"], ["xchg", "rm_reg_8_lock"], ["xchg", "rm_reg_v_lock"], # 0x84
["mov", "rm_reg_8"], ["mov", "rm_reg_v"], ["mov", "reg_rm_8"], ["mov", "reg_rm_v"], # 0x88
["mov", "rm_sreg_v"], ["lea", "reg_rm_0"], ["mov", "sreg_rm_v"], ["pop", "rm_v_def64"], # 0x8c
["nop", "nop"], ["xchg", "eax_op_reg_v"], ["xchg", "eax_op_reg_v"], ["xchg", "eax_op_reg_v"], # 0x90
["xchg", "eax_op_reg_v"], ["xchg", "eax_op_reg_v"], ["xchg", "eax_op_reg_v"], ["xchg", "eax_op_reg_v"], # 0x94
[["cbw", "cwde", "cdqe"], "op_size"], [["cwd", "cdq", "cqo"], "op_size"], ["callf", "far_imm_no64"], ["fwait", "no_operands"], # 0x98
[["pushf", "pushfd", "pushfq"], "op_size_def64"], [["popf", "popfd", "popfq"], "op_size_def64"], ["sahf", "no_operands"], ["lahf", "no_operands"], # 0x9c
["mov", "eax_addr_8"], ["mov", "eax_addr_v"], ["mov", "addr_eax_8"], ["mov", "addr_eax_v"], # 0xa0
["movsb", "edi_esi_8_rep"], [["movsw", "movsd", "movsq"], "edi_esi_op_size_rep"], ["cmpsb", "esi_edi_8_repc"], [["cmpsw", "cmpsd", "cmpsq"], "esi_edi_op_size_repc"], # 0xa4
["test", "eax_imm_8"], ["test", "eax_imm_v"], ["stosb", "edi_eax_8_rep"], [["stosw", "stosd", "stosq"], "edi_eax_op_size_rep"], # 0xa8
["lodsb", "eax_esi_8_rep"], [["lodsw", "lodsd", "lodsq"], "eax_esi_op_size_rep"], ["scasb", "eax_edi_8_repc"], [["scasw", "scasd", "scasq"], "eax_edi_op_size_repc"], # 0xac
["mov", "op_reg_imm_8"], ["mov", "op_reg_imm_8"], ["mov", "op_reg_imm_8"], ["mov", "op_reg_imm_8"], # 0xb0
["mov", "op_reg_imm_8"], ["mov", "op_reg_imm_8"], ["mov", "op_reg_imm_8"], ["mov", "op_reg_imm_8"], # 0xb4
["mov", "op_reg_imm_v"], ["mov", "op_reg_imm_v"], ["mov", "op_reg_imm_v"], ["mov", "op_reg_imm_v"], # 0xb8
["mov", "op_reg_imm_v"], ["mov", "op_reg_imm_v"], ["mov", "op_reg_imm_v"], ["mov", "op_reg_imm_v"], # 0xbc
[1, "group_rm_imm_8"], [1, "group_rm_imm8_v"], ["retn", "imm_16"], ["retn", "no_operands"], # 0xc0
["les", "reg_rm_f"], ["lds", "reg_rm_f"], [2, "group_rm_imm_8"], [2, "group_rm_imm_v"], # 0xc4
["enter", "imm16_imm8"], ["leave", "no_operands"], ["retf", "imm_16"], ["retf", "no_operands"], # 0xc8
["int3", "no_operands"], ["int", "imm_8"], ["into", "no_operands"], ["iret", "no_operands"], # 0xcc
[1, "group_rm_one_8"], [1, "group_rm_one_v"], [1, "group_rm_cl_8"], [1, "group_rm_cl_v"], # 0xd0
["aam", "imm_8"], ["aad", "imm_8"], ["salc", "no_operands"], ["xlat", "al_ebx_al"], # 0xd4
[0, "fpu"], [1, "fpu"], [2, "fpu"], [3, "fpu"], # 0xd8
[4, "fpu"], [5, "fpu"], [6, "fpu"], [7, "fpu"], # 0xdc
["loopne", "relimm_8_def64"], ["loope", "relimm_8_def64"], ["loop", "relimm_8_def64"], [["jcxz", "jecxz", "jrcxz"], "relimm_8_addr_size_def64"], # 0xe0
["in", "eax_imm8_8"], ["in", "eax_imm8_v"], ["out", "imm8_eax_8"], ["out", "imm8_eax_v"], # 0xe4
["calln", "relimm_v_def64"], ["jmpn", "relimm_v_def64"], ["jmpf", "far_imm_no64"], ["jmpn", "relimm_8_def64"], # 0xe8
["in", "eax_dx_8"], ["in", "eax_dx_v"], ["out", "dx_eax_8"], ["out", "dx_eax_v"], # 0xec
[None, None], ["int1", "no_operands"], [None, None], [None, None], # 0xf0
["hlt", "no_operands"], ["cmc", "no_operands"], [3, "group_f6"], [3, "group_f7"], # 0xf4
["clc", "no_operands"], ["stc", "no_operands"], ["cli", "no_operands"], ["sti", "no_operands"], # 0xf8
["cld", "no_operands"], ["std", "no_operands"], [4, "group_rm_8_lock"], [5, "group_ff"], # 0xfc
]
TwoByteOpcodeMap = [
[6, "group_0f00"], [7, "group_0f01"], ["lar", "reg_rm_v"], ["lsl", "reg_rm_v"], # 0x00
[None, None], ["syscall", "no_operands"], ["clts", "no_operands"], ["sysret", "no_operands"], # 0x04
["invd", "no_operands"], ["wbinvd", "no_operands"], [None, None], ["ud2", "no_operands"], # 0x08
[None, None], [8, "group_rm_0"], ["femms", "no_operands"], [0, "_3dnow"], # 0x0c
[0, "sse_table"], [0, "sse_table_flip"], [1, "sse_table"], [2, "sse_table_flip"], # 0x10
[3, "sse_table"], [4, "sse_table"], [5, "sse_table"], [6, "sse_table_flip"], # 0x14
[9, "group_rm_0"], [10, "group_rm_0"], [10, "group_rm_0"], [10, "group_rm_0"], # 0x18
[10, "group_rm_0"], [10, "group_rm_0"], [10, "group_rm_0"], [10, "group_rm_0"], # 0x1c
[ControlRegs, "reg_cr"], [DebugRegs, "reg_cr"], [ControlRegs, "cr_reg"], [DebugRegs, "cr_reg"], # 0x20
[TestRegs, "reg_cr"], [None, None], [TestRegs, "cr_reg"], [None, None], # 0x24
[7, "sse_table"], [7, "sse_table_flip"], [8, "sse_table"], [9, "sse_table_flip"], # 0x28
[10, "sse_table"], [11, "sse_table"], [12, "sse_table"], [13, "sse_table"], # 0x2c
["wrmsr", "no_operands"], ["rdtsc", "no_operands"], ["rdmsr", "no_operands"], ["rdpmc", "no_operands"], # 0x30
["sysenter", "no_operands"], ["sysexit", "no_operands"], [None, None], ["getsec", "no_operands"], # 0x34
[None, None], [None, None], [None, None], [None, None], # 0x38
[None, None], [None, None], [None, None], [None, None], # 0x3c
["cmovo", "reg_rm_v"], ["cmovno", "reg_rm_v"], ["cmovb", "reg_rm_v"], ["cmovae", "reg_rm_v"], # 0x40
["cmove", "reg_rm_v"], ["cmovne", "reg_rm_v"], ["cmovbe", "reg_rm_v"], ["cmova", "reg_rm_v"], # 0x44
["cmovs", "reg_rm_v"], ["cmovns", "reg_rm_v"], ["cmovpe", "reg_rm_v"], ["cmovpo", "reg_rm_v"], # 0x48
["cmovl", "reg_rm_v"], ["cmovge", "reg_rm_v"], ["cmovle", "reg_rm_v"], ["cmovg", "reg_rm_v"], # 0x4c
[14, "sse_table"], [["sqrtps", "sqrtpd", "sqrtsd", "sqrtss"], "sse"], [["rsqrtps", "rsqrtss"], "sse_single"], [["rcpps", "rcpss"], "sse_single"], # 0x50
[["andps", "andpd"], "sse_packed"], [["andnps", "andnpd"], "sse_packed"], [["orps", "orpd"], "sse_packed"], [["xorps", "xorpd"], "sse_packed"], # 0x54
[["addps", "addpd", "addsd", "addss"], "sse"], [["mulps", "mulpd", "mulsd", "mulss"], "sse"], [15, "sse_table"], [16, "sse_table"], # 0x58
[["subps", "subpd", "subsd", "subss"], "sse"], [["minps", "minpd", "minsd", "minss"], "sse"], [["divps", "divpd", "divsd", "divss"], "sse"], [["maxps", "maxpd", "maxsd", "maxss"], "sse"], # 0x5c
[17, "sse_table"], [18, "sse_table"], [19, "sse_table"], ["packsswb", "mmx"], # 0x60
["pcmpgtb", "mmx"], ["pcmpgtw", "mmx"], ["pcmpgtd", "mmx"], ["packuswb", "mmx"], # 0x64
["punpckhbw", "mmx"], ["punpckhwd", "mmx"], ["punpckhdq", "mmx"], ["packssdw", "mmx"], # 0x68
["punpcklqdq", "mmx_sseonly"], ["punpckhqdq", "mmx_sseonly"], [20, "sse_table_incop64"], [21, "sse_table"], # 0x6c
[22, "sse_table_imm_8"], [0, "mmx_group"], [1, "mmx_group"], [2, "mmx_group"], # 0x70
["pcmpeqb", "mmx"], ["pcmpeqw", "mmx"], ["pcmpeqd", "mmx"], ["emms", "no_operands"], # 0x74
["vmread", "rm_reg_def64"], ["vmwrite", "rm_reg_def64"], [None, None], [None, None], # 0x78
[23, "sse_table"], [24, "sse_table"], [25, "sse_table_incop64_flip"], [21, "sse_table_flip"], # 0x7c
["jo", "relimm_v_def64"], ["jno", "relimm_v_def64"], ["jb", "relimm_v_def64"], ["jae", "relimm_v_def64"], # 0x80
["je", "relimm_v_def64"], ["jne", "relimm_v_def64"], ["jbe", "relimm_v_def64"], ["ja", "relimm_v_def64"], # 0x84
["js", "relimm_v_def64"], ["jns", "relimm_v_def64"], ["jpe", "relimm_v_def64"], ["jpo", "relimm_v_def64"], # 0x88
["jl", "relimm_v_def64"], ["jge", "relimm_v_def64"], ["jle", "relimm_v_def64"], ["jg", "relimm_v_def64"], # 0x8c
["seto", "rm_8"], ["setno", "rm_8"], ["setb", "rm_8"], ["setae", "rm_8"], # 0x90
["sete", "rm_8"], ["setne", "rm_8"], ["setbe", "rm_8"], ["seta", "rm_8"], # 0x94
["sets", "rm_8"], ["setns", "rm_8"], ["setpe", "rm_8"], ["setpo", "rm_8"], # 0x98
["setl", "rm_8"], ["setge", "rm_8"], ["setle", "rm_8"], ["setg", "rm_8"], # 0x9c
["push", "push_pop_seg"], ["pop", "push_pop_seg"], ["cpuid", "no_operands"], ["bt", "rm_reg_v"], # 0xa0
["shld", "rm_reg_imm8_v"], ["shld", "rm_reg_cl_v"], [None, None], [None, None], # 0xa4
["push", "push_pop_seg"], ["pop", "push_pop_seg"], ["rsm", "no_operands"], ["bts", "rm_reg_v_lock"], # 0xa8
["shrd", "rm_reg_imm8_v"], ["shrd", "rm_reg_cl_v"], [24, "group_0fae"], ["imul", "reg_rm_v"], # 0xac
["cmpxchg", "rm_reg_8_lock"], ["cmpxchg", "rm_reg_v_lock"], ["lss", "reg_rm_f"], ["btr", "rm_reg_v_lock"], # 0xb0
["lfs", "reg_rm_f"], ["lgs", "reg_rm_f"], ["movzx", "movsxzx_8"], ["movzx", "movsxzx_16"], # 0xb4
["popcnt", "_0fb8"], [None, None], [11, "group_rm_imm8_v"], ["btc", "rm_reg_v_lock"], # 0xb8
["bsf", "reg_rm_v"], ["bsr", "reg_rm_v"], ["movsx", "movsxzx_8"], ["movsx", "movsxzx_16"], # 0xbc
["xadd", "rm_reg_8_lock"], ["xadd", "rm_reg_v_lock"], [26, "sse_table_imm_8"], ["movnti", "movnti"], # 0xc0
[27, "pinsrw"], [28, "sse_table_imm_8"], [29, "sse_table_imm_8"], ["cmpxch8b", "cmpxch8b"], # 0xc4
["bswap", "op_reg_v"], ["bswap", "op_reg_v"], ["bswap", "op_reg_v"], ["bswap", "op_reg_v"], # 0xc8
["bswap", "op_reg_v"], ["bswap", "op_reg_v"], ["bswap", "op_reg_v"], ["bswap", "op_reg_v"], # 0xcc
[30, "sse_table"], ["psrlw", "mmx"], ["psrld", "mmx"], ["psrlq", "mmx"], # 0xd0
["paddq", "mmx"], ["pmullw", "mmx"], [31, "sse_table"], [32, "sse_table"], # 0xd4
["psubusb", "mmx"], ["psubusw", "mmx"], ["pminub", "mmx"], ["pand", "mmx"], # 0xd8
["paddusb", "mmx"], ["paddusw", "mmx"], ["pmaxub", "mmx"], ["pandn", "mmx"], # 0xdc
["pavgb", "mmx"], ["psraw", "mmx"], ["psrad", "mmx"], ["pavgw", "mmx"], # 0xe0
["pmulhuw", "mmx"], ["pmulhw", "mmx"], [33, "sse_table"], [34, "sse_table_flip"], # 0xe4
["psubsb", "mmx"], ["psubsw", "mmx"], ["pminsw", "mmx"], ["por", "mmx"], # 0xe8
["paddsb", "mmx"], ["paddsw", "mmx"], ["pmaxsw", "mmx"], ["pxor", "mmx"], # 0xec
[35, "sse_table"], ["psllw", "mmx"], ["pslld", "mmx"], ["psllq", "mmx"], # 0xf0
["pmuludq", "mmx"], ["pmaddwd", "mmx"], ["psadbw", "mmx"], [36, "sse_table"], # 0xf4
["psubb", "mmx"], ["psubw", "mmx"], ["psubd", "mmx"], ["psubq", "mmx"], # 0xf8
["paddb", "mmx"], ["paddw", "mmx"], ["paddd", "mmx"], ["ud", "no_operands"] # 0xfc
]
ThreeByte0F38Map = [
[0x00, "pshufb", "mmx"], [0x01, "phaddw", "mmx"], [0x02, "phaddd", "mmx"], [0x03, "phaddsw", "mmx"],
[0x04, "pmaddubsw", "mmx"], [0x05, "phsubw", "mmx"], [0x06, "phsubd", "mmx"], [0x07, "phsubsw", "mmx"],
[0x08, "psignb", "mmx"], [0x09, "psignw", "mmx"], [0x0a, "psignd", "mmx"], [0x0b, "pmulhrsw", "mmx"],
[0x10, "pblendvb", "mmx_sseonly"], [0x14, "blendvps", "mmx_sseonly"], [0x15, "blendvpd", "mmx_sseonly"],
[0x17, "ptest", "mmx_sseonly"], [0x1c, "pabsb", "mmx"], [0x1d, "pabsw", "mmx"], [0x1e, "pabsd", "mmx"],
[0x20, 37, "sse_table"], [0x21, 38, "sse_table"], [0x22, 39, "sse_table"], [0x23, 40, "sse_table"],
[0x24, 41, "sse_table"], [0x25, 42, "sse_table"], [0x28, "pmuldq", "mmx_sseonly"], [0x29, "pcmpeqq", "mmx_sseonly"],
[0x2a, 43, "sse_table"], [0x2b, "packusdw", "mmx_sseonly"], [0x30, 44, "sse_table"], [0x31, 45, "sse_table"],
[0x32, 46, "sse_table"], [0x33, 47, "sse_table"], [0x34, 48, "sse_table"], [0x35, 49, "sse_table"],
[0x37, "pcmpgtq", "mmx_sseonly"], [0x38, "pminsb", "mmx_sseonly"], [0x39, "pminsd", "mmx_sseonly"],
[0x3a, "pminuw", "mmx_sseonly"], [0x3b, "pminud", "mmx_sseonly"], [0x3c, "pmaxsb", "mmx_sseonly"],
[0x3d, "pmaxsd", "mmx_sseonly"], [0x3e, "pmaxuw", "mmx_sseonly"], [0x3f, "pmaxud", "mmx_sseonly"],
[0x40, "pmulld", "mmx_sseonly"], [0x41, "phminposuw", "mmx_sseonly"], [0xf0, "crc32", "crc32_8"], [0xf1, "crc32", "crc32_v"]
]
ThreeByte0F3AMap = [
[0x08, "roundps", "mmx_sseonly"], [0x09, "roundpd", "mmx_sseonly"], [0x0a, 50, "sse_table"], [0x0b, 51, "sse_table"],
[0x0c, "blendps", "mmx_sseonly"], [0x0d, "blendpd", "mmx_sseonly"], [0x0e, "pblendw", "mmx_sseonly"], [0x0f, "palignr", "mmx"],
[0x14, 52, "sse_table_mem8_flip"], [0x15, 53, "sse_table"], [0x16, 54, "sse_table_incop64_flip"],
[0x17, 55, "sse_table_flip"], [0x20, 56, "sse_table_mem8"], [0x21, 57, "sse_table"], [0x22, 58, "sse_table_incop64"],
[0x40, "dpps", "mmx_sseonly"], [0x41, "dppd", "mmx_sseonly"], [0x42, "mpsadbw", "mmx_sseonly"],
[0x60, "pcmpestrm", "mmx_sseonly"], [0x61, "pcmpestri", "mmx_sseonly"], [0x62, "pcmpistrm", "mmx_sseonly"],
[0x63, "pcmpistri", "mmx_sseonly"]
]
FPUMemOpcodeMap = [
[ # 0xd8
["fadd", "mem_32"], ["fmul", "mem_32"], ["fcom", "mem_32"], ["fcomp", "mem_32"], # 0
["fsub", "mem_32"], ["fsubr", "mem_32"], ["fdiv", "mem_32"], ["fdivr", "mem_32"] # 4
],
[ # 0xd9
["fld", "mem_32"], [None, None], ["fst", "mem_32"], ["fstp", "mem_32"], # 0
["fldenv", "mem_floatenv"], ["fldcw", "mem_16"], ["fstenv", "mem_floatenv"], ["fstcw", "mem_16"] # 4
],
[ # 0xda
["fiadd", "mem_32"], ["fimul", "mem_32"], ["ficom", "mem_32"], ["ficomp", "mem_32"], # 0
["fisub", "mem_32"], ["fisubr", "mem_32"], ["fidiv", "mem_32"], ["fidivr", "mem_32"] # 4
],
[ # 0xdb
["fild", "mem_32"], ["fisttp", "mem_32"], ["fist", "mem_32"], ["fistp", "mem_32"], # 0
[None, None], ["fld", "mem_80"], [None, None], ["fstp", "mem_80"] # 4
],
[ # 0xdc
["fadd", "mem_64"], ["fmul", "mem_64"], ["fcom", "mem_64"], ["fcomp", "mem_64"], # 0
["fsub", "mem_64"], ["fsubr", "mem_64"], ["fdiv", "mem_64"], ["fdivr", "mem_64"] # 4
],
[ # 0xdd
["fld", "mem_64"], ["fisttp", "mem_64"], ["fst", "mem_64"], ["fstp", "mem_64"], # 0
["frstor", "mem_floatsave"], [None, None], ["fsave", "mem_floatsave"], ["fstsw", "mem_16"] # 4
],
[ # 0xde
["fiadd", "mem_16"], ["fimul", "mem_16"], ["ficom", "mem_16"], ["ficomp", "mem_16"], # 0
["fisub", "mem_16"], ["fisubr", "mem_16"], ["fidiv", "mem_16"], ["fidivr", "mem_16"] # 4
],
[ # 0xdf
["fild", "mem_16"], ["fisttp", "mem_16"], ["fist", "mem_16"], ["fistp", "mem_16"], # 0
["fbld", "mem_80"], ["fild", "mem_64"], ["fbstp", "mem_80"], ["fistp", "mem_64"] # 4
]
]
FPURegOpcodeMap = [
[ # 0xd8
["fadd", "st0_fpureg"], ["fmul", "st0_fpureg"], ["fcom", "st0_fpureg"], ["fcomp", "st0_fpureg"], # 0
["fsub", "st0_fpureg"], ["fsubr", "st0_fpureg"], ["fdiv", "st0_fpureg"], ["fdivr", "st0_fpureg"] # 4
],
[ # 0xd9
["fld", "fpureg"], ["fxch", "st0_fpureg"], [12, "reggroup_no_operands"], [None, None], # 0
[13, "reggroup_no_operands"], [14, "reggroup_no_operands"], [15, "reggroup_no_operands"], [16, "reggroup_no_operands"] # 4
],
[ # 0xda
["fcmovb", "st0_fpureg"], ["fcmove", "st0_fpureg"], ["fcmovbe", "st0_fpureg"], ["fcmovu", "st0_fpureg"], # 0
[None, None], [17, "reggroup_no_operands"], [None, None], [None, None] # 4
],
[ # 0xdb
["fcmovnb", "st0_fpureg"], ["fcmovne", "st0_fpureg"], ["fcmovnbe", "st0_fpureg"], ["fcmovnu", "st0_fpureg"], # 0
[18, "reggroup_no_operands"], ["fucomi", "st0_fpureg"], ["fcomi", "st0_fpureg"], [21, "reggroup_no_operands"] # 4
],
[ # 0xdc
["fadd", "fpureg_st0"], ["fmul", "fpureg_st0"], [None, None], [None, None], # 0
["fsubr", "fpureg_st0"], ["fsub", "fpureg_st0"], ["fdivr", "fpureg_st0"], ["fdiv", "fpureg_st0"] # 4
],
[ # 0xdd
["ffree", "fpureg"], [None, None], ["fst", "fpureg"], ["fstp", "fpureg"], # 0
["fucom", "st0_fpureg"], ["fucomp", "st0_fpureg"], [None, None], [22, "reggroup_no_operands"] # 4
],
[ # 0xde
["faddp", "fpureg_st0"], ["fmulp", "fpureg_st0"], [None, None], [19, "reggroup_no_operands"], # 0
["fsubrp", "fpureg_st0"], ["fsubp", "fpureg_st0"], ["fdivrp", "fpureg_st0"], ["fdivp", "fpureg_st0"] # 4
],
[ # 0xdf
["ffreep", "fpureg"], [None, None], [None, None], [None, None], # 0
[20, "reggroup_ax"], ["fucomip", "st0_fpureg"], ["fcomip", "st0_fpureg"], [23, "reggroup_no_operands"] # 4
]
]
GroupOperations = [
["add", "or", "adc", "sbb", "and", "sub", "xor", "cmp"], # Group 0
["rol", "ror", "rcl", "rcr", "shl", "shr", "shl", "sar"], # Group 1
["mov", None, None, None, None, None, None, None], # Group 2
["test", "test", "not", "neg", "mul", "imul", "div", "idiv"], # Group 3
["inc", "dec", None, None, None, None, None, None], # Group 4
["inc", "dec", "calln", "callf", "jmpn", "jmpf", "push", None], # Group 5
["sldt", "str", "lldt", "ltr", "verr", "verw", None, None], # Group 6
["sgdt", "sidt", "lgdt", "lidt", "smsw", None, "lmsw", "invlpg"], # Group 7
["prefetch", "prefetchw", "prefetch", "prefetch", "prefetch", "prefetch", "prefetch", "prefetch"], # Group 8
["prefetchnta", "prefetcht0", "prefetcht1", "prefetcht2", "mmxnop", "mmxnop", "mmxnop", "mmxnop"], # Group 9
["mmxnop", "mmxnop", "mmxnop", "mmxnop", "mmxnop", "mmxnop", "mmxnop", "mmxnop"], # Group 10
[None, None, None, None, "bt", "bts", "btr", "btc"], # Group 11
["fnop", None, None, None, None, None, None, None], # Group 12
["fchs", "fabs", None, None, "ftst", "fxam", None, None], # Group 13
["fld1", "fldl2t", "fldl2e", "fldpi", "fldlg2", "fldln2", "fldz", None], # Group 14
["f2xm1", "fyl2x", "fptan", "fpatan", "fxtract", "fprem1", "fdecstp", "fincstp"], # Group 15
["fprem", "fyl2xp1", "fsqrt", "fsincos", "frndint", "fscale", "fsin", "fcos"], # Group 16
[None, "fucompp", None, None, None, None, None, None], # Group 17
["feni", "fdisi", "fclex", "finit", "fsetpm", "frstpm", None, None], # Group 18
[None, "fcompp", None, None, None, None, None, None], # Group 19
["fstsw", "fstdw", "fstsg", None, None, None, None, None], # Group 20
[None, None, None, None, "frint2", None, None, None], # Group 21
[None, None, None, None, "frichop", None, None, None], # Group 22
[None, None, None, None, "frinear", None, None, None], # Group 23
["fxsave", "fxrstor", "ldmxcsr", "stmxcsr", "xsave", "xrstor", None, "clflush"], # Group 24
[None, None, None, None, None, "lfence", "mfence", "sfence"] # Group 25
]
Group0F01RegOperations = [
[None, "vmcall", "vmlaunch", "vmresume", "vmxoff", None, None, None],
["monitor", "mwait", None, None, None, None, None, None],
["xgetbv", "xsetbv", None, None, None, None, None, None],
[None, None, None, None, None, None, None, None],
[None, None, None, None, None, None, None, None],
[None, None, None, None, None, None, None, None],
[None, None, None, None, None, None, None, None],
["swapgs", "rdtscp", None, None, None, None, None, None]
]
MMXGroupOperations = [
[ # Group 0
[None, None], [None, None], ["psrlw", "psrlw"], [None, None],
["psraw", "psraw"], [None, None], ["psllw", "psllw"], [None, None]
],
[ # Group 1
[None, None], [None, None], ["psrld", "psrld"], [None, None],
["psrad", "psrad"], [None, None], ["pslld", "pslld"], [None, None]
],
[ # Group 2
[None, None], [None, None], ["psrlq", "psrlq"], [None, "psrldq"],
[None, None], [None, None], ["psllq", "psllq"], [None, "pslldq"]
]
]
SSETable = [
[ # Entry 0
[["movups", "sse_128", "sse_128"], ["movupd", "sse_128", "sse_128"], ["movsd", "sse_128", "sse_128"], ["movss", "sse_128", "sse_128"]],
[["movups", "sse_128", "sse_128"], ["movupd", "sse_128", "sse_128"], ["movsd", "sse_128", "sse_64"], ["movss", "sse_128", "sse_32"]]
],
[ # Entry 1
[["movhlps", "sse_128", "sse_128"], [None, 0, 0], ["movddup", "sse_128", "sse_128"], ["movsldup", "sse_128", "sse_128"]],
[["movlps", "sse_128", "sse_64"], ["movlpd", "sse_128", "sse_64"], ["movddup", "sse_128", "sse_64"], ["movsldup", "sse_128", "sse_128"]]
],
[ # Entry 2
[[None, 0, 0], [None, 0, 0], [None, 0, 0], [None, 0, 0]],
[["movlps", "sse_128", "sse_64"], ["movlpd", "sse_128", "sse_64"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 3
[["unpcklps", "sse_128", "sse_128"], ["unpcklpd", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[["unpcklps", "sse_128", "sse_128"], ["unpcklpd", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 4
[["unpckhps", "sse_128", "sse_128"], ["unpckhpd", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[["unpckhps", "sse_128", "sse_128"], ["unpckhpd", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 5
[["movlhps", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0], ["movshdup", "sse_128", "sse_128"]],
[["movhps", "sse_128", "sse_64"], ["movhpd", "sse_128", "sse_64"], [None, 0, 0], ["movshdup", "sse_128", "sse_128"]]
],
[ # Entry 6
[[None, 0, 0], [None, 0, 0], [None, 0, 0], [None, 0, 0]],
[["movhps", "sse_128", "sse_64"], ["movhpd", "sse_128", "sse_64"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 7
[["movaps", "sse_128", "sse_128"], ["movapd", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[["movaps", "sse_128", "sse_128"], ["movapd", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 8
[["cvtpi2ps", "sse_128", "mmx_64"], ["cvtpi2pd", "sse_128", "mmx_64"], ["cvtsi2sd", "sse_128", "gpr_32_or_64"], ["cvtsi2ss", "sse_128", "gpr_32_or_64"]],
[["cvtpi2ps", "sse_128", "mmx_64"], ["cvtpi2pd", "sse_128", "mmx_64"], ["cvtsi2sd", "sse_128", "gpr_32_or_64"], ["cvtsi2ss", "sse_128", "gpr_32_or_64"]]
],
[ # Entry 9
[[None, 0, 0], [None, 0, 0], [None, 0, 0], [None, 0, 0]],
[["movntps", "sse_128", "sse_128"], ["movntpd", "sse_128", "sse_128"], ["movntsd", "sse_128", "sse_64"], ["movntss", "see_128", "sse_32"]]
],
[ # Entry 10
[["cvttps2pi", "mmx_64", "sse_128"], ["cvttpd2pi", "mmx_64", "sse_128"], ["cvttsd2si", "gpr_32_or_64", "sse_128"], ["cvttss2si", "gpr_32_or_64", "sse_128"]],
[["cvttps2pi", "mmx_64", "sse_64"], ["cvttpd2pi", "mmx_64", "sse_128"], ["cvttsd2si", "gpr_32_or_64", "sse_64"], ["cvttss2si", "gpr_32_or_64", "sse_32"]]
],
[ # Entry 11
[["cvtps2pi", "mmx_64", "sse_128"], ["cvtpd2pi", "mmx_64", "sse_128"], ["cvtsd2si", "gpr_32_or_64", "sse_128"], ["cvtss2si", "gpr_32_or_64", "sse_128"]],
[["cvtps2pi", "mmx_64", "sse_64"], ["cvtpd2pi", "mmx_64", "sse_128"], ["cvtsd2si", "gpr_32_or_64", "sse_64"], ["cvtss2si", "gpr_32_or_64", "sse_32"]]
],
[ # Entry 12
[["ucomiss", "sse_128", "sse_128"], ["ucomisd", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[["ucomiss", "sse_128", "sse_32"], ["ucomisd", "sse_128", "sse_64"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 13
[["comiss", "sse_128", "sse_128"], ["comisd", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[["comiss", "sse_128", "sse_32"], ["comisd", "sse_128", "sse_64"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 14
[["movmskps", "gpr_32_or_64", "sse_128"], ["movmskpd", "gpr_32_or_64", "sse_128"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], [None, 0, 0], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 15
[["cvtps2pd", "sse_128", "sse_128"], ["cvtpd2ps", "sse_128", "sse_128"], ["cvtsd2ss", "sse_128", "sse_128"], ["cvtss2sd", "sse_128", "sse_128"]],
[["cvtps2pd", "sse_128", "sse_64"], ["cvtpd2ps", "sse_128", "sse_128"], ["cvtsd2ss", "sse_128", "sse_64"], ["cvtss2sd", "sse_128", "sse_32"]]
],
[ # Entry 16
[["cvtdq2ps", "sse_128", "sse_128"], ["cvtps2dq", "sse_128", "sse_128"], [None, 0, 0], ["cvttps2dq", "sse_128", "sse_128"]],
[["cvtdq2ps", "sse_128", "sse_128"], ["cvtps2dq", "sse_128", "sse_128"], [None, 0, 0], ["cvttps2dq", "sse_128", "sse_128"]]
],
[ # Entry 17
[["punpcklbw", "mmx_64", "mmx_64"], ["punpcklbw", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[["punpcklbw", "mmx_64", "mmx_32"], ["punpcklbw", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 18
[["punpcklwd", "mmx_64", "mmx_64"], ["punpcklwd", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[["punpcklwd", "mmx_64", "mmx_32"], ["punpcklwd", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 19
[["punpckldq", "mmx_64", "mmx_64"], ["punpckldq", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[["punpckldq", "mmx_64", "mmx_32"], ["punpckldq", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 20
[[["movd", "movq"], "mmx_64", "gpr_32_or_64"], [["movd", "movq"], "sse_128", "gpr_32_or_64"], [None, 0, 0], [None, 0, 0]],
[[["movd", "movq"], "mmx_64", "gpr_32_or_64"], [["movd", "movq"], "sse_128", "gpr_32_or_64"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 21
[["movq", "mmx_64", "mmx_64"], ["movdqa", "sse_128", "sse_128"], [None, 0, 0], ["movdqu", "sse_128", "sse_128"]],
[["movq", "mmx_64", "mmx_64"], ["movdqa", "sse_128", "sse_128"], [None, 0, 0], ["movdqu", "sse_128", "sse_128"]]
],
[ # Entry 22
[["pshufw", "mmx_64", "mmx_64"], ["pshufd", "sse_128", "sse_128"], ["pshuflw", "sse_128", "sse_128"], ["pshufhw", "sse_128", "sse_128"]],
[["pshufw", "mmx_64", "mmx_64"], ["pshufd", "sse_128", "sse_128"], ["pshuflw", "sse_128", "sse_128"], ["pshufhw", "sse_128", "sse_128"]]
],
[ # Entry 23
[[None, 0, 0], ["haddpd", "sse_128", "sse_128"], ["haddps", "sse_128", "sse_128"], [None, 0, 0]],
[[None, 0, 0], ["haddpd", "sse_128", "sse_128"], ["haddps", "sse_128", "sse_128"], [None, 0, 0]]
],
[ # Entry 24
[[None, 0, 0], ["hsubpd", "sse_128", "sse_128"], ["hsubps", "sse_128", "sse_128"], [None, 0, 0]],
[[None, 0, 0], ["hsubpd", "sse_128", "sse_128"], ["hsubps", "sse_128", "sse_128"], [None, 0, 0]]
],
[ # Entry 25
[[["movd", "movq"], "mmx_64", "gpr_32_or_64"], [["movd", "movq"], "sse_128", "gpr_32_or_64"], [None, 0, 0], ["movq", "sse_128_flip", "sse_128_flip"]],
[[["movd", "movq"], "mmx_64", "gpr_32_or_64"], [["movd", "movq"], "sse_128", "gpr_32_or_64"], [None, 0, 0], ["movq", "sse_128_flip", "sse_128_flip"]]
],
[ # Entry 26
[["cmpps", "sse_128", "sse_128"], ["cmppd", "sse_128", "sse_128"], ["cmpsd", "sse_128", "sse_128"], ["cmpss", "sse_128", "sse_128"]],
[["cmpps", "sse_128", "sse_128"], ["cmppd", "sse_128", "sse_128"], ["cmpsd", "sse_128", "sse_64"], ["cmpss", "sse_128", "sse_32"]]
],
[ # Entry 27
[["pinsrw", "mmx_64", "gpr_32_or_64"], ["pinsrw", "sse_128", "gpr_32_or_64"], [None, 0, 0], [None, 0, 0]],
[["pinsrw", "mmx_64", "gpr_32_or_64"], ["pinsrw", "sse_128", "gpr_32_or_64"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 28
[["pextrw", "gpr_32_or_64", "mmx_64"], ["pextrw", "gpr_32_or_64", "sse_128"], [None, 0, 0], [None, 0, 0]],
[["pextrw", "gpr_32_or_64", "mmx_64"], ["pextrw", "gpr_32_or_64", "sse_128"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 29
[["shufps", "sse_128", "sse_128"], ["shufpd", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[["shufps", "sse_128", "sse_128"], ["shufpd", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 30
[[None, 0, 0], ["addsubpd", "sse_128", "sse_128"], ["addsubps", "sse_128", "sse_128"], [None, 0, 0]],
[[None, 0, 0], ["addsubpd", "sse_128", "sse_128"], ["addsubps", "sse_128", "sse_128"], [None, 0, 0]]
],
[ # Entry 31
[[None, 0, 0], ["movq", "sse_128_flip", "sse_128_flip"], ["movdq2q", "mmx_64", "sse_128"], ["movq2dq", "sse_128", "mmx_64"]],
[[None, 0, 0], ["movq", "sse_128_flip", "sse_128_flip"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 32
[["pmovmskb", "gpr_32_or_64", "mmx_64"], ["pmovmskb", "gpr_32_or_64", "sse_128"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], [None, 0, 0], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 33
[[None, 0, 0], ["cvttpd2dq", "sse_128", "sse_128"], ["cvtpd2dq", "sse_128", "sse_128"], ["cvtdq2pd", "sse_128", "sse_128"]],
[[None, 0, 0], ["cvttpd2dq", "sse_128", "sse_128"], ["cvtpd2dq", "sse_128", "sse_128"], ["cvtdq2pd", "sse_128", "sse_128"]]
],
[ # Entry 34
[[None, 0, 0], [None, 0, 0], [None, 0, 0], [None, 0, 0]],
[["movntq", "mmx_64", "mmx_64"], ["movntdq", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 35
[[None, 0, 0], [None, 0, 0], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], [None, 0, 0], ["lddqu", "sse_128", "sse_128"], [None, 0, 0]]
],
[ # Entry 36
[["maskmovq", "mmx_64", "mmx_64"], ["maskmovdqu", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], [None, 0, 0], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 37
[[None, 0, 0], ["pmovsxbw", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], ["pmovsxbw", "sse_128", "sse_64"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 38
[[None, 0, 0], ["pmovsxbd", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], ["pmovsxbd", "sse_128", "sse_32"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 39
[[None, 0, 0], ["pmovsxbq", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], ["pmovsxbq", "sse_128", "sse_16"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 40
[[None, 0, 0], ["pmovsxwd", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], ["pmovsxwd", "sse_128", "sse_64"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 41
[[None, 0, 0], ["pmovsxwq", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], ["pmovsxwq", "sse_128", "sse_32"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 42
[[None, 0, 0], ["pmovsxdq", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], ["pmovsxdq", "sse_128", "sse_64"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 43
[[None, 0, 0], [None, 0, 0], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], ["movntdqa", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 44
[[None, 0, 0], ["pmovzxbw", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], ["pmovzxbw", "sse_128", "sse_64"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 45
[[None, 0, 0], ["pmovzxbd", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], ["pmovzxbd", "sse_128", "sse_32"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 46
[[None, 0, 0], ["pmovzxbq", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], ["pmovzxbq", "sse_128", "sse_16"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 47
[[None, 0, 0], ["pmovzxwd", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], ["pmovzxwd", "sse_128", "sse_64"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 48
[[None, 0, 0], ["pmovzxwq", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], ["pmovzxwq", "sse_128", "sse_32"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 49
[[None, 0, 0], ["pmovzxdq", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], ["pmovzxdq", "sse_128", "sse_64"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 50
[[None, 0, 0], ["roundss", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], ["roundss", "sse_128", "sse_32"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 51
[[None, 0, 0], ["roundsd", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], ["roundsd", "sse_128", "sse_64"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 52
[[None, 0, 0], ["pextrb", "sse_128", "gpr_32_or_64"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], ["pextrb", "sse_128", "gpr_32_or_64"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 53
[[None, 0, 0], ["pextrw", "gpr_32_or_64", "sse_128"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], ["pextrw", "sse_16", "sse_128"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 54
[[None, 0, 0], [["pextrd", "pextrq"], "sse_128", "gpr_32_or_64"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], [["pextrd", "pextrq"], "sse_128", "gpr_32_or_64"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 55
[[None, 0, 0], ["extractps", "sse_128", "gpr_32_or_64"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], ["extractps", "sse_128", "sse_32"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 56
[[None, 0, 0], ["pinsrb", "sse_128", "gpr_32_or_64"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], ["pinsrb", "sse_128", "gpr_32_or_64"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 57
[[None, 0, 0], ["insertps", "sse_128", "sse_128"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], ["insertps", "sse_128", "sse_32"], [None, 0, 0], [None, 0, 0]]
],
[ # Entry 58
[[None, 0, 0], [["pinsrd", "pinsrq"], "sse_128", "gpr_32_or_64"], [None, 0, 0], [None, 0, 0]],
[[None, 0, 0], [["pinsrd", "pinsrq"], "sse_128", "gpr_32_or_64"], [None, 0, 0], [None, 0, 0]]
]
]
Sparse3DNowOpcodes = [
[0x0c, "pi2fw"], [0x0d, "pi2fd"],
[0x1c, "pf2iw"], [0x1d, "pf2id"],
[0x86, "pfrcpv"], [0x87, "pfrsqrtv"], [0x8a, "pfnacc"], [0x8e, "pfpnacc"],
[0x90, "pfcmpge"], [0x94, "pfmin"], [0x96, "pfrcp"], [0x97, "pfrsqrt"], [0x9a, "pfsub"], [0x9e, "pfadd"],
[0xa0, "pfcmpgt"], [0xa4, "pfmax"], [0xa6, "pfrcpit1"], [0xa7, "pfrsqit1"], [0xaa, "pfsubr"], [0xae, "pfacc"],
[0xb0, "pfcmpeq"], [0xb4, "pfmul"], [0xb6, "pfrcpit2"], [0xb7, "pmulhrw"], [0xbb, "pswapd"], [0xbf, "pavgusb"]
]
Reg8List = ["al", "cl", "dl", "bl", "ah", "ch", "dh", "bh"]
Reg8List64 = ["al", "cl", "dl", "bl", "spl", "bpl", "sil", "dil", "r8b", "r9b", "r10b", "r11b", "r12b", "r13b", "r14b", "r15b"]
Reg16List = ["ax", "cx", "dx", "bx", "sp", "bp", "si", "di", "r8w", "r9w", "r10w", "r11w", "r12w", "r13w", "r14w", "r15w"]
Reg32List = ["eax", "ecx", "edx", "ebx", "esp", "ebp", "esi", "edi", "r8d", "r9d", "r10d", "r11d", "r12d", "r13d", "r14d", "r15d"]
Reg64List = ["rax", "rcx", "rdx", "rbx", "rsp", "rbp", "rsi", "rdi", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15"]
MMXRegList = ["mm0", "mm1", "mm2", "mm3", "mm4", "mm5", "mm6", "mm7", "mm0", "mm1", "mm2", "mm3", "mm4", "mm5", "mm6", "mm7"]
XMMRegList = ["xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7", "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13", "xmm14", "xmm15"]
FPURegList = ["st0", "st1", "st2", "st3", "st4", "st5", "st6", "st7", "st0", "st1", "st2", "st3", "st4", "st5", "st6", "st7"]
RM16Components = [["bx", "si", "ds"], ["bx", "di", "ds"], ["bp", "si", "ss"], ["bp", "di", "ss"], ["si", None, "ds"],
["di", None, "ds"], ["bp", None, "ss"], ["bx", None, "ds"], [None, None, "ds"]]
class InstructionOperand:
def __init__(self):
self.operand = None
self.components = [None, None]
self.scale = 1
self.size = 0
self.immediate = 0
self.segment = None
self.rip_relative = False
class Instruction:
def __init__(self):
self.operation = None
self.operands = [InstructionOperand(), InstructionOperand(), InstructionOperand()]
self.flags = 0
self.segment = None
self.length = 0
def finalize(self):
while (len(self.operands) > 0) and (self.operands[-1].operand == None):
self.operands.pop()
class DecodeState:
def __init__(self):
self.result = Instruction()
self.opcode_offset = 0
self.flags = 0
self.invalid = False
self.insufficient_length = False
self.op_prefix = False
self.rep = False
self.using64 = False
self.rex = False
self.rex_rm1 = False
self.rex_rm2 = False
self.rex_reg = False
def get_byte_reg_list(state):
if state.rex:
return Reg8List64
else:
return Reg8List
def get_reg_list_for_final_op_size(state):
if state.final_op_size == 1:
return get_byte_reg_list(state)
if state.final_op_size == 2:
return Reg16List
if state.final_op_size == 4:
return Reg32List
if state.final_op_size == 8:
return Reg64List
def get_reg_list_for_addr_size(state):
if state.addr_size == 2:
return Reg16List
if state.addr_size == 4:
return Reg32List
if state.addr_size == 8:
return Reg64List
def get_final_op_size(state):
if state.flags & DEC_FLAG_BYTE:
return 1
else:
return state.op_size
def read8(state):
if len(state.opcode) < 1:
# Read past end of buffer, returning 0xcc from now on will guarantee exit
state.invalid = True
state.insufficient_length = True
state.opcode = ""
return 0xcc
val = ord(state.opcode[0])
state.opcode = state.opcode[1:]
state.prev_opcode = val
state.opcode_offset += 1
return val
def peek8(state):
if len(state.opcode) < 1:
# Read past end of buffer, returning 0xcc from now on will guarantee exit
state.invalid = True
state.insufficient_length = True
state.opcode = ""
return 0xcc
val = ord(state.opcode[0])
return val
def read16(state):
val = read8(state)
val |= read8(state) << 8
return val
def read32(state):
val = read16(state)
val |= read16(state) << 16
return val
def read64(state):
val = read32(state)
val |= read32(state) << 32
return val
def read8_signed(state):
val = read8(state)
if val & 0x80:
val = -(0x100 - val)
return val
def read16_signed(state):
val = read16(state)
if val & 0x8000:
val = -(0x10000 - val)
return val
def read32_signed(state):
val = read32(state)
if val & 0x80000000:
val = -(0x100000000 - val)
return val
def read_final_op_size(state):
if state.flags & DEC_FLAG_IMM_SX:
return read8_signed(state)
if state.final_op_size == 1:
return read8(state)
if state.final_op_size == 2:
return read16(state)
if state.final_op_size == 4:
return read32(state)
if state.final_op_size == 8:
return read32_signed(state)
def read_addr_size(state):
if state.addr_size == 2:
return read16(state)
if state.addr_size == 4:
return read32(state)
if state.addr_size == 8:
return read64(state)
def read_signed_final_op_size(state):
if state.final_op_size == 1:
return read8_signed(state)
if state.final_op_size == 2:
return read16_signed(state)
if state.final_op_size == 4:
return read32_signed(state)
if state.final_op_size == 8:
return read32_signed(state)
def update_operation_for_addr_size(state):
if state.addr_size == 4:
state.result.operation = state.result.operation[1]
elif state.addr_size == 8:
state.result.operation = state.result.operation[2]
else:
state.result.operation = state.result.operation[0]
def process_encoding(state, encoding):
state.result.operation = encoding[0]
encoder = encoding[1]
state.flags = Encoding[encoder][1]
if state.using64 and (state.flags & DEC_FLAG_INVALID_IN_64BIT):
state.invalid = True
return
if state.using64 and (state.flags & DEC_FLAG_DEFAULT_TO_64BIT):
if state.op_prefix:
state.op_size = 2
else:
state.op_size = 8
state.final_op_size = get_final_op_size(state)
if state.flags & DEC_FLAG_FLIP_OPERANDS:
state.operand0 = state.result.operands[1]
state.operand1 = state.result.operands[0]
else:
state.operand0 = state.result.operands[0]
state.operand1 = state.result.operands[1]
if state.flags & DEC_FLAG_FORCE_16BIT:
state.final_op_size = 2
if state.flags & DEC_FLAG_OPERATION_OP_SIZE:
if state.final_op_size == 4:
state.result.operation = state.result.operation[1]
elif state.final_op_size == 8:
if len(state.result.operation) < 3:
state.final_op_size = 4
state.result.operation = state.result.operation[1]
else:
state.result.operation = state.result.operation[2]
else:
state.result.operation = state.result.operation[0]
if state.flags & DEC_FLAG_REP:
if state.rep != None:
state.result.flags |= FLAG_REP
elif state.flags & DEC_FLAG_REP_COND:
if state.rep == "repne":
state.result.flags |= FLAG_REPNE
elif state.rep == "repe":
state.result.flags |= FLAG_REPE
Encoding[encoder][0](state)
if state.result.operation == None:
state.invalid = True
if state.result.flags & FLAG_LOCK:
# Ensure instruction allows lock and it has proper semantics
if (state.flags & DEC_FLAG_LOCK) == 0:
state.invalid = True
elif state.result.operation == "cmp":
state.invalid = True
elif (state.result.operands[0].operand != "mem") and (state.result.operands[1].operand != "mem"):
state.invalid = True
def process_opcode(state, map, opcode):
process_encoding(state, map[opcode])
def process_sparse_opcode(state, map, opcode):
state.result.operation = None
min = 0
max = len(map) - 1
while min <= max:
i = (min + max) / 2
if opcode > map[i][0]:
min = i + 1
elif opcode < map[i][0]:
max = i - 1
else:
process_encoding(state, [map[i][1], map[i][2]])
break
def get_final_segment(state, seg):
if state.result.segment == None:
return seg
else:
return state.result.segment
def set_mem_operand(state, oper, rmdef, immed):
oper.operand = "mem"
oper.components = [rmdef[0], rmdef[1]]
oper.immediate = immed
oper.segment = get_final_segment(state, rmdef[2])
def decode_rm(state, rm_oper, reg_list, rm_size):
rm_byte = read8(state)
mod = rm_byte >> 6
rm = rm_byte & 7
reg_field = (rm_byte >> 3) & 7
rm_oper.size = rm_size
if state.addr_size == 2:
if mod == 0:
if rm == 6:
rm = 8
set_mem_operand(state, rm_oper, RM16Components[rm], read16(state))
else:
set_mem_operand(state, rm_oper, RM16Components[rm], 0)
elif mod == 1:
set_mem_operand(state, rm_oper, RM16Components[rm], read8_signed(state))
elif mod == 2:
set_mem_operand(state, rm_oper, RM16Components[rm], read16_signed(state))
elif mod == 3:
rm_oper.operand = reg_list[rm]
if rm_oper.components[0] == None:
rm_oper.immediate &= 0xffff
else:
addr_reg_list = get_reg_list_for_addr_size(state)
if state.rex_rm1:
rm_reg1_offset = 8
else:
rm_reg1_offset = 0
if state.rex_rm2:
rm_reg2_offset = 8
else:
rm_reg2_offset = 0
seg = None
rm_oper.operand = "mem"
if (mod != 3) and (rm == 4):
# SIB byte present
sib_byte = read8(state)
base = sib_byte & 7
index = (sib_byte >> 3) & 7
rm_oper.scale = 1 << (sib_byte >> 6)
if (mod != 0) or (base != 5):
rm_oper.components[0] = addr_reg_list[base + rm_reg1_offset]
if (index + rm_reg2_offset) != 4:
rm_oper.components[1] = addr_reg_list[index + rm_reg2_offset]
if mod == 0:
if base == 5:
rm_oper.immediate = read32_signed(state)
elif mod == 1:
rm_oper.immediate = read8_signed(state)
elif mod == 2:
rm_oper.immediate = read32_signed(state)
if ((base + rm_reg1_offset) == 4) or ((base + rm_reg1_offset) == 5):
seg = "ss"
else:
seg = "ds"
else:
if mod == 0:
if rm == 5:
rm_oper.immediate = read32_signed(state)
if state.addr_size == 8:
rm_oper.rip_relative = True
state.result.flags |= FLAG_64BIT_ADDRESS
else:
rm_oper.components[0] = addr_reg_list[rm + rm_reg1_offset]
seg = "ds"
elif mod == 1:
rm_oper.components[0] = addr_reg_list[rm + rm_reg1_offset]
rm_oper.immediate = read8_signed(state)
if rm == 5:
seg = "ss"
else:
seg = "ds"
elif mod == 2:
rm_oper.components[0] = addr_reg_list[rm + rm_reg1_offset]
rm_oper.immediate = read32_signed(state)
if rm == 5:
seg = "ss"
else:
seg = "ds"
elif mod == 3:
rm_oper.operand = reg_list[rm + rm_reg1_offset]
if seg != None:
rm_oper.segment = get_final_segment(state, seg)
return reg_field
def decode_rm_reg(state, rm_oper, rm_reg_list, rm_size, reg_oper, reg_list, reg_size):
reg = decode_rm(state, rm_oper, rm_reg_list, rm_size)
if reg_oper != None:
if state.rex_reg:
reg_offset = 8
else:
reg_offset = 0
reg_oper.size = reg_size
reg_oper.operand = reg_list[reg + reg_offset]
def set_operand_to_es_edi(state, oper, size):
addr_reg_list = get_reg_list_for_addr_size(state)
oper.operand = "mem"
oper.components[0] = addr_reg_list[7]
oper.size = size
oper.segment = "es"
def set_operand_to_ds_esi(state, oper, size):
addr_reg_list = get_reg_list_for_addr_size(state)
oper.operand = "mem"
oper.components[0] = addr_reg_list[6]
oper.size = size
oper.segment = get_final_segment(state, "ds")
def set_operand_to_imm_addr(state, oper):
oper.operand = "mem"
oper.immediate = read_addr_size(state)
oper.segment = get_final_segment(state, "ds")
oper.size = state.final_op_size
def set_operand_to_eax_final_op_size(state, oper):
reg_list = get_reg_list_for_final_op_size(state)
oper.operand = reg_list[0]
oper.size = state.final_op_size
def set_operand_to_op_reg(state, oper):
reg_list = get_reg_list_for_final_op_size(state)
if state.rex_rm1:
reg_offset = 8
else:
reg_offset = 0
oper.operand = reg_list[(state.prev_opcode & 7) + reg_offset]
oper.size = state.final_op_size
def set_operand_to_imm(state, oper):
oper.operand = "imm"
oper.size = state.final_op_size
oper.immediate = read_final_op_size(state)
def set_operand_to_imm8(state, oper):
oper.operand = "imm"
oper.size = 1
oper.immediate = read8(state)
def set_operand_to_imm16(state, oper):
oper.operand = "imm"
oper.size = 2
oper.immediate = read16(state)
def decode_sse_prefix(state):
if state.op_prefix:
state.op_prefix = False
return 1
elif state.rep == "repne":
state.rep = None
return 2
elif state.rep == "repe":
state.rep = None
return 3
else:
return 0
def get_size_for_sse_type(type):
if type == 2:
return 8
elif type == 3:
return 4
else:
return 16
def get_operand_for_sse_entry_type(state, type, operand_index):
if type == "sse_128_flip":
operand_index = 1 - operand_index
if operand_index == 0:
return state.operand0
else:
return state.operand1
def get_reg_list_for_sse_entry_type(state, type):
if type == "mmx_32":
return MMXRegList
if type == "mmx_64":
return MMXRegList
if type == "gpr_32_or_64":
if state.final_op_size == 8:
return Reg64List
else:
return Reg32List
return XMMRegList
def get_size_for_sse_entry_type(state, type):
if type == "sse_16":
return 2
if type == "sse_32":
return 4
if type == "mmx_32":
return 4
if type == "sse_64":
return 8
if type == "mmx_64":
return 8
if type == "gpr_32_or_64":
if state.final_op_size == 8:
return 8
else:
return 4
return 16
def update_operation_for_sse_entry_type(state, type):
if (type == "gpr_32_or_64") and (state.final_op_size == 8):
state.result.operation = state.result.operation[1]
elif type == "gpr_32_or_64":
state.result.operation = state.result.operation[0]
def invalid_decode(state):
state.invalid = True
def decode_two_byte(state):
opcode = read8(state)
if opcode == 0x38:
process_sparse_opcode(state, ThreeByte0F38Map, read8(state))
elif opcode == 0x3a:
process_sparse_opcode(state, ThreeByte0F3AMap, read8(state))
set_operand_to_imm8(state, state.result.operands[2])
else:
process_opcode(state, TwoByteOpcodeMap, opcode)
def decode_fpu(state):
mod_rm = peek8(state)
reg = (mod_rm >> 3) & 7
op = state.result.operation
if (mod_rm & 0xc0) == 0xc0:
map = FPURegOpcodeMap[op]
else:
map = FPUMemOpcodeMap[op]
process_encoding(state, map[reg])
def decode_no_operands(state):
pass
def decode_reg_rm(state):
size = state.final_op_size
reg_list = get_reg_list_for_final_op_size(state)
if (state.flags & DEC_FLAG_REG_RM_SIZE_MASK) == DEC_FLAG_REG_RM_2X_SIZE:
size *= 2
elif (state.flags & DEC_FLAG_REG_RM_SIZE_MASK) == DEC_FLAG_REG_RM_FAR_SIZE:
size += 2
elif (state.flags & DEC_FLAG_REG_RM_SIZE_MASK) == DEC_FLAG_REG_RM_NO_SIZE:
size = 0
decode_rm_reg(state, state.operand1, reg_list, size, state.operand0, reg_list, state.final_op_size)
if (size != state.final_op_size) and (state.operand1.operand != "mem"):
state.invalid = True
def decode_reg_rm_imm(state):
reg_list = get_reg_list_for_final_op_size(state)
decode_rm_reg(state, state.operand1, reg_list, state.final_op_size,
state.operand0, reg_list, state.final_op_size)
set_operand_to_imm(state, state.result.operands[2])
def decode_rm_reg_imm8(state):
reg_list = get_reg_list_for_final_op_size(state)
decode_rm_reg(state, state.operand0, reg_list, state.final_op_size,
state.operand1, reg_list, state.final_op_size)
set_operand_to_imm8(state, state.result.operands[2])
def decode_rm_reg_cl(state):
reg_list = get_reg_list_for_final_op_size(state)
decode_rm_reg(state, state.operand0, reg_list, state.final_op_size,
state.operand1, reg_list, state.final_op_size)
state.result.operands[2].operand = "cl"
state.result.operands[2].size = 1
def decode_eax_imm(state):
set_operand_to_eax_final_op_size(state, state.operand0)
set_operand_to_imm(state, state.operand1)
def decode_push_pop_seg(state):
offset = 0
if state.prev_opcode >= 0xa0: # FS/GS
offset = -16
state.operand0.operand = ["es", "cs", "ss", "ds", "fs", "gs"][(state.prev_opcode >> 3) + offset]
state.operand0.size = state.final_op_size
def decode_op_reg(state):
set_operand_to_op_reg(state, state.operand0)
def decode_eax_op_reg(state):
set_operand_to_eax_final_op_size(state, state.operand0)
set_operand_to_op_reg(state, state.operand1)
def decode_op_reg_imm(state):
set_operand_to_op_reg(state, state.operand0)
state.operand1.operand = "imm"
state.operand1.size = state.final_op_size
if state.final_op_size == 8:
state.operand1.immediate = read64(state)
else:
state.operand1.immediate = read_final_op_size(state)
def decode_nop(state):
if state.rex_rm1:
state.result.operation = "xchg"
set_operand_to_eax_final_op_size(state, state.operand0)
set_operand_to_op_reg(state, state.operand1)
def decode_imm(state):
set_operand_to_imm(state, state.operand0)
def decode_imm16_imm8(state):
set_operand_to_imm16(state, state.operand0)
set_operand_to_imm8(state, state.operand1)
def decode_edi_dx(state):
set_operand_to_es_edi(state, state.operand0, state.final_op_size)
state.operand1.operand = "dx"
state.operand1.size = 2
def decode_dx_esi(state):
state.operand0.operand = "dx"
state.operand0.size = 2
set_operand_to_ds_esi(state, state.operand1, state.final_op_size)
def decode_rel_imm(state):
state.operand0.operand = "imm"
state.operand0.size = state.op_size
state.operand0.immediate = read_signed_final_op_size(state)
state.operand0.immediate += state.addr + state.opcode_offset
def decode_rel_imm_addr_size(state):
decode_rel_imm(state)
update_operation_for_addr_size(state)
def decode_group_rm(state):
reg_list = get_reg_list_for_final_op_size(state)
reg_field = decode_rm(state, state.operand0, reg_list, state.final_op_size)
state.result.operation = GroupOperations[state.result.operation][reg_field]
def decode_group_rm_imm(state):
reg_list = get_reg_list_for_final_op_size(state)
reg_field = decode_rm(state, state.operand0, reg_list, state.final_op_size)
state.result.operation = GroupOperations[state.result.operation][reg_field]
set_operand_to_imm(state, state.operand1)
def decode_group_rm_imm8v(state):
reg_list = get_reg_list_for_final_op_size(state)
reg_field = decode_rm(state, state.operand0, reg_list, state.final_op_size)
state.result.operation = GroupOperations[state.result.operation][reg_field]
set_operand_to_imm8(state, state.operand1)
def decode_group_rm_one(state):
reg_list = get_reg_list_for_final_op_size(state)
reg_field = decode_rm(state, state.operand0, reg_list, state.final_op_size)
state.result.operation = GroupOperations[state.result.operation][reg_field]
state.operand1.operand = "imm"
state.operand1.size = 1
state.operand1.immediate = 1
def decode_group_rm_cl(state):
reg_list = get_reg_list_for_final_op_size(state)
reg_field = decode_rm(state, state.operand0, reg_list, state.final_op_size)
state.result.operation = GroupOperations[state.result.operation][reg_field]
state.operand1.operand = "cl"
state.operand1.size = 1
def decode_group_f6_f7(state):
reg_list = get_reg_list_for_final_op_size(state)
reg_field = decode_rm(state, state.operand0, reg_list, state.final_op_size)
state.result.operation = GroupOperations[state.result.operation][reg_field]
if state.result.operation == "test":
set_operand_to_imm(state, state.operand1)
# Check for valid locking semantics
if (state.result.flags & FLAG_LOCK) and (state.result.operation != "not") and (state.result.operation != "neg"):
state.invalid = True
def decode_group_ff(state):
if state.using64:
# Default to 64-bit for jumps and calls and pushes
rm = peek8(state)
reg_field = (rm >> 3) & 7
if (reg_field == 2) or (reg_field == 4):
if state.op_prefix:
state.final_op_size = 4
state.op_size = 4
else:
state.final_op_size = 8
state.op_size = 8
elif reg_field == 6:
if state.op_prefix:
state.final_op_size = 2
state.op_size = 2
else:
state.final_op_size = 8
state.op_size = 8
reg_list = get_reg_list_for_final_op_size(state)
reg_field = decode_rm(state, state.operand0, reg_list, state.final_op_size)
state.result.operation = GroupOperations[state.result.operation][reg_field]
# Check for valid far jump/call semantics
if (state.result.operation == "callf") or (state.result.operation == "jmpf"):
if state.operand0.operand != "mem":
state.invalid = True
state.operand0.size += 2
# Check for valid locking semantics
if (state.result.flags & FLAG_LOCK) and (state.result.operation != "inc") and (state.result.operation != "dec"):
state.invalid = True
def decode_group_0f00(state):
rm = peek8(state)
mod_field = (rm >> 6) & 3
reg_field = (rm >> 3) & 7
if ((mod_field != 3) and (reg_field < 2)) or ((reg_field >= 2) and (reg_field <= 5)):
state.final_op_size = 2
reg_list = get_reg_list_for_final_op_size(state)
reg_field = decode_rm(state, state.operand0, reg_list, state.final_op_size)
state.result.operation = GroupOperations[state.result.operation][reg_field]
def decode_group_0f01(state):
rm = peek8(state)
mod_field = (rm >> 6) & 3
reg_field = (rm >> 3) & 7
rm_field = rm & 7
if (mod_field == 3) and (reg_field != 4) and (reg_field != 6):
state.result.operation = Group0F01RegOperations[reg_field][rm_field]
read8(state)
else:
if reg_field < 4:
if state.using64:
state.final_op_size = 10
else:
state.final_op_size = 6
elif ((mod_field != 3) and (reg_field == 4)) or (reg_field == 6):
state.final_op_size = 2
elif reg_field == 7:
state.final_op_size = 1
reg_list = get_reg_list_for_final_op_size(state)
reg_field = decode_rm(state, state.operand0, reg_list, state.final_op_size)
state.result.operation = GroupOperations[state.result.operation][reg_field]
def decode_group_0fae(state):
rm = peek8(state)
mod_field = (rm >> 6) & 3
reg_field = (rm >> 3) & 7
if mod_field == 3:
state.result.operation = GroupOperations[state.result.operation + 1][reg_field]
read8(state)
else:
if (reg_field & 2) == 0:
state.final_op_size = 512
elif (reg_field & 6) == 2:
state.final_op_size = 4
else:
state.final_op_size = 1
reg_list = get_reg_list_for_final_op_size(state)
reg_field = decode_rm(state, state.operand0, reg_list, state.final_op_size)
state.result.operation = GroupOperations[state.result.operation][reg_field]
def decode_0fb8(state):
if state.rep != "repe":
if state.using64:
if state.op_prefix:
state.op_size = 4
else:
state.op_size = 8
state.final_op_size = get_final_op_size(state)
state.operand0.operand = "imm"
state.operand0.size = state.final_op_size
state.operand0.immediate = read_signed_final_op_size(state)
state.operand0.immediate += state.addr + state.opcode_offset
else:
size = state.final_op_size
reg_list = get_reg_list_for_final_op_size(state)
if (state.flags & DEC_FLAG_RM_SIZE_MASK) == DEC_FLAG_REG_RM_2X_SIZE:
size *= 2
elif (state.flags & DEC_FLAG_RM_SIZE_MASK) == DEC_FLAG_REG_RM_FAR_SIZE:
size += 2
elif (state.flags & DEC_FLAG_RM_SIZE_MASK) == DEC_FLAG_REG_RM_NO_SIZE:
size = 0
decode_rm_reg(state, state.operand1, reg_list, size, state.operand0, reg_list, state.final_op_size)
if (size != state.final_op_size) and (state.operand1.operand != "mem"):
state.invalid = True
def decode_rm_sreg_v(state):
reg_list = get_reg_list_for_final_op_size(state)
reg_field = decode_rm(state, state.operand0, reg_list, state.final_op_size)
if reg_field >= 6:
state.invalid = True
state.operand1.operand = ["es", "cs", "ss", "ds", "fs", "gs", None, None][reg_field]
state.operand1.size = 2
if state.result.operands[0].operand == "cs":
state.invalid = True
def decode_rm8(state):
reg_list = get_byte_reg_list(state)
decode_rm(state, state.operand0, reg_list, 1)
def decode_rm_v(state):
reg_list = get_reg_list_for_final_op_size(state)
decode_rm(state, state.operand0, reg_list, state.final_op_size)
def decode_far_imm(state):
set_operand_to_imm(state, state.operand1)
set_operand_to_imm16(state, state.operand0)
def decode_eax_addr(state):
set_operand_to_eax_final_op_size(state, state.operand0)
set_operand_to_imm_addr(state, state.operand1)
if state.addr_size == 8:
state.result.flags |= FLAG_64BIT_ADDRESS
def decode_edi_esi(state):
set_operand_to_es_edi(state, state.operand0, state.final_op_size)
set_operand_to_ds_esi(state, state.operand1, state.final_op_size)
def decode_edi_eax(state):
set_operand_to_es_edi(state, state.operand0, state.final_op_size)
set_operand_to_eax_final_op_size(state, state.operand1)
def decode_eax_esi(state):
set_operand_to_eax_final_op_size(state, state.operand0)
set_operand_to_ds_esi(state, state.operand1, state.final_op_size)
def decode_al_ebx_al(state):
reg_list = get_reg_list_for_addr_size(state)
state.operand0.operand = "al"
state.operand0.size = 1
state.operand1.operand = "mem"
state.operand1.components = [reg_list[3], "al"]
state.operand1.size = 1
state.operand1.segment = get_final_segment(state, "ds")
def decode_eax_imm8(state):
set_operand_to_eax_final_op_size(state, state.operand0)
set_operand_to_imm8(state, state.operand1)
def decode_eax_dx(state):
set_operand_to_eax_final_op_size(state, state.operand0)
state.operand1.operand = "dx"
state.operand1.size = 2
def decode_3dnow(state):
decode_rm_reg(state, state.operand1, MMXRegList, 8, state.operand0, MMXRegList, 8)
op = read8(state)
state.result.operation = None
min = 0
max = len(Sparse3DNowOpcodes) - 1
while min <= max:
i = (min + max) / 2
if op > Sparse3DNowOpcodes[i][0]:
min = i + 1
elif op < Sparse3DNowOpcodes[i][0]:
max = i - 1
else:
state.result.operation = Sparse3DNowOpcodes[i][1]
break
def decode_sse_table(state):
type = decode_sse_prefix(state)
rm = peek8(state)
mod_field = (rm >> 6) & 3
entry = SSETable[state.result.operation]
if mod_field == 3:
op_entry = entry[0][type]
else:
op_entry = entry[1][type]
state.result.operation = op_entry[0]
decode_rm_reg(state, get_operand_for_sse_entry_type(state, op_entry[2], 1),
get_reg_list_for_sse_entry_type(state, op_entry[2]), get_size_for_sse_entry_type(state, op_entry[2]),
get_operand_for_sse_entry_type(state, op_entry[1], 0),
get_reg_list_for_sse_entry_type(state, op_entry[1]), get_size_for_sse_entry_type(state, op_entry[1]))
if state.flags & DEC_FLAG_INC_OPERATION_FOR_64:
update_operation_for_sse_entry_type(state, op_entry[1])
update_operation_for_sse_entry_type(state, op_entry[2])
def decode_sse_table_imm8(state):
type = decode_sse_prefix(state)
rm = peek8(state)
mod_field = (rm >> 6) & 3
entry = SSETable[state.result.operation]
if mod_field == 3:
op_entry = entry[0][type]
else:
op_entry = entry[1][type]
state.result.operation = op_entry[0]
decode_rm_reg(state, get_operand_for_sse_entry_type(state, op_entry[2], 1),
get_reg_list_for_sse_entry_type(state, op_entry[2]), get_size_for_sse_entry_type(state, op_entry[2]),
get_operand_for_sse_entry_type(state, op_entry[1], 0),
get_reg_list_for_sse_entry_type(state, op_entry[1]), get_size_for_sse_entry_type(state, op_entry[1]))
if state.flags & DEC_FLAG_INC_OPERATION_FOR_64:
update_operation_for_sse_entry_type(state, op_entry[1])
update_operation_for_sse_entry_type(state, op_entry[2])
set_operand_to_imm8(state, state.result.operands[2])
def decode_sse_table_mem8(state):
type = decode_sse_prefix(state)
rm = peek8(state)
mod_field = (rm >> 6) & 3
entry = SSETable[state.result.operation]
if mod_field == 3:
op_entry = entry[0][type]
else:
op_entry = entry[1][type]
state.result.operation = op_entry[0]
decode_rm_reg(state, get_operand_for_sse_entry_type(state, op_entry[2], 1),
get_reg_list_for_sse_entry_type(state, op_entry[2]), get_size_for_sse_entry_type(state, op_entry[2]),
get_operand_for_sse_entry_type(state, op_entry[1], 0),
get_reg_list_for_sse_entry_type(state, op_entry[1]), get_size_for_sse_entry_type(state, op_entry[1]))
if state.flags & DEC_FLAG_INC_OPERATION_FOR_64:
update_operation_for_sse_entry_type(state, op_entry[1])
update_operation_for_sse_entry_type(state, op_entry[2])
if state.operand0.operand == "mem":
state.operand0.size = 1
if state.operand1.operand == "mem":
state.operand1.size = 1
def decode_sse(state):
type = decode_sse_prefix(state)
rm = peek8(state)
mod_field = (rm >> 6) & 3
state.result.operation = state.result.operation[type]
if mod_field == 3:
size = 16
else:
size = get_size_for_sse_type(type)
decode_rm_reg(state, state.operand1, XMMRegList, size, state.operand0, XMMRegList, 16)
def decode_sse_single(state):
type = decode_sse_prefix(state)
rm = peek8(state)
mod_field = (rm >> 6) & 3
if (type == 1) or (type == 2):
state.invalid = True
else:
state.result.operation = state.result.operation[type & 1]
if mod_field == 3:
size = 16
else:
size = get_size_for_sse_type(type)
decode_rm_reg(state, state.operand1, XMMRegList, 16, state.operand0, XMMRegList, 16)
def decode_sse_packed(state):
type = decode_sse_prefix(state)
if (type == 2) or (type == 3):
state.invalid = True
else:
state.result.operation = state.result.operation[type & 1]
decode_rm_reg(state, state.operand1, XMMRegList, 16, state.operand0, XMMRegList, 16)
def decode_mmx(state):
if state.op_prefix:
decode_rm_reg(state, state.operand1, XMMRegList, 16, state.operand0, XMMRegList, 16)
else:
decode_rm_reg(state, state.operand1, MMXRegList, 8, state.operand0, MMXRegList, 8)
def decode_mmx_sse_only(state):
if state.op_prefix:
decode_rm_reg(state, state.operand1, XMMRegList, 16, state.operand0, XMMRegList, 16)
else:
state.invalid = True
def decode_mmx_group(state):
if state.op_prefix:
reg_field = decode_rm(state, state.operand0, XMMRegList, 16)
state.result.operation = MMXGroupOperations[state.result.operation][reg_field][1]
else:
reg_field = decode_rm(state, state.operand0, MMXRegList, 8)
state.result.operation = MMXGroupOperations[state.result.operation][reg_field][0]
set_operand_to_imm8(state, state.operand1)
def decode_pinsrw(state):
type = decode_sse_prefix(state)
rm = peek8(state)
mod_field = (rm >> 6) & 3
entry = SSETable[state.result.operation]
if mod_field == 3:
op_entry = entry[0][type]
else:
op_entry = entry[1][type]
state.result.operation = op_entry[0]
decode_rm_reg(state, get_operand_for_sse_entry_type(state, op_entry[2], 1),
get_reg_list_for_sse_entry_type(state, op_entry[2]), get_size_for_sse_entry_type(state, op_entry[2]),
get_operand_for_sse_entry_type(state, op_entry[1], 0),
get_reg_list_for_sse_entry_type(state, op_entry[1]), get_size_for_sse_entry_type(state, op_entry[1]))
if state.flags & DEC_FLAG_INC_OPERATION_FOR_64:
update_operation_for_sse_entry_type(state, op_entry[1])
update_operation_for_sse_entry_type(state, op_entry[2])
set_operand_to_imm8(state, state.result.operands[2])
if state.operand1.operand == "mem":
state.operand1.size = 2
def decode_reg_cr(state):
if state.final_op_size == 2:
state.final_op_size = 4
reg_list = get_reg_list_for_final_op_size(state)
reg = read8(state)
if state.result.flags & FLAG_LOCK:
state.result.flags &= ~FLAG_LOCK
state.rex_reg = True
if state.rex_rm1:
state.operand0.operand = reg_list[(reg & 7) + 8]
else:
state.operand0.operand = reg_list[(reg & 7)]
state.operand0.size = state.final_op_size
if state.rex_reg:
state.operand1.operand = state.result.operation[((reg >> 3) & 7) + 8]
else:
state.operand1.operand = state.result.operation[((reg >> 3) & 7)]
state.operand1.size = state.final_op_size
state.result.operation = "mov"
def decode_mov_sx_zx_8(state):
decode_rm_reg(state, state.operand1, get_byte_reg_list(state), 1, state.operand0,
get_reg_list_for_final_op_size(state), state.final_op_size)
def decode_mov_sx_zx_16(state):
decode_rm_reg(state, state.operand1, Reg16List, 2, state.operand0,
get_reg_list_for_final_op_size(state), state.final_op_size)
def decode_mem16(state):
decode_rm(state, state.operand0, Reg32List, 2)
if state.operand0.operand != "mem":
state.invalid = True
def decode_mem32(state):
decode_rm(state, state.operand0, Reg32List, 4)
if state.operand0.operand != "mem":
state.invalid = True
def decode_mem64(state):
decode_rm(state, state.operand0, Reg32List, 8)
if state.operand0.operand != "mem":
state.invalid = True
def decode_mem80(state):
decode_rm(state, state.operand0, Reg32List, 10)
if state.operand0.operand != "mem":
state.invalid = True
def decode_mem_float_env(state):
if state.final_op_size == 2:
decode_rm(state, state.operand0, Reg32List, 14)
else:
decode_rm(state, state.operand0, Reg32List, 28)
if state.operand0.operand != "mem":
state.invalid = True
def decode_mem_float_save(state):
if state.final_op_size == 2:
decode_rm(state, state.operand0, Reg32List, 94)
else:
decode_rm(state, state.operand0, Reg32List, 108)
if state.operand0.operand != "mem":
state.invalid = True
def decode_fpu_reg(state):
decode_rm(state, state.operand0, FPURegList, 10)
def decode_fpu_reg_st0(state):
decode_rm(state, state.operand0, FPURegList, 10)
state.operand1.operand = "st0"
state.operand1.size = 10
def decode_reg_group_no_operands(state):
rm_byte = read8(state)
state.result.operation = GroupOperations[state.result.operation][rm_byte & 7]
def decode_reg_group_ax(state):
rm_byte = read8(state)
state.result.operation = GroupOperations[state.result.operation][rm_byte & 7]
state.operand0.operand = "ax"
state.operand0.size = 2
def decode_cmpxch8b(state):
rm = peek8(state)
reg_field = (rm >> 3) & 7
if reg_field == 1:
if state.final_op_size == 2:
state.final_op_size = 4
elif state.final_op_size == 8:
state.result.operation = "cmpxch16b"
decode_rm(state, state.operand0, get_reg_list_for_final_op_size(state), state.final_op_size * 2)
elif reg_field == 6:
if state.op_prefix:
state.result.operation = "vmclear"
elif state.rep == "repe":
state.result.operation = "vmxon"
else:
state.result.operation = "vmptrld"
decode_rm(state, state.operand0, Reg64List, 8)
elif reg_field == 7:
state.result.operation = "vmptrst"
decode_rm(state, state.operand0, Reg64List, 8)
else:
state.invalid = True
if state.operand0.operand != "mem":
state.invalid = True
def decode_mov_nti(state):
if state.final_op_size == 2:
state.final_op_size = 4
decode_rm_reg(state, state.operand0, get_reg_list_for_final_op_size(state), state.final_op_size,
state.operand1, get_reg_list_for_final_op_size(state), state.final_op_size)
if state.operand0.operand != "mem":
state.invalid = True
def decode_crc32(state):
src_reg_list = get_reg_list_for_final_op_size(state)
if state.final_op_size == 8:
dest_reg_list = Reg64List
dest_size = 8
else:
dest_reg_list = Reg32List
dest_size = 4
decode_rm_reg(state, state.operand1, src_reg_list, state.final_op_size,
state.operand0, dest_reg_list, dest_size)
def decode_arpl(state):
if state.using64:
state.result.operation = "movsxd"
reg_list = get_reg_list_for_final_op_size(state)
decode_rm_reg(state, state.operand1, Reg32List, 4, state.operand0, reg_list, state.final_op_size)
else:
state.final_op_size = 2
reg_list = get_reg_list_for_final_op_size(state)
decode_rm_reg(state, state.operand0, reg_list, 2, state.operand1, reg_list, state.final_op_size)
Encoding = {
None : [invalid_decode, 0],
"two_byte" : [decode_two_byte, 0], "fpu" : [decode_fpu, 0],
"no_operands" : [decode_no_operands, 0], "op_size" : [decode_no_operands, DEC_FLAG_OPERATION_OP_SIZE],
"op_size_def64" : [decode_no_operands, DEC_FLAG_DEFAULT_TO_64BIT | DEC_FLAG_OPERATION_OP_SIZE],
"op_size_no64" : [decode_no_operands, DEC_FLAG_INVALID_IN_64BIT | DEC_FLAG_OPERATION_OP_SIZE],
"reg_rm_8" : [decode_reg_rm, DEC_FLAG_BYTE], "rm_reg_8" : [decode_reg_rm, DEC_FLAG_BYTE | DEC_FLAG_FLIP_OPERANDS],
"rm_reg_8_lock" : [decode_reg_rm, DEC_FLAG_BYTE | DEC_FLAG_FLIP_OPERANDS | DEC_FLAG_LOCK],
"rm_reg_16" : [decode_reg_rm, DEC_FLAG_FLIP_OPERANDS | DEC_FLAG_FORCE_16BIT],
"reg_rm_v" : [decode_reg_rm, 0], "rm_reg_v" : [decode_reg_rm, DEC_FLAG_FLIP_OPERANDS],
"rm_reg_v_lock" : [decode_reg_rm, DEC_FLAG_FLIP_OPERANDS | DEC_FLAG_LOCK],
"reg_rm2x_v" : [decode_reg_rm, DEC_FLAG_REG_RM_2X_SIZE], "reg_rm_imm_v" : [decode_reg_rm_imm, 0],
"reg_rm_immsx_v" : [decode_reg_rm_imm, DEC_FLAG_IMM_SX], "reg_rm_0" : [decode_reg_rm, DEC_FLAG_REG_RM_NO_SIZE],
"reg_rm_f" : [decode_reg_rm, DEC_FLAG_REG_RM_FAR_SIZE],
"rm_reg_def64" : [decode_reg_rm, DEC_FLAG_FLIP_OPERANDS | DEC_FLAG_DEFAULT_TO_64BIT],
"rm_reg_imm8_v" : [decode_rm_reg_imm8, 0], "rm_reg_cl_v" : [decode_rm_reg_cl, 0],
"eax_imm_8" : [decode_eax_imm, DEC_FLAG_BYTE], "eax_imm_v" : [decode_eax_imm, 0],
"push_pop_seg" : [decode_push_pop_seg, 0],
"op_reg_v" : [decode_op_reg, 0], "op_reg_v_def64" : [decode_op_reg, DEC_FLAG_DEFAULT_TO_64BIT],
"eax_op_reg_v" : [decode_eax_op_reg, 0], "op_reg_imm_8" : [decode_op_reg_imm, DEC_FLAG_BYTE],
"op_reg_imm_v" : [decode_op_reg_imm, 0], "nop" : [decode_nop, 0],
"imm_v_def64" : [decode_imm, DEC_FLAG_DEFAULT_TO_64BIT],
"immsx_v_def64" : [decode_imm, DEC_FLAG_IMM_SX | DEC_FLAG_DEFAULT_TO_64BIT],
"imm_8" : [decode_imm, DEC_FLAG_BYTE], "imm_16" : [decode_imm, DEC_FLAG_FORCE_16BIT],
"imm16_imm8" : [decode_imm16_imm8, 0],
"edi_dx_8_rep" : [decode_edi_dx, DEC_FLAG_BYTE | DEC_FLAG_REP],
"edi_dx_op_size_rep" : [decode_edi_dx, DEC_FLAG_OPERATION_OP_SIZE | DEC_FLAG_REP],
"dx_esi_8_rep" : [decode_dx_esi, DEC_FLAG_BYTE | DEC_FLAG_REP],
"dx_esi_op_size_rep" : [decode_dx_esi, DEC_FLAG_OPERATION_OP_SIZE | DEC_FLAG_REP],
"relimm_8_def64" : [decode_rel_imm, DEC_FLAG_BYTE | DEC_FLAG_DEFAULT_TO_64BIT],
"relimm_v_def64" : [decode_rel_imm, DEC_FLAG_DEFAULT_TO_64BIT],
"relimm_8_addr_size_def64" : [decode_rel_imm_addr_size, DEC_FLAG_BYTE | DEC_FLAG_DEFAULT_TO_64BIT],
"group_rm_8" : [decode_group_rm, DEC_FLAG_BYTE], "group_rm_v" : [decode_group_rm, 0],
"group_rm_8_lock" : [decode_group_rm, DEC_FLAG_BYTE | DEC_FLAG_LOCK],
"group_rm_0" : [decode_group_rm, DEC_FLAG_REG_RM_NO_SIZE],
"group_rm_imm_8" : [decode_group_rm_imm, DEC_FLAG_BYTE],
"group_rm_imm_8_lock" : [decode_group_rm_imm, DEC_FLAG_BYTE | DEC_FLAG_LOCK],
"group_rm_imm_8_no64_lock" : [decode_group_rm_imm, DEC_FLAG_BYTE | DEC_FLAG_INVALID_IN_64BIT | DEC_FLAG_LOCK],
"group_rm_imm8_v" : [decode_group_rm_imm8v, 0],
"group_rm_imm_v" : [decode_group_rm_imm, 0], "group_rm_imm_v_lock" : [decode_group_rm_imm, DEC_FLAG_LOCK],
"group_rm_immsx_v_lock" : [decode_group_rm_imm, DEC_FLAG_IMM_SX | DEC_FLAG_LOCK],
"group_rm_one_8" : [decode_group_rm_one, DEC_FLAG_BYTE], "group_rm_one_v" : [decode_group_rm_one, 0],
"group_rm_cl_8" : [decode_group_rm_cl, DEC_FLAG_BYTE], "group_rm_cl_v" : [decode_group_rm_cl, 0],
"group_f6" : [decode_group_f6_f7, DEC_FLAG_BYTE | DEC_FLAG_LOCK], "group_f7" : [decode_group_f6_f7, DEC_FLAG_LOCK],
"group_ff" : [decode_group_ff, DEC_FLAG_LOCK],
"group_0f00" : [decode_group_0f00, 0], "group_0f01" : [decode_group_0f01, 0], "group_0fae" : [decode_group_0fae, 0],
"_0fb8" : [decode_0fb8, 0],
"rm_sreg_v" : [decode_rm_sreg_v, 0], "sreg_rm_v" : [decode_rm_sreg_v, DEC_FLAG_FLIP_OPERANDS],
"rm_8" : [decode_rm8, 0], "rm_v_def64" : [decode_rm_v, DEC_FLAG_DEFAULT_TO_64BIT],
"far_imm_no64" : [decode_far_imm, DEC_FLAG_INVALID_IN_64BIT],
"eax_addr_8" : [decode_eax_addr, DEC_FLAG_BYTE], "eax_addr_v" : [decode_eax_addr, 0],
"addr_eax_8" : [decode_eax_addr, DEC_FLAG_BYTE | DEC_FLAG_FLIP_OPERANDS],
"addr_eax_v" : [decode_eax_addr, DEC_FLAG_FLIP_OPERANDS],
"edi_esi_8_rep" : [decode_edi_esi, DEC_FLAG_BYTE | DEC_FLAG_REP],
"edi_esi_op_size_rep" : [decode_edi_esi, DEC_FLAG_OPERATION_OP_SIZE | DEC_FLAG_REP],
"esi_edi_8_repc" : [decode_edi_esi, DEC_FLAG_BYTE | DEC_FLAG_FLIP_OPERANDS | DEC_FLAG_REP_COND],
"esi_edi_op_size_repc" : [decode_edi_esi, DEC_FLAG_FLIP_OPERANDS | DEC_FLAG_OPERATION_OP_SIZE | DEC_FLAG_REP_COND],
"edi_eax_8_rep" : [decode_edi_eax, DEC_FLAG_BYTE | DEC_FLAG_REP],
"edi_eax_op_size_rep" : [decode_edi_eax, DEC_FLAG_OPERATION_OP_SIZE | DEC_FLAG_REP],
"eax_esi_8_rep" : [decode_eax_esi, DEC_FLAG_BYTE | DEC_FLAG_REP],
"eax_esi_op_size_rep" : [decode_eax_esi, DEC_FLAG_OPERATION_OP_SIZE | DEC_FLAG_REP],
"eax_edi_8_repc" : [decode_edi_eax, DEC_FLAG_BYTE | DEC_FLAG_FLIP_OPERANDS | DEC_FLAG_REP_COND],
"eax_edi_op_size_repc" : [decode_edi_eax, DEC_FLAG_FLIP_OPERANDS | DEC_FLAG_OPERATION_OP_SIZE | DEC_FLAG_REP_COND],
"al_ebx_al" : [decode_al_ebx_al, 0],
"eax_imm8_8" : [decode_eax_imm8, DEC_FLAG_BYTE], "eax_imm8_v" : [decode_eax_imm8, 0],
"imm8_eax_8" : [decode_eax_imm8, DEC_FLAG_BYTE | DEC_FLAG_FLIP_OPERANDS],
"imm8_eax_v" : [decode_eax_imm8, DEC_FLAG_FLIP_OPERANDS],
"eax_dx_8" : [decode_eax_dx, DEC_FLAG_BYTE], "eax_dx_v" : [decode_eax_dx, 0],
"dx_eax_8" : [decode_eax_dx, DEC_FLAG_BYTE | DEC_FLAG_FLIP_OPERANDS],
"dx_eax_v" : [decode_eax_dx, DEC_FLAG_FLIP_OPERANDS], "_3dnow" : [decode_3dnow, 0],
"sse_table" : [decode_sse_table, 0], "sse_table_flip" : [decode_sse_table, DEC_FLAG_FLIP_OPERANDS],
"sse_table_imm_8" : [decode_sse_table_imm8, 0], "sse_table_imm_8_flip" : [decode_sse_table_imm8, DEC_FLAG_FLIP_OPERANDS],
"sse_table_incop64" : [decode_sse_table, DEC_FLAG_INC_OPERATION_FOR_64],
"sse_table_incop64_flip" : [decode_sse_table, DEC_FLAG_INC_OPERATION_FOR_64 | DEC_FLAG_FLIP_OPERANDS],
"sse_table_mem8" : [decode_sse_table_mem8, 0], "sse_table_mem8_flip" : [decode_sse_table_mem8, DEC_FLAG_FLIP_OPERANDS],
"sse" : [decode_sse, 0], "sse_single" : [decode_sse_single, 0], "sse_packed" : [decode_sse_packed, 0],
"mmx" : [decode_mmx, 0], "mmx_sseonly" : [decode_mmx_sse_only, 0],
"mmx_group" : [decode_mmx_group, 0], "pinsrw" : [decode_pinsrw, 0],
"reg_cr" : [decode_reg_cr, DEC_FLAG_DEFAULT_TO_64BIT | DEC_FLAG_LOCK],
"cr_reg" : [decode_reg_cr, DEC_FLAG_FLIP_OPERANDS | DEC_FLAG_DEFAULT_TO_64BIT | DEC_FLAG_LOCK],
"movsxzx_8" : [decode_mov_sx_zx_8, 0], "movsxzx_16" : [decode_mov_sx_zx_16, 0],
"mem_16" : [decode_mem16, 0], "mem_32" : [decode_mem32, 0], "mem_64" : [decode_mem64, 0], "mem_80" : [decode_mem80, 0],
"mem_floatenv" : [decode_mem_float_env, 0], "mem_floatsave" : [decode_mem_float_save, 0],
"fpureg" : [decode_fpu_reg, 0], "st0_fpureg" : [decode_fpu_reg_st0, DEC_FLAG_FLIP_OPERANDS],
"fpureg_st0" : [decode_fpu_reg_st0, 0],
"reggroup_no_operands" : [decode_reg_group_no_operands, 0], "reggroup_ax" : [decode_reg_group_ax, 0],
"cmpxch8b" : [decode_cmpxch8b, DEC_FLAG_LOCK], "movnti" : [decode_mov_nti, 0],
"crc32_8" : [decode_crc32, DEC_FLAG_BYTE], "crc32_v" : [decode_crc32, 0],
"arpl" : [decode_arpl, 0]
}
def x86_reg_size(reg):
if reg in Reg8List:
return 1
if reg in Reg8List64:
return 1
if reg in Reg16List:
return 2
if reg in Reg32List:
return 4
if reg in Reg64List:
return 8
if reg in MMXRegList:
return 8
if reg in XMMRegList:
return 16
return 10
def process_prefixes(state):
rex = 0
addr_prefix = False
while not state.invalid:
prefix = read8(state)
if (prefix >= 0x26) and (prefix <= 0x3e) and ((prefix & 7) == 6):
# Segment prefix
state.result.segment = ["es", "cs", "ss", "ds"][(prefix >> 3) - 4]
elif prefix == 0x64:
state.result.segment = "fs"
elif prefix == 0x65:
state.result.segment = "gs"
elif prefix == 0x66:
state.op_prefix = True
state.result.flags |= FLAG_OPSIZE
elif prefix == 0x67:
addr_prefix = True
state.result.flags |= FLAG_ADDRSIZE
elif prefix == 0xf0:
state.result.flags |= FLAG_LOCK
elif prefix == 0xf2:
state.rep = "repne"
elif prefix == 0xf3:
state.rep = "repe"
elif state.using64 and (prefix >= 0x40) and (prefix <= 0x4f):
# REX prefix
rex = prefix
continue
else:
# Not a prefix, continue instruction processing
state.opcode = chr(prefix) + state.opcode
state.opcode_offset -= 1
break
# Force ignore REX unless it is the last prefix
rex = 0
if state.op_prefix:
if state.op_size == 2:
state.op_size = 4
else:
state.op_size = 2
if addr_prefix:
if state.addr_size == 4:
state.addr_size = 2
else:
state.addr_size = 4
if rex != 0:
# REX prefix found before opcode
state.rex = True
state.rex_rm1 = (rex & 1) != 0
state.rex_rm2 = (rex & 2) != 0
state.rex_reg = (rex & 4) != 0
if (rex & 8) != 0:
state.op_size = 8
def finish_disassemble(state):
state.result.length = state.opcode_offset
for i in state.result.operands:
if i.rip_relative:
i.immediate += state.addr + state.result.length
if state.insufficient_length and (state.orig_len < 15):
state.result.flags |= FLAG_INSUFFICIENT_LENGTH
if state.invalid:
state.result.operation = None
state.result.finalize()
def disassemble16(opcode, addr):
state = DecodeState()
state.opcode = opcode
state.addr = addr
state.addr_size = 2
state.op_size = 2
state.using64 = False
if len(state.opcode) > 15:
state.opcode = state.opcode[0:15]
state.orig_len = len(state.opcode)
process_prefixes(state)
process_opcode(state, MainOpcodeMap, read8(state))
finish_disassemble(state)
state.result.addr_size = state.addr_size
return state.result
def disassemble32(opcode, addr):
state = DecodeState()
state.opcode = opcode
state.addr = addr
state.addr_size = 4
state.op_size = 4
state.using64 = False
if len(state.opcode) > 15:
state.opcode = state.opcode[0:15]
state.orig_len = len(state.opcode)
process_prefixes(state)
process_opcode(state, MainOpcodeMap, read8(state))
finish_disassemble(state)
state.result.addr_size = state.addr_size
return state.result
def disassemble64(opcode, addr):
state = DecodeState()
state.opcode = opcode
state.addr = addr
state.addr_size = 8
state.op_size = 4
state.using64 = True
if len(state.opcode) > 15:
state.opcode = state.opcode[0:15]
state.orig_len = len(state.opcode)
process_prefixes(state)
process_opcode(state, MainOpcodeMap, read8(state))
finish_disassemble(state)
state.result.addr_size = state.addr_size
return state.result
def get_size_string(size):
if size == 1:
return "byte "
if size == 2:
return "word "
if size == 4:
return "dword "
if size == 6:
return "fword "
if size == 8:
return "qword "
if size == 10:
return "tword "
if size == 16:
return "oword "
return ""
def get_operand_string(type, scale, plus):
if plus:
result = "+"
else:
result = ""
result += type
if scale != 1:
result += "*%d" % scale
return result
def format_instruction_string(fmt, opcode, addr, instr):
result = ""
i = 0
while i < len(fmt):
if fmt[i] == '%':
width = 0
i += 1
while i < len(fmt):
if fmt[i] == 'a':
if width == 0:
width = 8
result += ("%%.%dx" % width) % addr
break
elif fmt[i] == 'b':
for j in range(0, instr.length):
result += "%.2x" % ord(opcode[j])
for j in range(instr.length, width):
result += " "
break
elif fmt[i] == 'i':
operation = ""
if instr.flags & FLAG_LOCK:
operation += "lock "
if instr.flags & FLAG_ANY_REP:
operation += "rep"
if instr.flags & FLAG_REPNE:
operation += "ne"
elif instr.flags & FLAG_REPE:
operation += "e"
operation += " "
operation += instr.operation
for j in range(len(operation), width):
operation += " "
result += operation
break
elif fmt[i] == 'o':
for j in range(0, len(instr.operands)):
if j != 0:
result += ", "
if instr.operands[j].operand == "imm":
numfmt = "0x%%.%dx" % (instr.operands[j].size * 2)
result += numfmt % (instr.operands[j].immediate &
((1 << (instr.operands[j].size * 8)) - 1))
elif instr.operands[j].operand == "mem":
plus = False
result += get_size_string(instr.operands[j].size)
if (instr.segment != None) or (instr.operands[j].segment == "es"):
result += instr.operands[j].segment + ":"
result += '['
if instr.operands[j].components[0] != None:
result += instr.operands[j].components[0]
plus = True
if instr.operands[j].components[1] != None:
result += get_operand_string(instr.operands[j].components[1],
instr.operands[j].scale, plus)
plus = True
if (instr.operands[j].immediate != 0) or ((instr.operands[j].components[0] == None) and (instr.operands[j].components[1] == None)):
if plus and (instr.operands[j].immediate >= -0x80) and (instr.operands[j].immediate < 0):
result += '-'
result += "0x%.2x" % (-instr.operands[j].immediate)
elif plus and (instr.operands[j].immediate > 0) and (instr.operands[j].immediate <= 0x7f):
result += '+'
result += "0x%.2x" % instr.operands[j].immediate
elif (instr.flags & FLAG_64BIT_ADDRESS) != 0:
if plus:
result += '+'
result += "0x%.16x" % instr.operands[j].immediate
else:
if plus:
result += '+'
result += "0x%.8x" % (instr.operands[j].immediate & 0xffffffff)
result += ']'
else:
result += instr.operands[j].operand
break
elif (fmt[i] >= '0') and (fmt[i] <= '9'):
width = (width * 10) + (ord(fmt[i]) - 0x30)
else:
result += fmt[i]
break
i += 1
else:
result += fmt[i]
i += 1
return result
def disassemble16_to_string(fmt, opcode, addr):
instr = disassemble16(opcode, addr)
return format_instruction_string(fmt, opcode, addr, instr)
def disassemble32_to_string(fmt, opcode, addr):
instr = disassemble32(opcode, addr)
return format_instruction_string(fmt, opcode, addr, instr)
def disassemble64_to_string(fmt, opcode, addr):
instr = disassemble64(opcode, addr)
return format_instruction_string(fmt, opcode, addr, instr)
| 85,623 | 43,268 |
from sentences.base_sentence import base_sentence
class sentence14(base_sentence):
def get_sentence(self, components):
sentence = 'You are the Captain of ' + components['transport_article_on_desc']
sentence += ' ' + components['transport_desc']
sentence += ' ' + components['transport']
sentence += ' from ' + components['regime']
sentence += '. It has a dual function: ' + components['transport_function']
sentence += ', and ' + components['transport_function2']
sentence += '.'
return sentence
| 590 | 158 |
#encoding=utf8
import datetime
from camper.exceptions import *
import isodate
import pycountry
from slugify import UniqueSlugify
import embeddify
import re
from sfext.babel import T
import pymongo
from mongogogo import *
from bson.code import Code
from tickets import *
# initialize the embed provider with some max values
embedder = embeddify.Embedder(config = {'width' : 1200, 'height' : 1200 })
# create the URL regexp
URL_REGEX = r"""(?i)\b((?:https?:(?:/{1,3}|[a-z0-9%])|[a-z0-9.\-]+[.](?:com|net|org|edu|gov|mil|aero|asia|biz|cat|coop|info|int|jobs|mobi|museum|name|post|pro|tel|travel|xxx|ac|ad|ae|af|ag|ai|al|am|an|ao|aq|ar|as|at|au|aw|ax|az|ba|bb|bd|be|bf|bg|bh|bi|bj|bm|bn|bo|br|bs|bt|bv|bw|by|bz|ca|cc|cd|cf|cg|ch|ci|ck|cl|cm|cn|co|cr|cs|cu|cv|cx|cy|cz|dd|de|dj|dk|dm|do|dz|ec|ee|eg|eh|er|es|et|eu|fi|fj|fk|fm|fo|fr|ga|gb|gd|ge|gf|gg|gh|gi|gl|gm|gn|gp|gq|gr|gs|gt|gu|gw|gy|hk|hm|hn|hr|ht|hu|id|ie|il|im|in|io|iq|ir|is|it|je|jm|jo|jp|ke|kg|kh|ki|km|kn|kp|kr|kw|ky|kz|la|lb|lc|li|lk|lr|ls|lt|lu|lv|ly|ma|mc|md|me|mg|mh|mk|ml|mm|mn|mo|mp|mq|mr|ms|mt|mu|mv|mw|mx|my|mz|na|nc|ne|nf|ng|ni|nl|no|np|nr|nu|nz|om|pa|pe|pf|pg|ph|pk|pl|pm|pn|pr|ps|pt|pw|py|qa|re|ro|rs|ru|rw|sa|sb|sc|sd|se|sg|sh|si|sj|Ja|sk|sl|sm|sn|so|sr|ss|st|su|sv|sx|sy|sz|tc|td|tf|tg|th|tj|tk|tl|tm|tn|to|tp|tr|tt|tv|tw|tz|ua|ug|uk|us|uy|uz|va|vc|ve|vg|vi|vn|vu|wf|ws|ye|yt|yu|za|zm|zw)/)(?:[^\s()<>{}\[\]]+|\([^\s()]*?\([^\s()]+\)[^\s()]*?\)|\([^\s]+?\))+(?:\([^\s()]*?\([^\s()]+\)[^\s()]*?\)|\([^\s]+?\)|[^\s`!()\[\]{};:'".,<>?«»“”‘’])|(?:(?<!@)[a-z0-9]+(?:[.\-][a-z0-9]+)*[.](?:com|net|org|edu|gov|mil|aero|asia|biz|cat|coop|info|int|jobs|mobi|museum|name|post|pro|tel|travel|xxx|ac|ad|ae|af|ag|ai|al|am|an|ao|aq|ar|as|at|au|aw|ax|az|ba|bb|bd|be|bf|bg|bh|bi|bj|bm|bn|bo|br|bs|bt|bv|bw|by|bz|ca|cc|cd|cf|cg|ch|ci|ck|cl|cm|cn|co|cr|cs|cu|cv|cx|cy|cz|dd|de|dj|dk|dm|do|dz|ec|ee|eg|eh|er|es|et|eu|fi|fj|fk|fm|fo|fr|ga|gb|gd|ge|gf|gg|gh|gi|gl|gm|gn|gp|gq|gr|gs|gt|gu|gw|gy|hk|hm|hn|hr|ht|hu|id|ie|il|im|in|io|iq|ir|is|it|je|jm|jo|jp|ke|kg|kh|ki|km|kn|kp|kr|kw|ky|kz|la|lb|lc|li|lk|lr|ls|lt|lu|lv|ly|ma|mc|md|me|mg|mh|mk|ml|mm|mn|mo|mp|mq|mr|ms|mt|mu|mv|mw|mx|my|mz|na|nc|ne|nf|ng|ni|nl|no|np|nr|nu|nz|om|pa|pe|pf|pg|ph|pk|pl|pm|pn|pr|ps|pt|pw|py|qa|re|ro|rs|ru|rw|sa|sb|sc|sd|se|sg|sh|si|sj|Ja|sk|sl|sm|sn|so|sr|ss|st|su|sv|sx|sy|sz|tc|td|tf|tg|th|tj|tk|tl|tm|tn|to|tp|tr|tt|tv|tw|tz|ua|ug|uk|us|uy|uz|va|vc|ve|vg|vi|vn|vu|wf|ws|ye|yt|yu|za|zm|zw)\b/?(?!@)))"""
url_re = re.compile(URL_REGEX, re.IGNORECASE)
HREF_REGX = r'<a.*href=[\'"]?([^\'" >]+)[\'""].*>(.*)</a>'
a_re = re.compile(HREF_REGX, re.IGNORECASE)
__all__=["Barcamp", "BarcampSchema", "Barcamps", "Event"]
class BaseError(Exception):
"""base class for exceptions"""
def __init__(self, msg):
"""initialize the error with a message"""
self.msg = msg
class WorkflowError(BaseError):
"""error in workflow e.g. transition not allowed"""
def __init__(self, msg = u"Transition nicht erlaubt" , old_state = None, new_state = None):
"""initialize the error with a message"""
self.msg = msg
self.old_state = old_state
self.new_state = new_state
def __str__(self):
"""return a printable representation"""
return """<WorkflowError: %s (old=%s, new=%s)>""" %(self.msg, self.old_state, self.new_state)
class LocationSchema(Schema):
"""a location described by name, lat and long"""
name = String()
street = String()
city = String()
zip = String()
country = String()
url = String()
phone = String()
email = String()
description = String()
lat = Float()
lng = Float()
class Sponsor(Schema):
"""a location described by name, lat and long"""
logo = String(required=True) # asset id
name = String(required=True)
url = String(required=True)
class BlogLinkSchema(Schema):
"""a link to a blog for documentation"""
title = String(required = True)
url = String(required = True)
user_id = String(required = True)
class RegistrationFieldSchema(Schema):
"""a sub schema describing a field for the registration form"""
name = String(required=True)
title = String(required=True)
description = String()
fieldtype = String(required=True)
choices = List(List(String()), default=[])
required = Boolean()
class MailsSchema(Schema):
"""a sub schema describing email templates"""
welcome_subject = String()
welcome_text = String()
onwaitinglist_subject = String()
onwaitinglist_text = String()
fromwaitinglist_subject = String()
fromwaitinglist_text = String()
ticket_welcome_subject = String()
ticket_welcome_text = String()
ticket_pending_subject = String()
ticket_pending_text = String()
ticket_confirmed_subject = String()
ticket_confirmed_text = String()
ticket_canceled_subject = String()
ticket_canceled_text = String()
class Location(Record):
"""a location"""
schema = LocationSchema()
@property
def country_name(self):
"""retrieve the country name from the country db. It's not i18n"""
return pycountry.countries.get(alpha_2 = self.country).name
class SessionSchema(Schema):
"""a session in a timetable"""
_id = String(required = True) # the session index made out of timeslot and room
title = String(required = True, max_length = 255)
description = String(max_length = 5000)
moderator = String(default = "") # actually list of names separated by comma
interested = String(default = "") # ppl interested in a session
# sid and slug for url referencing, will be computed in before_serialze below in Barcamps
sid = String(required = True) # the actual unique id
slug = String(required = True, max_length = 100)
pad = String() # the pad id for the documentation
confurl = String(max_length = 500) # video conf url
class RoomSchema(Schema):
"""a room"""
id = String(required = True) # uuid
name = String(required = True, max_length = 100)
capacity = Integer(required = True, default = 20)
confurl = String(max_length = 500)
description = String(max_length = 1000)
class TimeSlotSchema(Schema):
"""a timeslot"""
time = Regexp("^([0-9]|0[0-9]|1[0-9]|2[0-3]):[0-5][0-9]$", required = True) # only HH:MM here
#time = String()
reason = String(default = "", max_length = 200) # optional reason for blocking it
blocked = Boolean(default = False) # is it blocked?
class TimeTableSchema(Schema):
"""a timetable of an event"""
timeslots = List(TimeSlotSchema())
rooms = List(RoomSchema())
sessions = Dict(SessionSchema(), default = {})
class EventSchema(Schema):
"""a sub schema describing one event"""
_id = String(required=True)
name = String(required=True, max_length = 255)
description = String(required=True, max_length = 15000)
date = DateTime()
start_time = String(max_length = 5)
end_time = String(max_length = 5)
location = LocationSchema(kls = Location, default = {})
participants = List(String()) # TODO: ref
size = Integer()
maybe = List(String()) # we maybe will implement this
waiting_list = List(String()) # TODO: ref
own_location = Boolean() # flag if the barcamp address is used or not
timetable = TimeTableSchema(default = {
'rooms' : [],
'timeslots': [],
'sessions' : {},
})
class Event(Record):
"""wraps event data with a class to provider more properties etc."""
schema = EventSchema()
_protected = ['_barcamp']
def __init__(self, *args, **kwargs):
"""initialize the event"""
super(Event, self).__init__(*args, **kwargs)
self._barcamp = kwargs.get('_barcamp', None)
@property
def state(self):
"""returns the state of the event which can be one of
planning -- the event has not started
active -- the event is active
finished -- the event has finished
All of those depend on the date which will be checked in here.
We only check for days, not timestamps, so if an event starts at 10am it still
is supposed to be active for the whole day.
"""
# convert everthing to dates without time
today = datetime.date.today()
start = self.start_date.date()
end = self.end_date.date()
if today < start:
return "planning"
if today > end:
return "finished"
return "active"
def add_participant(self, user):
"""register a new user via the user object
:param user: user object
:raises ParticipantListFull: in case the participant list was full. The uses
is moved to the waiting list then instead
:returns: nothing which means everthing went ok. Don't forget to save the barcamp
afterwards
"""
uid = unicode(user._id)
if uid in self.participants:
return
if len(self.participants) >= self.size:
if uid not in self.waiting_list:
self.waiting_list.append(uid)
raise ParticipantListFull()
# all checks ok, add it to the list of participants
self.participants.append(uid)
# any mail will be sent by the application logic
@property
def full(self):
"""return whether event is full or not"""
return len(self.participants) >= self.size
def set_status(self, uid, status="going", force=False):
"""set the status of the user for this event, read: register the user
:param uid: the user id of the user (unicode)
:param status: can be "going", "maybe", "notgoing"
:param force: if ``True`` then a user can be added to the participants regardless if barcamp is full
:returns: the final status (going, waitinglist, maybe, notgoing)
"""
if status=="going":
if not force and (len(self.participants) >= self.size or self._barcamp.preregistration):
# user induced action
if uid not in self.waiting_list:
self.waiting_list.append(uid)
if uid in self.maybe:
self.maybe.remove(uid)
status = 'waitinglist'
else:
# force is only done by admins and can overpop an event.
if uid not in self.participants:
self.participants.append(uid)
if uid in self.maybe:
self.maybe.remove(uid)
if uid in self.waiting_list:
self.waiting_list.remove(uid)
status = 'going'
return status
elif status=="maybe" or status=="notgoing":
if uid in self.participants:
self.participants.remove(uid)
if uid in self.waiting_list:
self.waiting_list.remove(uid)
if status=="maybe" and uid not in self.maybe:
self.maybe.append(uid)
if status=="notgoing" and uid in self.maybe:
self.maybe.remove(uid)
return status
elif status=="waiting":
# this is something only the admin can do
if uid in self.participants:
self.participants.remove(uid)
if uid in self.maybe:
self.maybe.remove(uid)
if uid not in self.waiting_list:
self.waiting_list.append(uid)
return status
elif status=="deleted":
# remove a user from the barcamp
if uid in self.participants:
self.participants.remove(uid)
if uid in self.maybe:
self.maybe.remove(uid)
if uid in self.waiting_list:
self.waiting_list.remove(uid)
return status
def fill_participants(self):
"""try to fill up the participant list from the waiting list in case
there is space. This should be called after somebody was removed from the
participants list or the size was increased.
It returns a list of user ids so you can send out mails.
"""
# only fill participation list if we are in preregistration mode
if self._barcamp.preregistration:
return []
uids = []
# we have to make sure size is an int as it will fill everybody otherwise
while len(self.participants) < int(self.size) and len(self.waiting_list)>0:
nuid = self.waiting_list.pop(0)
self.participants.append(nuid)
uids.append(nuid)
return uids
@property
def rooms(self):
"""return the rooms"""
return self.timetable.get('rooms', [])
@property
def timeslots(self):
"""return the timeslots"""
return self.timetable.get('timeslots', [])
@property
def event_location(self):
"""return the event location or the barcamp location depending on settings"""
if self.own_location:
return self.location
else:
return self._barcamp.location
class BarcampSchema(Schema):
"""main schema for a barcamp holding all information about core data, events etc."""
created = DateTime()
updated = DateTime()
created_by = String() # TODO: should be ref to user
workflow = String(required = True, default = "created")
# location
location = LocationSchema(kls = Location)
# base data
name = String(required = True)
description = String(required = True)
description2 = String(default="") # compiled description
slug = String(required = True)
registration_date = Date() # date when the registration starts
start_date = Date()
end_date = Date()
seo_description = String() # description for meta tags
seo_keywords = String() # keywords for meta tags
twitter = String() # only the username
hashtag = String()
gplus = String()
facebook = String() # facebook page
homepage = String() # URL
twitterwall = String() # URL
hide_barcamp = Boolean(default=False) # whether the whole barcamp should be visible or not
preregistration = Boolean(default=False) # if ppl need to be put manually on the participation list
# ticketmode
ticketmode_enabled = Boolean(default = False) # is the ticket mode enabled?
paid_tickets = Boolean(default = False) # if false no prices will be shown
ticket_classes = List(TicketClassSchema(), default = []) # list of ticket classes
max_participants = Integer(default = 1000) # max number of participants over all tickets
# documentation
planning_pad = String() # ID of the planning etherpad
documentation_pad = String() # ID of the pad for documentation
planning_pad_public = Boolean(default = False)
blogposts = List(BlogLinkSchema())
# design
logo = String() # asset id
link_color = String()
text_color = String()
background_image = String()
background_color = String()
font = String()
fb_image = String()
header_color = String()
text_color = String()
# logo
logo_color_logo = String()
logo_color1 = String()
logo_color2 = String()
logo_text1 = String()
logo_text2 = String()
navbar_link_color = String() # text color of all navbar links
navbar_active_color = String() # text color of active navbar item
navbar_border_color = String() # border color of all navbar items
navbar_active_bg = String() # bg color of active item
navbar_hover_bg = String() # bg color when hovering
sp_row_color = String(default="#ffffff") # text color of session plan row
sp_row_bg = String(default="#222222") # text color of session plan row
sp_column_color = String(default="#ffffff") # text color of session plan col
sp_column_bg = String(default="#6aab58") # text color of session plan col
hide_tabs = List(String(), default=[]) # list of tab ids to hide
gallery = String() # gallery to show on homepage
# user related
admins = List(String()) # TODO: ref
invited_admins = List(String()) # list of invited admins who have not yet accepted TODO: ref
subscribers = List(String()) # TODO: ref
# events
events = Dict(EventSchema(kls=Event))
# image stuff
sponsors = List(Sponsor())
# registration_form
registration_form = List(RegistrationFieldSchema())
registration_data = Dict() # user => data
# default mail templates
mail_templates = MailsSchema(default = {})
# notifications
send_email_to_admins = Boolean(default = False)
# wizard checks. Elements in this list will disable asking for it again on the wizard screen
wizard_checked = List(String(), default = [])
# imprint and barcamp contact email, important for paid tickets
contact_email = String(default="")
imprint = String(default="")
tos = String(default="")
cancel_policy = String(default="")
# newsletter related
newsletter_reply_to = String(default="") # the active reply to address for the newsletter
newsletter_rt_code = String(default="") # the activation code for the newsletter reply to
newsletter_reply_to2= String(default="") # the to be set reply to address for the newsletter
class Barcamp(Record):
schema = BarcampSchema()
_protected = ['event', 'schema', 'collection', '_protected', '_schemaless', 'default_values', 'admin_users', 'workflow_states', 'initial_workflow_state']
initial_workflow_state = "created"
default_values = {
'created' : datetime.datetime.utcnow,
'updated' : datetime.datetime.utcnow,
'location' : {},
'workflow' : "created",
'events' : {},
'registration_form' : [
{ "title" : T("Show on public participants list"),
"required" : False,
"description" : T("Decide whether you want to be displayed on the public list of participants"),
"name" : "optin_participant",
"fieldtype" : "checkbox" }
],
'registration_data' : {},
'planning_pad_public' : False,
'background_color' : '#fcfcfa',
'link_color' : '#337CBB',
'text_color' : '#333',
'header_color' : '#fff',
'navbar_link_color' : '#888',
'navbar_active_bg' : '#555',
'navbar_active_color' : '#eee',
'navbar_border_color' : '#f0f0f0',
'navbar_hover_bg' : '#f8f8f8',
'sp_row_color' : "#ffffff",
'sp_row_bg' : "#222222",
'sp_column_color' : "#ffffff",
'sp_column_bg' : "#6aab58",
'hide_tabs' : [],
'hide_barcamp' : False,
'seo_description' : '',
'seo_keywords' : '',
'wizard_checked' : [],
'contact_email' : '',
'imprint' : '',
'tos' : '',
'cancel_policy' : '',
}
workflow_states = {
'created' : ['public', 'deleted', 'canceled'],
'public' : ['created', 'registration', 'deleted', 'canceled'],
'registration' : ['deleted', 'canceled', 'created'],
'canceled' : ['deleted'],
}
def set_workflow(self, new_state):
"""set the workflow to a new state"""
old_state = self.workflow
if old_state is None:
old_state = self.initial_workflow_state
allowed_states = self.workflow_states[old_state]
# check if transition is allowed
if hasattr(self, "check_wf_"+new_state):
m = getattr(self, "check_wf_"+new_state)
if not m(old_state = old_state): # this should raise WorkflowError if not allowed otherwise return True
raise WorkflowError(old_state = old_state, new_state = new_state) # fallback
if new_state not in allowed_states:
raise WorkflowError(old_state = old_state, new_state = new_state)
# Trigger
if hasattr(self, "on_wf_"+new_state):
m = getattr(self, "on_wf_"+new_state)
m(old_state = old_state)
self.workflow = new_state
def get_event(self, eid):
"""return the event for the given id or None"""
e = self.events[eid]
return Event(e, _barcamp = self)
@property
def eventlist(self):
"""return the events as a list sorted by date"""
events = self.events.values()
def s(a,b):
d = cmp(a['date'], b['date'])
if d==0:
tpa = a['start_time'].split(":") + ['00']
ta = int(tpa[0])*60 + int(tpa[1])
tpb = b['start_time'].split(":") + ['00']
tb = int(tpb[0])*60 + int(tpb[1])
return cmp(ta,tb)
return d
events.sort(s)
events = [Event(e, _barcamp = self) for e in events]
return events
@property
def ticketlist(self):
"""return a list of all ticket classes and whether they are full or not"""
ub = self._collection.md.app.module_map.userbase
tickets = [TicketClass(tc, _barcamp = self, _userbase = ub) for tc in self.ticket_classes]
return tickets
def get_ticket_class(self, tc_id):
"""return a ticket class by it's id or None if it does not exist"""
for tc in self.ticket_classes:
if tc['_id'] == tc_id:
return TicketClass(tc, _barcamp = self, _userbase = self._collection.md.app.module_map.userbase)
return None
def update_ticket_class(self, tc):
"""update an existing ticket class by searching for it in the list and replacing it"""
i = 0
tc_data = tc.schema.serialize(tc)
for i in range(0,len(self.ticket_classes)):
if self.ticket_classes[i]['_id'] == tc._id:
self.ticket_classes[i] = tc_data
return
def delete_ticket_class(self, tc):
"""delete an existing ticket class by searching for it in the list and removing it"""
i = 0
tc_data = tc.schema.serialize(tc)
for i in range(0,len(self.ticket_classes)):
if self.ticket_classes[i]['_id'] == tc._id:
del self.ticket_classes[i]
return
def get_tickets_for_user(self, user_id, status=["confirmed", "pending", "cancel_request"]):
"""return all the ticket class ids which a users owns
:param user_id: the user id of the user
:param status: the status which is either a string or a list of strings
:return: a list of ticket classes
"""
tickets = self._collection.md.app.config.dbs.tickets
return tickets.get_tickets(user_id = user_id, barcamp_id = self._id, status = status)
@property
def paid_allowed(self):
"""check if all necessary fields are present for paid mode"""
return self.contact_email \
and len(self.imprint.strip())>20 \
and len(self.tos.strip())>20 \
and len(self.cancel_policy.strip())>20
@property
def publish_not_allowed(self):
"""check if publishing a barcamp is allowed. Reasons will be listed in the resulting array. False means
barcamp can be published"""
if not self.ticketmode_enabled:
return False
reasons = []
if not self.contact_email:
reasons.append({
'reason' : T('The contact email is not set'),
'url' : 'legaledit'
})
if not len(self.imprint.strip())>20:
reasons.append({
'reason' : T('The Imprint is missing'),
'url' : 'legaledit'
})
if not len(self.tos.strip())>20:
reasons.append({
'reason' : T('The terms of service are missing'),
'url' : 'legaledit'
})
if not len(self.cancel_policy.strip())>20:
reasons.append({
'reason' : T('The cancel policy is missing'),
'url' : 'legaledit'
})
return reasons
@property
def has_imprint(self):
"""return whether the barcamp has a proper imprint or not
basically it needs to be bigger than 20 chars
"""
return len(self.imprint.strip())>20
@property
def registration_active(self):
"""check if registration is active by checking workflow state and end date"""
if self.workflow != "registration":
return False
# check date
today = datetime.date.today()
if today > self.end_date:
return False
return True
def is_registered(self, user, states=['going', 'maybe', 'waiting']):
"""check if the given user is registered in any event of this barcamp
:param user: the user object to test
:param states: give the list of states which count as registered (defaults to all)
:returns: ``True`` or ``False``
"""
if user is None:
return False
uid = user.user_id
for event in self.eventlist:
if uid in event.participants and 'going' in states:
return True
elif uid in event.maybe and 'maybe' in states:
return True
elif uid in event.waiting_list and 'waiting' in states:
return True
return False
@property
def live_event(self):
"""returns the active event or None"""
today = datetime.date.today()
today = datetime.datetime.combine(today, datetime.time.min)
for event in self.eventlist:
if event.date == today:
if len(event.rooms)>0:
return event
return None
@property
def public(self):
"""return whether the barcamp is public or not"""
return self.workflow in ['public', 'registration', 'canceled']
def add_admin(self, user):
"""add a new admin to the invited admins list"""
self.admins.append(unicode(user._id))
def remove_admin_by_id(self, user_id):
"""remove an admin from the list of admins but only if the list is not empty then and the
creator of the barcamp is still on it."""
if len(self.admins)<2:
return
self.admins.remove(unicode(user_id))
def activate_admin(self, user):
"""activate an admin by moving from the invited to the actual list"""
if user._id in self.invited_admins:
self.invited_admins.remove(user._id)
if user._id not in self.admins:
self.admins.append(unicode(user._id))
@property
def admin_users(self):
"""return a list of user objects of the admin users"""
ub = self._collection.md.app.module_map.userbase
return list(ub.get_users_by_ids(self.admins))
@property
def subscriber_users(self):
"""return a list of user objects of the subscribed users"""
ub = self._collection.md.app.module_map.userbase
users = []
for uid in self.subscribers:
users.append(ub.get_user_by_id(uid))
return users
@property
def event(self):
"""returns the main event object or None in case there is no event"""
return {}
raise NotImplemented
if self.events == []:
return None
event = self.events[0]
event._barcamp = self
return event
def get_events(self):
"""return the events wrapped in the ``Event`` class"""
return [Event(e, _barcamp = self) for e in self.events]
def add_event(self, event):
"""add an event"""
if event.get("_id", None) is None:
eid = event['_id'] = unicode(uuid.uuid4())
else:
eid = event['_id']
self.events[eid] = event
return event
@property
def state(self):
"""the same as the event state which we compute here for the main event.
If multiple events are possible in the future then this will check all of the events
If no event is present, ``planning`` will be returned.
"""
if self.event is None:
return "planning"
return self.event.state
def subscribe(self, user):
"""subscribe a user to the barcamp"""
uid = unicode(user._id)
if uid not in self.subscribers:
self.subscribers.append(uid)
self.put()
def unsubscribe(self, user):
"""unsubscribe a user from the barcamp"""
uid = unicode(user._id)
if uid in self.subscribers:
self.subscribers.remove(uid)
self.put()
def set_nl_reply_to(self, email_address):
"""set up a new reply to address for the newsletter
you need to save the barcamp afterwards
:param email_address: the email address to be set
:returns: a uuid code for sending to the user to verify
"""
self.newsletter_reply_to2 = email_address
self.newsletter_rt_code = unicode(uuid.uuid4())
return self.newsletter_rt_code
def verify_nl_reply_to(self, code):
"""verify the reply to verification code
returns True if it's ok and will set the new reply to address then
you need to save the barcamp object afterwards
"""
if self.newsletter_rt_code == code:
self.newsletter_reply_to = self.newsletter_reply_to2
self.newsletter_rt_code = ""
self.newsletter_reply_to2 = ""
return True
else:
return False
def remove_nl_reply_to(self):
"""remove the reply to address"""
self.newsletter_reply_to = ""
class Barcamps(Collection):
data_class = Barcamp
def by_slug(self, slug):
"""find a barcamp by slug"""
return self.find_one({'slug' : slug})
def get_by_user_id(self, user_id, participate=True, maybe=False, waiting=False):
"""return all the barcamps the user is participating in
:param participate: returns the ones the user is in a participants list
:param maybe: returns the ones the user is in a maybe list
:param waiting: returns the ones the user is on the waiting list for
"""
map = Code("""
function () {
var uid = '%s';
for (var eid in this.events) {
var event = this.events[eid];
if (%s && event.participants.indexOf(uid)>-1) {emit(this._id, 1); }
if (%s && event.maybe.indexOf(uid)>-1) {emit(this._id, 1); }
if (%s && event.waiting_list.indexOf(uid)>-1) {emit(this._id, 1); }
}
}
""" %( user_id,
'true' if participate else 'false',
'true' if maybe else 'false',
'true' if waiting else 'false',
)
)
reduce = Code("""
function(key, values) {
var total = 0;
for (var i=0; i < values.length; i++) {
total += values[i];
}
return total;
}
""")
result = self.collection.inline_map_reduce(map, reduce)
ids = [u['_id'] for u in result]
query = {'_id' : {'$in' : ids}}
return self.find(query).sort("end_date", pymongo.DESCENDING)
def before_serialize(self, obj):
"""make sure we have all required data for serializing"""
###
### process the embed urls
###
def do_embed(x):
url, t = x.groups()
html = embedder(url)
if url == html: # this means no plugin was found
return x.string[x.start():x.end()]
return html
obj.description2 = a_re.sub(do_embed, obj.description)
###
### remove all sessions which have no room or timeslot anymore
###
for event in obj.eventlist:
tt = event.get('timetable', {})
rooms = tt.get('rooms', [])
timeslots = tt.get('timeslots', [])
all_idxs = [] # list of all possible indexes of room/time
for room in rooms:
for timeslot in timeslots:
all_idxs.append("%s@%s" %(room['id'], timeslot['time']))
if 'sessions' in tt:
sessions = {}
for idx, session in tt['sessions'].items():
if idx in all_idxs:
sessions[idx] = session
event['timetable']['sessions'] = sessions
###
### fix all the sids and slugs in the session plan
###
for event in obj.eventlist:
sessions = event.get('timetable', {}).get('sessions', {})
# dict with all session slugs and their id except the new ones
all_slugs = dict([(s['slug'], s['sid']) for s in sessions.values() if s['slug'] is not None])
for session_idx, session in sessions.items():
# compute sid if missing
if session.get("sid", None) is None:
session['sid'] = unicode(uuid.uuid4())
# compute slug if missing
slugify = UniqueSlugify(separator='_', uids = all_slugs.keys(), max_length = 50, to_lower = True)
orig_slug = session.get("slug", None)
# we need a new slug if a) the slug is None (new) or
# b) another session with this slug exists already
# we can solve all this with .get() as the default is None anyway
my_sid = all_slugs.get(orig_slug, None)
if my_sid != session['sid']: # for new ones it's None != xyz
new_slug = slugify(session['title'])
session['slug'] = new_slug
all_slugs[new_slug] = session['sid']
event['timetable']['sessions'][session_idx] = session
return obj
| 35,847 | 10,893 |
import sys
import os
import random
import re
import time
import torch
from torch.autograd import Variable
from torch import optim
import torch.nn as nn
#from static_model import StaticModel
from CNNencoder import StaticModel
#from dyna_model import DynamicModel
from dynaMerge import DynamicModel
from data_utils import *
from pathlib import Path
username = Path.home().name
save_dir = Path(f'./data1/{username}/conversation/')
def init_command_line(argv):
from argparse import ArgumentParser
usage = "train"
description = ArgumentParser(usage)
description.add_argument("--w2v_file", type=str, default="./data/train_300e.w2v")
description.add_argument("--train_file", type=str, default="./data/train_cornell.txt")
description.add_argument("--max_context_size", type=int, default=9)
description.add_argument("--batch_size", type=int, default=80)
description.add_argument("--hidden_size", type=int, default=1024)
description.add_argument("--max_senten_len", type=int, default=15)
description.add_argument("--type_model", type=int, default=1)
#description.add_argument('-kernel_sizes', type=str, default='2,3,4,5')
#description.add_argument('-kernel_num', type=int, default=256)
description.add_argument('-kernel_sizes', type=str, default='2,3')
description.add_argument('-kernel_num', type=int, default=512)
description.add_argument('-static', action='store_true', default=False)
description.add_argument("--lr", type=float, default=0.001)
description.add_argument("--weight_decay", type=float, default=1e-5)
description.add_argument("--dropout", type=float, default=0.5)
description.add_argument("--epochs", type=int, default=30)
description.add_argument("--teach_forcing", type=int, default=1)
description.add_argument("--shuffle", type=int, default=1)
description.add_argument("--print_every", type=int, default=200)
description.add_argument("--save_model", type=int, default=1)
description.add_argument("--weights", type=str, default=None)
return description.parse_args(argv)
opts = init_command_line(sys.argv[1:])
print ("Configure:")
print (" train_file:",opts.train_file)
print (" w2v_file:",opts.w2v_file)
print (" max_context_size:",opts.max_context_size)
print (" batch_size:",opts.batch_size)
print (" hidden_size:",opts.hidden_size)
print (" max_senten_len:",opts.max_senten_len)
if opts.type_model:
print (" static model")
else:
print (" dynamic model")
print(" kernel_sizes:", opts.kernel_sizes)
print(" kernel_num:", opts.kernel_num)
print(" static embedding:", opts.static)
print (" learning rate:",opts.lr)
print (" weight_decay:",opts.weight_decay)
print (" dropout:",opts.dropout)
print (" epochs:",opts.epochs)
print (" teach_forcing:",opts.teach_forcing)
print (" shuffle:",opts.shuffle)
print (" print_every:",opts.print_every)
print (" save_model:",opts.save_model)
print (" weights:",opts.weights)
print ("")
opts.kernel_sizes = [int(k) for k in opts.kernel_sizes.split(',')]
print(" kernel_sizes_list:", type(opts.kernel_sizes))
def save_epoch_model(statedict, save_path, epoch):
epoch = epoch + 1
if not os.path.exists(save_path):
os.mkdir(save_path)
ckpt_path = os.path.join(save_path, f'{epoch}.pkl')
print(f'Save parameters to {ckpt_path}')
torch.save(statedict, ckpt_path)
def train_batch(reply_tensor_batch,contexts_tensor_batch,pad_matrix_batch,model,model_optimizer,criterion,ini_idx):
loss = 0
model_optimizer.zero_grad()
list_pred = model(reply_tensor_batch,contexts_tensor_batch,pad_matrix_batch,ini_idx)
for idx,reply_tensor in enumerate(reply_tensor_batch):
loss_s = criterion(list_pred[idx],Variable(reply_tensor).cuda())
loss += loss_s
loss.backward()
model_optimizer.step()
return loss.data[0]
def train_model(word2index,ini_idx,corpus_pairs,model,model_optimizer,criterion,epochs,
batch_size,max_senten_len,max_context_size,print_every,save_model,shuffle):
print ("start training...")
model.train()
state_loss = 10000.0
for ei in range(epochs):
print ("Iteration {}: ".format(ei+1))
epoch_loss = 0
every_loss = 0
t0 = time.time()
pairs_batches,num_batches = buildingPairsBatch(corpus_pairs,batch_size,shuffle=shuffle)
print ("num_batches:",num_batches)
idx_batch = 0
for reply_tensor_batch, contexts_tensor_batch, pad_matrix_batch in getTensorsPairsBatch(word2index,pairs_batches,max_context_size):
loss = train_batch(reply_tensor_batch,contexts_tensor_batch,pad_matrix_batch,model,model_optimizer,criterion,ini_idx)
epoch_loss += loss
every_loss += loss
if (idx_batch+1)%print_every == 0:
every_avg_loss = every_loss/(max_senten_len*(idx_batch+1))
print ("{} batches finished, avg_loss:{}".format(idx_batch+1, every_avg_loss))
idx_batch += 1
epoch_avg_loss = epoch_loss/(max_senten_len*num_batches)
print ("epoch_avg_loss:",epoch_avg_loss)
if save_model:# and epoch_avg_loss < state_loss:
print ("save model...")
if opts.type_model:
if "cornell" in opts.train_file:
#save_path = save_dir.joinpath('cornell','static.model')
#os.makedirs(save_path, exist_ok=True)
#torch.save(model.state_dict(), "./cornell_static_parameters_IterEnd1")
save_epoch_model(model.state_dict(), "./cornell_static_parameters", ei)
elif "ubuntu" in opts.train_file:
#save_path = save_dir.joinpath('ubuntu','static.model')
#os.makedirs(save_path, exist_ok=True)
#torch.save(model.state_dict(), "./ubuntu_static_parameters_IterEnd")
save_epoch_model(model.state_dict(), "./ubuntu_static_parameters", ei)
else:
#torch.save(model.state_dict(), "./opensubtitles_static_parameters_IterEnd")
save_epoch_model(model.state_dict(), "./opensubtitles_static_parameters", ei)
else:
if "cornell" in opts.train_file:
#save_path = save_dir.joinpath('cornell','dynamic.model')
#os.makedirs(save_path, exist_ok=True)
#torch.save(model.state_dict(), "./cornell_dynamic_parameters_IterEnd")
save_epoch_model(model.state_dict(), "./cornell_dynamic_parameters", ei)
elif "ubuntu" in opts.train_file:
#save_path = save_dir.joinpath('ubuntu','dynamic.model')
#os.makedirs(save_path, exist_ok=True)
#torch.save(model.state_dict(), "./ubuntu_dynamic_parameters_IterEnd")
save_epoch_model(model.state_dict(), "./ubuntu_dynamic_parameters", ei)
else:
#torch.save(model.state_dict(), "./opensubtitles_dynamic_parameters_IterEnd")
save_epoch_model(model.state_dict(), "./opensubtitles_dynamic_parameters", ei)
# os.makedirs(self.save_path, exist_ok=True)
state_loss = epoch_avg_loss
print ("Iteration time:",time.time()-t0)
print ("=============================================" )
print ("")
if __name__ == '__main__':
ini_char = '</i>'
unk_char = '<unk>'
t0 = time.time()
print ("loading word2vec...")
ctable = W2vCharacterTable(opts.w2v_file,ini_char,unk_char)
print(" dict size:",ctable.getDictSize())
print (" emb size:",ctable.getEmbSize())
print ("")
ctable,corpus_pairs = readingData(ctable,opts.train_file,opts.max_senten_len,opts.max_context_size)
print (time.time()-t0)
print ("")
if opts.type_model:
# model = StaticModel(ctable.getDictSize(),ctable.getEmbSize(),opts.hidden_size,opts.batch_size,opts.dropout,
# opts.max_senten_len,opts.teach_forcing).cuda()
model = StaticModel(ctable.getDictSize(),ctable.getEmbSize(),opts.hidden_size,opts.batch_size,opts.dropout,opts.max_senten_len,opts.teach_forcing,opts.kernel_num,opts.kernel_sizes).cuda()
else:
model = DynamicModel(ctable.getDictSize(),ctable.getEmbSize(),opts.hidden_size,opts.batch_size,opts.dropout,
opts.max_senten_len,opts.teach_forcing, opts.kernel_num, opts.kernel_sizes).cuda()
if opts.weights != None:
print ("load weights...")
model.load_state_dict(torch.load(opts.weights))
else:
model.init_parameters(ctable.getEmbMatrix())
model_optimizer = optim.Adam(model.parameters(), lr=opts.lr, weight_decay=opts.weight_decay)
criterion = nn.NLLLoss()
print ("")
word2index = ctable.getWord2Index()
ini_idx = word2index[ini_char]
train_model(word2index,ini_idx,corpus_pairs,model,model_optimizer,criterion,opts.epochs,opts.batch_size,
opts.max_senten_len,opts.max_context_size,opts.print_every,opts.save_model,opts.shuffle)
print ("")
| 9,123 | 3,075 |
import math
import numpy as np
from experiment.utils.training import BatchedTraining
class SentenceClassificationBatchedTraining(BatchedTraining):
def __init__(self, config, config_global, logger):
super(SentenceClassificationBatchedTraining, self).__init__(config, config_global, logger)
self.n_batches = None
self.data = None
self.batch_i = 0
self.epoch_shuffle_indices = None
def get_feed_dict(self, model, data, sess):
batch_sents, batch_labels = self.get_next_batch(model, data, sess)
return {
model.input_sent: batch_sents,
model.input_label: batch_labels,
model.dropout_keep_prob: self.dropout_keep_prob
}
def prepare_next_epoch(self, model, data, sess, epoch):
self.epoch_learning_rate = self.initial_learning_rate
if self.dynamic_learning_rate:
self.epoch_learning_rate /= float(epoch)
self.n_batches = int(math.ceil(len(data.train) / float(self.batchsize)))
if self.data is None:
self.data = data.train
self.epoch_shuffle_indices = np.random.permutation(len(self.data))
self.batch_i = 0
def get_n_batches(self):
return self.n_batches
def get_next_batch(self, model, data, sess):
"""Return the training data for the next batch
:return: questions, good answers, bad answers
:rtype: list, list, list
"""
indices = self.epoch_shuffle_indices[self.batch_i * self.batchsize: (self.batch_i + 1) * self.batchsize]
batch_data = [self.data[i] for i in indices]
self.batch_i += 1
# transpose of zip(batch_data)
return zip(*batch_data)
component = SentenceClassificationBatchedTraining
| 1,770 | 557 |
import torch
from spec import constants
from spec.explainers.explainer import Explainer
from spec.explainers.utils import filter_word_ids_with_non_zero_probability
class RandomAttentionExplainer(Explainer):
def __init__(self, fields_tuples, options):
super().__init__(fields_tuples)
self.words_vocab_size = len(self.fields_dict['words'].vocab)
self.explainer_attn_top_k = options.explainer_attn_top_k
self.message_type = options.message_type
# options.word_embeddings_size is updated in the classifier constructor
# when a path to pretrained embeddings is passed
self.emb_size = options.word_embeddings_size
self.random_type = options.explainer_random_type
self.valid_top_word_ids = None
def build_loss(self, loss_weights=None):
self._loss = None
def forward(self, batch, classifier):
# generate random attn_weights
if self.random_type == 'beta':
mask = torch.ne(batch.words, constants.PAD_ID)
beta = torch.distributions.beta.Beta(5.0, 5.0)
attn_weights = beta.sample(batch.words.shape)
attn_weights = attn_weights.squeeze(-1).to(batch.words.device)
attn_weights[mask == 0] = 0
elif self.random_type == 'uniform':
mask = torch.ne(batch.words, constants.PAD_ID)
attn_weights = torch.rand(batch.words.shape).to(batch.words.device)
attn_weights = attn_weights / attn_weights.sum(-1).unsqueeze(-1)
attn_weights[mask == 0] = 0
elif self.random_type == 'zero_max_out':
_ = classifier(batch)
attn_weights = classifier.attn_weights.squeeze()
arange = torch.arange(attn_weights.shape[0]).to(attn_weights.device)
# maybe we can try zero out k max?
_, max_idxs = torch.topk(attn_weights, k=1, dim=-1)
attn_weights[arange, max_idxs.squeeze()] = 0
elif self.random_type == 'first_states':
mask = torch.ne(batch.words, constants.PAD_ID)
_ = classifier(batch)
bs, ts = batch.words.shape
attn_weights = torch.arange(ts, 0, -1).repeat(bs, 1).float()
attn_weights = attn_weights.to(batch.words.device)
attn_weights = attn_weights / ts
attn_weights[mask == 0] = 0
elif self.random_type == 'last_states':
mask = torch.ne(batch.words, constants.PAD_ID)
_ = classifier(batch)
bs, ts = batch.words.shape
attn_weights = torch.arange(1, ts + 1).repeat(bs, 1).float()
attn_weights = attn_weights.to(batch.words.device)
attn_weights = attn_weights / ts
attn_weights[mask == 0] = 0
elif self.random_type == 'mid_states':
mask = torch.ne(batch.words, constants.PAD_ID)
lengths = mask.int().sum(-1).tolist()
bs, ts = batch.words.shape
attn_weights = torch.zeros(bs, ts).to(batch.words.device)
for i, ell in enumerate(lengths):
attn_weight_left = torch.arange(1, ell // 2 + 1)
attn_weight_right = torch.arange(ell // 2, 0, -1)
w = [attn_weight_left]
if ell % 2 != 0:
attn_weight_mid = torch.tensor([(ell + 1) // 2])
w.append(attn_weight_mid)
w.append(attn_weight_right)
concat_tensors = torch.cat(w).to(attn_weights.device)
attn_weights[i, :ell] = concat_tensors
attn_weights = attn_weights.float()
else: # shuffle
_ = classifier(batch)
attn_weights = classifier.attn_weights.squeeze()
mask = torch.ne(batch.words, constants.PAD_ID)
lengths = mask.int().sum(-1).tolist()
for i in range(attn_weights.shape[0]):
valid_random_idx = torch.arange(attn_weights.shape[1])
idx = torch.randperm(lengths[i])
valid_random_idx[:lengths[i]] = idx
attn_weights[i] = attn_weights[i, valid_random_idx]
# find the topk attn weights using 1 < k < seq_len
k = min(self.explainer_attn_top_k, attn_weights.shape[-1])
top_probas, top_idxs = torch.topk(attn_weights, k, dim=-1)
# recover the word ids from the top indexes
top_word_ids = batch.words.gather(1, top_idxs)
# what to do when top ids map to pad ids? or when
# it returns instances zeroed out by sparsity?
# for now, hard coded in pure python: filter out these entries
valid_top_word_ids = filter_word_ids_with_non_zero_probability(
top_word_ids, top_probas, pad_id=constants.PAD_ID
)
# save for getting the words later
self.valid_top_word_ids = valid_top_word_ids
# create the message
message = self.make_message(
valid_top_word_ids, top_probas, classifier.word_emb
)
# create a time dimension of size 1
message = message.unsqueeze(1)
return message
| 5,096 | 1,705 |
import pygame
import os
import time
from spaceship import *
from pygame.locals import *
# constants
WIDTH, HEIGHT = 700, 800
WINDOW = pygame.display.set_mode((WIDTH, HEIGHT))
pygame.display.set_caption("Failure is Inevitable")
BLACK = (0, 0, 0)
FPS = 60
VEL = 4
p_x = 330
p_y = 650
width = 50
height = 50
movement = False
# player perameters
class Player(pygame.sprite.Sprite):
def __init__(self):
super().__init__()
self.sprite = [pygame.image.load(
os.path.join("game_jam", "Assets",
'Spaceship', 'ship', 'ship3.png')),
pygame.image.load(
os.path.join("game_jam", "Assets",
'Spaceship', 'ship', 'ship4.png')),
pygame.image.load(
os.path.join("game_jam", "Assets",
'Spaceship', 'ship', 'ship5.png')),
pygame.image.load(
os.path.join("game_jam", "Assets",
'Spaceship', 'ship', 'ship6.png')),
pygame.image.load(
os.path.join("game_jam", "Assets",
'Spaceship', 'ship', 'ship7.png')),
pygame.image.load(
os.path.join("game_jam", "Assets",
'Spaceship', 'ship', 'ship8.png')),
pygame.image.load(
os.path.join("game_jam", "Assets",
'Spaceship', 'ship', 'ship9.png')),
pygame.image.load(
os.path.join("game_jam", "Assets",
'Spaceship', 'ship', 'ship10.png')),
pygame.image.load(
os.path.join("game_jam", "Assets",
'Spaceship', 'ship', 'ship1.png')),
pygame.image.load(
os.path.join("game_jam", "Assets",
'Spaceship', 'ship', 'ship2.png'))]
self.current_sprite = 0
self.image = self.sprite[self.current_sprite]
self.rect = self.image.get_rect(center=(WIDTH/2, HEIGHT-100))
def update(self):
self.current_sprite += 0.3
if self.current_sprite >= len(self.sprite):
self.current_sprite = 0
self.image = self.sprite[int(self.current_sprite)]
self.rect.center = pygame.mouse.get_pos()
def create_bullet(self):
return Bullet(pygame.mouse.get_pos()[0], pygame.mouse.get_pos()[1])
# bullet perameters
class Bullet(pygame.sprite.Sprite):
def __init__(self, pos_x, pos_y):
super().__init__()
self.image = pygame.image.load(
os.path.join('game_jam', 'Assets',
'Laser Animations', 'laser1.png'))
self.rect = self.image.get_rect(center=(pos_x, pos_y))
def update(self):
self.rect.y -= 5
if self.rect.y <= 0:
self.kill()
# player and bullet groups
player = Player()
player_group = pygame.sprite.Group()
player_group.add(player)
pygame.mouse.set_visible(False)
bullet_group = pygame.sprite.Group()
# main function
def main():
# background image
BACKGROUND = pygame.transform.scale(pygame.image.load(os.path.join(
'game_jam', 'Assets', 'Background', 'Galaxy_bg',
'Purple_Nebula', 'PN1.png')), (WIDTH, HEIGHT)).convert()
# variables
clock = pygame.time.Clock()
run = True
y = 0
# constants
WIDTH, HEIGHT = 700, 800
WINDOW = pygame.display.set_mode((WIDTH, HEIGHT))
pygame.display.set_caption("Failure is Inevitable")
BLACK = (0, 0, 0)
FPS = 60
VEL = 4
p_x = 330
p_y = 650
width = 50
height = 50
movement = False
# player perameters
class Player(pygame.sprite.Sprite):
def __init__(self):
super().__init__()
self.sprite = [pygame.image.load(
os.path.join("game_jam", "Assets",
'Spaceship', 'ship', 'ship3.png')),
pygame.image.load(
os.path.join("game_jam", "Assets",
'Spaceship', 'ship', 'ship4.png')),
pygame.image.load(
os.path.join("game_jam", "Assets",
'Spaceship', 'ship', 'ship5.png')),
pygame.image.load(
os.path.join("game_jam", "Assets",
'Spaceship', 'ship', 'ship6.png')),
pygame.image.load(
os.path.join("game_jam", "Assets",
'Spaceship', 'ship', 'ship7.png')),
pygame.image.load(
os.path.join("game_jam", "Assets",
'Spaceship', 'ship', 'ship8.png')),
pygame.image.load(
os.path.join("game_jam", "Assets",
'Spaceship', 'ship', 'ship9.png')),
pygame.image.load(
os.path.join("game_jam", "Assets",
'Spaceship', 'ship', 'ship10.png')),
pygame.image.load(
os.path.join("game_jam", "Assets",
'Spaceship', 'ship', 'ship1.png')),
pygame.image.load(
os.path.join("game_jam", "Assets",
'Spaceship', 'ship', 'ship2.png'))]
self.current_sprite = 0
self.image = self.sprite[self.current_sprite]
self.rect = self.image.get_rect(center=(WIDTH/2, HEIGHT-100))
def update(self):
self.current_sprite += 0.3
if self.current_sprite >= len(self.sprite):
self.current_sprite = 0
self.image = self.sprite[int(self.current_sprite)]
self.rect.center = pygame.mouse.get_pos()
def create_bullet(self):
return Bullet(pygame.mouse.get_pos()[0], pygame.mouse.get_pos()[1])
# bullet perameters
class Bullet(pygame.sprite.Sprite):
def __init__(self, pos_x, pos_y):
super().__init__()
self.image = pygame.image.load(
os.path.join('game_jam', 'Assets',
'Laser Animations', 'laser1.png'))
self.rect = self.image.get_rect(center=(pos_x, pos_y))
def update(self):
self.rect.y -= 5
if self.rect.y <= 0:
self.kill()
# player and bullet groups
player = Player()
player_group = pygame.sprite.Group()
player_group.add(player)
pygame.mouse.set_visible(False)
bullet_group = pygame.sprite.Group()
# main function
def main():
# background image
BACKGROUND = pygame.transform.scale(pygame.image.load(os.path.join(
'game_jam', 'Assets', 'Background', 'Galaxy_bg',
'Purple_Nebula', 'PN1.png')), (WIDTH, HEIGHT)).convert()
# variables
# Initializes mixer
pygame.mixer.init()
# Grabs sound file
pygame.mixer.music.load(os.path.join(
'game_jam', 'Assets', 'Sounds', 'spaceship_music', 'Far-Out_OST', 'OST', 'Far-Out-Hurry_Up.wav'))
# Plays music indefinitely
pygame.mixer.music.play(-1)
# Sets music volume
pygame.mixer.music.set_volume(0.3)
clock = pygame.time.Clock()
run = True
y = 0
# music
pygame.mixer.init()
pygame.mixer.music.load(os.path.join(
'game_jam', 'Assets', 'Sounds', 'Far Out Hurry Up.wav'))
pygame.mixer.music.play(-1)
pygame.mixer.music.set_volume(0.3)
# run
while run:
clock.tick(FPS)
for event in pygame.event.get():
if event.type == pygame.QUIT:
run = False
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
run = False
if event.type == pygame.MOUSEBUTTONDOWN:
bullet_group.add(player.create_bullet())
if event.type == pygame.KEYDOWN:
player.animate()
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
run = False
# updates backgroud for scrolling effect
WINDOW.fill(BLACK)
rel_y = y % BACKGROUND.get_rect().height
WINDOW.blit(BACKGROUND, (0, rel_y - BACKGROUND.get_rect().height))
if rel_y < HEIGHT:
WINDOW.blit(BACKGROUND, (0, rel_y))
y += 1
# update screen
bullet_group.draw(WINDOW)
bullet_group.update()
player_group.draw(WINDOW)
player_group.update()
pygame.display.update()
# starts main function
if __name__ == "__main__":
main()
main.py
| 8,373 | 2,840 |
#
# Code common across shots to handle movement on paths
#
from pymavlink import mavutil
import location_helpers
import shotLogger
from pathHandler import PathHandler
from shotManagerConstants import *
import math
from vector3 import Vector3
logger = shotLogger.logger
#Path accel/decel constants
WPNAV_ACCEL = 200
WPNAV_ACCEL_Z = 160
# for 3D max speed
HIGH_PATH_SPEED = 5.0
LOW_PATH_SPEED = 1.5
MAX_PATH_SPEED = HIGH_PATH_SPEED + LOW_PATH_SPEED
# used to correct for drag or other factors
ERROR_P = .01
# special case of PathHandler
class VectorPathHandler(PathHandler):
def __init__(self, vehicle, shotManager, heading, pitch):
PathHandler.__init__(self, vehicle, shotManager)
# the initial reference position
self.initialLocation = vehicle.location.global_relative_frame
self.heading = heading
# creates a unit vector from telemetry data
self.unitVector = self.getUnitVectorFromHeadingAndTilt(heading, pitch)
# limit speed based on vertical component
# We can't go full speed vertically
# this section should be 2.0 to 8.0 m/s
# to generate a nice speed limiting curve we scale it.
# pitch is used to generate the vertical portion of the 3d Vector
pitch = min(pitch, 0) # level
pitch = max(pitch, -90) # down
accelXY = shotManager.getParam( "WPNAV_ACCEL", WPNAV_ACCEL ) / 100.0
accelZ = shotManager.getParam( "WPNAV_ACCEL_Z", WPNAV_ACCEL_Z ) / 100.0
cos_pitch = math.cos(math.radians(pitch))
self.maxSpeed = LOW_PATH_SPEED + (cos_pitch**3 * HIGH_PATH_SPEED)
self.maxSpeed = min(self.maxSpeed, MAX_PATH_SPEED)
self.accel = accelZ + (cos_pitch**3 * (accelXY - accelZ))
self.accel *= UPDATE_TIME
# the current distance from the intitial location
self.distance = 0.0
#for synthetic acceleration
self.currentSpeed = 0.0
self.desiredSpeed = 0.0
self.distError = 0.0
# given RC input, calculate a speed to move along vector
def move(self, channels):
# allows travel along the vector
# use the max of them
if abs(channels[ROLL]) > abs(channels[PITCH]):
userInput = channels[ROLL]
else:
userInput = -channels[PITCH]
# user controls speed
if self.cruiseSpeed == 0.0:
self.desiredSpeed = userInput * self.maxSpeed
# cruise control
else:
speed = abs(self.cruiseSpeed)
# if sign of stick and cruiseSpeed don't match then...
if math.copysign(1, userInput) != math.copysign(1, self.cruiseSpeed): # slow down
speed *= (1.0 - abs(userInput))
else: # speed up
speed += (self.maxSpeed - speed) * abs(userInput)
# carryover user input sign
if self.cruiseSpeed < 0:
speed = -speed
# limit speed
if speed > self.maxSpeed:
speed = self.maxSpeed
elif -speed > self.maxSpeed:
speed = -self.maxSpeed
self.desiredSpeed = speed
# Synthetic acceleration
if self.desiredSpeed > self.currentSpeed:
self.currentSpeed += self.accel
self.currentSpeed = min(self.currentSpeed, self.desiredSpeed)
elif self.desiredSpeed < self.currentSpeed:
self.currentSpeed -= self.accel
self.currentSpeed = max(self.currentSpeed, self.desiredSpeed)
else:
self.currentSpeed = self.desiredSpeed
# the distance to fly along the vectorPath
self.distance += self.currentSpeed * UPDATE_TIME
self.distance += self.distError * ERROR_P
# generate Guided mode commands to move the copter
self.travel()
# report speed output
return abs(self.currentSpeed)
def travel(self):
''' generate a new location from our distance offset and initial position '''
# the location of the vehicle in meters from the origin
offsetVector = self.unitVector * self.distance
# Scale unit vector by speed
velVector = self.unitVector * self.currentSpeed
# Convert NEU to NED velocity
#velVector.z = -velVector.z
# generate a new Location from our offset vector and initial location
loc = location_helpers.addVectorToLocation(self.initialLocation, offsetVector)
# calc dot product so we can assign a sign to the distance
vectorToTarget = location_helpers.getVectorFromPoints( self.initialLocation, self.vehicle.location.global_relative_frame)
dp = self.unitVector.x * vectorToTarget.x
dp += self.unitVector.y * vectorToTarget.y
dp += self.unitVector.z * vectorToTarget.z
self.actualDistance = location_helpers.getDistanceFromPoints3d(self.initialLocation, self.vehicle.location.global_relative_frame)
if (dp < 0):
self.actualDistance = -self.actualDistance
# We can now compare the actual vs vector distance
self.distError = self.actualDistance - self.distance
# formulate mavlink message for pos-vel controller
posVelMsg = self.vehicle.message_factory.set_position_target_global_int_encode(
0, # time_boot_ms (not used)
0, 1, # target system, target component
mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, # frame
0b0000110111000000, # type_mask - enable pos/vel
int(loc.lat * 10000000), # latitude (degrees*1.0e7)
int(loc.lon * 10000000), # longitude (degrees*1.0e7)
loc.alt, # altitude (meters)
velVector.x, velVector.y, velVector.z, # North, East, Down velocity (m/s)
0, 0, 0, # x, y, z acceleration (not used)
0, 0) # yaw, yaw_rate (not used)
# send pos-vel command to vehicle
self.vehicle.send_mavlink(posVelMsg)
def getUnitVectorFromHeadingAndTilt(self, heading, tilt):
''' generate a vector from the camera gimbal '''
angle = math.radians(90 - heading)
tilt = math.radians(tilt)
# create a vector scaled by tilt
x = math.cos(tilt)
# Rotate the vector
nx = x * math.cos(angle)
ny = x * math.sin(angle)
# Up
z = math.sin(tilt)
return Vector3(ny, nx, z)
| 6,529 | 1,994 |
from django.apps import AppConfig
class TransferAppConfig(AppConfig):
name = 'transfer_app'
| 98 | 30 |
import asyncio
from imjoy_rpc import api
class ImJoyPlugin:
async def setup(self):
await api.log("plugin initialized")
async def run(self, ctx):
await api.alert("hello")
await api.showDialog(type="external", src="https://imjoy.io")
if __name__ == "__main__":
api.export(ImJoyPlugin(), {"debug": True, "url": "http://localhost:9988"})
| 375 | 128 |
from abc import ABC, abstractmethod
class MorePlugin(ABC):
'''
A plugin that represents an extra action the user can take on the 'more' prompt.
'''
@abstractmethod
def get_keys(self):
''' Returns a list of the keys the user has to enter to trigger this action. '''
pass
@abstractmethod
def build_page(self, page_builder, key_pressed, arguments):
'''
Called when the user pressed one of the keys to trigger this action.
Arguments:
----------
page_builder:
The MorePageBuilder instance.
key_pressed:
The key the user pressed to trigger this action.
arguments:
A dictionary of arguments the user entered on this line before triggering
this action.
By default, the only value that can be in there is 'count',
which will be set if the user entered a number before your action.
For example, if the user entered '10 ' then the '<space>' action is triggered
with argument {'count': 10}.
'''
pass
def wrap_page(self, page):
'''
Called when a new page is created.
Gives the plugin to return a wrapper page that can be used to modify/register
_every_ line, including the ones that are suppressed by other plugins.
Example usage is counting all the outputted lines.
Must return a 'Page'. Implementing this method is optional.
'''
return page
@abstractmethod
def get_help(self):
'''
Returns an iterator over 'command', 'help-text' tuples that describe how to use
this plugin.
Example:
yield (' ', 'Display next line of text')
'''
pass
| 1,877 | 437 |
#!/usr/bin/env python3
import requests
import json
import argparse
import datetime
import io
import threading
BL_API = 'https://www.intensivregister.de/api/public/reporting/laendertabelle'
LK_API = 'https://diviexchange.blob.core.windows.net/%24web/DIVI_Intensivregister_Auszug_pro_Landkreis.csv'
BL_DICT = {'BW': 'BADEN_WUERTTEMBERG','BY' : 'BAYERN','BE': 'BERLIN','BB': 'BRANDENBURG','HB': 'BREMEN','HH': 'HAMBURG','HE': 'HESSEN','MV': 'MECKLENBURG_VORPOMMERN','NI': 'NIEDERSACHSEN','NW': 'NORDRHEIN_WESTFALEN','RP': 'RHEINLAND_PFALZ','SL': 'SAARLAND','SN': 'SACHSEN','ST': 'SACHSEN_ANHALT','SH': 'SCHLESWIG_HOLSTEIN','TH': 'THUERINGEN'}
GS_DICT = {}
with open('ags-dict.json', encoding='utf-8') as json_file:
GS_DICT = json.load(json_file,)
class IntensivregisterUpdate:
def __init__(self):
self.prefix = ''
th_bl = threading.Thread(self.update_bl_data())
th_lk = threading.Thread(self.update_lk_data())
th_bl.start()
th_lk.start()
th_bl.join()
th_lk.join()
def update_lk_data(self):
result = requests.get(LK_API)
self.lk_data = self.parse_csv_to_json(result.text)["data"]
def update_bl_data(self):
self.bl_data = self.get_data_as_json()
def get_data_as_json(self):
response = requests.get(BL_API)
return response.json()["data"]
def get_occupancy_by_bl_in_percent(self,bl):
bl_full = BL_DICT[bl]
for item in self.bl_data:
if item['bundesland'] == bl_full:
return item['bettenBelegtToBettenGesamtPercent']
def get_occupancy_by_bl_in_percent_with_7d_emgergancy_beds_in_percent(self,bl):
return round(self.get_all_occupied_beds_by_bl(bl)/(self.get_all_beds_by_bl(bl)+self.get_all_emergency_beds_7d_by_bl(bl)) * 100, 1)
def get_all_beds_by_bl(self,bl):
bl_full = BL_DICT[bl]
for item in self.bl_data:
if item['bundesland'] == bl_full:
return item['intensivBettenGesamt']
def get_all_occupied_beds_by_bl(self,bl):
bl_full = BL_DICT[bl]
for item in self.bl_data:
if item['bundesland'] == bl_full:
return item['intensivBettenBelegt']
def get_all_emergency_beds_7d_by_bl(self,bl):
bl_full = BL_DICT[bl]
for item in self.bl_data:
if item['bundesland'] == bl_full:
return item['intensivBettenNotfall7d']
def get_all_beds(self):
b_sum = 0
for item in self.bl_data:
b_sum += item['intensivBettenGesamt']
return b_sum
def get_all_occupied_beds(self):
bo_sum = 0
for item in self.bl_data:
bo_sum += item['intensivBettenBelegt']
return bo_sum
def get_all_emergency_beds_7d(self):
be_sum = 0
for item in self.bl_data:
be_sum += item['intensivBettenNotfall7d']
return be_sum
def get_overall_occupancy_in_percent(self):
return round(self.get_all_occupied_beds()/self.get_all_beds() * 100, 1)
def get_overall_occupancy_in_percent_with_emergency_beds(self):
return round(self.get_all_occupied_beds()/(self.get_all_beds() + self.get_all_emergency_beds_7d())* 100, 1)
def get_date(self):
for item in self.bl_data:
t = item['creationTimestamp']
return datetime.datetime.strptime(t, '%Y-%m-%dT%H:%M:%SZ')
def parse_csv_to_json(self,csv_as_string):
csvfile = io.StringIO(csv_as_string)
arr=[]
headers = []
# Read in the headers/first row
for header in csvfile.readline().split(','):
headers.append(header)
# Extract the information into the "xx" : "yy" format.
for line in csvfile.readlines():
lineStr = '\n'
for i,item in enumerate(line.split(',')):
lineStr+='"'+headers[i].replace('\r\n','') +'" : "' + item.replace('\r\n','') + '",\n'
arr.append(lineStr)
csvfile.close()
#convert the array into a JSON string:
jsn = '{ "data":['
jsnEnd = ']}'
for i in range(len(arr)-1):
if i == len(arr)-2:
jsn+="{"+str(arr[i])[:-2]+"}"
else:
jsn+="{"+str(arr[i])[:-2]+"},"
jsn+=jsnEnd
return json.loads(jsn)
def get_lk_data(self,lk_name):
gs = ""
try:
gs = GS_DICT[lk_name]
except:
return None
for entry in self.lk_data:
if int(entry["gemeindeschluessel"]) == gs:
return entry
def lk_data_formatted(self,lk_data):
if (lk_data == None):
return "Your Landkreis or Stadt isn't in the list. See -la to list all Landkreise and Städte."
fb = int(lk_data["betten_frei"])
ob = int(lk_data["betten_belegt"])
ab = fb + ob
rate = round(ob/ab*100,2)
return ("{percent}% ({ob}/{ab})").format(percent=rate, ob=ob ,ab=ab)
def lk_data_for_areas(self,areas):
result = ""
for area in areas:
BEZ = area["BEZ"]
GEN = area["GEN"]
if BEZ != "Landkreis":
BEZ = "Stadt"
result += "{gen} {bez}: {rate}\n".format(gen=GEN,bez=BEZ,rate=self.lk_data_formatted(self.get_lk_data(GEN + " " + BEZ)))
return result[:-1]
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-lb", "--listbundeslander", help="Lists all available states and their abbreviations", action="store_true")
parser.add_argument("-lk", "--landkreis", help="Print Landkreis occupancy rate", type=str)
parser.add_argument("-s", "--stadt", help="Print Stadt occupancy rate", type=str)
parser.add_argument("-b", "--bundesland", help="Show the percentage of occupied beds in a specific state. Example: -b BY")
parser.add_argument("-d", "--deutschland", help="Show the Percentage of all occupied beds in Germany",action="store_true")
parser.add_argument("-dn", "--deutschlandwithemergency", help="Show the Percentage of all occupied beds in Germany including the 7 day emergency beds",action="store_true")
parser.add_argument("-bn", "--bundeslandwithemergency", help="Show the percentage of occupied beds in a specific state including the 7 day emergency beds. Example: -bn BY")
parser.add_argument("-p", "--prefix", help="Print given prefix as String before the actual number. Example: -p 'BY beds' -bn BY")
parser.add_argument("-la","--listareas", help="Prints all names of the Landreise and Städte",action="store_true")
parser.add_argument("-a","--areas", help="Receives JSON file with defined areas of interest.")
args = parser.parse_args()
iu = IntensivregisterUpdate()
if args.prefix:
iu.prefix = args.prefix
args = parser.parse_args()
if args.listbundeslander:
print(json.dumps(BL_DICT,indent=4))
elif args.bundesland:
print(iu.prefix + str(iu.get_occupancy_by_bl_in_percent(args.bundesland)))
elif args.deutschland:
print(iu.prefix + str(iu.get_overall_occupancy_in_percent()))
elif args.deutschlandwithemergency:
print(iu.prefix + str(iu.get_overall_occupancy_in_percent_with_emergency_beds()))
elif args.bundeslandwithemergency:
print(iu.prefix + str(iu.get_occupancy_by_bl_in_percent_with_7d_emgergancy_beds_in_percent(args.bundeslandwithemergency)))
elif args.landkreis:
result = iu.lk_data_formatted(iu.get_lk_data(args.landkreis + " Landkreis"))
if result != None:
print(iu.prefix + str(result))
elif args.stadt:
result = iu.lk_data_formatted(iu.get_lk_data(args.stadt + " Stadt"))
if result != None:
print(iu.prefix + str(result))
elif args.areas:
with open(args.areas) as json_file:
example_area = json.load(json_file)
result = iu.lk_data_for_areas(example_area)
print(iu.prefix + str(result))
elif args.listareas:
l = list(GS_DICT.keys())
l.sort()
for e in l:
print(e)
else:
print("Please use help to see your options (--help)")
| 8,183 | 2,896 |
import abc
import secrets
from pyhanko.pdf_utils import generic
from pyhanko.pdf_utils.crypt._util import (
aes_cbc_decrypt,
aes_cbc_encrypt,
rc4_encrypt,
)
from pyhanko.pdf_utils.crypt.api import CryptFilter, SecurityHandlerVersion
from ._legacy import legacy_derive_object_key
class RC4CryptFilterMixin(CryptFilter, abc.ABC):
"""
Mixin for RC4-based crypt filters.
:param keylen:
Key length, in bytes. Defaults to 5.
"""
method = generic.NameObject('/V2')
keylen = None
def __init__(self, *, keylen=5, **kwargs):
self.keylen = keylen
super().__init__(**kwargs)
def encrypt(self, key, plaintext: bytes, params=None) -> bytes:
"""
Encrypt data using RC4.
:param key:
Local encryption key.
:param plaintext:
Plaintext to encrypt.
:param params:
Ignored.
:return:
Ciphertext.
"""
return rc4_encrypt(key, plaintext)
def decrypt(self, key, ciphertext: bytes, params=None) -> bytes:
"""
Decrypt data using RC4.
:param key:
Local encryption key.
:param ciphertext:
Ciphertext to decrypt.
:param params:
Ignored.
:return:
Plaintext.
"""
return rc4_encrypt(key, ciphertext)
def derive_object_key(self, idnum, generation) -> bytes:
"""
Derive the local key for the given object ID and generation number,
by calling :func:`.legacy_derive_object_key`.
:param idnum:
ID of the object being encrypted.
:param generation:
Generation number of the object being encrypted.
:return:
The local key.
"""
return legacy_derive_object_key(self.shared_key, idnum, generation)
class AESCryptFilterMixin(CryptFilter, abc.ABC):
"""Mixin for AES-based crypt filters."""
keylen = None
method = None
def __init__(self, *, keylen, **kwargs):
if keylen not in (16, 32):
raise NotImplementedError("Only AES-128 and AES-256 are supported")
self.keylen = keylen
self.method = (
generic.NameObject('/AESV2') if keylen == 16 else
generic.NameObject('/AESV3')
)
super().__init__(**kwargs)
def encrypt(self, key, plaintext: bytes, params=None):
"""
Encrypt data using AES in CBC mode, with PKCS#7 padding.
:param key:
The key to use.
:param plaintext:
The plaintext to be encrypted.
:param params:
Ignored.
:return:
The resulting ciphertext, prepended with a 16-byte initialisation
vector.
"""
iv, ciphertext = aes_cbc_encrypt(
key, plaintext, secrets.token_bytes(16)
)
return iv + ciphertext
def decrypt(self, key, ciphertext: bytes, params=None) -> bytes:
"""
Decrypt data using AES in CBC mode, with PKCS#7 padding.
:param key:
The key to use.
:param ciphertext:
The ciphertext to be decrypted, prepended with a 16-byte
initialisation vector.
:param params:
Ignored.
:return:
The resulting plaintext.
"""
iv, data = ciphertext[:16], ciphertext[16:]
return aes_cbc_decrypt(key, data, iv)
def derive_object_key(self, idnum, generation) -> bytes:
"""
Derive the local key for the given object ID and generation number.
If the associated handler is of version
:attr:`.SecurityHandlerVersion.AES256` or greater, this method
simply returns the global key as-is.
If not, the computation is carried out by
:func:`.legacy_derive_object_key`.
:param idnum:
ID of the object being encrypted.
:param generation:
Generation number of the object being encrypted.
:return:
The local key.
"""
if self._handler.version >= SecurityHandlerVersion.AES256:
return self.shared_key
else:
return legacy_derive_object_key(
self.shared_key, idnum, generation, use_aes=True
) | 4,316 | 1,266 |
#!/usr/bin/env python
import uuid
import os
import subprocess
import shutil
DOC_DELIMITER = '\n##### DOCUMENT #############################################################'
class Scrubber():
'''This class is a wrapper around the `nlm_scrubber` library.'''
def __init__(self, working_directory='/tmp/nlm_scrubber'):
self.working_directory = working_directory
def _setup(self, base_path):
if not os.path.exists(base_path):
os.makedirs(base_path)
input_path = '%s/input' % (base_path)
if not os.path.exists(input_path):
os.makedirs(input_path)
output_path = '%s/output' % (base_path)
if not os.path.exists(output_path):
os.makedirs(output_path)
def scrub(self, inputs, docker=True):
my_uuid = str(uuid.uuid4())
base_path = '%s/%s' % (self.working_directory, my_uuid)
self._setup(base_path)
if not docker:
self.config_file = '%s/config' % (base_path)
with open(self.config_file, 'w') as file:
file.write('ROOT1 = %s\n' % (base_path))
file.write('ClinicalReports_dir = ROOT1/input\n')
file.write('ClinicalReports_files = .*\\.txt\n')
file.write('nPHI_outdir = ROOT1/output\n')
for index, input in enumerate(inputs):
# Write string to disk
with open('%s/input/data_%s.txt' % (base_path, index), 'w') as file:
file.write(input)
# Run scrubber with config
if docker:
input_path = '%s/input' % base_path
output_path = '%s/output' % base_path
run = 'docker run -it --rm -v %s:/tmp/once_off/input -v %s:/tmp/once_off/output --env SCRUBBER_REGEX radaisystems/nlm-scrubber' % (input_path, output_path)
result = subprocess.run(run, capture_output=True, shell=True, env={'SCRUBBER_REGEX':'.*\.txt'})
else:
result = subprocess.run(['/opt/nlm_scrubber', self.config_file], capture_output=True)
outputs = []
for index, input in enumerate(inputs):
# Retrieve results
with open('%s/output/data_%s.nphi.txt' % (base_path, index)) as file:
output = file.read()
if DOC_DELIMITER in output:
output = output[:output.find(DOC_DELIMITER)]
outputs.append(output)
# Cleanup
shutil.rmtree(base_path)
return outputs
def scrub(inputs):
scrubber = Scrubber()
return scrubber.scrub(inputs)
if __name__ == "__main__":
print(scrub(['testing', 'My name is Robert Hafner.', 'This string is also a test. 1/19/1998']))
| 2,693 | 890 |
from typing import TYPE_CHECKING
import pandas as pd
from galaxy_crawler.models import utils
from galaxy_crawler.models import v1 as models
if TYPE_CHECKING:
from datetime import datetime
from typing import List, Optional
from sqlalchemy.engine import Engine
def get_roles_df(engine: 'Engine', except_role_types: 'Optional[List[int]]' = None):
"""
Obtain all roles with repository data as pandas.DataFrame
:param engine: Database engine for connection
:param except_role_types: Filtering role type based on given integers.
:return: pandas.DataFrame
"""
session = utils.get_scoped_session(engine)
get_all_role_query = str(session.query(models.Role, models.Repository) \
.join(models.Repository, models.Role.repository_id == models.Repository.repository_id))
role_df = pd.read_sql_query(get_all_role_query, engine, index_col=['roles_role_id'])
# Remove column name prefix `roles_`
role_df.rename(columns=lambda x: x[6:] if x.startswith("roles_") else x, inplace=True)
if except_role_types is not None:
# ~series.isin(some) indicate that `series not in some`
role_df = role_df[~role_df["role_type_id"].isin(except_role_types)]
return role_df
def filter_roles_df_by_modified_date(roles: 'pd.DataFrame',
from_date: 'datetime',
to_date: 'datetime') -> 'pd.DataFrame':
"""
Filtering roles by the modified date.
Returns only those with a value that was updated between `from_date` and `to_date`.
:param roles: Roles DataFrame
:param from_date: Lower threshold of modified datetime
:param to_date: Upper threshold of modified datetime
:return: pandas.DataFrame
"""
if to_date <= from_date:
to_date, from_date = from_date, to_date
masks = (roles["modified"] <= to_date) & (roles["modified"] >= from_date)
return roles.loc[masks]
def filter_roles_df_by_dl_percentile(roles: 'pd.DataFrame',
percentile: 'float' = 0.9) -> 'pd.DataFrame':
"""
Filtering roles by the number of downloads.
Returns only those with a value greater than or equal to the specified percentile value.
:param roles: Roles DataFrame
:param percentile: 0 <= N <= 1
:return: pandas.DataFrame
"""
assert 0 <= percentile <= 1, "Percentile should be 0 <= N <= 1."
threshold = roles['download_count'].quantile(percentile)
masks = roles["download_count"] >= threshold
return roles.loc[masks]
| 2,569 | 774 |
import logging
from collections import OrderedDict
import numpy as np
from sklearn.base import BaseEstimator
from sklearn.feature_selection import SelectKBest, chi2
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import Normalizer
from frameworks.tc_scikit.models.tiger import TIGER_TAGSET_SPACY
def build_feature_selection(use_TIGER=True, k=5):
pipeline = Pipeline([('transformer',
DependencyDistributionSpacy(use_TIGER=use_TIGER)),
('feature_selection', SelectKBest(chi2, k=k)),
('normalizer', Normalizer())
])
return ('dependency_distribution_spacy', pipeline)
def build(use_TIGER=True):
pipeline = Pipeline([('transformer',
DependencyDistributionSpacy(use_TIGER=use_TIGER)),
('normalizer', Normalizer())
])
return ('dependency_distribution_spacy', pipeline)
dependency_black_list = ['ROOT', 'punct']
def get_dependency_histogram(pos_list, tag_set):
histogram = OrderedDict.fromkeys(tag_set, 0)
for entry in pos_list:
if entry and entry not in dependency_black_list and '||' not in entry:
histogram[entry] += 1
values = []
for key, value in histogram.items():
values.append(value)
histogram = np.array(values, dtype=np.float64)
return histogram
class DependencyDistributionSpacy(BaseEstimator):
def __init__(self, use_TIGER=True):
self.logger = logging.getLogger()
self.use_TIGER = use_TIGER
def fit(self, X, y):
return self
def transform(self, X):
return list(map(lambda x: self.transform_document(x), X))
def transform_document(self, document):
if self.use_TIGER:
dependency_list = list(map(lambda x: x.releation, document.dependencies))
distribution = get_dependency_histogram(dependency_list, TIGER_TAGSET_SPACY)
return distribution
else:
raise NotImplementedError("")
| 2,065 | 602 |
import numpy as np
from numpy import dot
from scipy.sparse import csr_matrix, vstack as spvstack, hstack as sphstack
from .static import solve
from .logger import msg, warn
def _solver_arc_length_riks(an, silent=False):
r"""Arc-Length solver using the Riks method
"""
msg('___________________________________________', level=1, silent=silent)
msg(' ', level=1, silent=silent)
msg('Arc-Length solver using Riks implementation', level=1, silent=silent)
msg('___________________________________________', level=1, silent=silent)
msg('Initializing...', level=1, silent=silent)
lbd = 0.
arc_length = an.initialInc
length = arc_length
dlbd = arc_length
max_arc_length = an.maxArcLength
modified_NR = an.modified_NR
kC = an.calc_kC(silent=True)
fext = an.calc_fext(inc=1., silent=True)
kT = kC
c = solve(kC, arc_length*fext, silent=True)
fint = kC*c
dc = c
c_last = 0 * c
step_num = 1
if modified_NR:
compute_NL_matrices = False
else:
compute_NL_matrices = True
while step_num < 1000:
msg('Step %d, lbd %1.5f, arc-length %1.5f' % (step_num, lbd, arc_length), level=1, silent=silent)
min_Rmax = 1.e6
prev_Rmax = 1.e6
converged = False
iteration = 0
varlbd = 0
varc = 0
phi = 1 # spheric arc-length
while True:
iteration += 1
if iteration > an.maxNumIter:
warn('Maximum number of iterations achieved!', level=2, silent=silent)
break
q = fext
TMP = sphstack((kT, -q[:, None]), format='lil')
dcext = np.concatenate((dc, [0.]))
TMP = spvstack((TMP, 2*dcext[None, :]), format='lil')
TMP[-1, -1] = 2*phi**2*dlbd*np.dot(q, q)
TMP = TMP.tocsr()
right_vec = np.zeros(q.shape[0]+1, dtype=q.dtype)
R = fint - (lbd + dlbd)*q
A = - (np.dot(dc, dc) + phi**2*dlbd**2*np.dot(q, q) - arc_length**2)
right_vec[:-1] = -R
right_vec[-1] = A
solution = solve(TMP, right_vec, silent=True)
varc = solution[:-1]
varlbd = solution[-1]
dlbd = dlbd + varlbd
dc = dc + varc
msg('iter %d, lbd+dlbd %1.5f' % (iteration, lbd+dlbd), level=2, silent=silent)
# computing the Non-Linear matrices
if compute_NL_matrices:
kC = an.calc_kC(c=(c + dc), NLgeom=True, silent=True)
kG = an.calc_kG(c=(c + dc), NLgeom=True, silent=True)
kT = kC + kG
if modified_NR:
compute_NL_matrices = False
else:
if not modified_NR:
compute_NL_matrices = True
# calculating the residual
fint = an.calc_fint(c + dc, silent=True)
Rmax = np.abs((lbd + dlbd)*fext - fint).max()
if iteration >=2 and Rmax <= an.absTOL:
converged = True
break
if (Rmax > min_Rmax and Rmax > prev_Rmax and iteration > 3):
warn('Diverged - Rmax value significantly increased', level=2, silent=silent)
break
else:
min_Rmax = min(min_Rmax, Rmax)
change_rate_Rmax = abs(1 - Rmax/prev_Rmax)
if (iteration > 2 and change_rate_Rmax < an.too_slow_TOL):
warn('Diverged - convergence too slow', level=2, silent=silent)
break
prev_Rmax = Rmax
if converged:
step_num += 1
msg('Converged at lbd+dlbd of %1.5f, total length %1.5f' % (lbd + dlbd, length), level=2, silent=silent)
length += arc_length
lbd = lbd + dlbd
arc_length *= 1.1111
dlbd = arc_length
c_last = c.copy()
c = c + dc
an.increments.append(lbd)
an.cs.append(c.copy())
else:
msg('Reseting step with reduced arc-length', level=2, silent=silent)
arc_length *= 0.90
if length >= max_arc_length:
msg('Maximum specified arc-length of %1.5f achieved' % max_arc_length, level=2, silent=silent)
break
dc = c - c_last
dlbd = arc_length
kC = an.calc_kC(c=c, NLgeom=True, silent=True)
kG = an.calc_kG(c=c, NLgeom=True, silent=True)
kT = kC + kG
fint = an.calc_fint(c=c, silent=True)
compute_NL_matrices = False
msg('Finished Non-Linear Static Analysis', silent=silent)
msg(' total arc-length %1.5f' % length, level=1, silent=silent)
| 4,728 | 1,637 |
from torchsynth.__info__ import ( # noqa: F401
__author__,
__author_email__,
__copyright__,
__docs__,
__homepage__,
__license__,
__version__,
)
| 173 | 70 |
# Import ROS2 libraries
import rclpy
from rclpy.node import Node
from cv_bridge import CvBridge, CvBridgeError
from rclpy.qos import QoSProfile
from rclpy.callback_groups import ReentrantCallbackGroup
from rclpy.executors import MultiThreadedExecutor
# Import message files
from sensor_msgs.msg import Image
from autonomous_exploration_msgs.msg import MapData
from nav_msgs.msg import OccupancyGrid
# Import other libraries
import numpy as np
import cv2 as cv
class VisualizeInteractiveMap(Node):
"""
Convert the map published from Unity to an image topic
"""
def __init__(self):
super().__init__("visualize_interactive_map")
# Initialize the variables
self.bridge = CvBridge()
qos = QoSProfile(depth=10)
# Create subscribers
## /rosbridge_msgs_unity/interactive_map
self.create_subscription(MapData, "rosbridge_msgs_unity/interactive_map", self._mapCallback, qos)
# Create publishers
## /interactive_map/image
self.interactiveMap_Imagepub = self.create_publisher(Image, "/interactive_map/image", qos)
## /interactive_map/map
self.interactiveMap_Mappub = self.create_publisher(OccupancyGrid, "/interactive_map/map", qos)
self.get_logger().info("Interactive map to image converter initiated")
def _mapCallback(self, data:MapData):
# Store the map Info
width = data.height
height = data.width
# Rearrange the data to be visible correctly on unity
map = np.array(data.map).reshape(width, height)
map = np.flip(map, 0)
map = map.flatten()
map_img = np.zeros((width * height, 3))
# Generate the colors randomly
colors = 255 * np.random.rand(max(map), 1, 3)
for i in range(max(map)):
map_img[map == (i + 1)] = colors[i, :, :]
# Reshape the map image to width * height * 3
map_img = np.reshape(map_img, (width, height, 3))
#map_img = np.flip(map_img, 1)
map_img = map_img.astype(np.uint8)
# Create the interactive map
intMap = OccupancyGrid()
intMap.header.frame_id = 'map'
intMap.data = [int(el) for el in map]
intMap.info.resolution = data.resolution
intMap.info.width = width
intMap.info.height = height
intMap.info.origin.position.x = float(data.origin[0])
intMap.info.origin.position.y = float(data.origin[1])
# Publish the image
self.interactiveMap_Imagepub.publish(self.bridge.cv2_to_imgmsg(map_img, "rgb8"))
# Publish the map
self.interactiveMap_Mappub.publish(intMap)
###################################################################################################
def main(args=None):
rclpy.init(args=args)
VIM = VisualizeInteractiveMap()
executor = MultiThreadedExecutor()
try:
rclpy.spin(VIM)
except KeyboardInterrupt:
pass
#rclpy.spin_until_future_complete(SR, )
# Destroy the node explicitly
# (optional - otherwise it will be done automatically
# when the garbage collector destroys the node object)
#SR.destroy_node()
rclpy.shutdown()
if __name__ == '__main__':
main() | 3,245 | 1,012 |
default_app_config = 'rest_framework_captcha.apps.RestFrameworkCaptchaConfig'
| 79 | 25 |
#!/usr/bin/env python3
# Libraries
from base64 import b64encode, b64decode
# Messages
warn = '!!!!!!!!!!!!!!!!!!!!!!!! WARNING: '
valid = '\nPlease Enter a Valid Entry!\n'
# Menu/Encode/Decode Logic
def app(st):
if st == '1':
code = input('Enter Stuff to Encode: ').encode()
print('\n')
try:
ans = b64encode(code)
print('\n',str(ans)[2:-1],'\n')
return
except Exception as x:
print(warn,x,'\n')
elif st == '2':
code = input('Enter Stuff to Decode: ').encode()
print('\n')
try:
ans = b64decode(code)
print('\n',str(ans)[2:-1],'\n')
return
except Exception as x:
print(warn,x,'\n')
elif st == 'q':
exit()
else:
print(valid)
return
#Main Menu Loop
while True:
try:
app(input('\nEnter 1 to Encode\n\nEnter 2 to Decode\n\nEnter q to Quit\n'))
except KeyboardInterrupt:
break
| 1,027 | 380 |
import os
import sys
import subprocess
sys.path.append(snakemake.config['args']['mcc_path'])
import scripts.mccutils as mccutils
def main():
mccutils.log("processing","making PopoolationTE reference fasta")
command = ["cat", snakemake.input[0], snakemake.input[1], snakemake.input[2]]
mccutils.run_command_stdout(command, snakemake.output[0])
mccutils.log("processing","PopoolationTE reference fasta created")
if __name__ == "__main__":
main() | 491 | 162 |
"""
Control servo using potentiometer
"""
from machine import Pin, ADC, PWM
pot = ADC(Pin(32), atten = ADC.ATTN_11DB) # 电位器 - ADC
servo = PWM(Pin(33), freq = 50) # 舵机
while True:
adc_value = pot.read()
pulse_width_value = (125 - 25)/4095 * adc_value + 25
servo.duty(int(pulse_width_value))
| 325 | 156 |
from gurobipy import *
from sys import argv
import json
import math
import drawful
def read_lst(fn):
with open(fn, 'r') as f:
(n, tp) = json.load(f)
return (n, tp)
def write_lst(fn, lst):
with open(fn, 'w') as f:
json.dump(lst, f)
def distance(v1, v2):
return math.sqrt((v2[0] - v1[0]) ** 2 + (v2[1] - v1[1]) ** 2 + (v2[2] - v1[2]) ** 2)
def distance_squared(v1, v2):
return (v2[0] - v1[0]) ** 2 + (v2[1] - v1[1]) ** 2 + (v2[2] - v1[2]) ** 2
def get_permutation(edges, last_perm, last_frame, frame, n):
perm = [0] * n
for v1, v2 in edges:
v1i = last_frame.index(list(v1))
v2i = frame.index(list(v2))
# j = last_perm.index(v1i)
perm[v2i] = last_perm[v1i]
return perm
def main():
def optimize_single(f):
m = Model('project7')
print("Adding variables...")
edge_vars = {}
point_edges = {}
t1, f1 = frames[f]
t2, f2 = frames[f + 1]
for i in range(n):
v1 = tuple(f1[i])
point_edges[v1] = []
for j in range(n):
v2 = tuple(f2[j])
cost = distance_squared(v1, v2)
# if (v1, v2) in edge_vars[f]:
# print("Duplicate vertex!")
# return
edge_vars[v1, v2] = m.addVar(obj=cost, vtype=GRB.BINARY)
point_edges[v1].append(edge_vars[v1, v2])
m.update()
print("Adding constraints...")
'''
# There must be n edges from one frame to the next
for frame in edge_vars:
m.addConstr(quicksum(frame.values()) == n)
'''
# There must be one incoming edge per point in the last n-1 frames
for v2 in frames[f + 1][1]:
v2 = tuple(v2)
v2_edges = []
for v1 in frames[f][1]:
v1 = tuple(v1)
v2_edges.append(edge_vars[v1, v2])
m.addConstr(quicksum(v2_edges) == 1)
# There must be one outgoing edge per point in the first n-1 frames
for edges in point_edges:
m.addConstr(quicksum(point_edges[edges]) == 1)
m.optimize()
edges = m.getAttr('x', edge_vars).items()
selected = []
for edge, value in edges:
if value:
selected.append(edge)
# Calculate cost
cost = 0
for v1, v2 in selected:
cost += distance(v1, v2)
print("cost", f, ":", cost)
return get_permutation(selected, last_perm, frames[f][1], frames[f + 1][1], n)
# fn = 'data-n2-t3.json'
# fn = 'example-points.lst'
# fn = 'points-00125-0.lst'
# fn = 'points-10400-0.lst'
# fn = 'points-00125-0.lst'
# fn = 'new/points-00020-0.lst'
# fn = 'points-02500-0.lst'
fn = 'points_v-209-0.3.lst'
if len(argv) == 2:
fn = argv[1]
n, frames = read_lst(fn)
orig_frames = [[tuple(u) for u in ss[1]] for ss in frames]
nf = len(frames) - 1
print("n:", n)
print("frames: t0-t" + str(nf))
solution = [n]
last_perm = [i for i in range(n)]
for f in range(nf):
last_perm = optimize_single(f)
solution.append(last_perm)
# print(solution)
write_lst(fn + '.sol', solution)
return (orig_frames, solution[1], solution[2])
if __name__ == '__main__':
import time
start = time.clock()
(orig_frames, solution1, solution2) = main()
end = time.clock()
print("time: {0:.3f} s".format(end - start))
drawful.drawWithIndices(orig_frames, solution1, solution2)
| 3,720 | 1,478 |
def verb_post(s):
irregular = {
"go": "went",
"put": "put",
"write": "wrote",
"find": "found",
"read": "read",
}
if s in irregular:
return irregular[s]
if s[-1] == "c":
return s + "ked"
if s[-1] == "e":
return s + "d"
if s[-1] == "y" and not s[-2] in ["a", "i", "u", "e", "o"]:
return s[:-1] + "ied"
return s + "ed"
print(verb_post("play"))
print(verb_post("like"))
print(verb_post("try"))
print(verb_post("picnic"))
print(verb_post("write"))
print(verb_post("go"))
print(verb_post("read"))
| 601 | 235 |
import select
import socket
import struct
import traceback
import logging
import time
import numpy as np
import queue
import random,threading,time
from translate import pose_predict
import csv
import time
def health_check(s):
readable,writeable,err = select.select([s],[s],[s],0)
if len(readable)<1 or len(writeable)<1 or len(err)>0:
raise socket.error("discon")
def getbytes(s,num):
recv_num=0
recv_data=b""
while recv_num<num:
data = s.recv(num-recv_num)
recv_num += len(data)
recv_data += data
return recv_data
def receivepacket(s):
try:
bytes_received = getbytes(s,76)
_id = struct.unpack('<I', bytes_received[:4])[0]
pose = np.frombuffer(bytes_received[4:], dtype=np.float32) #converting into float array
return _id,pose
except Exception as e:
print("receiving packet error!")
def sending(s,_id,result):
try:
bytes_to_send=struct.pack('<I', _id)
for i in range(25):
for j in range(18):
bytes_to_send+=struct.pack('<f', result[i][j])
s.sendall(bytes_to_send) #sending back
except Exception as e:
logging.error(traceback.format_exc())
print("sending result error!")
def interpolation(data_queue, time_queue):
interpolated_data_queue = []
for i in range(len(data_queue)):
if i != 0 and (time_queue[i] - time_queue[i-1]) > 40:
interpolated_data_queue.append((data_queue[i]+data_queue[i-1])/2)
interpolated_data_queue.append(data_queue[i])
return interpolated_data_queue
class MLService(threading.Thread):
def __init__(self, s, queue_map, queue_time_map, model):
threading.Thread.__init__(self,name="mlservice")
self.s=s
self.queue_map = queue_map
self.queue_time_map = queue_time_map
self.model = model
self.doRun = True
def run(self):
print("ML running!!\n")
while self.doRun:
if(len(self.queue_map)==2):
for _id,queue in self.queue_map.items():
print("ml _id: ",_id,", length: ",len(queue))
interpolated_data_queue = interpolation(queue_map,queue_time_map)
if len(interpolated_data_queue)==100:
poses = np.array(interpolated_data_queue)
result = self.model.sample(poses)
sending(self.s,_id,result)
if __name__=="__main__":
# create model
model = pose_predict()
# create socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(5)
s.bind(("127.0.0.1", 60000))
print('socket created ')
s.listen()
queue_map = {}
queue_time_map = {}
print('socket listensing ... ')
while True: # for connect multiple times
try:
conn, addr = s.accept()
print(addr[0] + 'connect!!')
mlservice = MLService(conn,queue_map,queue_time_map,model)
mlservice.start()
#handle one client!!
while True:
try:
# health_check(conn)
_id, input_pose = receivepacket(conn)
print("Input ")
print(_id)
if _id not in queue_map.keys():
queue_map[_id]=[]
data_ = queue_map[_id]
if(len(data_)==100):
data_[0:99] = data_[1:100]
data_[99] = input_pose
else:
data_.append(input_pose)
if _id not in queue_time_map.keys():
queue_time_map[_id]=[]
time_data_ = queue_time_map[_id]
if(len(time_data_)==100):
time_data_[0:99] = time_data_[1:100]
time_data_[99] = int(round(time.time() * 1000))
else:
time_data_.append(int(round(time.time() * 1000)))
except Exception as e:
logging.error(traceback.format_exc())
queue_map.clear()
break
#end of handle client
mlservice.doRun=False
mlservice.join()
except socket.timeout:
pass | 4,515 | 1,396 |
import csv, glob, re
import matplotlib.pyplot as plt
import numpy as np
from intersection import intersection
devre = re.compile(r'\[(.*)\(')
# Read in data
devices = ["Al_%.1f_%d" % (t1, t2) for t1 in np.arange(0.5, 2.5, 0.5) for t2 in np.arange(5, 15, 5)]
devices += ["Al2O3_%.1f_%d" % (t1, t2) for t1 in np.arange(0.5, 2.5, 0.5) for t2 in np.arange(5, 15, 5)]
for dev in ['Al', 'Al2O3']:
for t2 in np.arange(5, 15, 5):
for t1 in np.arange(0, 2.5, 0.5):
for plotter in [plt.plot, plt.semilogy]:
bvs = []
for f in glob.glob('data/Breakdown [%s_%.1f_%d*.csv' % (dev, t1, t2)):
# Read data
data = csv.reader(open(f))
iv = map(lambda entry: (float(entry[1]), float(entry[2])), filter(lambda entry: entry[0] == 'DataValue', data))
vvals, ivals = zip(*iv)
# Compute breakdown voltage based on 1mA threshold
bv = intersection(np.array(vvals), np.array(ivals), np.array([0, 50]), np.array([1e-3, 1e-3]))
bvs.append(bv[0][0])
# Set the font dictionaries (for plot title and axis titles)
title_font = {'fontname':'Arial', 'size':'16', 'color':'black', 'weight':'bold', 'verticalalignment':'bottom'}
axis_font = {'fontname':'Arial', 'size':'12'}
# Plot data
plt.title('MOSCAP Breakdown I-V %s_%.1f_%d' % (dev, t1, t2))
plt.xlabel('$V_{G}$ (V)', **axis_font)
plt.ylabel('$I_{GB}$ (A)', **axis_font)
plotter(vvals, ivals)
plt.savefig('imgs/%s_%.1f_%d_breakdown_%s.pdf' % (dev, t1, t2, plotter.__name__), format='pdf')
plt.close()
if bvs != [] and plotter == plt.plot:
print dev, t1, t2
print len(bvs)
print np.mean(bvs)
print np.std(bvs)
print bvs
| 2,049 | 742 |
import torch.nn as nn
import torch.nn.functional as F
from torchvision import models
import pretrainedmodels
from . import resnet_fpn
from . import dla
def get_model(name, heads, head_conv=128, num_filters=[256, 256, 256],
dcn=False, gn=False, ws=False, freeze_bn=False, **kwargs):
if 'res' in name and 'fpn' in name:
backbone = '_'.join(name.split('_')[:-1])
model = resnet_fpn.ResNetFPN(backbone, heads, head_conv, num_filters,
dcn=dcn, gn=gn, ws=ws, freeze_bn=freeze_bn)
elif 'dla' in name:
pretrained = '_'.join(name.split('_')[1:])
model = dla.get_dla34(heads, pretrained, head_conv, num_filters,
gn=gn, ws=ws, freeze_bn=freeze_bn)
else:
raise NotImplementedError
return model
def get_pose_model(model_name='resnet18', num_outputs=None, pretrained=True,
freeze_bn=False, dropout_p=0, **kwargs):
if 'densenet' in model_name:
model = models.__dict__[model_name](num_classes=1000,
pretrained=pretrained)
in_features = model.classifier.in_features
model.classifier = nn.Linear(in_features, num_outputs)
else:
pretrained = 'imagenet' if pretrained else None
model = pretrainedmodels.__dict__[model_name](num_classes=1000,
pretrained=pretrained)
if 'dpn' in model_name:
in_channels = model.last_linear.in_channels
model.last_linear = nn.Conv2d(in_channels, num_outputs,
kernel_size=1, bias=True)
else:
if 'resnet' in model_name:
model.avgpool = nn.AdaptiveAvgPool2d(1)
else:
model.avg_pool = nn.AdaptiveAvgPool2d(1)
in_features = model.last_linear.in_features
if dropout_p == 0:
model.last_linear = nn.Linear(in_features, num_outputs)
else:
model.last_linear = nn.Sequential(
nn.Dropout(p=dropout_p),
nn.Linear(in_features, num_outputs),
)
if freeze_bn:
for m in model.modules():
if isinstance(m, nn.BatchNorm2d):
m.weight.requires_grad = False
m.bias.requires_grad = False
return model
| 2,421 | 802 |
# AUTOGENERATED! DO NOT EDIT! File to edit: nbs/15_interp_latent.ipynb (unless otherwise specified).
__all__ = ['CosineSearch', 'InterpEmbeddings']
# Cell
import numpy as np
import pandas as pd
from typing import Dict, List, Any
from forgebox.html import DOM
# Cell
class CosineSearch:
"""
Build a index search on cosine distance
cos = CosineSearch(base_array)
idx_order = cos(vec)
"""
def __init__(self, base: np.ndarray):
"""
base: np.ndarray, embedding matrix of shape:
(num_items, hidden_size)
"""
assert len(base.shape) == 2,\
f"Base array has to be 2 dimentional, input is {len(base.shape)}"
self.base = base
self.base_norm = self.calc_base_norm(self.base)
self.normed_base = self.base/self.base_norm[:, None]
self.dim = self.base.shape[1]
def __repr__(self):
return f"[Consine Similarity Search] ({len(self)} items)"
def __len__(self): return self.base.shape[0]
@staticmethod
def calc_base_norm(base: np.ndarray) -> np.ndarray:
return np.sqrt(np.power(base, 2).sum(1))
def search(self, vec: np.ndarray, return_similarity: bool = False):
if return_similarity:
similarity = (vec * self.normed_base /
(np.power(vec, 2).sum())).sum(1)
order = similarity.argsort()[::-1]
return order, similarity[order]
return self(vec)
def __call__(self, vec: np.ndarray) -> np.ndarray:
"""
Return the order index of the closest vector to the furthest
vec: an 1 dimentional vector, marks the closest index
to the further ones
"""
return (vec * self.normed_base).sum(1).argsort()[::-1]
# Cell
class InterpEmbeddings:
"""
interp = InterpEmbeddings(embedding_matrix, vocab_dict)
interp.search("computer")
# visualize the embedding with tensorboard
interp.visualize_in_tb()
"""
def __init__(
self,
embedding_matrix: np.ndarray,
vocab: Dict[int, str]
):
"""
embedding_matrix: np.ndarray, embedding matrix of shape:
(num_items, hidden_size)
"""
self.base = embedding_matrix
self.cosine = CosineSearch(embedding_matrix)
self.vocab = vocab
self.c2i = dict((v, k) for k, v in vocab.items())
def __repr__(self) -> str:
cls = self.__class__.__name__
return f"{cls} with\n\t{self.cosine}"
def search(
self,
category: str,
top_k: int = 20,
) -> pd.DataFrame:
"""
search for similar words with embedding and vocabulary dictionary
"""
token_id = self.c2i.get(category)
if token_id is None:
match_list = []
for token_id, token in self.vocab.items():
if category.lower() in str(token).lower():
match_list.append({"token": token, "token_id": token_id})
if len(match_list)==0:
raise KeyError(
f"[UnpackAI] category: {category} not in vocabulary")
else:
match_df = pd.DataFrame(match_list)
DOM("Search with the following categories","h3")()
display(match_df)
token_ids = list(match_df.token_id)
else:
DOM(f"Search with token id {token_id}","h3")()
token_ids = [token_id,]
# combine multiple tokens into 1
vec = self.base[token_ids].mean(0)
# distance search
closest, similarity = self.cosine.search(vec, return_similarity=True)
closest = closest[:top_k]
similarity = similarity[:top_k]
tokens = list(self.vocab.get(idx) for idx in closest)
return pd.DataFrame({
"tokens": tokens,
"idx": closest,
"similarity": similarity})
def visualize_in_tb(
self,
log_dir:str="./logs",
selection: np.ndarray=None,
first_k:int=500,
) -> None:
"""
Visualize the embedding in tensorboard
For now this function is only supported on colab
"""
# since this won't be excute too many times within a notebook
# in large chances... so to avoid missing library when import
# other function under this module: we import related stuff here
from torch.utils.tensorboard import SummaryWriter
# this version's pd has vc for quick value counts
from forgebox.imports import pd
import tensorflow as tf
import tensorboard as tb
import os
# possible tensorflow version error
tf.io.gfile = tb.compat.tensorflow_stub.io.gfile
os.system(f"rm -rf {log_dir}")
writer = SummaryWriter(log_dir=log_dir,)
self.i2c = dict((v,k) for k,v in self.c2i.items())
tokens = list(self.i2c.get(i) for i in range(len(self.i2c)))
if selection is None:
vecs = self.base[:first_k]
tokens = tokens[:first_k]
else:
selection = np.array(selection).astype(dtype=np.int64)
# select a pool of tokens for visualizaiton
tokens = list(np.array(tokens)[selection][:first_k])
vecs = self.base[selection][:first_k]
writer.add_embedding(vecs, metadata=tokens,)
# prompts for next step
print(f"Please run the the following command in a cell")
print("%load_ext tensorboard")
print(f"%tensorboard --logdir {log_dir}") | 5,564 | 1,697 |
#!/usr/bin/env python
# $Id$
"""
1 solution.
"""
import puzzler
from puzzler.puzzles.tritrigs import TritrigsTriangle4
puzzler.run(TritrigsTriangle4)
| 153 | 67 |
# -*- coding: utf-8 -*-
from datetime import datetime
import json
import re
import requests
from django.http import HttpResponse
from accounts.models import AuthToken
from moderations.models import Moderation, ModerationAction
from moderations.utils import timedelta_to_str
class SlackSdk(object):
@staticmethod
def get_channel_data(channel):
auth_token_object = AuthToken.objects.filter(
service_name='slack', service_entity_auth_name=channel
).first()
if auth_token_object:
channel_id = auth_token_object.service_entity_auth_id
token = auth_token_object.service_auth_token
return token, channel_id
else:
return None, None
@staticmethod
def post_moderation(text):
attachments = [
{
'fallback': "Moderator actions",
'callback_id': 'mod-inbox',
'attachment_type': 'default',
'actions': [
{
'name': 'approve',
'text': "Approve",
'type': 'button',
'value': 'approve',
'style': 'primary'
},
{
'name': 'reject',
'text': "Reject",
'type': 'button',
'value': 'reject'
}
]
}
]
token, channel_id = SlackSdk.get_channel_data('#mod-inbox')
if channel_id:
response = SlackSdk.create_message(token,
channel_id, text, attachments)
return response.json()
else:
data = {
'success': False,
'message': "{} is not a valid channel or "
"was not previously authorized".format(channel_id)
}
return data
@staticmethod
def post_leaderboard(leaderboard):
"""
leaderboard = [
{'@jared': 12,345},
]
"""
def render_board(leaderboard, title):
text = '┌----------------------┬----------------------┐\n'
text += '│ {0: <20} | {1: <20} │\n'.format('Mod', title)
sorted_leaderboard = sorted(leaderboard.items(),
key=lambda x: x[1],
reverse=True)
for k, v in sorted_leaderboard:
if k:
text += '├----------------------┼----------------------┤\n'
text += '│ {0: <20} │ {1: <20} │\n'.format(k, v)
text += '└----------------------┴----------------------┘\n'
return text
def avg(a, b):
if b > 0.0:
return a/float(b) * 100.0
else:
return 0
text = (
"LEADERBOARD as of {date}\n"
"```\n"
"{all_time}\n"
"{seven_days}\n"
"```\n"
)
text = text.format(
date=datetime.utcnow(),
all_time=render_board(leaderboard['all_time'], 'All Time'),
seven_days=render_board(leaderboard['seven_days'], 'Last 7 Days')
)
text += 'MOD TEAM SPEED REPORT AS OF {} UTC\n'.format(datetime.utcnow())
text += '```\n'
text += 'Average time to first mod review (all-time): %s over %i pieces of content\n' \
% (timedelta_to_str(leaderboard['avg']['all_time']['review'][0]),
leaderboard['avg']['all_time']['review'][1])
text += 'Average time to first mod review (last 7 days): %s over %i pieces of content\n' \
% (timedelta_to_str(leaderboard['avg']['seven_days']['review'][0]),
leaderboard['avg']['seven_days']['review'][1])
text += 'Average time to first mod resolution (all-time): %s over %i pieces of content\n' \
% (timedelta_to_str(leaderboard['avg']['all_time']['resolution'][0]),
leaderboard['avg']['all_time']['resolution'][1])
text += 'Average time to first mod resolution (last 7 days): %s over %i pieces of content\n' \
% (timedelta_to_str(leaderboard['avg']['seven_days']['resolution'][0]),
leaderboard['avg']['seven_days']['resolution'][1])
text += '```\n'
text += 'CONTENT QUALITY REPORT AS OF {} UTC\n'.format(datetime.utcnow())
counts = leaderboard['counts']
text += '```\n'
text += 'Past 7 days content: %i\n' \
% counts['total']
text += 'Past 7 days flagged by mods: %i (%.2f%%)\n' \
% (counts['total_flagged'],
avg(counts['total_flagged'], counts['total']))
text += 'Reason: Off topic: %i (%.2f%% of flags)\n' \
% (counts['off_topic'],
avg(counts['off_topic'], counts['total_flagged']))
text += 'Reason: Inappropriate: %i (%.2f%% of flags)\n' \
% (counts['inappropriate'],
avg(counts['inappropriate'], counts['total_flagged']))
text += 'Reason: Contact info: %i (%.2f%% of flags)\n' \
% (counts['contact_info'],
avg(counts['contact_info'], counts['total_flagged']))
text += 'Reason: Other: %i (%.2f%% of flags)\n' \
% (counts['other'],
avg(counts['other'], counts['total_flagged']))
text += '```\n'
token, channel_id = SlackSdk.get_channel_data('#mod-leaderboard')
return SlackSdk.create_message(token, channel_id,
text, [], in_channel=True)
@staticmethod
def create_message(access_token, channel_id,
text='', attachments=[], in_channel=False):
is_image = False
if 'https://res.cloudinary.com/' in text:
is_image = True
if len(text) >= 3500:
search_text = re.findall(
'^(.* posted the) <(https://.*)\|(.*)>.*:\n',
text
)
if search_text:
new_content_text = search_text[0][0]
link = search_text[0][1]
new_content_type = search_text[0][2]
text = '%s %s. WARNING: this content cannot be displayed, ' \
'please read the complete content <%s|HERE>' \
% (new_content_text, new_content_type, link)
params = {
'token': access_token,
'channel': channel_id,
'text': text,
'attachments': json.dumps(attachments),
'unfurl_links': False,
'unfurl_media': is_image,
}
if in_channel:
params['response_type'] = 'in_channel'
return requests.get(
url='https://slack.com/api/chat.postMessage',
params=params
)
@staticmethod
def delete_message(access_token, channel_id, ts):
return requests.get(
url='https://slack.com/api/chat.delete',
params={
'token': access_token,
'ts': ts,
'channel': channel_id,
}
)
@staticmethod
def update_message(access_token, channel_id, ts,
text='', attachments=[]):
return requests.get(
url='https://slack.com/api/chat.update',
params={
'token': access_token,
'ts': ts,
'channel': channel_id,
'text': text,
'attachments': json.dumps(attachments),
'parse': 'none',
}
)
def mod_inbox_approved(data, moderation):
original_message = data.get('original_message')
text = original_message.get('text')
approved_by = data.get('user').get('name')
approved_time = float(data.get('action_ts').split('.')[0])
approved_time = datetime.utcfromtimestamp(approved_time)
approved_time = approved_time.strftime('%Y-%m-%d %I:%M%p')
ts = data.get('message_ts')
attachments = [
{
"fallback": "Please moderate this.",
"text": ":white_check_mark: _Approved by @%s %s UTC_" %
(approved_by, approved_time),
"callback_id": "mod-approved",
"attachment_type": "default",
"mrkdwn_in": [
"text"
]
}
]
token, channel_id = SlackSdk.get_channel_data('#mod-approved')
response = SlackSdk.create_message(token, channel_id, text, attachments)
if response.status_code == 200:
data = response.json()
if data.get('ok'):
token, channel_id = SlackSdk.get_channel_data('#mod-inbox')
save_moderation_action(moderation, approved_by, channel_id,
'approve', data.get('ts'))
reponse = SlackSdk.delete_message(token, channel_id, ts)
return HttpResponse('')
def mod_inbox_reject(data, moderation):
original_message = data.get('original_message')
text = original_message.get('text')
ts = data.get('message_ts')
attachments = [
{
"fallback": "Moderator actions",
"text": "_Reject: Select a reason_",
"callback_id": "mod-inbox",
"attachment_type": "default",
"mrkdwn_in": [
"text"
],
"actions": [
{
"name": "Off topic",
"text": "Off topic",
"type": "button",
"value": "off_topic",
"style": "danger"
},
{
"name": "Inappropriate",
"text": "Inappropriate",
"type": "button",
"value": "inappropriate",
"style": "danger"
},
{
"name": "Contact info",
"text": "Contact info",
"type": "button",
"value": "contact_info",
"style": "danger"
},
{
"name": "Other",
"text": "Other",
"type": "button",
"value": "other",
"style": "danger"
},
{
"name": "Undo",
"text": "Undo",
"type": "button",
"value": "undo"
}
]
}
]
token, channel_id = SlackSdk.get_channel_data('#mod-inbox')
response = SlackSdk.update_message(token, channel_id, ts,
text=text, attachments=attachments)
data = response.json()
return HttpResponse('')
def mod_inbox_reject_undo(data):
original_message = data.get('original_message')
text = original_message.get('text')
ts = data.get('message_ts')
attachments = [
{
"fallback": "Moderator actions",
"callback_id": "mod-inbox",
"attachment_type": "default",
"actions": [
{
"name": "approve",
"text": "Approve",
"type": "button",
"value": "approve",
"style": "primary"
},
{
"name": "reject",
"text": "Reject",
"type": "button",
"value": "reject"
}
]
}
]
token, channel_id = SlackSdk.get_channel_data('#mod-inbox')
SlackSdk.update_message(token, channel_id,
ts, text=text, attachments=attachments)
return HttpResponse('')
def mod_inbox_reject_reason(data, moderation):
original_message = data.get('original_message')
text = original_message.get('text')
rejected_by = data.get('user').get('name')
rejected_time = float(data.get('action_ts').split('.')[0])
rejected_time = datetime.utcfromtimestamp(rejected_time)
rejected_time = rejected_time.strftime('%Y-%m-%d %I:%M%p')
rejected_reason = data.get('actions')[0]['value']
ts = data.get('message_ts')
attachments = [
{
"fallback": "Moderator actions",
"text": "_%s UTC: @%s rejected this with the reason: \"%s\"_" %
(rejected_time, rejected_by, rejected_reason),
"callback_id": "mod-flagged",
"attachment_type": "default",
"mrkdwn_in": [
"text"
],
"actions": [
{
"name": "Resolve",
"text": "Mark resolved",
"type": "button",
"value": "resolve",
"style": "primary"
}
]
}
]
token, channel_id = SlackSdk.get_channel_data('#mod-flagged')
response = SlackSdk.create_message(token, channel_id,
text=text, attachments=attachments)
if response.status_code == 200:
data = response.json()
if data.get('ok'):
token, channel_id = SlackSdk.get_channel_data('#mod-inbox')
save_moderation_action(moderation, rejected_by,
channel_id, rejected_reason, data.get('ts'))
SlackSdk.delete_message(token, channel_id, ts)
return HttpResponse('')
def mod_inbox(data, action, moderation):
if action == 'approve':
return mod_inbox_approved(data, moderation)
elif action == 'reject':
return mod_inbox_reject(data, moderation)
elif action == 'undo':
return mod_inbox_reject_undo(data)
elif (action == 'off_topic') or (action == 'inappropriate') \
or (action == 'contact_info') or (action == 'other'):
return mod_inbox_reject_reason(data, moderation)
def mod_flagged_resolve(data, moderation):
original_message = data.get('original_message')
text = original_message.get('text')
resolved_by = data.get('user').get('name')
resolved_time = float(data.get('action_ts').split('.')[0])
resolved_time = datetime.utcfromtimestamp(resolved_time)
resolved_time = resolved_time.strftime('%Y-%m-%d %I:%M%p')
rejected_reason = original_message.get('attachments')[0]['text']
message_ts = data.get('message_ts')
attachments = [
{
"fallback": "Please moderate this.",
"text": "%s\n_%s UTC: @%s marked this \"Resolved\"_" %
(rejected_reason, resolved_time, resolved_by),
"callback_id": "mod-resolved",
"attachment_type": "default",
"mrkdwn_in": [
"text"
]
}
]
token, channel_id = SlackSdk.get_channel_data('#mod-resolved')
response = SlackSdk.create_message(token, channel_id, text=text,
attachments=attachments)
if response.status_code == 200:
data = response.json()
if data.get('ok'):
token, channel_id = SlackSdk.get_channel_data('#mod-flagged')
ts = data.get('ts')
save_moderation_action(moderation, resolved_by, channel_id,
'resolve', ts)
SlackSdk.delete_message(token, channel_id, message_ts)
return HttpResponse('')
def mod_flagged(data, action, moderation):
if action == 'resolve':
return mod_flagged_resolve(data, moderation)
assert False, action
def save_moderation_action(moderation, username, channel_id,
action, message_id):
moderation.status = channel_id
moderation.status_reason = action
moderation.message_id = message_id
moderation.save()
ModerationAction.objects.create(moderation=moderation,
action=action,
action_author_id=username)
def moderate(data):
"""
"""
data = data.get('payload')
data = json.loads(data)
if data:
action = data.get('actions')[0].get('value')
message_id = data.get('message_ts')
moderation = Moderation.objects.get_by_message_id(message_id)
callback_id = data.get('callback_id')
if callback_id == 'mod-inbox':
return mod_inbox(data, action, moderation)
elif callback_id == 'mod-flagged':
return mod_flagged(data, action, moderation)
return HttpResponse(json.dumps(data, indent=4))
| 16,775 | 4,879 |
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect
from django.template import loader
from django.http import HttpResponse
from django.contrib import messages
from django.contrib.auth.models import User
@login_required(login_url='signin')
def home(request):
return render(request, 'home.html')
def register(request):
if request.method == "POST":
username = request.POST['username']
email = request.POST['email']
password1 = request.POST['password1']
password2 = request.POST['password2']
if password1 == password2:
newUser = User.objects.create_user(username, email, password1)
newUser.save()
messages.success(request, "Account successfully created.")
return redirect('signin')
return render(request, 'register.html')
def signin(request):
if request.method == 'POST':
username = request.POST['username']
password = request.POST.get('password')
user = authenticate(username=username, password=password)
if user is not None:
login(request, user)
return render(request, "home.html", {'username': username})
else:
messages.error(request, "Invalid credentials")
return redirect('signin')
return render(request, 'signin.html')
def signout(request):
logout(request)
messages.info(request, "You have succesfully logged out.")
return redirect("signin")
@login_required(login_url='signin')
def leaderboard(request):
return render(request, 'leaderboard.html')
@login_required(login_url='signin')
def scan(request):
return render(request, 'scan.html')
| 1,790 | 483 |