code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
from itertools import chain
from nose.tools import *
from hawkweed.monads.either import Either, Left, Right, is_right,\
is_left, is_either, either, lefts, rights, partition_eithers
from hawkweed.functional.primitives import identity
def test_right():
assert_equal(Right(10).bind(identity), 10)
def test_nothing():
l = Left("failure")
assert_equal(l.bind(lambda _: "lol"), l)
def test_is_right():
assert_true(is_right(Right(10)))
assert_false(is_right(Left("no")))
assert_false(is_right(10))
def test_is_left():
assert_true(is_left(Left("yes")))
assert_false(is_left(Right(10)))
assert_false(is_left(10))
def test_is_either():
assert_true(is_either(Right(10)))
assert_true(is_either(Left("yes")))
assert_false(is_either(10))
def test_either():
v = "val"
either(lambda x: assert_equal(Left(v), x), None, Left(v))
either(None, lambda x: assert_equal(v, x), Right(v))
with assert_raises(ValueError):
either(None, None, 10)
def test_lefts():
l = [Left("failure"), Left("i died"), Left("noes")]
lr = l + [Right(1)]
assert_equal(list(lefts(lr)), l)
def test_rights():
r = [Right(x) for x in range(4)]
rl = [Left("moo")] + r
assert_equal(list(rights(rl)), r)
def test_partition_eithers():
r = [Right(x) for x in range(4)]
l = [Left(x) for x in ["failure"] * 4]
rl = list(chain.from_iterable(zip(r, l)))
assert_equal([list(x) for x in partition_eithers(rl)], [l, r])
| [
"hawkweed.monads.either.either",
"hawkweed.monads.either.rights",
"hawkweed.monads.either.Left",
"hawkweed.monads.either.Right",
"hawkweed.monads.either.is_either",
"hawkweed.monads.either.lefts",
"hawkweed.monads.either.is_right",
"hawkweed.monads.either.is_left",
"hawkweed.monads.either.partition_... | [((336, 351), 'hawkweed.monads.either.Left', 'Left', (['"""failure"""'], {}), "('failure')\n", (340, 351), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((512, 524), 'hawkweed.monads.either.is_right', 'is_right', (['(10)'], {}), '(10)\n', (520, 524), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((639, 650), 'hawkweed.monads.either.is_left', 'is_left', (['(10)'], {}), '(10)\n', (646, 650), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((770, 783), 'hawkweed.monads.either.is_either', 'is_either', (['(10)'], {}), '(10)\n', (779, 783), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((872, 879), 'hawkweed.monads.either.Left', 'Left', (['v'], {}), '(v)\n', (876, 879), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((928, 936), 'hawkweed.monads.either.Right', 'Right', (['v'], {}), '(v)\n', (933, 936), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((982, 1004), 'hawkweed.monads.either.either', 'either', (['None', 'None', '(10)'], {}), '(None, None, 10)\n', (988, 1004), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((1033, 1048), 'hawkweed.monads.either.Left', 'Left', (['"""failure"""'], {}), "('failure')\n", (1037, 1048), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((1050, 1064), 'hawkweed.monads.either.Left', 'Left', (['"""i died"""'], {}), "('i died')\n", (1054, 1064), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((1066, 1078), 'hawkweed.monads.either.Left', 'Left', (['"""noes"""'], {}), "('noes')\n", (1070, 1078), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((1170, 1178), 'hawkweed.monads.either.Right', 'Right', (['x'], {}), '(x)\n', (1175, 1178), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((1303, 1311), 'hawkweed.monads.either.Right', 'Right', (['x'], {}), '(x)\n', (1308, 1311), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((1340, 1347), 'hawkweed.monads.either.Left', 'Left', (['x'], {}), '(x)\n', (1344, 1347), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((444, 453), 'hawkweed.monads.either.Right', 'Right', (['(10)'], {}), '(10)\n', (449, 453), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((482, 492), 'hawkweed.monads.either.Left', 'Left', (['"""no"""'], {}), "('no')\n", (486, 492), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((571, 582), 'hawkweed.monads.either.Left', 'Left', (['"""yes"""'], {}), "('yes')\n", (575, 582), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((610, 619), 'hawkweed.monads.either.Right', 'Right', (['(10)'], {}), '(10)\n', (615, 619), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((701, 710), 'hawkweed.monads.either.Right', 'Right', (['(10)'], {}), '(10)\n', (706, 710), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((739, 750), 'hawkweed.monads.either.Left', 'Left', (['"""yes"""'], {}), "('yes')\n", (743, 750), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((1094, 1102), 'hawkweed.monads.either.Right', 'Right', (['(1)'], {}), '(1)\n', (1099, 1102), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((1126, 1135), 'hawkweed.monads.either.lefts', 'lefts', (['lr'], {}), '(lr)\n', (1131, 1135), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((1208, 1219), 'hawkweed.monads.either.Left', 'Left', (['"""moo"""'], {}), "('moo')\n", (1212, 1219), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((1247, 1257), 'hawkweed.monads.either.rights', 'rights', (['rl'], {}), '(rl)\n', (1253, 1257), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((277, 286), 'hawkweed.monads.either.Right', 'Right', (['(10)'], {}), '(10)\n', (282, 286), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((853, 860), 'hawkweed.monads.either.Left', 'Left', (['v'], {}), '(v)\n', (857, 860), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n'), ((1455, 1476), 'hawkweed.monads.either.partition_eithers', 'partition_eithers', (['rl'], {}), '(rl)\n', (1472, 1476), False, 'from hawkweed.monads.either import Either, Left, Right, is_right, is_left, is_either, either, lefts, rights, partition_eithers\n')] |
import numpy as np
import pandas as pd
from .libbinding import t_schema, t_dtype, t_table
class Perspective(object):
def __init__(self, column_names, types):
self._columns = {}
dtypes = []
for name, _type in zip(column_names, types):
dtypes.append(self._type_to_dtype(_type))
self._columns[name] = _type
if len(column_names) != len(dtypes):
raise Exception('column name/dtype length mismatch')
_schema = t_schema(column_names, dtypes)
self._t_table = t_table(_schema)
self._t_table.init()
@classmethod
def _type_to_dtype(self, _type):
if isinstance(_type, t_dtype):
return _type
if isinstance(_type, np.ndarray):
if _type.dtype == np.int64:
return t_dtype.INT64
if _type.dtype == np.float64:
return t_dtype.FLOAT64
if _type.dtype == np.str:
return t_dtype.STR
if _type.dtype == np.bool:
return t_dtype.BOOL
if _type.dtype == np.object:
return t_dtype.STR
raise Exception('Type not recognized - %s' % _type)
if isinstance(_type, list):
if isinstance(_type[0], int):
return t_dtype.INT64
if isinstance(_type[0], float):
return t_dtype.FLOAT64
if isinstance(_type[0], str):
return t_dtype.STR
if isinstance(_type[0], bool):
return t_dtype.BOOL
raise Exception('Type not recognized - %s' % _type)
if _type == np.int64:
return t_dtype.INT64
if _type == np.float64:
return t_dtype.FLOAT64
if _type == int:
return t_dtype.INT64
if _type == float:
return t_dtype.FLOAT64
if _type == bool:
return t_dtype.BOOL
if _type == str:
return t_dtype.STR
if _type == np.str:
return t_dtype.STR
if _type == np.bool:
return t_dtype.BOOL
elif _type == object:
return t_dtype.STR
else:
raise Exception('%s not currently supported' % _type)
def _validate_col(self, col):
for i in range(len(col)):
if i == 0:
continue
if not isinstance(col[i-1], type(col[i])) and not (isinstance(col[i], type(col[i-1]))):
raise Exception('data must be homogenous type')
def load(self, col, data):
if self._t_table.size() < len(data):
self._t_table.extend(len(data))
if not self._t_table.get_schema().has_column(col):
raise Exception('schema change not implemented')
self._validate_col(data)
self._t_table.load_column(col, data, self._type_to_dtype(data))
def print(self):
self._t_table.pprint()
def __repr__(self):
# FIXME
self.print()
return ''
def __getitem__(self, col):
if not self._t_table.get_schema().has_column(col):
raise Exception('col not in table - %s' % col)
return self._t_table.get_column(col)
def to_df(self):
df = pd.DataFrame()
for col in self._columns:
df[col] = self[col]
return df
@staticmethod
def from_df(df):
cols = []
types = []
for k, v in dict(df.dtypes).items():
cols.append(k)
types.append(Perspective._type_to_dtype(v))
t = Perspective(cols, types)
t._t_table.extend(len(df.index))
for col in cols:
t.load(col, df[col].values)
return t
| [
"pandas.DataFrame"
] | [((3233, 3247), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (3245, 3247), True, 'import pandas as pd\n')] |
import os
import shutil
import tempfile
import unittest
from django.test import TestCase
from ...plugins import DatabaseCacheLayer
from .handler import ShelfDatabasePlugin
class ShelfDatabaseTest(TestCase):
def setUp(self):
self.temp_path = tempfile.mkdtemp()
self.db_path = os.path.join(self.temp_path, "db")
try:
self.db = self.open_database()
except ImportError:
raise unittest.SkipTest("Database not installed")
def tearDown(self):
shutil.rmtree(self.temp_path)
def open_database(self):
return ShelfDatabasePlugin({"path": self.db_path})
def reOpen(self):
self.db.sync()
self.db.close()
self.db = self.open_database()
def test_set_get_keys(self):
self.db["\x01\x03"] = "\x01\x03"
self.db["test"] = "test"
self.db.sync()
self.assertEqual(self.db["\x01\x03"], "\x01\x03")
self.assertEqual(self.db["test"], "test")
self.assertEqual(len(self.db), 2)
def test_delete_keys(self):
self.test_set_get_keys()
del self.db["test"]
self.db.sync()
self.assertEqual(len(self.db), 1)
self.assertNotIn("test", self.db)
self.assertIn("\x01\x03", self.db)
def test_reopen_database(self):
self.db["\x01\x03"] = "\x01\x03"
self.db["test"] = "test"
self.reOpen()
self.assertEqual(self.db["test"], "test")
self.assertEqual(len(self.db), 2)
self.assertEqual(self.db["\x01\x03"], "\x01\x03")
self.db["test2"] = "abc"
del self.db["test"]
self.reOpen()
self.assertEqual(len(self.db), 2)
self.assertNotIn("test", self.db)
self.assertIn("\x01\x03", self.db)
self.assertIn("test2", self.db)
def test_complex_types(self):
self.db["set"] = set([1, 2, 3])
self.assertEqual(self.db["set"], set([1, 2, 3]))
self.db["dict"] = dict(a=1, b=2)
self.assertEqual(self.db["dict"], dict(a=1, b=2))
class ShelfDatabaseCacheTest(ShelfDatabaseTest):
def open_database(self):
return DatabaseCacheLayer(super(ShelfDatabaseCacheTest, self).open_database())
| [
"os.path.join",
"tempfile.mkdtemp",
"unittest.SkipTest",
"shutil.rmtree"
] | [((257, 275), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (273, 275), False, 'import tempfile\n'), ((299, 333), 'os.path.join', 'os.path.join', (['self.temp_path', '"""db"""'], {}), "(self.temp_path, 'db')\n", (311, 333), False, 'import os\n'), ((513, 542), 'shutil.rmtree', 'shutil.rmtree', (['self.temp_path'], {}), '(self.temp_path)\n', (526, 542), False, 'import shutil\n'), ((436, 479), 'unittest.SkipTest', 'unittest.SkipTest', (['"""Database not installed"""'], {}), "('Database not installed')\n", (453, 479), False, 'import unittest\n')] |
from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, DATE
from sqlalchemy.orm import relationship
from database import Base
class User(Base):
__tablename__ = "users"
username = Column(String, primary_key=True, index=True)
email = Column(String, unique=True, index=True)
name = Column(String, index=True)
last_name = Column(String, index=True)
celular = Column(String, index=True)
user_password = Column(String)
documents = relationship("Documents", back_populates="owner")
class Documents(Base):
__tablename__ = "documents"
id = Column(Integer, primary_key=True, index=True)
name_doc = Column(String, index=True)
exp = Column(DATE, index=True)
notif = Column(Integer)
descrip = Column(String, index=True)
owner_username = Column(String, ForeignKey("users.username"))
owner = relationship("User", back_populates="documents") | [
"sqlalchemy.orm.relationship",
"sqlalchemy.ForeignKey",
"sqlalchemy.Column"
] | [((205, 249), 'sqlalchemy.Column', 'Column', (['String'], {'primary_key': '(True)', 'index': '(True)'}), '(String, primary_key=True, index=True)\n', (211, 249), False, 'from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, DATE\n'), ((262, 301), 'sqlalchemy.Column', 'Column', (['String'], {'unique': '(True)', 'index': '(True)'}), '(String, unique=True, index=True)\n', (268, 301), False, 'from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, DATE\n'), ((313, 339), 'sqlalchemy.Column', 'Column', (['String'], {'index': '(True)'}), '(String, index=True)\n', (319, 339), False, 'from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, DATE\n'), ((356, 382), 'sqlalchemy.Column', 'Column', (['String'], {'index': '(True)'}), '(String, index=True)\n', (362, 382), False, 'from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, DATE\n'), ((397, 423), 'sqlalchemy.Column', 'Column', (['String'], {'index': '(True)'}), '(String, index=True)\n', (403, 423), False, 'from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, DATE\n'), ((444, 458), 'sqlalchemy.Column', 'Column', (['String'], {}), '(String)\n', (450, 458), False, 'from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, DATE\n'), ((476, 525), 'sqlalchemy.orm.relationship', 'relationship', (['"""Documents"""'], {'back_populates': '"""owner"""'}), "('Documents', back_populates='owner')\n", (488, 525), False, 'from sqlalchemy.orm import relationship\n'), ((593, 638), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)', 'index': '(True)'}), '(Integer, primary_key=True, index=True)\n', (599, 638), False, 'from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, DATE\n'), ((654, 680), 'sqlalchemy.Column', 'Column', (['String'], {'index': '(True)'}), '(String, index=True)\n', (660, 680), False, 'from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, DATE\n'), ((691, 715), 'sqlalchemy.Column', 'Column', (['DATE'], {'index': '(True)'}), '(DATE, index=True)\n', (697, 715), False, 'from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, DATE\n'), ((728, 743), 'sqlalchemy.Column', 'Column', (['Integer'], {}), '(Integer)\n', (734, 743), False, 'from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, DATE\n'), ((758, 784), 'sqlalchemy.Column', 'Column', (['String'], {'index': '(True)'}), '(String, index=True)\n', (764, 784), False, 'from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, DATE\n'), ((864, 912), 'sqlalchemy.orm.relationship', 'relationship', (['"""User"""'], {'back_populates': '"""documents"""'}), "('User', back_populates='documents')\n", (876, 912), False, 'from sqlalchemy.orm import relationship\n'), ((821, 849), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""users.username"""'], {}), "('users.username')\n", (831, 849), False, 'from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, DATE\n')] |
from openpathsampling.high_level.network import FixedLengthTPSNetwork
from openpathsampling.high_level.transition import FixedLengthTPSTransition
import openpathsampling as paths
class PartInBFixedLengthTPSTransition(FixedLengthTPSTransition):
"""Fixed length TPS transition accepting any frame in the final state.
Transition that builds an ensemble used to facilitate the rate
calculation in fixed-length TPS. [1]_ Details in
:class:`.PartInBFixedLengthTPSNetwork`.
See also
--------
PartInBFixedLengthTPSNetwork
References
----------
.. [1] <NAME>, <NAME>, and <NAME>. J. Chem. Phys. 110,
6617 (1999). http://dx.doi.org/10.1063/1.478569
"""
def _tps_ensemble(self, stateA, stateB):
return paths.SequentialEnsemble([
paths.LengthEnsemble(1) & paths.AllInXEnsemble(stateA),
paths.LengthEnsemble(self.length - 1) \
& paths.PartInXEnsemble(stateB)
])
class PartInBFixedLengthTPSNetwork(FixedLengthTPSNetwork):
"""Network for fixed-length TPS accepting any frame in the final state
This network samples a single path ensemble where the paths must begin
in an initial state, run for a fixed total number of frames, and must
have at least one frame in a final state. This was used to assist in
the flux part of the TPS rate calculation. [1]_ This version is
generalized to multiple states.
Parameters
----------
intial_states : (list of) :class:`.Volume`
acceptable initial states
final_states : (list of) :class:`.Volume`
acceptable final states
length : int
length of paths in the path ensemble, in frames
allow_self_transitions : bool
whether self-transitions (A->A) are allowed; default is False. For
this network, A->B->A transitions are *always* allowed.
References
----------
.. [1] <NAME>, <NAME>, and <NAME>. Phys. 110,
6617 (1999). http://dx.doi.org/10.1063/1.478569
"""
TransitionType = PartInBFixedLengthTPSTransition
| [
"openpathsampling.LengthEnsemble",
"openpathsampling.AllInXEnsemble",
"openpathsampling.PartInXEnsemble"
] | [((802, 825), 'openpathsampling.LengthEnsemble', 'paths.LengthEnsemble', (['(1)'], {}), '(1)\n', (822, 825), True, 'import openpathsampling as paths\n'), ((828, 856), 'openpathsampling.AllInXEnsemble', 'paths.AllInXEnsemble', (['stateA'], {}), '(stateA)\n', (848, 856), True, 'import openpathsampling as paths\n'), ((870, 907), 'openpathsampling.LengthEnsemble', 'paths.LengthEnsemble', (['(self.length - 1)'], {}), '(self.length - 1)\n', (890, 907), True, 'import openpathsampling as paths\n'), ((928, 957), 'openpathsampling.PartInXEnsemble', 'paths.PartInXEnsemble', (['stateB'], {}), '(stateB)\n', (949, 957), True, 'import openpathsampling as paths\n')] |
from django.contrib import admin
from players.models import Player
class PlayerAdmin(admin.ModelAdmin):
pass
admin.site.register(Player, PlayerAdmin)
| [
"django.contrib.admin.site.register"
] | [((117, 157), 'django.contrib.admin.site.register', 'admin.site.register', (['Player', 'PlayerAdmin'], {}), '(Player, PlayerAdmin)\n', (136, 157), False, 'from django.contrib import admin\n')] |
from . import Checksum
from django.conf import settings
import requests
import json
def VerifyPaytmResponse(response):
response_dict = {}
if response.method == "POST":
data_dict = {}
for key in response.POST:
data_dict[key] = response.POST[key]
MID = data_dict['MID']
ORDERID = data_dict['ORDERID']
verify = Checksum.verify_checksum(data_dict, settings.PAYTM_MERCHANT_KEY, data_dict['CHECKSUMHASH'])
if verify:
STATUS_URL = settings.PAYTM_TRANSACTION_STATUS_URL
headers = {
'Content-Type': 'application/json',
}
data = '{"MID":"%s","ORDERID":"%s"}'%(MID, ORDERID)
check_resp = requests.post(STATUS_URL, data=data, headers=headers).json()
if check_resp['STATUS']=='TXN_SUCCESS':
response_dict['verified'] = True
response_dict['paytm'] = check_resp
return (response_dict)
else:
response_dict['verified'] = False
response_dict['paytm'] = check_resp
return (response_dict)
else:
response_dict['verified'] = False
return (response_dict)
response_dict['verified'] = False
return response_dict | [
"requests.post"
] | [((722, 775), 'requests.post', 'requests.post', (['STATUS_URL'], {'data': 'data', 'headers': 'headers'}), '(STATUS_URL, data=data, headers=headers)\n', (735, 775), False, 'import requests\n')] |
# Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import core
from hypothesis import given
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
def entropy(p):
q = 1. - p
return -p * np.log(p) - q * np.log(q)
def jsd(p, q):
return [entropy(p / 2. + q / 2.) - entropy(p) / 2. - entropy(q) / 2.]
def jsd_grad(go, o, pq_list):
p, q = pq_list
m = (p + q) / 2.
return [np.log(p * (1 - m) / (1 - p) / m) / 2. * go, None]
class TestJSDOps(hu.HypothesisTestCase):
@given(n=st.integers(10, 100), **hu.gcs_cpu_only)
def test_bernoulli_jsd(self, n, gc, dc):
p = np.random.rand(n).astype(np.float32)
q = np.random.rand(n).astype(np.float32)
op = core.CreateOperator("BernoulliJSD", ["p", "q"], ["l"])
self.assertReferenceChecks(
device_option=gc,
op=op,
inputs=[p, q],
reference=jsd,
output_to_grad='l',
grad_reference=jsd_grad,
)
| [
"numpy.random.rand",
"numpy.log",
"hypothesis.strategies.integers",
"caffe2.python.core.CreateOperator"
] | [((1536, 1590), 'caffe2.python.core.CreateOperator', 'core.CreateOperator', (['"""BernoulliJSD"""', "['p', 'q']", "['l']"], {}), "('BernoulliJSD', ['p', 'q'], ['l'])\n", (1555, 1590), False, 'from caffe2.python import core\n'), ((1031, 1040), 'numpy.log', 'np.log', (['p'], {}), '(p)\n', (1037, 1040), True, 'import numpy as np\n'), ((1047, 1056), 'numpy.log', 'np.log', (['q'], {}), '(q)\n', (1053, 1056), True, 'import numpy as np\n'), ((1339, 1359), 'hypothesis.strategies.integers', 'st.integers', (['(10)', '(100)'], {}), '(10, 100)\n', (1350, 1359), True, 'import hypothesis.strategies as st\n'), ((1232, 1265), 'numpy.log', 'np.log', (['(p * (1 - m) / (1 - p) / m)'], {}), '(p * (1 - m) / (1 - p) / m)\n', (1238, 1265), True, 'import numpy as np\n'), ((1437, 1454), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (1451, 1454), True, 'import numpy as np\n'), ((1486, 1503), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (1500, 1503), True, 'import numpy as np\n')] |
from collections import deque
import networkx as nx
import numpy as np
def random_subtree(T, alpha, beta, subtree_mark):
""" Random subtree of T according to Algorithm X in [1].
Args:
alpha (float): probability of continuing to a neighbor
beta (float): probability of non empty subtree
T (NetworkX graph): the tree of which the subtree is taken
Returns:
A subtree of T
References:
[1] <NAME>., <NAME>. Pavlenko Bayesian structure learning in graphical models using sequential Monte Carlo.
"""
# Take empty subtree with prob beta
empty = np.random.multinomial(1, [beta, 1-beta]).argmax()
subtree_edges = []
subtree_nodes = []
if empty == 1:
separators = {}
subtree = nx.Graph()
return (subtree, [], [], {}, separators, 1-beta)
# Take non-empty subtree
n = T.order()
w = 0.0
visited = set() # cliques
q = deque([])
start = np.random.randint(n) # then n means new component
separators = {}
#start_node = T.nodes()[start] # nx < 2.x
start_node = list(T.nodes())[start] # nx > 2.x
q.append(start_node)
subtree_adjlist = {start_node: []}
while len(q) > 0:
node = q.popleft()
visited.add(node)
subtree_nodes.append(node)
#T.node[node]["subnode"] = subtree_mark
for neig in T.neighbors(node):
b = np.random.multinomial(1, [1-alpha, alpha]).argmax()
if neig not in visited:
if b == 1:
subtree_edges.append((node, neig))
subtree_adjlist[node].append(neig)
subtree_adjlist[neig] = [node]
q.append(neig)
# Add separator
sep = neig & node
if not sep in separators:
separators[sep] = []
separators[sep].append((neig, node))
else:
w += 1
# subtree = T.subgraph(subtree_nodes)
# assert subtree_nodes in T.nodes()
subtree = None
v = len(subtree_nodes)
probtree = beta * v * np.power(alpha, v-1) / np.float(n)
probtree *= np.power(1-alpha, w)
return (subtree, subtree_nodes, subtree_edges, subtree_adjlist, separators, probtree)
def pdf(subtree, T, alpha, beta):
""" Returns the probability of the subtree subtree generated by
random_subtree(T, alpha, beta).
Args:
T (NetworkX graph): A tree
subtree (NetworkX graph): a subtree of T drawn by the subtree kernel
alpha (float): Subtree kernel parameter
beta (float): Subtree kernel parameter
Returns:
float
"""
p = subtree.order()
if p == 0:
return 1.0 - beta
forest = T.subgraph(set(T.nodes()) - set(subtree.nodes()))
#components = nx.connected_components(forest)
components = forest.connected_components()
w = float(len(list(components)))
v = float(subtree.order())
alpha = float(alpha)
beta = float(beta)
n = float(T.order())
prob = beta * v * np.power(alpha, v-1) * np.power(1-alpha, w) / n
return prob | [
"numpy.float",
"collections.deque",
"numpy.power",
"networkx.Graph",
"numpy.random.multinomial",
"numpy.random.randint"
] | [((937, 946), 'collections.deque', 'deque', (['[]'], {}), '([])\n', (942, 946), False, 'from collections import deque\n'), ((959, 979), 'numpy.random.randint', 'np.random.randint', (['n'], {}), '(n)\n', (976, 979), True, 'import numpy as np\n'), ((2194, 2216), 'numpy.power', 'np.power', (['(1 - alpha)', 'w'], {}), '(1 - alpha, w)\n', (2202, 2216), True, 'import numpy as np\n'), ((770, 780), 'networkx.Graph', 'nx.Graph', ([], {}), '()\n', (778, 780), True, 'import networkx as nx\n'), ((2166, 2177), 'numpy.float', 'np.float', (['n'], {}), '(n)\n', (2174, 2177), True, 'import numpy as np\n'), ((612, 654), 'numpy.random.multinomial', 'np.random.multinomial', (['(1)', '[beta, 1 - beta]'], {}), '(1, [beta, 1 - beta])\n', (633, 654), True, 'import numpy as np\n'), ((2143, 2165), 'numpy.power', 'np.power', (['alpha', '(v - 1)'], {}), '(alpha, v - 1)\n', (2151, 2165), True, 'import numpy as np\n'), ((3110, 3132), 'numpy.power', 'np.power', (['(1 - alpha)', 'w'], {}), '(1 - alpha, w)\n', (3118, 3132), True, 'import numpy as np\n'), ((3087, 3109), 'numpy.power', 'np.power', (['alpha', '(v - 1)'], {}), '(alpha, v - 1)\n', (3095, 3109), True, 'import numpy as np\n'), ((1405, 1449), 'numpy.random.multinomial', 'np.random.multinomial', (['(1)', '[1 - alpha, alpha]'], {}), '(1, [1 - alpha, alpha])\n', (1426, 1449), True, 'import numpy as np\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Constants parameter functions
DRS Import Rules:
- only from apero.lang and apero.core.constants
Created on 2019-01-17 at 15:24
@author: cook
"""
from collections import OrderedDict
import copy
import numpy as np
import os
import pkg_resources
import shutil
import sys
from typing import Union, List, Type
from pathlib import Path
from apero.core.constants import constant_functions
from apero.lang import drs_exceptions
# =============================================================================
# Define variables
# =============================================================================
# Define script name
__NAME__ = 'param_functions.py'
# Define package name
PACKAGE = 'apero'
# Define relative path to 'const' sub-package
CONST_PATH = './core/instruments/'
CORE_PATH = './core/instruments/default/'
# Define config/constant/keyword scripts to open
SCRIPTS = ['default_config.py', 'default_constants.py', 'default_keywords.py']
USCRIPTS = ['user_config.ini', 'user_constants.ini', 'user_keywords.ini']
PSEUDO_CONST_FILE = 'pseudo_const.py'
PSEUDO_CONST_CLASS = 'PseudoConstants'
# get the Drs Exceptions
ArgumentError = drs_exceptions.ArgumentError
ArgumentWarning = drs_exceptions.ArgumentWarning
DRSError = drs_exceptions.DrsError
DRSWarning = drs_exceptions.DrsWarning
TextError = drs_exceptions.TextError
TextWarning = drs_exceptions.TextWarning
ConfigError = drs_exceptions.ConfigError
ConfigWarning = drs_exceptions.ConfigWarning
# get the logger
BLOG = drs_exceptions.basiclogger
# relative folder cache
REL_CACHE = dict()
CONFIG_CACHE = dict()
PCONFIG_CACHE = dict()
# cache some settings
SETTINGS_CACHE_KEYS = ['DRS_DEBUG', 'ALLOW_BREAKPOINTS']
SETTINGS_CACHE = dict()
# =============================================================================
# Define Custom classes
# =============================================================================
# case insensitive dictionary
class CaseInsensitiveDict(dict):
"""
Custom dictionary with string keys that are case insensitive
"""
def __init__(self, *arg, **kw):
"""
Construct the case insensitive dictionary class
:param arg: arguments passed to dict
:param kw: keyword arguments passed to dict
"""
# set function name
_ = display_func(None, '__init__', __NAME__, 'CaseInsensitiveDict')
# super from dict
super(CaseInsensitiveDict, self).__init__(*arg, **kw)
# force keys to be capitals (internally)
self.__capitalise_keys__()
def __getitem__(self, key: str) -> object:
"""
Method used to get the value of an item using "key"
used as x.__getitem__(y) <==> x[y]
where key is case insensitive
:param key: string, the key for the value returned (case insensitive)
:type key: str
:return value: object, the value stored at position "key"
"""
# set function name
_ = display_func(None, '__getitem__', __NAME__, 'CaseInsensitiveDict')
# make key capitals
key = _capitalise_key(key)
# return from supers dictionary storage
return super(CaseInsensitiveDict, self).__getitem__(key)
def __setitem__(self, key: str, value: object, source: str = None):
"""
Sets an item wrapper for self[key] = value
:param key: string, the key to set for the parameter
:param value: object, the object to set (as in dictionary) for the
parameter
:param source: string, the source for the parameter
:type key: str
:type value: object
:type source: str
:return: None
"""
# set function name
_ = display_func(None, '__setitem__', __NAME__, 'CaseInsensitiveDict')
# capitalise string keys
key = _capitalise_key(key)
# then do the normal dictionary setting
super(CaseInsensitiveDict, self).__setitem__(key, value)
def __contains__(self, key: str) -> bool:
"""
Method to find whether CaseInsensitiveDict instance has key="key"
used with the "in" operator
if key exists in CaseInsensitiveDict True is returned else False
is returned
:param key: string, "key" to look for in CaseInsensitiveDict instance
:type key: str
:return bool: True if CaseInsensitiveDict instance has a key "key",
else False
:rtype: bool
"""
# set function name
_ = display_func(None, '__contains__', __NAME__, 'CaseInsensitiveDict')
# capitalize key first
key = _capitalise_key(key)
# return True if key in keys else return False
return super(CaseInsensitiveDict, self).__contains__(key)
def __delitem__(self, key: str):
"""
Deletes the "key" from CaseInsensitiveDict instance, case insensitive
:param key: string, the key to delete from ParamDict instance,
case insensitive
:type key: str
:return None:
"""
# set function name
_ = display_func(None, '__delitem__', __NAME__, 'CaseInsensitiveDict')
# capitalize key first
key = _capitalise_key(key)
# delete key from keys
super(CaseInsensitiveDict, self).__delitem__(key)
def get(self, key: str, default: Union[None, object] = None):
"""
Overrides the dictionary get function
If "key" is in CaseInsensitiveDict instance then returns this value,
else returns "default" (if default returned source is set to None)
key is case insensitive
:param key: string, the key to search for in ParamDict instance
case insensitive
:param default: object or None, if key not in ParamDict instance this
object is returned
:type key: str
:type default: Union[None, object]
:return value: if key in ParamDict instance this value is returned else
the default value is returned (None if undefined)
"""
# set function name
_ = display_func(None, 'get', __NAME__, 'CaseInsensitiveDict')
# capitalise string keys
key = _capitalise_key(key)
# if we have the key return the value
if key in self.keys():
return self.__getitem__(key)
# else return the default key (None if not defined)
else:
return default
def __capitalise_keys__(self):
"""
Capitalizes all keys in ParamDict (used to make ParamDict case
insensitive), only if keys entered are strings
:return None:
"""
# set function name
_ = display_func(None, '__capitalise_keys__', __NAME__,
'CaseInsensitiveDict')
# make keys a list
keys = list(self.keys())
# loop around key in keys
for key in keys:
# check if key is a string
if type(key) == str:
# get value
value = super(CaseInsensitiveDict, self).__getitem__(key)
# delete old key
super(CaseInsensitiveDict, self).__delitem__(key)
# if it is a string set it to upper case
key = key.upper()
# set the new key
super(CaseInsensitiveDict, self).__setitem__(key, value)
class ListCaseInsensitiveDict(CaseInsensitiveDict):
def __getitem__(self, key: str) -> list:
"""
Method used to get the value of an item using "key"
used as x.__getitem__(y) <==> x[y]
where key is case insensitive
:param key: string, the key for the value returned (case insensitive)
:type key: str
:return value: list, the value stored at position "key"
"""
# set function name
_ = display_func(None, '__getitem__', __NAME__,
'ListCaseInsensitiveDict')
# return from supers dictionary storage
# noinspection PyTypeChecker
return list(super(ListCaseInsensitiveDict, self).__getitem__(key))
def __setitem__(self, key: str, value: list, source: str = None):
"""
Sets an item wrapper for self[key] = value
:param key: string, the key to set for the parameter
:param value: object, the object to set (as in dictionary) for the
parameter
:param source: string, the source for the parameter
:type key: str
:type value: list
:type source: str
:return: None
"""
# set function name
_ = display_func(None, '__setitem__', __NAME__,
'ListCaseInsensitiveDict')
# then do the normal dictionary setting
super(ListCaseInsensitiveDict, self).__setitem__(key, list(value))
class ParamDict(CaseInsensitiveDict):
"""
Custom dictionary to retain source of a parameter (added via setSource,
retreived via getSource). String keys are case insensitive.
"""
def __init__(self, *arg, **kw):
"""
Constructor for parameter dictionary, calls dict.__init__
i.e. the same as running dict(*arg, *kw)
:param arg: arguments passed to CaseInsensitiveDict
:param kw: keyword arguments passed to CaseInsensitiveDict
"""
# set function name
_ = display_func(None, '__init__', __NAME__, 'ParamDict')
# storage for the sources
self.sources = CaseInsensitiveDict()
# storage for the source history
self.source_history = ListCaseInsensitiveDict()
# storage for the instances
self.instances = CaseInsensitiveDict()
# the print format
self.pfmt = '\t{0:30s}{1:45s} # {2}'
# the print format for list items
self.pfmt_ns = '\t{1:45s}'
# whether the parameter dictionary is locked for editing
self.locked = False
# get text entry from constants (manual database)
self.textentry = constant_functions.DisplayText()
# run the super class (CaseInsensitiveDict <-- dict)
super(ParamDict, self).__init__(*arg, **kw)
def __getitem__(self, key: str) -> object:
"""
Method used to get the value of an item using "key"
used as x.__getitem__(y) <==> x[y]
where key is case insensitive
:param key: string, the key for the value returned (case insensitive)
:type key: str
:return value: object, the value stored at position "key"
:raises ConfigError: if key not found
"""
# set function name
_ = display_func(None, '__getitem__', __NAME__, 'ParamDict')
# try to get item from super
try:
return super(ParamDict, self).__getitem__(key)
except KeyError:
# log that parameter was not found in parameter dictionary
emsg = self.textentry('00-003-00024', args=[key])
raise ConfigError(emsg, level='error')
def __setitem__(self, key: str, value: object,
source: Union[None, str] = None,
instance: Union[None, object] = None):
"""
Sets an item wrapper for self[key] = value
:param key: string, the key to set for the parameter
:param value: object, the object to set (as in dictionary) for the
parameter
:param source: string, the source for the parameter
:type key: str
:type source: Union[None, str]
:type instance: Union[None, object]
:return: None
:raises ConfigError: if parameter dictionary is locked
"""
global SETTINGS_CACHE
# set function name
_ = display_func(None, '__setitem__', __NAME__, 'ParamDict')
# deal with parameter dictionary being locked
if self.locked:
# log that parameter dictionary is locked so we cannot set key
raise ConfigError(self.textentry('00-003-00025', args=[key, value]))
# if we dont have the key in sources set it regardless
if key not in self.sources:
self.sources[key] = source
self.instances[key] = instance
# if we do have the key only set it if source is not None
elif source is not None:
self.sources[key] = source
self.instances[key] = instance
# if setting in cached settings add
if key in SETTINGS_CACHE_KEYS:
SETTINGS_CACHE[key] = copy.deepcopy(value)
# then do the normal dictionary setting
super(ParamDict, self).__setitem__(key, value)
def __contains__(self, key: str) -> bool:
"""
Method to find whether ParamDict instance has key="key"
used with the "in" operator
if key exists in ParamDict True is returned else False is returned
:param key: string, "key" to look for in ParamDict instance
:return bool: True if ParamDict instance has a key "key", else False
"""
# set function name
_ = display_func(None, '__contains__', __NAME__, 'ParamDict')
# run contains command from super
return super(ParamDict, self).__contains__(key)
def __delitem__(self, key: str):
"""
Deletes the "key" from ParamDict instance, case insensitive
:param key: string, the key to delete from ParamDict instance,
case insensitive
:return None:
"""
# set function name
_ = display_func(None, '__delitem__', __NAME__, 'ParamDict')
# delete item using super
super(ParamDict, self).__delitem__(key)
def __repr__(self):
"""
Get the offical string representation for this instance
:return: return the string representation
:rtype: str
"""
# set function name
_ = display_func(None, '__repr__', __NAME__, 'ParamDict')
# get string from string print
return self._string_print()
def __str__(self) -> str:
"""
Get the informal string representation for this instance
:return: return the string representation
:rtype: str
"""
# set function name
_ = display_func(None, '__repr__', __NAME__, 'ParamDict')
# get string from string print
return self._string_print()
def set(self, key: str, value: object,
source: Union[None, str] = None,
instance: Union[None, object] = None):
"""
Set an item even if params is locked
:param key: str, the key to set
:param value: object, the value of the key to set
:param source: str, the source of the value/key to set
:param instance: object, the instance of the value/key to set
:type key: str
:type source: str
:type instance: object
:return: None
"""
# set function name
_ = display_func(None, 'set', __NAME__, 'ParamDict')
# if we dont have the key in sources set it regardless
if key not in self.sources:
self.sources[key] = source
self.instances[key] = instance
# if we do have the key only set it if source is not None
elif source is not None:
self.sources[key] = source
self.instances[key] = instance
# then do the normal dictionary setting
super(ParamDict, self).__setitem__(key, value)
def lock(self):
"""
Locks the parameter dictionary
:return:
"""
# set function name
_ = display_func(None, 'lock', __NAME__, 'ParamDict')
# set locked to True
self.locked = True
def unlock(self):
"""
Unlocks the parameter dictionary
:return:
"""
# set function name
_ = display_func(None, 'unlock', __NAME__, 'ParamDict')
# set locked to False
self.locked = False
def get(self, key: str, default: Union[None, object] = None) -> object:
"""
Overrides the dictionary get function
If "key" is in ParamDict instance then returns this value, else
returns "default" (if default returned source is set to None)
key is case insensitive
:param key: string, the key to search for in ParamDict instance
case insensitive
:param default: object or None, if key not in ParamDict instance this
object is returned
:type key: str
:return value: if key in ParamDict instance this value is returned else
the default value is returned (None if undefined)
"""
# set function name
_ = display_func(None, 'get', __NAME__, 'ParamDict')
# if we have the key return the value
if key in self.keys():
return self.__getitem__(key)
# else return the default key (None if not defined)
else:
self.sources[key] = None
return default
def set_source(self, key: str, source: str):
"""
Set a key to have sources[key] = source
raises a ConfigError if key not found
:param key: string, the main dictionary string
:param source: string, the source to set
:type key: str
:type source: str
:return None:
:raises ConfigError: if key not found
"""
# set function name
_ = display_func(None, 'set_source', __NAME__, 'ParamDict')
# capitalise
key = _capitalise_key(key)
# don't put full path for sources in package
source = _check_mod_source(source)
# only add if key is in main dictionary
if key in self.keys():
self.sources[key] = source
# add to history
if key in self.source_history:
self.source_history[key].append(source)
else:
self.source_history[key] = [source]
else:
# log error: source cannot be added for key
emsg = self.textentry('00-003-00026', args=[key])
raise ConfigError(emsg, level='error')
def set_instance(self, key: str, instance: object):
"""
Set a key to have instance[key] = instance
raise a Config Error if key not found
:param key: str, the key to add
:param instance: object, the instance to store (normally Const/Keyword)
:type key: str
:return None:
:raises ConfigError: if key not found
"""
# set function name
_ = display_func(None, 'set_instance', __NAME__, 'ParamDict')
# capitalise
key = _capitalise_key(key)
# only add if key is in main dictionary
if key in self.keys():
self.instances[key] = instance
else:
# log error: instance cannot be added for key
emsg = self.textentry('00-003-00027', args=[key])
raise ConfigError(emsg, level='error')
def append_source(self, key: str, source: str):
"""
Adds source to the source of key (appends if exists)
i.e. sources[key] = oldsource + source
:param key: string, the main dictionary string
:param source: string, the source to set
:type key: str
:type source: str
:return None:
"""
# set function name
_ = display_func(None, 'append_source', __NAME__, 'ParamDict')
# capitalise
key = _capitalise_key(key)
# if key exists append source to it
if key in self.keys() and key in list(self.sources.keys()):
self.sources[key] += ' {0}'.format(source)
else:
self.set_source(key, source)
def set_sources(self, keys: List[str],
sources: Union[str, List[str], dict]):
"""
Set a list of keys sources
raises a ConfigError if key not found
:param keys: list of strings, the list of keys to add sources for
:param sources: string or list of strings or dictionary of strings,
the source or sources to add,
if a dictionary source = sources[key] for key = keys[i]
if list source = sources[i] for keys[i]
if string all sources with these keys will = source
:type keys: list
:type sources: Union[str, list, dict]
:return None:
"""
# set function name
_ = display_func(None, 'set_sources', __NAME__, 'ParamDict')
# loop around each key in keys
for k_it in range(len(keys)):
# assign the key from k_it
key = keys[k_it]
# capitalise
key = _capitalise_key(key)
# Get source for this iteration
if type(sources) == list:
source = sources[k_it]
elif type(sources) == dict:
source = sources[key]
else:
source = str(sources)
# set source
self.set_source(key, source)
def set_instances(self, keys: List[str],
instances: Union[object, list, dict]):
"""
Set a list of keys sources
raises a ConfigError if key not found
:param keys: list of strings, the list of keys to add sources for
:param instances: object or list of objects or dictionary of objects,
the source or sources to add,
if a dictionary source = sources[key] for key = keys[i]
if list source = sources[i] for keys[i]
if object all sources with these keys will = source
:type keys: list
:type instances: Union[object, list, dict]
:return None:
"""
# set function name
_ = display_func(None, 'set_instances', __NAME__, 'ParamDict')
# loop around each key in keys
for k_it in range(len(keys)):
# assign the key from k_it
key = keys[k_it]
# capitalise
key = _capitalise_key(key)
# Get source for this iteration
if type(instances) == list:
instance = instances[k_it]
elif type(instances) == dict:
instance = instances[key]
else:
instance = instances
# set source
self.set_instance(key, instance)
def append_sources(self, keys: str, sources: Union[str, List[str], dict]):
"""
Adds list of keys sources (appends if exists)
raises a ConfigError if key not found
:param keys: list of strings, the list of keys to add sources for
:param sources: string or list of strings or dictionary of strings,
the source or sources to add,
if a dictionary source = sources[key] for key = keys[i]
if list source = sources[i] for keys[i]
if string all sources with these keys will = source
:type keys: list
:type sources: Union[str, List[str], dict]
:return None:
"""
# set function name
_ = display_func(None, 'append_sources', __NAME__, 'ParamDict')
# loop around each key in keys
for k_it in range(len(keys)):
# assign the key from k_it
key = keys[k_it]
# capitalise
key = _capitalise_key(key)
# Get source for this iteration
if type(sources) == list:
source = sources[k_it]
elif type(sources) == dict:
source = sources[key]
else:
source = str(sources)
# append key
self.append_source(key, source)
def set_all_sources(self, source: str):
"""
Set all keys in dictionary to this source
:param source: string, all keys will be set to this source
:type source: str
:return None:
"""
# set function name
_ = display_func(None, 'set_all_sources', __NAME__, 'ParamDict')
# loop around each key in keys
for key in self.keys():
# capitalise
key = _capitalise_key(key)
# set key
self.sources[key] = source
def append_all_sources(self, source: str):
"""
Sets all sources to this "source" value
:param source: string, the source to set
:type source: str
:return None:
"""
# set function name
_ = display_func(None, 'append_all_sources', __NAME__, 'ParamDict')
# loop around each key in keys
for key in self.keys():
# capitalise
key = _capitalise_key(key)
# set key
self.sources[key] += ' {0}'.format(source)
def get_source(self, key: str) -> str:
"""
Get a source from the parameter dictionary (must be set)
raises a ConfigError if key not found
:param key: string, the key to find (must be set)
:return source: string, the source of the parameter
"""
# set function name
_ = display_func(None, 'get_source', __NAME__, 'ParamDict')
# capitalise
key = _capitalise_key(key)
# if key in keys and sources then return source
if key in self.keys() and key in self.sources.keys():
return str(self.sources[key])
# else raise a Config Error
else:
# log error: no source set for key
emsg = self.textentry('00-003-00028', args=[key])
raise ConfigError(emsg, level='error')
def get_instance(self, key: str) -> object:
"""
Get a source from the parameter dictionary (must be set)
raises a ConfigError if key not found
:param key: string, the key to find (must be set)
:return source: string, the source of the parameter
"""
# set function name
_ = display_func(None, 'get_instance', __NAME__, 'ParamDict')
# capitalise
key = _capitalise_key(key)
# if key in keys and sources then return source
if key in self.keys() and key in self.instances.keys():
return self.instances[key]
# else raise a Config Error
else:
emsg = self.textentry('00-003-00029', args=[key])
raise ConfigError(emsg, level='error')
def source_keys(self) -> List[str]:
"""
Get a dict_keys for the sources for this parameter dictionary
order the same as self.keys()
:return sources: values of sources dictionary
"""
# set function name
_ = display_func(None, 'source_keys', __NAME__, 'ParamDict')
# return all keys in source dictionary
return list(self.sources.keys())
def source_values(self) -> List[object]:
"""
Get a dict_values for the sources for this parameter dictionary
order the same as self.keys()
:return sources: values of sources dictionary
"""
# set function name
_ = display_func(None, 'source_values', __NAME__, 'ParamDict')
# return all values in source dictionary
return list(self.sources.values())
def startswith(self, substring: str) -> List[str]:
"""
Return all keys that start with this substring
:param substring: string, the prefix that the keys start with
:type substring: str
:return keys: list of strings, the keys with this substring at the start
"""
# set function name
_ = display_func(None, 'startswith', __NAME__, 'ParamDict')
# define return list
return_keys = []
# loop around keys
for key in self.keys():
# make sure key is string
if type(key) != str:
continue
# if first
if str(key).startswith(substring.upper()):
return_keys.append(key)
# return keys
return return_keys
def contains(self, substring: str) -> List[str]:
"""
Return all keys that contain this substring
:param substring: string, the sub-string to look for in all keys
:type substring: str
:return keys: list of strings, the keys which contain this substring
"""
# set function name
_ = display_func(None, 'contains', __NAME__, 'ParamDict')
# define return list
return_keys = []
# loop around keys
for key in self.keys():
# make sure key is string
if type(key) != str:
continue
# if first
if substring.upper() in key:
return_keys.append(key)
# return keys
return return_keys
def endswith(self, substring: str) -> List[str]:
"""
Return all keys that end with this substring
:param substring: string, the suffix that the keys ends with
:type substring: str
:return keys: list of strings, the keys with this substring at the end
"""
# set function name
_ = display_func(None, 'endswith', __NAME__, 'ParamDict')
# define return list
return_keys = []
# loop around keys
for key in self.keys():
# make sure key is string
if type(key) != str:
continue
# if first
if str(key).endswith(substring.upper()):
return_keys.append(key)
# return keys
return return_keys
def copy(self):
"""
Copy a parameter dictionary (deep copy parameters)
:return: the copy of the parameter dictionary
:rtype: ParamDict
"""
# set function name
_ = display_func(None, 'copy', __NAME__, 'ParamDict')
# make new copy of param dict
pp = ParamDict()
keys = list(self.keys())
values = list(self.values())
# loop around keys and add to new copy
for k_it, key in enumerate(keys):
value = values[k_it]
# try to deep copy parameter
if isinstance(value, ParamDict):
pp[key] = value.copy()
else:
# noinspection PyBroadException
try:
pp[key] = copy.deepcopy(value)
except Exception as _:
pp[key] = type(value)(value)
# copy source
if key in self.sources:
pp.set_source(key, str(self.sources[key]))
else:
pp.set_source(key, 'Unknown')
# copy source history
if key in self.source_history:
pp.source_history[key] = list(self.source_history[key])
else:
pp.source_history[key] = []
# copy instance
if key in self.instances:
pp.set_instance(key, self.instances[key])
else:
pp.set_instance(key, None)
# return new param dict filled
return pp
def merge(self, paramdict, overwrite: bool = True):
"""
Merge another parameter dictionary with this one
:param paramdict: ParamDict, another parameter dictionary to merge
with this one
:param overwrite: bool, if True (default) allows overwriting of
parameters, else skips ones already present
:type paramdict: ParamDict
:type overwrite: bool
:return: None
"""
# set function name
_ = display_func(None, 'merge', __NAME__, 'ParamDict')
# add param dict to self
for key in paramdict:
# deal with no overwriting
if not overwrite and key in self.keys:
continue
# copy source
if key in paramdict.sources:
ksource = paramdict.sources[key]
else:
ksource = None
# copy instance
if key in paramdict.instances:
kinst = paramdict.instances[key]
else:
kinst = None
# add to self
self.set(key, paramdict[key], ksource, kinst)
def _string_print(self) -> str:
"""
Constructs a string representation of the instance
:return: a string representation of the instance
:rtype: str
"""
# set function name
_ = display_func(None, '_string_print', __NAME__, 'ParamDict')
# get keys and values
keys = list(self.keys())
values = list(self.values())
# string storage
return_string = 'ParamDict:\n'
strvalues = []
# loop around each key in keys
for k_it, key in enumerate(keys):
# get this iterations values
value = values[k_it]
# deal with no source
if key not in self.sources:
self.sources[key] = 'None'
# print value
if type(value) in [list, np.ndarray]:
sargs = [key, list(value), self.sources[key], self.pfmt]
strvalues += _string_repr_list(*sargs)
elif type(value) in [dict, OrderedDict, ParamDict]:
strvalue = list(value.keys()).__repr__()[:40]
sargs = [key + '[DICT]', strvalue, self.sources[key]]
strvalues += [self.pfmt.format(*sargs)]
else:
strvalue = str(value)[:40]
sargs = [key + ':', strvalue, self.sources[key]]
strvalues += [self.pfmt.format(*sargs)]
# combine list into single string
for string_value in strvalues:
return_string += '\n {0}'.format(string_value)
# return string
return return_string + '\n'
def listp(self, key: str, separator: str = ',',
dtype: Union[None, Type] = None) -> list:
"""
Turn a string list parameter (separated with `separator`) into a list
of objects (of data type `dtype`)
i.e. ParamDict['MYPARAM'] = '1, 2, 3, 4'
x = ParamDict.listp('my_parameter', dtype=int)
gives:
x = list([1, 2, 3, 4])
:param key: str, the key that contains a string list
:param separator: str, the character that separates
:param dtype: type, the type to cast the list element to
:return: the list of values extracted from the string for `key`
:rtype: list
"""
# set function name
_ = display_func(None, 'listp', __NAME__, 'ParamDict')
# if key is present attempt str-->list
if key in self.keys():
item = self.__getitem__(key)
else:
# log error: parameter not found in parameter dict (via listp)
emsg = self.textentry('00-003-00030', args=[key])
raise ConfigError(emsg, level='error')
# convert string
if key in self.keys() and isinstance(item, str):
return _map_listparameter(str(item), separator=separator,
dtype=dtype)
elif isinstance(item, list):
return item
else:
# log error: parameter not found in parameter dict (via listp)
emsg = self.textentry('00-003-00032', args=[key])
raise ConfigError(emsg, level='error')
def dictp(self, key: str, dtype: Union[str, None] = None) -> dict:
"""
Turn a string dictionary parameter into a python dictionary
of objects (of data type `dtype`)
i.e. ParamDict['MYPARAM'] = '{"varA":1, "varB":2}'
x = ParamDict.listp('my_parameter', dtype=int)
gives:
x = dict(varA=1, varB=2)
Note string dictionary must be in the {"key":value} format
:param key: str, the key that contains a string list
:param dtype: type, the type to cast the list element to
:return: the list of values extracted from the string for `key`
:rtype: dict
"""
# set function name
_ = display_func(None, 'dictp', __NAME__, 'ParamDict')
# if key is present attempt str-->dict
if key in self.keys():
item = self.__getitem__(key)
else:
# log error message: parameter not found in param dict (via dictp)
emsg = self.textentry('00-003-00031', args=[key])
raise ConfigError(emsg.format(key), level='error')
# convert string
if isinstance(item, str):
return _map_dictparameter(str(item), dtype=dtype)
elif isinstance(item, dict):
return item
else:
# log error message: parameter not found in param dict (via dictp)
emsg = self.textentry('00-003-00033', args=[key])
raise ConfigError(emsg.format(key), level='error')
def get_instanceof(self, lookup: object, nameattr: str = 'name') -> dict:
"""
Get all instances of object instance lookup
i.e. perform isinstance(object, lookup)
:param lookup: object, the instance to lookup
:param nameattr: str, the attribute in instance that we will return
as the key
:return: a dictionary of keys/value pairs where each value is an
instance that belongs to instance of `lookup`
:rtype: dict
"""
# set function name
_ = display_func(None, 'get_instanceof', __NAME__, 'ParamDict')
# output storage
keydict = dict()
# loop around all keys
for key in list(self.instances.keys()):
# get the instance for this key
instance = self.instances[key]
# skip None
if instance is None:
continue
# else check instance type
if isinstance(instance, type(lookup)):
if hasattr(instance, nameattr):
name = getattr(instance, nameattr)
keydict[name] = instance
else:
continue
# return keyworddict
return keydict
def info(self, key: str):
"""
Display the information related to a specific key
:param key: str, the key to display information about
:type key: str
:return: None
"""
# set function name
_ = display_func(None, 'info', __NAME__, 'ParamDict')
# deal with key not existing
if key not in self.keys():
print(self.textentry('40-000-00001', args=[key]))
return
# print key title
print(self.textentry('40-000-00002', args=[key]))
# print value stats
value = self.__getitem__(key)
# print the data type
print(self.textentry('40-000-00003', args=[type(value).__name__]))
# deal with lists and numpy array
if isinstance(value, (list, np.ndarray)):
sargs = [key, list(value), None, self.pfmt_ns]
wargs = [np.nanmin(value), np.nanmax(value),
np.sum(np.isnan(value)) > 0, _string_repr_list(*sargs)]
print(self.textentry('40-000-00004', args=wargs))
# deal with dictionaries
elif isinstance(value, (dict, OrderedDict, ParamDict)):
strvalue = list(value.keys()).__repr__()[:40]
sargs = [key + '[DICT]', strvalue, None]
wargs = [len(list(value.keys())), self.pfmt_ns.format(*sargs)]
print(self.textentry('40-000-00005', args=wargs))
# deal with everything else
else:
strvalue = str(value)[:40]
sargs = [key + ':', strvalue, None]
wargs = [self.pfmt_ns.format(*sargs)]
print(self.textentry('40-000-00006', args=wargs))
# add source info
if key in self.sources:
print(self.textentry('40-000-00007', args=[self.sources[key]]))
# add instances info
if key in self.instances:
print(self.textentry('40-000-00008', args=[self.instances[key]]))
def history(self, key: str):
"""
Display the history of where key was defined (using source)
:param key: str, the key to print history of
:type key: str
:return: None
"""
# set function name
_ = display_func(None, 'history', __NAME__, 'ParamDict')
# if history found then print it
if key in self.source_history:
# print title: History for key
print(self.textentry('40-000-00009', args=[key]))
# loop around history and print row by row
for it, entry in enumerate(self.source_history[key]):
print('{0}: {1}'.format(it + 1, entry))
# else display that there was not history found
else:
print(self.textentry('40-000-00010', args=[key]))
# =============================================================================
# Define functions
# =============================================================================
def update_paramdicts(*args, **kwargs):
# set function name (cannot break here --> no access to inputs)
_ = display_func(None, 'update_paramdicts', __NAME__)
# get key from kwargs
key = kwargs.get('key', None)
# get value from kwargs
value = kwargs.get('value', None)
# get source from kwargs
source = kwargs.get('source', None)
# get instance from kwargs
instance = kwargs.get('instance', None)
# loop through param dicts
for arg in args:
if isinstance(arg, ParamDict):
arg.set(key, value=value, source=source, instance=instance)
def load_config(instrument=None, from_file=True, cache=True):
global CONFIG_CACHE
# set function name (cannot break here --> no access to inputs)
_ = display_func(None, 'load_config', __NAME__)
# check config cache
if instrument in CONFIG_CACHE and cache:
return CONFIG_CACHE[instrument].copy()
# deal with instrument set to 'None'
if isinstance(instrument, str):
if instrument.upper() == 'NONE':
instrument = None
# get instrument sub-package constants files
modules = get_module_names(instrument)
# get constants from modules
try:
keys, values, sources, instances = _load_from_module(modules, True)
except ConfigError:
sys.exit(1)
params = ParamDict(zip(keys, values))
# Set the source
params.set_sources(keys=keys, sources=sources)
# add to params
for it in range(len(keys)):
# set instance (Const/Keyword instance)
params.set_instance(keys[it], instances[it])
# get constants from user config files
if from_file:
# get instrument user config files
files = _get_file_names(params, instrument)
try:
keys, values, sources, instances = _load_from_file(files, modules)
except ConfigError:
sys.exit(1)
# add to params
for it in range(len(keys)):
# set value
params[keys[it]] = values[it]
# set instance (Const/Keyword instance)
params.set_instance(keys[it], instances[it])
params.set_sources(keys=keys, sources=sources)
# save sources to params
params = _save_config_params(params)
# cache these params
if cache:
CONFIG_CACHE[instrument] = params.copy()
# return the parameter dictionary
return params
def load_pconfig(instrument=None):
global PCONFIG_CACHE
# set function name (cannot break here --> no access to inputs)
func_name = display_func(None, 'load_pconfig', __NAME__)
# check cache
if instrument in PCONFIG_CACHE:
return PCONFIG_CACHE[instrument]
# deal with instrument set to 'None'
if isinstance(instrument, str):
if instrument.upper() == 'NONE':
instrument = None
# get instrument sub-package constants files
modules = get_module_names(instrument, mod_list=[PSEUDO_CONST_FILE])
# import module
mod = constant_functions.import_module(func_name, modules[0])
# check that we have class and import it
if hasattr(mod, PSEUDO_CONST_CLASS):
psconst = getattr(mod, PSEUDO_CONST_CLASS)
# else raise error
else:
emsg = 'Module "{0}" is required to have class "{1}"'
ConfigError(emsg.format(modules[0], PSEUDO_CONST_CLASS))
sys.exit(1)
# get instance of PseudoClass
pconfig = psconst(instrument=instrument)
# update cache
PCONFIG_CACHE[instrument] = pconfig
return pconfig
def get_config_all():
# set function name (cannot break here --> no access to inputs)
_ = display_func(None, 'get_config_all', __NAME__)
# get module names
modules = get_module_names(None)
# loop around modules and print our __all__ statement
for module in modules:
# generate a list of all functions in a module
rawlist = constant_functions.generate_consts(module)[0]
# print to std-out
print('=' * 50)
print('MODULE: {0}'.format(module))
print('=' * 50)
print('')
print('__all__ = [\'{0}\']'.format('\', \''.join(rawlist)))
print('')
def get_file_names(instrument=None, file_list=None, instrument_path=None,
default_path=None):
# set function name (cannot break here --> no access to inputs)
func_name = display_func(None, 'get_file_names', __NAME__)
# get core path
core_path = get_relative_folder(PACKAGE, default_path)
# get constants package path
if instrument is not None:
const_path = get_relative_folder(PACKAGE, instrument_path)
# get the directories within const_path
filelist = np.sort(os.listdir(const_path))
directories = []
for filename in filelist:
if os.path.isdir(filename):
directories.append(filename)
else:
const_path = None
# get the directories within const_path
filelist = np.sort(os.listdir(core_path))
directories = []
for filename in filelist:
if os.path.isdir(filename):
directories.append(filename)
# construct module import name
if instrument is None:
filepath = os.path.join(core_path, '')
else:
filepath = os.path.join(const_path, instrument.lower())
# get module names
paths = []
for filename in file_list:
# get file path
fpath = os.path.join(filepath, filename)
# append if path exists
if not os.path.exists(fpath):
emsgs = ['DevError: Filepath "{0}" does not exist.'
''.format(fpath),
'\tfunction = {0}'.format(func_name)]
raise ConfigError(emsgs, level='error')
# append mods
paths.append(fpath)
# make sure we found something
if len(paths) == 0:
emsgs = ['DevError: No files found',
'\tfunction = {0}'.format(func_name)]
raise ConfigError(emsgs, level='error')
# return modules
return paths
def get_module_names(instrument=None, mod_list=None, instrument_path=None,
default_path=None, path=True):
# set function name (cannot break here --> no access to inputs)
func_name = display_func(None, '_get_module_names', __NAME__)
# deal with no module list
if mod_list is None:
mod_list = SCRIPTS
# deal with no path
if instrument_path is None:
instrument_path = CONST_PATH
if default_path is None:
default_path = CORE_PATH
# get constants package path
const_path = get_relative_folder(PACKAGE, instrument_path)
core_path = get_relative_folder(PACKAGE, default_path)
# get the directories within const_path
filelist = np.sort(os.listdir(const_path))
directories = []
for filename in filelist:
if os.path.isdir(filename):
directories.append(filename)
# construct sub-module name
relpath = os.path.normpath(instrument_path).replace('.', '')
relpath = relpath.replace(os.sep, '.').strip('.')
corepath = os.path.normpath(default_path).replace('.', '')
corepath = corepath.replace(os.sep, '.').strip('.')
# construct module import name
if instrument is None:
modpath = '{0}.{1}'.format(PACKAGE, corepath)
filepath = os.path.join(core_path, '')
else:
modpath = '{0}.{1}.{2}'.format(PACKAGE, relpath, instrument.lower())
filepath = os.path.join(const_path, instrument.lower())
# get module names
mods, paths = [], []
for script in mod_list:
# make sure script doesn't end with .py
mscript = script.split('.')[0]
# get mod path
mod = '{0}.{1}'.format(modpath, mscript)
# get file path
fpath = os.path.join(filepath, script)
# append if path exists
found = True
if not os.path.exists(fpath):
if not fpath.endswith('.py'):
fpath += '.py'
if not os.path.exists(fpath):
found = False
else:
found = False
# deal with no file found
if not found:
emsgs = ['DevError: Const mod path "{0}" does not exist.'
''.format(mod),
'\tpath = {0}'.format(fpath),
'\tfunction = {0}'.format(func_name)]
raise ConfigError(emsgs, level='error')
# append mods
mods.append(mod)
paths.append(fpath)
# make sure we found something
if len(mods) == 0:
emsgs = ['DevError: No config dirs found',
'\tfunction = {0}'.format(func_name)]
raise ConfigError(emsgs, level='error')
if len(mods) != len(mod_list):
emsgs = ['DevError: Const mod scrips missing found=[{0}]'
''.format(','.join(mods)),
'\tfunction = {0}'.format(func_name)]
raise ConfigError(emsgs, level='error')
# return modules
if path:
return paths
else:
return mods
def print_error(error):
# set function name (cannot break here --> no access to inputs)
_ = display_func(None, 'print_error', __NAME__)
# print the configuration file
print('\n')
print('=' * 70)
print(' Configuration file {0}:'.format(error.level))
print('=' * 70, '\n')
# get the error string
estring = error.message
# if error string is not a list assume it is a string and push it into
# a single element list
if type(estring) is not list:
estring = [estring]
# loop around error strings (now must be a list of strings)
for emsg in estring:
# replace new line with new line + tab
emsg = emsg.replace('\n', '\n\t')
# print to std-out
print('\t' + emsg)
# print a gap between this and next lines
print('=' * 70, '\n')
def break_point(params=None, allow=None, level=2):
# set function name (cannot break inside break function)
_ = str(__NAME__) + '.break_point()'
# if we don't have parameters load them from config file
if params is None:
params = load_config()
# force to True
params['ALLOW_BREAKPOINTS'] = True
# if allow is not set
if allow is None:
allow = params['ALLOW_BREAKPOINTS']
# if still not allowed the return
if not allow:
return
# copy pdbrc
_copy_pdb_rc(params, level=level)
# catch bdb quit
# noinspection PyPep8
try:
_execute_ipdb()
except Exception:
emsg = 'USER[00-000-00000]: Debugger breakpoint exit.'
raise drs_exceptions.DebugExit(emsg)
finally:
# delete pdbrc
_remove_pdb_rc(params)
# noinspection PyUnusedLocal
def catch_sigint(signal_received, frame):
# set function name (cannot break here --> no access to inputs)
_ = display_func(None, 'catch_sigint', __NAME__)
# raise Keyboard Interrupt
raise KeyboardInterrupt('\nSIGINT or CTRL-C detected. Exiting\n')
def window_size(drows=80, dcols=80):
# set function name (cannot break here --> no access to inputs)
_ = display_func(None, 'window_size', __NAME__)
# only works on unix operating systems
if os.name == 'posix':
# see if we have stty commnad
if shutil.which('stty') is None:
return drows, dcols
# try to open via open and split output back to rows and columns
# noinspection PyPep8,PyBroadException
try:
rows, columns = os.popen('stty size', 'r').read().split()
return int(rows), int(columns)
# if not just pass over this
except Exception:
pass
# if we are on windows we have to get window size differently
elif os.name == 'nt':
# taken from: https://gist.github.com/jtriley/1108174
# noinspection PyPep8,PyBroadException
try:
import struct
from ctypes import windll, create_string_buffer
# stdin handle is -10
# stdout handle is -11
# stderr handle is -12
h = windll.kernel32.GetStdHandle(-12)
csbi = create_string_buffer(22)
res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi)
if res:
out = struct.unpack("hhhhHhhhhhh", csbi.raw)
left, top, right, bottom = out[5:9]
sizex = right - left + 1
sizey = bottom - top + 1
return int(sizey), int(sizex)
# if not just pass over this
except Exception:
pass
# if we have reached this point return the default number of rows
# and columns
return drows, dcols
def display_func(params=None, name=None, program=None, class_name=None,
wlog=None, textentry=None):
# set function name (cannot break here --> no access to inputs)
func_name = str(__NAME__) + '.display_func()'
# deal with no wlog defined
if wlog is None:
wlog = drs_exceptions.wlogbasic
# deal with not text entry defined
if textentry is None:
textentry = constant_functions.DisplayText()
# start the string function
strfunc = ''
# deal with no file name
if name is None:
name = 'Unknown'
# ----------------------------------------------------------------------
# add the program
if program is not None:
strfunc = str(program)
if class_name is not None:
strfunc += '.{0}'.format(class_name)
# add the name
strfunc += '.{0}'.format(name)
# add brackets to show function
if not strfunc.endswith('()'):
strfunc += '()'
# ----------------------------------------------------------------------
# deal with adding a break point
if params is not None:
if 'INPUTS' in params and 'BREAKFUNC' in params['INPUTS']:
# get break function
breakfunc = params['INPUTS']['BREAKFUNC']
# only deal with break function if it is set
if breakfunc not in [None, 'None', '']:
# get function name (without ending)
funcname = strfunc.replace('()', '')
# if function name endwith break function then we break here
if funcname.endswith(breakfunc):
# log we are breaking due to break function
wargs = [breakfunc]
msg = textentry('10-005-00004', args=wargs)
wlog(params, 'warning', msg)
break_point(params, allow=True, level=3)
# ----------------------------------------------------------------------
# deal with no params (do not log)
if params is None:
return strfunc
# deal with debug level too low (just return here)
if params['DRS_DEBUG'] < params['DEBUG_MODE_FUNC_PRINT']:
return strfunc
# ----------------------------------------------------------------------
# below here just for debug mode func print
# ----------------------------------------------------------------------
# add the string function to param dict
if 'DEBUG_FUNC_LIST' not in params:
params.set('DEBUG_FUNC_LIST', value=[None], source=func_name)
if 'DEBUG_FUNC_DICT' not in params:
params.set('DEBUG_FUNC_DICT', value=dict(), source=func_name)
# append to list
params['DEBUG_FUNC_LIST'].append(strfunc)
# update debug dictionary
if strfunc in params['DEBUG_FUNC_DICT']:
params['DEBUG_FUNC_DICT'][strfunc] += 1
else:
params['DEBUG_FUNC_DICT'][strfunc] = 1
# get count
count = params['DEBUG_FUNC_DICT'][strfunc]
# find previous entry
previous = params['DEBUG_FUNC_LIST'][-2]
# find out whether we have the same entry
same_entry = previous == strfunc
# add count
strfunc += ' (N={0})'.format(count)
# if we don't have a list then just print
if params['DEBUG_FUNC_LIST'][-2] is None:
# log in func
wlog(params, 'debug', textentry('90-000-00004', args=[strfunc]),
wrap=False)
elif not same_entry:
# get previous set of counts
previous_count = _get_prev_count(params, previous)
# only log if count is greater than 1
if previous_count > 1:
# log how many of previous there were
dargs = [previous_count]
wlog(params, 'debug', textentry('90-000-00005', args=dargs))
# log in func
wlog(params, 'debug', textentry('90-000-00004', args=[strfunc]),
wrap=False)
# return func_name
return strfunc
# =============================================================================
# Config loading private functions
# =============================================================================
def _get_file_names(params, instrument=None):
# set function name (cannot break here --> no access to inputs)
_ = display_func(params, '_get_file_names', __NAME__)
# deal with no instrument
if instrument is None:
return []
# get user environmental path
user_env = params['DRS_USERENV']
# get user default path (if environmental path unset)
user_dpath = params['DRS_USER_DEFAULT']
# get the package name
drs_package = params['DRS_PACKAGE']
# change user_dpath to a absolute path
user_dpath = get_relative_folder(drs_package, user_dpath)
# deal with no user environment and no default path
if user_env is None and user_dpath is None:
return []
# set empty directory
directory = None
# -------------------------------------------------------------------------
# User environmental path
# -------------------------------------------------------------------------
# check environmental path exists
if user_env in os.environ:
# get value
path = os.environ[user_env]
# check that directory linked exists
if os.path.exists(path):
# set directory
directory = path
# -------------------------------------------------------------------------
# if directory is not empty then we need to get instrument specific files
# -------------------------------------------------------------------------
if directory is not None:
# look for sub-directories (and if not found directory set to None so
# that we check the user default path)
source = 'environmental variables ({0})'.format(user_env)
subdir = _get_subdir(directory, instrument, source=source)
if subdir is None:
directory = None
# -------------------------------------------------------------------------
# User default path
# -------------------------------------------------------------------------
# check default path exists
if directory is None:
# check the directory linked exists
if os.path.exists(user_dpath):
# set directory
directory = user_dpath
# if directory is still empty return empty list
if directory is None:
return []
# -------------------------------------------------------------------------
# if directory is not empty then we need to get instrument specific files
# -------------------------------------------------------------------------
# look for sub-directories (This time if not found we have none and should
# return an empty set of files
source = 'default user config file ({0})'.format(user_dpath)
subdir = _get_subdir(directory, instrument, source=source)
if subdir is None:
return []
# -------------------------------------------------------------------------
# look for user configurations within instrument sub-folder
# -------------------------------------------------------------------------
files = []
for script in USCRIPTS:
# construct path
path = os.path.join(directory, subdir, script)
# check that it exists
if os.path.exists(path):
files.append(path)
# deal with no files found
if len(files) == 0:
wmsg1 = ('User config defined but instrument "{0}" directory '
'has no configurations files')
wmsg2 = '\tValid config files: {0}'.format(','.join(USCRIPTS))
ConfigWarning([wmsg1.format(instrument), wmsg2])
# return files
return files
def _get_subdir(directory, instrument, source):
# set function name (cannot break here --> no access to inputs)
_ = display_func(None, 'catch_sigint', __NAME__)
# get display text
textentry = constant_functions.DisplayText()
# set the sub directory to None initially
subdir = None
# loop around items in the directory
for filename in np.sort(os.listdir(directory)):
# check that the absolute path is a directory
cond1 = os.path.isdir(os.path.join(directory, filename))
# check that item (directory) is named the same as the instrument
cond2 = filename.lower() == instrument.lower()
# if both conditions true set the sub directory as this item
if cond1 and cond2:
subdir = filename
# deal with instrument sub-folder not found
if subdir is None:
# raise a config warning that directory not found
wargs = [source, instrument.lower(), directory]
ConfigWarning(textentry('10-002-00001', args=wargs))
# return the subdir
return subdir
def get_relative_folder(package, folder: Union[str, Path]):
"""
Get the absolute path of folder defined at relative path
folder from package
:param package: string, the python package name
:param folder: string, the relative path of the config folder
:return data: string, the absolute path and filename of the default config
file
"""
global REL_CACHE
# TODO: update to pathlib.Path
if isinstance(folder, Path):
folder = str(folder)
# set function name (cannot break here --> no access to inputs)
func_name = display_func(None, 'get_relative_folder', __NAME__)
# get text entry
textentry = constant_functions.DisplayText()
# ----------------------------------------------------------------------
# check relative folder cache
if package in REL_CACHE and folder in REL_CACHE[package]:
return REL_CACHE[package][folder]
# ----------------------------------------------------------------------
# get the package.__init__ file path
try:
init = pkg_resources.resource_filename(package, '__init__.py')
except ImportError:
eargs = [package, func_name]
raise ConfigError(textentry('00-008-00001', args=eargs), level='error')
# Get the config_folder from relative path
current = os.getcwd()
# get directory name of folder
dirname = os.path.dirname(init)
# change to directory in init
os.chdir(dirname)
# get the absolute path of the folder
data_folder = os.path.abspath(folder)
# change back to working dir
os.chdir(current)
# test that folder exists
if not os.path.exists(data_folder):
# raise exception
eargs = [os.path.basename(data_folder), os.path.dirname(data_folder)]
raise ConfigError(textentry('00-003-00005', args=eargs), level='error')
# ----------------------------------------------------------------------
# update REL_CACHE
if package not in REL_CACHE:
REL_CACHE[package] = dict()
# update entry
REL_CACHE[folder] = data_folder
# ----------------------------------------------------------------------
# return the absolute data_folder path
return data_folder
def _load_from_module(modules, quiet=False):
# set function name (cannot break here --> no access to inputs)
func_name = display_func(None, '_load_from_module', __NAME__)
# get text entry
textentry = constant_functions.DisplayText()
# storage for returned values
keys, values, sources, instances = [], [], [], []
# loop around modules
for module in modules:
# get a list of keys values
mkeys, mvalues = constant_functions.generate_consts(module)
# loop around each value and test type
for it in range(len(mkeys)):
# get iteration values
mvalue = mvalues[it]
# get the parameter name
key = mkeys[it]
# deal with duplicate keys
if key in keys:
# raise exception
eargs = [key, module, ','.join(modules), func_name]
raise ConfigError(textentry('00-003-00006', args=eargs),
level='error')
# valid parameter
cond = mvalue.validate(quiet=quiet)
# if validated append to keys/values/sources
if cond:
keys.append(key)
values.append(mvalue.true_value)
sources.append(mvalue.source)
instances.append(mvalue)
# return keys
return keys, values, sources, instances
def _load_from_file(files, modules):
# set function name (cannot break here --> no access to inputs)
_ = display_func(None, '_load_from_file', __NAME__)
# get text entry
textentry = constant_functions.DisplayText()
# -------------------------------------------------------------------------
# load constants from file
# -------------------------------------------------------------------------
fkeys, fvalues, fsources = [], [], []
for filename in files:
# get keys/values from script
fkey, fvalue = constant_functions.get_constants_from_file(filename)
# add to fkeys and fvalues (loop around fkeys)
for it in range(len(fkey)):
# get this iterations values
fkeyi, fvaluei = fkey[it], fvalue[it]
# if this is not a new constant print warning
if fkeyi in fkeys:
# log warning message
wargs = [fkeyi, filename, ','.join(set(fsources)), filename]
ConfigWarning(textentry('10-002-00002', args=wargs),
level='warning')
# append to list
fkeys.append(fkeyi)
fvalues.append(fvaluei)
fsources.append(filename)
# -------------------------------------------------------------------------
# Now need to test the values are correct
# -------------------------------------------------------------------------
# storage for returned values
keys, values, sources, instances = [], [], [], []
# loop around modules
for module in modules:
# get a list of keys values
mkeys, mvalues = constant_functions.generate_consts(module)
# loop around each value and test type
for it in range(len(mkeys)):
# get iteration values
mvalue = mvalues[it]
# loop around the file values
for jt in range(len(fkeys)):
# if we are not dealing with the same key skip
if fkeys[jt] != mkeys[it]:
continue
# if we are then we need to validate
value = mvalue.validate(fvalues[jt], source=fsources[jt])
# now append to output lists
keys.append(fkeys[jt])
values.append(value)
sources.append(fsources[jt])
instances.append(mvalue)
# return keys values and sources
return keys, values, sources, instances
def _save_config_params(params):
# set function name (cannot break here --> no access to inputs)
func_name = display_func(params, '_save_config_params', __NAME__)
# get sources from paramater dictionary
sources = params.sources.values()
# get unique sources
usources = set(sources)
# set up storage
params['DRS_CONFIG'] = []
params.set_source('DRS_CONFIG', func_name)
# loop around and add to param
for source in usources:
params['DRS_CONFIG'].append(source)
# return the parameters
return params
def _check_mod_source(source: str) -> str:
# set function name (cannot break here --> no access to inputs)
_ = display_func(None, '_check_mod_source', __NAME__)
# deal with source is None
if source is None:
return source
# if source doesn't exist also skip
if not os.path.exists(source):
return source
# get package path
package_path = get_relative_folder(PACKAGE, '')
# if package path not in source then skip
if package_path not in source:
return source
# remove package path and replace with PACKAGE
source = source.replace(package_path, PACKAGE.lower())
# replace separators with .
source = source.replace(os.sep, '.')
# remove double dots
while '..' in source:
source = source.replace('..', '.')
# return edited source
return source
def _execute_ipdb():
# set function name (cannot break here --> within break function)
_ = str(__NAME__) + '._execute_ipdb()'
# start ipdb
# noinspection PyBroadException
try:
# import ipython debugger
# noinspection PyUnresolvedReferences
import ipdb
# set the ipython trace
ipdb.set_trace()
except Exception as _:
# import python debugger (standard python module)
import pdb
# set the python trace
pdb.set_trace()
# =============================================================================
# Other private functions
# =============================================================================
# capitalisation function (for case insensitive keys)
def _capitalise_key(key: str) -> str:
"""
Capitalizes "key" (used to make ParamDict case insensitive), only if
key is a string
:param key: string or object, if string then key is capitalized else
nothing is done
:return key: capitalized string (or unchanged object)
"""
# set function name (cannot break here --> no access to inputs)
_ = display_func(None, '_capitalise_key', __NAME__)
# capitalise string keys
if type(key) == str:
key = key.upper()
return key
def _string_repr_list(key: str, values: Union[list, np.ndarray], source: str,
fmt: str) -> List[str]:
"""
Represent a list (or array) as a string list but only the first
40 charactersay
:param key: str, the key the list (values) came from
:param values: vector, the list or numpy array to print as a string
:param source: str, the source where the values were defined
:param fmt: str, the format for the printed list
:return:
"""
# set function name (cannot break here --> no access to inputs)
_ = display_func(None, '_load_from_file', __NAME__)
# get the list as a string
str_value = list(values).__repr__()
# if the string is longer than 40 characters cut down and add ...
if len(str_value) > 40:
str_value = str_value[:40] + '...'
# return the string as a list entry
return [fmt.format(key, str_value, source)]
def _map_listparameter(value, separator=',', dtype=None):
"""
Map a string list into a python list
:param value: str or list, if list returns if string tries to evaluate
:param separator: str, where to split the str at to make a list
:param dtype: type, if set forces elements of list to this data type
:return:
"""
# set function name (cannot break here --> no access to inputs)
func_name = display_func(None, '_map_listparameter', __NAME__)
# get text entry
textentry = constant_functions.DisplayText()
# return list if already a list
if isinstance(value, (list, np.ndarray)):
return list(value)
# try evaluating is a list
# noinspection PyBroadException
try:
# evulate value
rawvalue = eval(value)
# if it is a list return as a list
if isinstance(rawvalue, list):
return list(rawvalue)
# if it is not pass
except Exception as _:
pass
# deal with an empty value i.e. ''
if value == '':
return []
# try to return dtyped data
try:
# first split by separator
listparameter = value.split(separator)
# return the stripped down values
if dtype is not None and isinstance(dtype, type):
return list(map(lambda x: dtype(x.strip()), listparameter))
else:
return list(map(lambda x: x.strip(), listparameter))
except Exception as e:
eargs = [value, type(e), e, func_name]
BLOG(message=textentry('00-003-00002', args=eargs), level='error')
def _map_dictparameter(value: str, dtype: Union[None, Type] = None) -> dict:
"""
Map a string dictionary into a python dictionary
:param value: str, tries to evaluate string into a dictionary
i.e. "dict(a=1, b=2)" or {'a':1, 'b': 2}
:param dtype: type, if set forces elements of list to this data type
:return:
"""
# set function name (cannot break here --> no access to inputs)
func_name = display_func(None, '_map_dictparameter', __NAME__)
# get text entry
textentry = constant_functions.DisplayText()
# deal with an empty value i.e. ''
if value == '':
return dict()
# try evaulating as a dict
try:
rawvalue = eval(value)
if isinstance(rawvalue, dict):
returndict = dict()
for key in rawvalue.keys():
if dtype is not None and isinstance(dtype, type):
returndict[key] = dtype(rawvalue[key])
else:
returndict[key] = rawvalue[key]
return returndict
except Exception as e:
eargs = [value, type(e), e, func_name]
BLOG(message=textentry('00-003-00003', args=eargs), level='error')
def _copy_pdb_rc(params, level=0):
# set function name (cannot break here --> no access to inputs)
_ = str(__NAME__) + '_copy_pdb_rc()'
# set global CURRENT_PATH
global CURRENT_PATH
# get package
package = params['DRS_PACKAGE']
# get path
path = params['DRS_PDB_RC_FILE']
filename = params['DRS_PDB_RC_FILENAME']
# get current path
CURRENT_PATH = os.getcwd()
# get absolute path
oldsrc = get_relative_folder(package, path)
tmppath = oldsrc + '_tmp'
# get newsrc
newsrc = os.path.join(CURRENT_PATH, filename)
# read the lines
with open(oldsrc, 'r') as f:
lines = f.readlines()
# deal with levels
if level == 0:
upkey = ''
else:
upkey = 'up\n' * level
# loop around lines and replace
newlines = []
for line in lines:
newlines.append(line.format(up=upkey))
# write the lines
with open(tmppath, 'w') as f:
f.writelines(newlines)
# copy
shutil.copy(tmppath, newsrc)
# remove tmp file
os.remove(tmppath)
def _remove_pdb_rc(params):
# set function name (cannot break here --> no access to inputs)
_ = str(__NAME__) + '_remove_pdb_rc()'
# get file name
filename = params['DRS_PDB_RC_FILENAME']
# get newsrc
newsrc = os.path.join(CURRENT_PATH, filename)
# remove
if os.path.exists(newsrc):
os.remove(newsrc)
def _get_prev_count(params, previous):
# set function name (cannot break here --> no access to inputs)
_ = str(__NAME__) + '._get_prev_count()'
# get the debug list
debug_list = params['DEBUG_FUNC_LIST'][:-1]
# get the number of iterations
n_elements = 0
# loop around until we get to
for row in range(len(debug_list))[::-1]:
if debug_list[row] != previous:
break
else:
n_elements += 1
# return number of element founds
return n_elements
# =============================================================================
# Start of code
# =============================================================================
# Main code here
if __name__ == "__main__":
# ----------------------------------------------------------------------
# print 'Hello World!'
print("Hello World!")
# =============================================================================
# End of code
# =============================================================================
| [
"ctypes.create_string_buffer",
"copy.deepcopy",
"sys.exit",
"numpy.nanmin",
"os.remove",
"os.path.exists",
"ctypes.windll.kernel32.GetConsoleScreenBufferInfo",
"os.listdir",
"apero.core.constants.constant_functions.generate_consts",
"os.path.normpath",
"apero.core.constants.constant_functions.Di... | [((44313, 44368), 'apero.core.constants.constant_functions.import_module', 'constant_functions.import_module', (['func_name', 'modules[0]'], {}), '(func_name, modules[0])\n', (44345, 44368), False, 'from apero.core.constants import constant_functions\n'), ((61893, 61925), 'apero.core.constants.constant_functions.DisplayText', 'constant_functions.DisplayText', ([], {}), '()\n', (61923, 61925), False, 'from apero.core.constants import constant_functions\n'), ((63423, 63455), 'apero.core.constants.constant_functions.DisplayText', 'constant_functions.DisplayText', ([], {}), '()\n', (63453, 63455), False, 'from apero.core.constants import constant_functions\n'), ((64071, 64082), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (64080, 64082), False, 'import os\n'), ((64132, 64153), 'os.path.dirname', 'os.path.dirname', (['init'], {}), '(init)\n', (64147, 64153), False, 'import os\n'), ((64192, 64209), 'os.chdir', 'os.chdir', (['dirname'], {}), '(dirname)\n', (64200, 64209), False, 'import os\n'), ((64270, 64293), 'os.path.abspath', 'os.path.abspath', (['folder'], {}), '(folder)\n', (64285, 64293), False, 'import os\n'), ((64331, 64348), 'os.chdir', 'os.chdir', (['current'], {}), '(current)\n', (64339, 64348), False, 'import os\n'), ((65189, 65221), 'apero.core.constants.constant_functions.DisplayText', 'constant_functions.DisplayText', ([], {}), '()\n', (65219, 65221), False, 'from apero.core.constants import constant_functions\n'), ((66562, 66594), 'apero.core.constants.constant_functions.DisplayText', 'constant_functions.DisplayText', ([], {}), '()\n', (66592, 66594), False, 'from apero.core.constants import constant_functions\n'), ((72966, 72998), 'apero.core.constants.constant_functions.DisplayText', 'constant_functions.DisplayText', ([], {}), '()\n', (72996, 72998), False, 'from apero.core.constants import constant_functions\n'), ((74554, 74586), 'apero.core.constants.constant_functions.DisplayText', 'constant_functions.DisplayText', ([], {}), '()\n', (74584, 74586), False, 'from apero.core.constants import constant_functions\n'), ((75621, 75632), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (75630, 75632), False, 'import os\n'), ((75765, 75801), 'os.path.join', 'os.path.join', (['CURRENT_PATH', 'filename'], {}), '(CURRENT_PATH, filename)\n', (75777, 75801), False, 'import os\n'), ((76214, 76242), 'shutil.copy', 'shutil.copy', (['tmppath', 'newsrc'], {}), '(tmppath, newsrc)\n', (76225, 76242), False, 'import shutil\n'), ((76269, 76287), 'os.remove', 'os.remove', (['tmppath'], {}), '(tmppath)\n', (76278, 76287), False, 'import os\n'), ((76524, 76560), 'os.path.join', 'os.path.join', (['CURRENT_PATH', 'filename'], {}), '(CURRENT_PATH, filename)\n', (76536, 76560), False, 'import os\n'), ((76581, 76603), 'os.path.exists', 'os.path.exists', (['newsrc'], {}), '(newsrc)\n', (76595, 76603), False, 'import os\n'), ((10081, 10113), 'apero.core.constants.constant_functions.DisplayText', 'constant_functions.DisplayText', ([], {}), '()\n', (10111, 10113), False, 'from apero.core.constants import constant_functions\n'), ((44674, 44685), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (44682, 44685), False, 'import sys\n'), ((46536, 46563), 'os.path.join', 'os.path.join', (['core_path', '""""""'], {}), "(core_path, '')\n", (46548, 46563), False, 'import os\n'), ((46748, 46780), 'os.path.join', 'os.path.join', (['filepath', 'filename'], {}), '(filepath, filename)\n', (46760, 46780), False, 'import os\n'), ((48084, 48106), 'os.listdir', 'os.listdir', (['const_path'], {}), '(const_path)\n', (48094, 48106), False, 'import os\n'), ((48170, 48193), 'os.path.isdir', 'os.path.isdir', (['filename'], {}), '(filename)\n', (48183, 48193), False, 'import os\n'), ((48642, 48669), 'os.path.join', 'os.path.join', (['core_path', '""""""'], {}), "(core_path, '')\n", (48654, 48669), False, 'import os\n'), ((49097, 49127), 'os.path.join', 'os.path.join', (['filepath', 'script'], {}), '(filepath, script)\n', (49109, 49127), False, 'import os\n'), ((54424, 54456), 'apero.core.constants.constant_functions.DisplayText', 'constant_functions.DisplayText', ([], {}), '()\n', (54454, 54456), False, 'from apero.core.constants import constant_functions\n'), ((59227, 59247), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (59241, 59247), False, 'import os\n'), ((60189, 60215), 'os.path.exists', 'os.path.exists', (['user_dpath'], {}), '(user_dpath)\n', (60203, 60215), False, 'import os\n'), ((61208, 61247), 'os.path.join', 'os.path.join', (['directory', 'subdir', 'script'], {}), '(directory, subdir, script)\n', (61220, 61247), False, 'import os\n'), ((61290, 61310), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (61304, 61310), False, 'import os\n'), ((62059, 62080), 'os.listdir', 'os.listdir', (['directory'], {}), '(directory)\n', (62069, 62080), False, 'import os\n'), ((63813, 63868), 'pkg_resources.resource_filename', 'pkg_resources.resource_filename', (['package', '"""__init__.py"""'], {}), "(package, '__init__.py')\n", (63844, 63868), False, 'import pkg_resources\n'), ((64390, 64417), 'os.path.exists', 'os.path.exists', (['data_folder'], {}), '(data_folder)\n', (64404, 64417), False, 'import os\n'), ((65424, 65466), 'apero.core.constants.constant_functions.generate_consts', 'constant_functions.generate_consts', (['module'], {}), '(module)\n', (65458, 65466), False, 'from apero.core.constants import constant_functions\n'), ((66916, 66968), 'apero.core.constants.constant_functions.get_constants_from_file', 'constant_functions.get_constants_from_file', (['filename'], {}), '(filename)\n', (66958, 66968), False, 'from apero.core.constants import constant_functions\n'), ((68014, 68056), 'apero.core.constants.constant_functions.generate_consts', 'constant_functions.generate_consts', (['module'], {}), '(module)\n', (68048, 68056), False, 'from apero.core.constants import constant_functions\n'), ((69699, 69721), 'os.path.exists', 'os.path.exists', (['source'], {}), '(source)\n', (69713, 69721), False, 'import os\n'), ((70583, 70599), 'ipdb.set_trace', 'ipdb.set_trace', ([], {}), '()\n', (70597, 70599), False, 'import ipdb\n'), ((76613, 76630), 'os.remove', 'os.remove', (['newsrc'], {}), '(newsrc)\n', (76622, 76630), False, 'import os\n'), ((12570, 12590), 'copy.deepcopy', 'copy.deepcopy', (['value'], {}), '(value)\n', (12583, 12590), False, 'import copy\n'), ((42643, 42654), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (42651, 42654), False, 'import sys\n'), ((45208, 45250), 'apero.core.constants.constant_functions.generate_consts', 'constant_functions.generate_consts', (['module'], {}), '(module)\n', (45242, 45250), False, 'from apero.core.constants import constant_functions\n'), ((46008, 46030), 'os.listdir', 'os.listdir', (['const_path'], {}), '(const_path)\n', (46018, 46030), False, 'import os\n'), ((46106, 46129), 'os.path.isdir', 'os.path.isdir', (['filename'], {}), '(filename)\n', (46119, 46129), False, 'import os\n'), ((46287, 46308), 'os.listdir', 'os.listdir', (['core_path'], {}), '(core_path)\n', (46297, 46308), False, 'import os\n'), ((46384, 46407), 'os.path.isdir', 'os.path.isdir', (['filename'], {}), '(filename)\n', (46397, 46407), False, 'import os\n'), ((46828, 46849), 'os.path.exists', 'os.path.exists', (['fpath'], {}), '(fpath)\n', (46842, 46849), False, 'import os\n'), ((48282, 48315), 'os.path.normpath', 'os.path.normpath', (['instrument_path'], {}), '(instrument_path)\n', (48298, 48315), False, 'import os\n'), ((48402, 48432), 'os.path.normpath', 'os.path.normpath', (['default_path'], {}), '(default_path)\n', (48418, 48432), False, 'import os\n'), ((49196, 49217), 'os.path.exists', 'os.path.exists', (['fpath'], {}), '(fpath)\n', (49210, 49217), False, 'import os\n'), ((51927, 51957), 'apero.lang.drs_exceptions.DebugExit', 'drs_exceptions.DebugExit', (['emsg'], {}), '(emsg)\n', (51951, 51957), False, 'from apero.lang import drs_exceptions\n'), ((52598, 52618), 'shutil.which', 'shutil.which', (['"""stty"""'], {}), "('stty')\n", (52610, 52618), False, 'import shutil\n'), ((62167, 62200), 'os.path.join', 'os.path.join', (['directory', 'filename'], {}), '(directory, filename)\n', (62179, 62200), False, 'import os\n'), ((64462, 64491), 'os.path.basename', 'os.path.basename', (['data_folder'], {}), '(data_folder)\n', (64478, 64491), False, 'import os\n'), ((64493, 64521), 'os.path.dirname', 'os.path.dirname', (['data_folder'], {}), '(data_folder)\n', (64508, 64521), False, 'import os\n'), ((70743, 70758), 'pdb.set_trace', 'pdb.set_trace', ([], {}), '()\n', (70756, 70758), False, 'import pdb\n'), ((39290, 39306), 'numpy.nanmin', 'np.nanmin', (['value'], {}), '(value)\n', (39299, 39306), True, 'import numpy as np\n'), ((39308, 39324), 'numpy.nanmax', 'np.nanmax', (['value'], {}), '(value)\n', (39317, 39324), True, 'import numpy as np\n'), ((43211, 43222), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (43219, 43222), False, 'import sys\n'), ((53406, 53439), 'ctypes.windll.kernel32.GetStdHandle', 'windll.kernel32.GetStdHandle', (['(-12)'], {}), '(-12)\n', (53434, 53439), False, 'from ctypes import windll, create_string_buffer\n'), ((53459, 53483), 'ctypes.create_string_buffer', 'create_string_buffer', (['(22)'], {}), '(22)\n', (53479, 53483), False, 'from ctypes import windll, create_string_buffer\n'), ((53502, 53553), 'ctypes.windll.kernel32.GetConsoleScreenBufferInfo', 'windll.kernel32.GetConsoleScreenBufferInfo', (['h', 'csbi'], {}), '(h, csbi)\n', (53544, 53553), False, 'from ctypes import windll, create_string_buffer\n'), ((30575, 30595), 'copy.deepcopy', 'copy.deepcopy', (['value'], {}), '(value)\n', (30588, 30595), False, 'import copy\n'), ((49315, 49336), 'os.path.exists', 'os.path.exists', (['fpath'], {}), '(fpath)\n', (49329, 49336), False, 'import os\n'), ((53596, 53634), 'struct.unpack', 'struct.unpack', (['"""hhhhHhhhhhh"""', 'csbi.raw'], {}), "('hhhhHhhhhhh', csbi.raw)\n", (53609, 53634), False, 'import struct\n'), ((39354, 39369), 'numpy.isnan', 'np.isnan', (['value'], {}), '(value)\n', (39362, 39369), True, 'import numpy as np\n'), ((52821, 52847), 'os.popen', 'os.popen', (['"""stty size"""', '"""r"""'], {}), "('stty size', 'r')\n", (52829, 52847), False, 'import os\n')] |
from math import sin #para usar la función seno
from time import time #importamos la función time para capturar tiempos
x = list(range(0,100)) #vector de valores desde 0 a 99
y = [0.0 for i in range(len(x))] #inicializamos el vector de resultados con 100 valores 0.0
tiempo_inicial = time()
for i in range(100):
y[i] = sin(x[i])
tiempo_final = time()
tiempo_ejecucion = tiempo_final - tiempo_inicial
print ('El tiempo de ejecucion fue:',tiempo_ejecucion) #En segundos | [
"math.sin",
"time.time"
] | [((290, 296), 'time.time', 'time', ([], {}), '()\n', (294, 296), False, 'from time import time\n'), ((361, 367), 'time.time', 'time', ([], {}), '()\n', (365, 367), False, 'from time import time\n'), ((334, 343), 'math.sin', 'sin', (['x[i]'], {}), '(x[i])\n', (337, 343), False, 'from math import sin\n')] |
# -*- coding: utf-8 -*-
# Copyright (C) <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
Django views for application nagios.
"""
# Std imports
import logging
import json
import time
from base64 import urlsafe_b64decode
# Django imports
from django.http import HttpResponse
from django.views.generic import ListView
from django.views.decorators.csrf import csrf_exempt
from django.core import serializers
from django.shortcuts import render
# Models imports
from apps.nagios.models import Satellite, SecurityPort
# Utilities
from cipher import AESCipher
logger = logging.getLogger(__name__)
# View definitions
# ================
#
def acknowledge_token(request, token):
"""Acknowledge a host or service alert using an encrypted token."""
time_now = time.time()
cipher = AESCipher('ABCDEF0123456789', iv='iv1234567890ABCD')
host_command_line = "COMMAND [{timestamp}] {command};" \
"{hostname};" \
"{sticky};" \
"{notify};" \
"{persistent};" \
"{author};" \
"Ack by email, working on it."
svc_command_line = "COMMAND [{timestamp}] {command};" \
"{hostname};" \
"{service_description};" \
"{sticky};" \
"{notify};" \
"{persistent};" \
"{author};" \
"Ack by email, working on it."
# Try to decode the encrypted token to a python object (dict)
try:
token = str(token)
json_token = cipher.decrypt(urlsafe_b64decode(token))
ack_data = json.loads(json_token)
except:
logger.exception("Unable to decrypt the provided token !")
logger.debug("Token received: %s", token)
return HttpResponse('Token is not valid !\n', status=400)
# Check token validity in time
if time_now > ack_data['expire_time']:
if 'service_description' in ack_data:
logger.warning(
"Token validity for service alert \"%s / %s\" has expired !",
ack_data['hostname'],
ack_data['service_description'])
else:
logger.warning("Token validity for host alert \"%s\" has expired !",
ack_data['hostname'])
return render(request, 'nagios/ack_email_expired.html', ack_data)
# Send the ack command to Nagios
if 'service_description' in ack_data:
command_line = svc_command_line.format(timestamp=time_now,
**ack_data)
else:
command_line = host_command_line.format(timestamp=time_now,
**ack_data)
# Establish a connection to satellites and send the command to ack
try:
satellites = Satellite.live_connect()
for conn in satellites.connections:
site = conn[0]
satellites.command(command_line, sitename=site)
except Satellite.SatelliteConnectError as e:
logger.exception('Error connecting on satellites !')
return HttpResponse('Unable to connect to Nagios.\n'
'Error: {}\n'.format(e), status=400)
logger.info("Processed ack by email: %s", command_line)
return render(request, 'nagios/ack_email_passed.html', ack_data)
def get_satellite_list(request, format='json'):
"""
Return the list of all satellites, format is json by default.
"""
satellites = Satellite.objects.filter(active=True)
if format not in "csv":
return HttpResponse(serializers.serialize(format, satellites))
else:
csv = ""
for sat in satellites:
csv += "%s;%s;%s;%s\n" % (sat.name, sat.fqdn, sat.alias, sat.live_port)
return HttpResponse(csv)
@csrf_exempt
def send_passive(request):
"""
Web API that uses HTTP POST requests to send passive checks results to Nagios.
**Note**
As this is POST data but we have no form, CSRF protection is off for Django using decorator ``@csrf_exempt``.
You should provides the following POST variables to the URL of the Web API:
- host
- service
- status
- message
The view log any received HTTP TRAP to file ``~django/optools/log/http_trap.log`` on Central server. File is rotated.
How to use with **cURL**
------------------------
This example send a WARNING alert to the service CPU of host NAGIOS_DC_SATELLITE_EDC1::
curl -f \
-d host=NAGIOS_DC_SATELLITE_EDC1 \
-d service=CPU \
-d status=1 \
-d message="Test TRAP HTTP" \
http://canuxcheng.com/optools/nagios/passive/
"""
# Get the logger for this view
logger.info('-------------------------------')
logger.info('-- Receiving a new HTTP TRAP --')
logger.info('-------------------------------')
logger.info('From IP: %s', request.META.get('REMOTE_ADDR'))
logger.info('User-Agent: %s', request.META.get('HTTP_USER_AGENT'))
logger.debug('Request body: %s', request.body)
# Livestatus queries
command_line = 'COMMAND [{timestamp}] PROCESS_SERVICE_CHECK_RESULT;{host};{service};{status};{message}\n'
query_find_host = 'GET hosts\nColumns: name services\nFilter: name = {host}\n'
query_find_service = 'GET services\nColumns: description host_name\nFilter: host_name = {host}\nFilter: description = {service}\nAnd: 2\n'
# Get POST data
try:
params = {
'host': request.POST['host'].upper(),
'service': request.POST['service'],
'status': int(request.POST['status']),
'message': request.POST['message'],
'timestamp': int(time.time()),
}
logger.debug('Cleaned data: %s', params)
except KeyError:
logger.exception('Incomplete POST data !')
return HttpResponse('Incomplete POST data ! Missing key.\n', status=400)
except ValueError:
logger.exception('Incorrect value type for data !')
return HttpResponse('The key \"status\" should be an integer within 0 (OK), 1 (WARNING), 2 (CRITICAL) and 3 (UNKNOWN).\n', status=400)
# Prepare data to be sent to Nagios
try:
satellites = Satellite.live_connect()
satellites.set_prepend_site(True)
except Satellite.SatelliteConnectError as e:
logger.exception('Error connecting on satellites !')
return HttpResponse('Unable to connect to Nagios.\nError: {}\n'.format(e), status=400)
# Check if host and service exist in Nagios
host_dest = satellites.query(query_find_host.format(**params))
if host_dest:
# The host is found, check if service exist
service_dest = satellites.query(query_find_service.format(**params))
if service_dest:
sat = service_dest.pop()[0]
logger.info('Preparing command for Nagios.')
logger.debug('Command: %s', command_line.format(**params))
logger.debug('Satellite: %s', sat)
satellites.command(command_line.format(**params), sitename=sat)
else:
# Service not found
message = 'Service \"{service}\" does not exist on host \"{host}\".\n'.format(**params)
logger.error(message)
return HttpResponse(message, status=400)
else:
# Host not found
message = 'The host \"{host}\" does not exist in Nagios.\n'.format(**params)
logger.error(message)
return HttpResponse(message, status=400)
# Everything is OK
logger.info('HTTP TRAP processed successfully.')
return HttpResponse()
# Class-based views
class SatelliteListView(ListView):
"""
Show the list of satellites.
"""
context_object_name = "systems_list"
model = Satellite
def get_context_data(self, **kwargs):
# Call the base implementation first to get a context
context = super(SatelliteListView, self).get_context_data(**kwargs)
# Adding extra context data to the view
context['section'] = {'systems': 'active'}
context['base_url'] = self.request.build_absolute_uri('/').strip('/')
context['ports'] = SecurityPort.objects.all()
return context
| [
"logging.getLogger",
"django.shortcuts.render",
"json.loads",
"base64.urlsafe_b64decode",
"django.http.HttpResponse",
"apps.nagios.models.SecurityPort.objects.all",
"apps.nagios.models.Satellite.live_connect",
"apps.nagios.models.Satellite.objects.filter",
"cipher.AESCipher",
"django.core.serializ... | [((1599, 1626), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1616, 1626), False, 'import logging\n'), ((1795, 1806), 'time.time', 'time.time', ([], {}), '()\n', (1804, 1806), False, 'import time\n'), ((1820, 1872), 'cipher.AESCipher', 'AESCipher', (['"""ABCDEF0123456789"""'], {'iv': '"""iv1234567890ABCD"""'}), "('ABCDEF0123456789', iv='iv1234567890ABCD')\n", (1829, 1872), False, 'from cipher import AESCipher\n'), ((4389, 4446), 'django.shortcuts.render', 'render', (['request', '"""nagios/ack_email_passed.html"""', 'ack_data'], {}), "(request, 'nagios/ack_email_passed.html', ack_data)\n", (4395, 4446), False, 'from django.shortcuts import render\n'), ((4596, 4633), 'apps.nagios.models.Satellite.objects.filter', 'Satellite.objects.filter', ([], {'active': '(True)'}), '(active=True)\n', (4620, 4633), False, 'from apps.nagios.models import Satellite, SecurityPort\n'), ((8680, 8694), 'django.http.HttpResponse', 'HttpResponse', ([], {}), '()\n', (8692, 8694), False, 'from django.http import HttpResponse\n'), ((2724, 2746), 'json.loads', 'json.loads', (['json_token'], {}), '(json_token)\n', (2734, 2746), False, 'import json\n'), ((3419, 3477), 'django.shortcuts.render', 'render', (['request', '"""nagios/ack_email_expired.html"""', 'ack_data'], {}), "(request, 'nagios/ack_email_expired.html', ack_data)\n", (3425, 3477), False, 'from django.shortcuts import render\n'), ((3924, 3948), 'apps.nagios.models.Satellite.live_connect', 'Satellite.live_connect', ([], {}), '()\n', (3946, 3948), False, 'from apps.nagios.models import Satellite, SecurityPort\n'), ((4890, 4907), 'django.http.HttpResponse', 'HttpResponse', (['csv'], {}), '(csv)\n', (4902, 4907), False, 'from django.http import HttpResponse\n'), ((7307, 7331), 'apps.nagios.models.Satellite.live_connect', 'Satellite.live_connect', ([], {}), '()\n', (7329, 7331), False, 'from apps.nagios.models import Satellite, SecurityPort\n'), ((8558, 8591), 'django.http.HttpResponse', 'HttpResponse', (['message'], {'status': '(400)'}), '(message, status=400)\n', (8570, 8591), False, 'from django.http import HttpResponse\n'), ((9250, 9276), 'apps.nagios.models.SecurityPort.objects.all', 'SecurityPort.objects.all', ([], {}), '()\n', (9274, 9276), False, 'from apps.nagios.models import Satellite, SecurityPort\n'), ((2679, 2703), 'base64.urlsafe_b64decode', 'urlsafe_b64decode', (['token'], {}), '(token)\n', (2696, 2703), False, 'from base64 import urlsafe_b64decode\n'), ((2891, 2941), 'django.http.HttpResponse', 'HttpResponse', (['"""Token is not valid !\n"""'], {'status': '(400)'}), "('Token is not valid !\\n', status=400)\n", (2903, 2941), False, 'from django.http import HttpResponse\n'), ((4690, 4731), 'django.core.serializers.serialize', 'serializers.serialize', (['format', 'satellites'], {}), '(format, satellites)\n', (4711, 4731), False, 'from django.core import serializers\n'), ((6944, 7009), 'django.http.HttpResponse', 'HttpResponse', (['"""Incomplete POST data ! Missing key.\n"""'], {'status': '(400)'}), "('Incomplete POST data ! Missing key.\\n', status=400)\n", (6956, 7009), False, 'from django.http import HttpResponse\n'), ((7108, 7246), 'django.http.HttpResponse', 'HttpResponse', (['"""The key "status" should be an integer within 0 (OK), 1 (WARNING), 2 (CRITICAL) and 3 (UNKNOWN).\n"""'], {'status': '(400)'}), '(\n """The key "status" should be an integer within 0 (OK), 1 (WARNING), 2 (CRITICAL) and 3 (UNKNOWN).\n"""\n , status=400)\n', (7120, 7246), False, 'from django.http import HttpResponse\n'), ((8359, 8392), 'django.http.HttpResponse', 'HttpResponse', (['message'], {'status': '(400)'}), '(message, status=400)\n', (8371, 8392), False, 'from django.http import HttpResponse\n'), ((6784, 6795), 'time.time', 'time.time', ([], {}), '()\n', (6793, 6795), False, 'import time\n')] |
from flask_wtf import Form
from flask_wtf import FlaskForm
from wtforms import StringField,TextAreaField,SubmitField,SelectField
#from flask_wtf import import Required
#from wtforms.validators import Required
class PitchForm(FlaskForm):
title = StringField('Pitch title')
text = TextAreaField('Text')
category = SelectField('Type',choices=[('interview','Interview pitch'),('product','Product pitch'),('promotion','Promotion pitch')])
submit = SubmitField('Submit')
class UpdateProfile(FlaskForm):
bio = TextAreaField('Bio.')
submit = SubmitField('Submit')
class CommentForm(FlaskForm):
text = TextAreaField('Leave a comment:')
submit = SubmitField('Submit')
| [
"wtforms.SelectField",
"wtforms.SubmitField",
"wtforms.StringField",
"wtforms.TextAreaField"
] | [((252, 278), 'wtforms.StringField', 'StringField', (['"""Pitch title"""'], {}), "('Pitch title')\n", (263, 278), False, 'from wtforms import StringField, TextAreaField, SubmitField, SelectField\n'), ((290, 311), 'wtforms.TextAreaField', 'TextAreaField', (['"""Text"""'], {}), "('Text')\n", (303, 311), False, 'from wtforms import StringField, TextAreaField, SubmitField, SelectField\n'), ((327, 458), 'wtforms.SelectField', 'SelectField', (['"""Type"""'], {'choices': "[('interview', 'Interview pitch'), ('product', 'Product pitch'), (\n 'promotion', 'Promotion pitch')]"}), "('Type', choices=[('interview', 'Interview pitch'), ('product',\n 'Product pitch'), ('promotion', 'Promotion pitch')])\n", (338, 458), False, 'from wtforms import StringField, TextAreaField, SubmitField, SelectField\n'), ((462, 483), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (473, 483), False, 'from wtforms import StringField, TextAreaField, SubmitField, SelectField\n'), ((527, 548), 'wtforms.TextAreaField', 'TextAreaField', (['"""Bio."""'], {}), "('Bio.')\n", (540, 548), False, 'from wtforms import StringField, TextAreaField, SubmitField, SelectField\n'), ((562, 583), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (573, 583), False, 'from wtforms import StringField, TextAreaField, SubmitField, SelectField\n'), ((626, 659), 'wtforms.TextAreaField', 'TextAreaField', (['"""Leave a comment:"""'], {}), "('Leave a comment:')\n", (639, 659), False, 'from wtforms import StringField, TextAreaField, SubmitField, SelectField\n'), ((673, 694), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (684, 694), False, 'from wtforms import StringField, TextAreaField, SubmitField, SelectField\n')] |
import json
import os
import sys
import yaml
def parseYaml(file):
yFile = open(file)
yml = yaml.load(yFile, Loader=yaml.FullLoader)
yFile.close()
return yml
def generateCosts(tosca, deploymentInfo):
# creating empty costs dictionary
costs = {}
# adding cost for bottom
costs["bottom"] = []
# filling costs with node costs
nodeTemplates = tosca["topology_template"]["node_templates"]
for nodeName in nodeTemplates:
nodeTemplate = nodeTemplates[nodeName]
if "Compute" in nodeTemplate["type"]:
cost = {}
cost["name"] = nodeName
cost["memory"] = 0
cost["storage"] = 0
cost["clusterNodes"] = deploymentInfo["clusterNodes"]
costs[nodeName] = [cost]
else:
costs[nodeName] = deploymentInfo["consumption"][nodeName]
# returning generated costs
return costs
def generateCostsPy(costs,costsFilePath):
costsFile = open(costsFilePath, "w+")
costsFile.write("costs = " + json.dumps(costs))
costsGetterPy = open("config/costs-template-getter.py")
costsFile.write("\n")
costsFile.write(costsGetterPy.read())
costsGetterPy.close()
costsFile.close()
def generateCompositors(tosca, deploymentInfo):
# creating empty comps dictionary for compositors
comps = {}
# adding compositors for bottom
comps["bottom"] = {}
comps["bottom"]["h"] = "union_placements"
comps["bottom"]["v"] = "union_placements"
# filling comps with functions for solving feasible placement problem
nodeTemplates = tosca["topology_template"]["node_templates"]
for nodeName in nodeTemplates:
nodeType = nodeTemplates[nodeName]["type"]
if not(nodeType in comps):
if "Compute" in nodeType:
comps[nodeType] = {}
comps[nodeType]["h"] = "sum_consumptions"
comps[nodeType]["v"] = "update_placement"
else:
comps[nodeType] = {}
comps[nodeType]["h"] = "sum_consumptions"
comps[nodeType]["v"] = "sum_consumptions"
# returning generated costs
return comps
def generateCompositorsPy(comps,compsFilePath):
compsFile = open(compsFilePath, "w+")
compsFunctionsPy = open("config/compositors-template-functions.py")
compsFile.write(compsFunctionsPy.read())
compsFunctionsPy.close()
compsFile.write("compositors = { ") # + json.dumps(comps))
for nodeType in comps:
compsFile.write(" '" + nodeType.replace(".","").lower() + "': {")
compsFile.write(" 'h': " + comps[nodeType]["h"] + ",")
compsFile.write(" 'v': " + comps[nodeType]["v"])
compsFile.write("}, ")
compsFile.write(" }")
compsGetterPy = open("config/compositors-template-getter.py")
compsFile.write("\n")
compsFile.write(compsGetterPy.read())
compsGetterPy.close()
compsFile.close()
return compsFilePath
def main(args):
# parsing command line input
if len(args) < 3:
print("usage: loader.py <toscaFile> <deploymentInfoFile> <targetFolder>")
exit(2)
toscaFile = os.path.abspath(args[0])
deploymentInfoFile = os.path.abspath(args[1])
targetFolder = os.path.abspath(args[2])
os.chdir("loader")
# parsing input TOSCA and deployment info files
tosca = parseYaml(toscaFile)
deploymentInfo = parseYaml(deploymentInfoFile)
# creating target folder
if not(os.path.exists(targetFolder)):
os.mkdir(targetFolder)
# generating file compositors.py
comps = generateCompositors(tosca, deploymentInfo)
targetCompsFilePath = targetFolder + "/compositors.py"
generateCompositorsPy(comps, targetCompsFilePath)
# generating file costs.py
costs = generateCosts(tosca, deploymentInfo)
targetCostsFilePath = targetFolder + "/costs.py"
generateCostsPy(costs, targetCostsFilePath)
os.chdir("..")
main(sys.argv[1:])
| [
"os.path.exists",
"json.dumps",
"yaml.load",
"os.chdir",
"os.mkdir",
"os.path.abspath"
] | [((100, 140), 'yaml.load', 'yaml.load', (['yFile'], {'Loader': 'yaml.FullLoader'}), '(yFile, Loader=yaml.FullLoader)\n', (109, 140), False, 'import yaml\n'), ((3150, 3174), 'os.path.abspath', 'os.path.abspath', (['args[0]'], {}), '(args[0])\n', (3165, 3174), False, 'import os\n'), ((3200, 3224), 'os.path.abspath', 'os.path.abspath', (['args[1]'], {}), '(args[1])\n', (3215, 3224), False, 'import os\n'), ((3244, 3268), 'os.path.abspath', 'os.path.abspath', (['args[2]'], {}), '(args[2])\n', (3259, 3268), False, 'import os\n'), ((3274, 3292), 'os.chdir', 'os.chdir', (['"""loader"""'], {}), "('loader')\n", (3282, 3292), False, 'import os\n'), ((3926, 3940), 'os.chdir', 'os.chdir', (['""".."""'], {}), "('..')\n", (3934, 3940), False, 'import os\n'), ((3471, 3499), 'os.path.exists', 'os.path.exists', (['targetFolder'], {}), '(targetFolder)\n', (3485, 3499), False, 'import os\n'), ((3510, 3532), 'os.mkdir', 'os.mkdir', (['targetFolder'], {}), '(targetFolder)\n', (3518, 3532), False, 'import os\n'), ((1032, 1049), 'json.dumps', 'json.dumps', (['costs'], {}), '(costs)\n', (1042, 1049), False, 'import json\n')] |
import sys
import os
sys.path = [os.path.abspath(os.path.join(__file__, '..', '..')), ] + sys.path
import INoDS_model as inods
import numpy as np
import time
##################################################
## NOTE: INoDS requires the network and health data to be formatted in
## a specific manner. Check example files.
###############################################
#############################################
### Please edit these based on your input files
#############################################
#Provide the network hypothesis stored as an edgelist
edge_filename = "Edge_connections_poisson.csv"
# Prvide the health data
health_filename = "Health_data_nolag.csv"
# provide filename for output files
output_filename = "complete_data_SI_beta0.045"
###########################################
### Model parameters
###########################################
##the number of null networks to create null ditribution of predictive power
##NOTE: edge connections of networks are completely randomized,i.e., Jaccard index=0
##If complete randomization is not possible, then the model will throw an error
null_networks = 100
##do you know the true values? If not set it to [0,0,0]
truth = [0.045, 0, 0.01]
infection_type = "SI"
##specify chain length and burn-in
burnin = 50
#number of iterations after burnin
iteration = 100
#####################################
#### run INoDS
######################################
start = time.time()
inods.run_inods_sampler(edge_filename, health_filename, output_filename, infection_type, null_networks, burnin, iteration, truth = truth,verbose=True, diagnosis_lag=False, null_comparison=True, normalize_edge_weight=False, is_network_dynamic=True, parameter_estimate = True)
end = time.time()
print ("total run time="), end-start
| [
"os.path.join",
"time.time",
"INoDS_model.run_inods_sampler"
] | [((1442, 1453), 'time.time', 'time.time', ([], {}), '()\n', (1451, 1453), False, 'import time\n'), ((1454, 1739), 'INoDS_model.run_inods_sampler', 'inods.run_inods_sampler', (['edge_filename', 'health_filename', 'output_filename', 'infection_type', 'null_networks', 'burnin', 'iteration'], {'truth': 'truth', 'verbose': '(True)', 'diagnosis_lag': '(False)', 'null_comparison': '(True)', 'normalize_edge_weight': '(False)', 'is_network_dynamic': '(True)', 'parameter_estimate': '(True)'}), '(edge_filename, health_filename, output_filename,\n infection_type, null_networks, burnin, iteration, truth=truth, verbose=\n True, diagnosis_lag=False, null_comparison=True, normalize_edge_weight=\n False, is_network_dynamic=True, parameter_estimate=True)\n', (1477, 1739), True, 'import INoDS_model as inods\n'), ((1739, 1750), 'time.time', 'time.time', ([], {}), '()\n', (1748, 1750), False, 'import time\n'), ((49, 83), 'os.path.join', 'os.path.join', (['__file__', '""".."""', '""".."""'], {}), "(__file__, '..', '..')\n", (61, 83), False, 'import os\n')] |
import os
import random as rn
def clear(): return os.system('cls')
lines = [x.replace('\n', '') for x in tuple(open('words.txt', 'r'))]
clear()
words = lines
char_set = set()
word = ''
word_set = set(word)
word_len = len(word)
trie = 6
score = [0, 0]
def reset():
x = rn.randint(0, len(words) - 1)
global word, trie, char_set, word_set, word_len
word = words[x]
char_set = set()
word_set = set(word)
word_len = len(word)
trie = 6
reset()
def propose():
print('Score [{0} : {1}]\n Would like to play again? y/n'.format(
score[0], score[1]))
while True:
y = input()
if (y == 'y'):
return True
elif y == 'n':
return False
else:
print('Sorry i didn\'t get that')
def ask_for_letter():
print("Enter letter?")
while True:
c = input()
if (len(c) > 0):
break
if c in char_set:
print('you allready tried that letter')
else:
char_set.add(c)
return c
def check(c):
if (c not in word_set):
global trie
trie -= 1
if (char_set & word_set) == word_set:
return True
return False
def print_word():
print('You tried: ' + ''.join([(c + ' ') for c in char_set][::-1]))
print('Tries left:', trie)
print(
''.join([(c + ' ') if c in char_set else chr(167) + ' ' for c in word]))
print('wellcome to hang man')
print_word()
while True:
c = ask_for_letter()
if check(c):
clear()
print('You win')
print_word()
score[0] += 1
if (not propose()):
break
else:
reset()
if trie == 0:
print('You dead')
score[1] += 1
if (not propose()):
break
else:
reset()
clear()
print_word()
| [
"os.system"
] | [((52, 68), 'os.system', 'os.system', (['"""cls"""'], {}), "('cls')\n", (61, 68), False, 'import os\n')] |
import os
import sys
import fcntl
fh=0
def run_once():
global fh
fh=open(os.path.realpath(__file__),'r')
try:
fcntl.flock(fh,fcntl.LOCK_EX|fcntl.LOCK_NB)
except:
os._exit(0)
run_once()
| [
"os.path.realpath",
"os._exit",
"fcntl.flock"
] | [((81, 107), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (97, 107), False, 'import os\n'), ((130, 176), 'fcntl.flock', 'fcntl.flock', (['fh', '(fcntl.LOCK_EX | fcntl.LOCK_NB)'], {}), '(fh, fcntl.LOCK_EX | fcntl.LOCK_NB)\n', (141, 176), False, 'import fcntl\n'), ((194, 205), 'os._exit', 'os._exit', (['(0)'], {}), '(0)\n', (202, 205), False, 'import os\n')] |
import pytest
import asyncio
@pytest.mark.asyncio
async def test_faulty_setup(Player):
p = Player.from_description("videotestsrc ! faulty ! fakesink")
await p.play()
await asyncio.sleep(5)
await p.stop() | [
"asyncio.sleep"
] | [((186, 202), 'asyncio.sleep', 'asyncio.sleep', (['(5)'], {}), '(5)\n', (199, 202), False, 'import asyncio\n')] |
# Library imports
from flask import Flask, request, make_response, Response
from flask_cors import CORS
import json
import os
import sys
# Add James to path
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
# Project imports
from api.jamesClasses import inputCorpus
from api.jamesConfig import cfg
from api.jamesCSV import makeCSV
from api.jamesMain import process
from api.jamesPreProcessing import separateSentences
# Flask backend setup
app = Flask(__name__)
cors = CORS(app)
# POST request handling for uploaded files
@app.route('/upload', methods=['GET', 'POST'])
def index():
# Try to process uploaded files
try:
if request.method == 'POST':
# Initialize an empty inputCorpus object, imported from jamesClasses
corpus = inputCorpus()
# Initialize sentence count
sentenceCount = 0
# Iterate through each file that should be uploaded
for x in range(int(request.form["fileCount"])):
# Files should be named 'file1', 'file2', 'file3', etc.
file = 'file' + str(x)
# If any file is not found, return an error
if file not in request.files:
return 'Error with attached file(s)', 500
# If any file is not a .txt file, return an error
if not request.files.get(file).filename.split(".")[-1] == 'txt':
return 'Only .txt files accepted', 500
# Try to read and decode the contents of each file
# If any issue is encountered, return an error
try:
contents = request.files.get(file).read().decode("utf-8")
except:
return 'Error with attached file(s)', 500
# If any file was empty, return an error
if contents == "":
return 'File empty', 500
# For each file, read the filename without the file extension,
# and add these to the inputCorpus object
title = request.files.get(file).filename.split(".")[0]
corpus.addDoc(title, contents)
# Add the sentence count to the running total
sentenceCount += len(separateSentences(contents))
# The number of topics is taken from the request
try:
numTopics = int(request.form["numTopics"])
except:
return "Error with number of topics", 500
# The topic number cannot be higher than the topic max
if numTopics > cfg['topicmax']:
return "Topic number greater than topic max " + str(cfg['topicmax']), 500
# The topic number cannot be higher than the total number of sentences
if numTopics > sentenceCount:
return "Topic number greater than sentence count", 500
# The dataset selected for sentiment analysis is taken from the request
try:
datasetChoice = request.form["datasetChoice"]
except:
return 'Error with selected dataset', 500
# The process method imported from jamesMain produces results from the input corpus
results = process(corpus, numTopics, datasetChoice)
if results == None:
return 'Error with attached file(s)', 500
# Convert the results to a json object, and return it to the frontend
response = json.dumps(results)
return response, 200
# If making a GET request, the page will display "No files received"
if request.method == 'GET':
return "No files received", 200
# If processing files fails, return error result
except Exception as e:
return "Error processing attached files: " + str(e), 500
# POST request handling for downloading results
@app.route('/download', methods=['POST'])
def download():
# Try to download csv of given results
try:
# Load the results and hidden topics from the request json object
results = json.loads(request.form["results"])
hidden = json.loads(request.form["hiddenTopics"])
# If the user has hidden all topics, return an error
if len(hidden) >= len(results["topics"]):
return "All topics hidden", 500
# Construct the csv
data = makeCSV(results,hidden)
# Construct the response
output = make_response(data)
output.headers["Content-Disposition"] = "attachment; filename=export.csv"
output.headers["Content-type"] = "text/csv"
# Return the response
return output
# If csv generation fails, return error result
except:
return "Error downloading results", 500
# Backend main
if __name__ == '__main__':
app.run(host=cfg['host']['ip'], port=cfg['host']['port'], threaded=True)
| [
"api.jamesClasses.inputCorpus",
"flask.request.files.get",
"json.loads",
"flask_cors.CORS",
"flask.Flask",
"json.dumps",
"api.jamesCSV.makeCSV",
"api.jamesMain.process",
"os.path.dirname",
"flask.make_response",
"api.jamesPreProcessing.separateSentences"
] | [((457, 472), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (462, 472), False, 'from flask import Flask, request, make_response, Response\n'), ((480, 489), 'flask_cors.CORS', 'CORS', (['app'], {}), '(app)\n', (484, 489), False, 'from flask_cors import CORS\n'), ((189, 214), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (204, 214), False, 'import os\n'), ((4123, 4158), 'json.loads', 'json.loads', (["request.form['results']"], {}), "(request.form['results'])\n", (4133, 4158), False, 'import json\n'), ((4176, 4216), 'json.loads', 'json.loads', (["request.form['hiddenTopics']"], {}), "(request.form['hiddenTopics'])\n", (4186, 4216), False, 'import json\n'), ((4415, 4439), 'api.jamesCSV.makeCSV', 'makeCSV', (['results', 'hidden'], {}), '(results, hidden)\n', (4422, 4439), False, 'from api.jamesCSV import makeCSV\n'), ((4489, 4508), 'flask.make_response', 'make_response', (['data'], {}), '(data)\n', (4502, 4508), False, 'from flask import Flask, request, make_response, Response\n'), ((778, 791), 'api.jamesClasses.inputCorpus', 'inputCorpus', ([], {}), '()\n', (789, 791), False, 'from api.jamesClasses import inputCorpus\n'), ((3279, 3320), 'api.jamesMain.process', 'process', (['corpus', 'numTopics', 'datasetChoice'], {}), '(corpus, numTopics, datasetChoice)\n', (3286, 3320), False, 'from api.jamesMain import process\n'), ((3516, 3535), 'json.dumps', 'json.dumps', (['results'], {}), '(results)\n', (3526, 3535), False, 'import json\n'), ((2279, 2306), 'api.jamesPreProcessing.separateSentences', 'separateSentences', (['contents'], {}), '(contents)\n', (2296, 2306), False, 'from api.jamesPreProcessing import separateSentences\n'), ((2086, 2109), 'flask.request.files.get', 'request.files.get', (['file'], {}), '(file)\n', (2103, 2109), False, 'from flask import Flask, request, make_response, Response\n'), ((1653, 1676), 'flask.request.files.get', 'request.files.get', (['file'], {}), '(file)\n', (1670, 1676), False, 'from flask import Flask, request, make_response, Response\n'), ((1354, 1377), 'flask.request.files.get', 'request.files.get', (['file'], {}), '(file)\n', (1371, 1377), False, 'from flask import Flask, request, make_response, Response\n')] |
from os import name
from django.shortcuts import render,redirect
# Create your views here.
# from django.contrib.auth.forms import UserCreationForm
from django.contrib import messages
from .forms import UserRegisterForm,UserUpdateForm,ProfileUpdateForm
from django.contrib.auth.decorators import login_required
# from .models import Profile
from .models import *
def home(request):
return render(request,'neighapp/base.html')
def register(request):
if request.method == 'POST':
# if form as method post and there is data posted return
form=UserRegisterForm(request.POST)
if form.is_valid():
form.save()
username=form.cleaned_data.get('username')
messages.success(request,f'Account created for {username}!')
return redirect('profileacc')
else:
form=UserRegisterForm()
context={'form':form}
return render(request, 'neighapp/register.html',context)
@login_required
def profileacc(request):
Profile.objects.get_or_create(user=request.user)
if request.method == 'POST':
u_form=UserUpdateForm(request.POST,instance=request.user)
p_form=ProfileUpdateForm(request.POST,request.FILES,instance=request.user.profile)
if u_form.is_valid()and p_form.is_valid():
u_form.save()
p_form.save()
messages.success(request,f'Your account has been updated')
return redirect('index')
else:
u_form=UserUpdateForm(instance=request.user)
p_form=ProfileUpdateForm(instance=request.user)
context={
'u_form':u_form,
'p_form':p_form
}
return render(request,'neighapp/profile.html',context)
#
@login_required
def index(request):
current_user = request.user
profile = Profile.objects.filter(user_id=current_user.id).first()
if profile is None:
profile = Profile.objects.filter(
user_id=current_user.id).first()
posts = Post.objects.filter(user_id=current_user.id)
locations = Location.objects.all()
neighbourhood = Neighbourhood.objects.all()
category = Category.objects.all()
business = Business.objects.filter(user_id=current_user.id)
contacts = Contact.objects.filter(user_id=current_user.id)
return render(request, "neighapp/mainprofile.html", {"danger": "Update Profile ", "locations": locations, "neighbourhood": neighbourhood, "categories": category, "business": business, "contacts": contacts, "posts": posts})
else:
neighbourhood = profile.neighbourhood
posts = Post.objects.filter(neighbourhood=neighbourhood).order_by("-created_at")
return render(request, 'neighapp/index.html', {'posts': posts})
@login_required
def profile(request):
current_user = request.user
profile = Profile.objects.filter(
user_id=current_user.id).first()
posts = Post.objects.filter(user_id=current_user.id)
locations = Location.objects.all()
neighbourhood = Neighbourhood.objects.all()
category = Category.objects.all()
business = Business.objects.filter(user_id=current_user.id)
contacts = Contact.objects.filter(user_id=current_user.id)
return render(request, 'neighapp/mainprofile.html', {'profile': profile, 'posts': posts, 'locations': locations, 'neighbourhood': neighbourhood, 'categories': category, 'business': business, 'contacts': contacts})
@login_required
def update_profile(request):
if request.method == "POST":
current_user = request.user
first_name = request.POST["first_name"]
last_name = request.POST["last_name"]
username = request.POST["username"]
email = request.POST["email"]
name = request.POST["first_name"] + " " + request.POST["last_name"]
neighbourhood = request.POST["neighbourhood"]
location = request.POST["location"]
if location == "":
location = None
else:
location = Location.objects.get(name=location)
#
if neighbourhood == "":
neighbourhood = None
else:
neighbourhood = Neighbourhood.objects.get(name=neighbourhood)
profile_image = request.FILES["profile_pic"]
profile_image = cloudinary.uploader.upload(profile_image)
profile_url = profile_image["url"]
user = User.objects.get(id=current_user.id)
if Profile.objects.filter(user_id=current_user.id).exists():
profile = Profile.objects.get(user_id=current_user.id)
profile.profile_pic = profile_url
profile.neighbourhood = neighbourhood
profile.location = location
profile.save()
else:
profile = Profile(
user_id=current_user.id,
name=name,
profile_pic=profile_url,
neighbourhood=neighbourhood,
location=location,
)
profile.save_profile()
user.first_name = first_name
user.last_name = last_name
user.username = username
user.email = email
user.save()
return redirect("/profile", {"success": "Profile Updated Successfully"})
else:
return render(request, "neighapp/profile.html", {"danger": "Update Failed"})
@login_required
def create_post(request):
if request.method == "POST":
current_user = request.user
title = request.POST["title"]
content = request.POST["content"]
category = request.POST["category"]
location = request.POST["location"]
profile = Profile.objects.filter(user_id=current_user.id).first()
if profile is None:
profile = Profile.objects.filter(
user_id=current_user.id).first()
posts = Post.objects.filter(user_id=current_user.id)
locations = Location.objects.all()
neighbourhood = Neighbourhood.objects.all()
category = Category.objects.all()
business = Business.objects.filter(user_id=current_user.id)
contacts = Contact.objects.filter(user_id=current_user.id)
return render(request, "neighapp/profile.html", {"danger": "Update Profile", "locations": locations, "neighbourhood": neighbourhood, "categories": category, "business": business, "contacts": contacts, "posts": posts})
else:
neighbourhood = profile.neighbourhood
if category == "":
category = None
else:
category = Category.objects.get(name=category)
if location == "":
location = None
else:
location = Location.objects.get(name=location)
if request.FILES:
image = request.FILES["image"]
#
image = cloudinary.uploader.upload(
image, crop="limit", width=800, height=600)
image_url = image["url"]
post = Post(
user_id=current_user.id,
title=title,
content=content,
category=category,
location=location,
image=image_url,
neighbourhood=neighbourhood,
)
post.create_post()
return redirect("/profile", {"success": "Post Created Successfully"})
else:
post = Post(
user_id=current_user.id,
title=title,
content=content,
category=category,
location=location,
neighbourhood=neighbourhood,
)
post.create_post()
return redirect("/profile", {"success": "Post Created Successfully"})
else:
return render(request, "profile.html", {"danger": "Post Creation Failed"})
# create business
@login_required
def create_business(request):
if request.method == "POST":
current_user = request.user
name = request.POST["name"]
email = request.POST["email"]
profile = Profile.objects.filter(user_id=current_user.id).first()
if profile is None:
profile = Profile.objects.filter(
user_id=current_user.id).first()
posts = Post.objects.filter(user_id=current_user.id)
locations = Location.objects.all()
neighbourhood = Neighbourhood.objects.all()
category = Category.objects.all()
business = Business.objects.filter(user_id=current_user.id)
contacts = Contact.objects.filter(user_id=current_user.id)
return render(request, "neighapp/profile.html", {"danger": "Update Profile", "locations": locations, "neighbourhood": neighbourhood, "categories": category, "business": business, "contacts": contacts, "posts": posts})
else:
neighbourhood = profile.neighbourhood
if neighbourhood == "":
neighbourhood = None
else:
neighbourhood = Neighbourhood.objects.get(name=neighbourhood)
business = Business(
user_id=current_user.id,
name=name,
email=email,
neighbourhood=neighbourhood,
)
business.create_business()
return redirect("/profile", {"success": "Business Created Successfully"})
else:
return render(request, "neighapp/profile.html", {"danger": "Failed"})
@login_required
def create_contact(request):
if request.method == "POST":
current_user = request.user
name = request.POST["name"]
email = request.POST["email"]
phone = request.POST["phone"]
profile = Profile.objects.filter(user_id=current_user.id).first()
if profile is None:
profile = Profile.objects.filter(
user_id=current_user.id).first()
posts = Post.objects.filter(user_id=current_user.id)
locations = Location.objects.all()
neighbourhood = Neighbourhood.objects.all()
category = Category.objects.all()
business = Business.objects.filter(user_id=current_user.id)
contacts = Contact.objects.filter(user_id=current_user.id)
return render(request, "neighapp/profile.html", {"danger": "Update Profile ", "locations": locations, "neighbourhood": neighbourhood, "categories": category, "business": business, "contacts": contacts, "posts": posts})
else:
neighbourhood = profile.neighbourhood
if neighbourhood == "":
neighbourhood = None
else:
neighbourhood = Neighbourhood.objects.get(name=neighbourhood)
contact = Contact(
user_id=current_user.id,
name=name,
email=email,
phone=phone,
neighbourhood=neighbourhood,
)
contact.create_contact()
return redirect("/profile", {"success": "Contact Created Successfully"})
else:
return render(request, "neighapp/profile.html", {"danger": "Contact Creation Failed"})
@login_required
def get_business(request):
current_user = request.user
profile = Profile.objects.filter(user_id=current_user.id).first()
if profile is None:
profile = Profile.objects.filter(
user_id=current_user.id).first()
posts = Post.objects.filter(user_id=current_user.id)
locations = Location.objects.all()
business = Business.objects.filter(user_id=current_user.id)
contacts = Contact.objects.filter(user_id=current_user.id)
neighbourhood = Neighbourhood.objects.all()
category = Category.objects.all()
return render(request, "neighapp/profile.html", {"danger": "Update Profile ", "locations": locations, "neighbourhood": neighbourhood, "categories": category, "business": business, "contacts": contacts, "posts": posts})
else:
neighbourhood = profile.neighbourhood
business = Business.objects.filter(
neighbourhood=profile.neighbourhood)
return render(request, "neighapp/business.html", {"business": business})
@login_required
def get_contact(request):
current_user = request.user
profile = Profile.objects.filter(user_id=current_user.id).first()
if profile is None:
profile = Profile.objects.filter(
user_id=current_user.id).first()
posts = Post.objects.filter(user_id=current_user.id)
locations = Location.objects.all()
business = Business.objects.filter(user_id=current_user.id)
contacts = Contact.objects.filter(user_id=current_user.id)
neighbourhood = Neighbourhood.objects.all()
category = Category.objects.all()
return render(request, "neighapp/profile.html", {"danger": "Update Profile ", "locations": locations, "neighbourhood": neighbourhood, "categories": category, "business": business, "contacts": contacts, "posts": posts})
else:
neighbourhood = profile.neighbourhood
contacts = Contact.objects.filter(neighbourhood=profile.neighbourhood).order_by("-created_at")
return render(request, "neighapp/contacts.html", {"contacts": contacts, "neighbourhood": profile.neighbourhood})
@login_required
def search(request):
if 'search_term' in request.GET and request.GET["search_term"]:
search_term = request.GET.get("search_term")
searched_business = Business.objects.filter(name__icontains=search_term)
message = f"Search For: {search_term}"
return render(request, "neighapp/search.html", {"message": message, "business": searched_business})
else:
message = "You haven't searched for any term"
return render(request, "neighapp/search.html", {"message": message})
| [
"django.shortcuts.render",
"django.shortcuts.redirect",
"django.contrib.messages.success"
] | [((398, 435), 'django.shortcuts.render', 'render', (['request', '"""neighapp/base.html"""'], {}), "(request, 'neighapp/base.html')\n", (404, 435), False, 'from django.shortcuts import render, redirect\n'), ((922, 972), 'django.shortcuts.render', 'render', (['request', '"""neighapp/register.html"""', 'context'], {}), "(request, 'neighapp/register.html', context)\n", (928, 972), False, 'from django.shortcuts import render, redirect\n'), ((1714, 1763), 'django.shortcuts.render', 'render', (['request', '"""neighapp/profile.html"""', 'context'], {}), "(request, 'neighapp/profile.html', context)\n", (1720, 1763), False, 'from django.shortcuts import render, redirect\n'), ((3274, 3488), 'django.shortcuts.render', 'render', (['request', '"""neighapp/mainprofile.html"""', "{'profile': profile, 'posts': posts, 'locations': locations,\n 'neighbourhood': neighbourhood, 'categories': category, 'business':\n business, 'contacts': contacts}"], {}), "(request, 'neighapp/mainprofile.html', {'profile': profile, 'posts':\n posts, 'locations': locations, 'neighbourhood': neighbourhood,\n 'categories': category, 'business': business, 'contacts': contacts})\n", (3280, 3488), False, 'from django.shortcuts import render, redirect\n'), ((2368, 2591), 'django.shortcuts.render', 'render', (['request', '"""neighapp/mainprofile.html"""', "{'danger': 'Update Profile ', 'locations': locations, 'neighbourhood':\n neighbourhood, 'categories': category, 'business': business, 'contacts':\n contacts, 'posts': posts}"], {}), "(request, 'neighapp/mainprofile.html', {'danger': 'Update Profile ',\n 'locations': locations, 'neighbourhood': neighbourhood, 'categories':\n category, 'business': business, 'contacts': contacts, 'posts': posts})\n", (2374, 2591), False, 'from django.shortcuts import render, redirect\n'), ((2744, 2800), 'django.shortcuts.render', 'render', (['request', '"""neighapp/index.html"""', "{'posts': posts}"], {}), "(request, 'neighapp/index.html', {'posts': posts})\n", (2750, 2800), False, 'from django.shortcuts import render, redirect\n'), ((5234, 5299), 'django.shortcuts.redirect', 'redirect', (['"""/profile"""', "{'success': 'Profile Updated Successfully'}"], {}), "('/profile', {'success': 'Profile Updated Successfully'})\n", (5242, 5299), False, 'from django.shortcuts import render, redirect\n'), ((5326, 5395), 'django.shortcuts.render', 'render', (['request', '"""neighapp/profile.html"""', "{'danger': 'Update Failed'}"], {}), "(request, 'neighapp/profile.html', {'danger': 'Update Failed'})\n", (5332, 5395), False, 'from django.shortcuts import render, redirect\n'), ((7929, 7996), 'django.shortcuts.render', 'render', (['request', '"""profile.html"""', "{'danger': 'Post Creation Failed'}"], {}), "(request, 'profile.html', {'danger': 'Post Creation Failed'})\n", (7935, 7996), False, 'from django.shortcuts import render, redirect\n'), ((9424, 9490), 'django.shortcuts.redirect', 'redirect', (['"""/profile"""', "{'success': 'Business Created Successfully'}"], {}), "('/profile', {'success': 'Business Created Successfully'})\n", (9432, 9490), False, 'from django.shortcuts import render, redirect\n'), ((9516, 9578), 'django.shortcuts.render', 'render', (['request', '"""neighapp/profile.html"""', "{'danger': 'Failed'}"], {}), "(request, 'neighapp/profile.html', {'danger': 'Failed'})\n", (9522, 9578), False, 'from django.shortcuts import render, redirect\n'), ((11050, 11115), 'django.shortcuts.redirect', 'redirect', (['"""/profile"""', "{'success': 'Contact Created Successfully'}"], {}), "('/profile', {'success': 'Contact Created Successfully'})\n", (11058, 11115), False, 'from django.shortcuts import render, redirect\n'), ((11141, 11220), 'django.shortcuts.render', 'render', (['request', '"""neighapp/profile.html"""', "{'danger': 'Contact Creation Failed'}"], {}), "(request, 'neighapp/profile.html', {'danger': 'Contact Creation Failed'})\n", (11147, 11220), False, 'from django.shortcuts import render, redirect\n'), ((11836, 12055), 'django.shortcuts.render', 'render', (['request', '"""neighapp/profile.html"""', "{'danger': 'Update Profile ', 'locations': locations, 'neighbourhood':\n neighbourhood, 'categories': category, 'business': business, 'contacts':\n contacts, 'posts': posts}"], {}), "(request, 'neighapp/profile.html', {'danger': 'Update Profile ',\n 'locations': locations, 'neighbourhood': neighbourhood, 'categories':\n category, 'business': business, 'contacts': contacts, 'posts': posts})\n", (11842, 12055), False, 'from django.shortcuts import render, redirect\n'), ((12212, 12277), 'django.shortcuts.render', 'render', (['request', '"""neighapp/business.html"""', "{'business': business}"], {}), "(request, 'neighapp/business.html', {'business': business})\n", (12218, 12277), False, 'from django.shortcuts import render, redirect\n'), ((12893, 13112), 'django.shortcuts.render', 'render', (['request', '"""neighapp/profile.html"""', "{'danger': 'Update Profile ', 'locations': locations, 'neighbourhood':\n neighbourhood, 'categories': category, 'business': business, 'contacts':\n contacts, 'posts': posts}"], {}), "(request, 'neighapp/profile.html', {'danger': 'Update Profile ',\n 'locations': locations, 'neighbourhood': neighbourhood, 'categories':\n category, 'business': business, 'contacts': contacts, 'posts': posts})\n", (12899, 13112), False, 'from django.shortcuts import render, redirect\n'), ((13279, 13388), 'django.shortcuts.render', 'render', (['request', '"""neighapp/contacts.html"""', "{'contacts': contacts, 'neighbourhood': profile.neighbourhood}"], {}), "(request, 'neighapp/contacts.html', {'contacts': contacts,\n 'neighbourhood': profile.neighbourhood})\n", (13285, 13388), False, 'from django.shortcuts import render, redirect\n'), ((13690, 13786), 'django.shortcuts.render', 'render', (['request', '"""neighapp/search.html"""', "{'message': message, 'business': searched_business}"], {}), "(request, 'neighapp/search.html', {'message': message, 'business':\n searched_business})\n", (13696, 13786), False, 'from django.shortcuts import render, redirect\n'), ((13862, 13923), 'django.shortcuts.render', 'render', (['request', '"""neighapp/search.html"""', "{'message': message}"], {}), "(request, 'neighapp/search.html', {'message': message})\n", (13868, 13923), False, 'from django.shortcuts import render, redirect\n'), ((726, 787), 'django.contrib.messages.success', 'messages.success', (['request', 'f"""Account created for {username}!"""'], {}), "(request, f'Account created for {username}!')\n", (742, 787), False, 'from django.contrib import messages\n'), ((806, 828), 'django.shortcuts.redirect', 'redirect', (['"""profileacc"""'], {}), "('profileacc')\n", (814, 828), False, 'from django.shortcuts import render, redirect\n'), ((1382, 1441), 'django.contrib.messages.success', 'messages.success', (['request', 'f"""Your account has been updated"""'], {}), "(request, f'Your account has been updated')\n", (1398, 1441), False, 'from django.contrib import messages\n'), ((1460, 1477), 'django.shortcuts.redirect', 'redirect', (['"""index"""'], {}), "('index')\n", (1468, 1477), False, 'from django.shortcuts import render, redirect\n'), ((6302, 6520), 'django.shortcuts.render', 'render', (['request', '"""neighapp/profile.html"""', "{'danger': 'Update Profile', 'locations': locations, 'neighbourhood':\n neighbourhood, 'categories': category, 'business': business, 'contacts':\n contacts, 'posts': posts}"], {}), "(request, 'neighapp/profile.html', {'danger': 'Update Profile',\n 'locations': locations, 'neighbourhood': neighbourhood, 'categories':\n category, 'business': business, 'contacts': contacts, 'posts': posts})\n", (6308, 6520), False, 'from django.shortcuts import render, redirect\n'), ((7455, 7517), 'django.shortcuts.redirect', 'redirect', (['"""/profile"""', "{'success': 'Post Created Successfully'}"], {}), "('/profile', {'success': 'Post Created Successfully'})\n", (7463, 7517), False, 'from django.shortcuts import render, redirect\n'), ((7840, 7902), 'django.shortcuts.redirect', 'redirect', (['"""/profile"""', "{'success': 'Post Created Successfully'}"], {}), "('/profile', {'success': 'Post Created Successfully'})\n", (7848, 7902), False, 'from django.shortcuts import render, redirect\n'), ((8780, 8998), 'django.shortcuts.render', 'render', (['request', '"""neighapp/profile.html"""', "{'danger': 'Update Profile', 'locations': locations, 'neighbourhood':\n neighbourhood, 'categories': category, 'business': business, 'contacts':\n contacts, 'posts': posts}"], {}), "(request, 'neighapp/profile.html', {'danger': 'Update Profile',\n 'locations': locations, 'neighbourhood': neighbourhood, 'categories':\n category, 'business': business, 'contacts': contacts, 'posts': posts})\n", (8786, 8998), False, 'from django.shortcuts import render, redirect\n'), ((10383, 10602), 'django.shortcuts.render', 'render', (['request', '"""neighapp/profile.html"""', "{'danger': 'Update Profile ', 'locations': locations, 'neighbourhood':\n neighbourhood, 'categories': category, 'business': business, 'contacts':\n contacts, 'posts': posts}"], {}), "(request, 'neighapp/profile.html', {'danger': 'Update Profile ',\n 'locations': locations, 'neighbourhood': neighbourhood, 'categories':\n category, 'business': business, 'contacts': contacts, 'posts': posts})\n", (10389, 10602), False, 'from django.shortcuts import render, redirect\n')] |
import matplotlib.pyplot as plt
import matplotlib
from matplotlib import animation
import seaborn as sns
import numpy as np
import cmocean
import os
from mpl_toolkits.axes_grid1 import AxesGrid
from mpl_toolkits.axes_grid1 import make_axes_locatable
import scipy
import scipy.ndimage
from scipy.stats import norm
import matplotlib.image as mpimg
class Plotter():
def __init__(self, dic_data, deck, data_modes,
plot_deltas = False):
self.zz = deck.targetplot
plot_contour_linear = deck.doc["Plots"]["Contour Plots"]["Linear"]["Plot_it"]
plot_contour_log = deck.doc["Plots"]["Contour Plots"]["Log"]["Plot_it"]
plot_quiver = deck.doc["Plots"]["Quiver"]["Plot_it"]
plot_streamplots = deck.doc["Plots"]["Streamplots"]["Plot_it"]
gif_heatmaps = deck.doc["Plots"]["Heatmaps"]["Gif_it"]
gif_contourlin = deck.doc["Plots"]["Contour Plots"]["Linear"]["Gif_it"]
gif_contourlog = deck.doc["Plots"]["Contour Plots"]["Log"]["Gif_it"]
for self.index, dic_image in enumerate(dic_data.dataframe):
index = self.index
if plot_contour_linear.lower() == "true":
self.create_contourplot_linear(dic_data.dic_paths[index], dic_image, deck, data_modes)
if plot_contour_log.lower() == "true":
self.create_contourplot_log(dic_data.dic_paths[index], dic_image, deck, data_modes)
if plot_quiver.lower() == "true":
self.create_quiver(dic_data.dic_paths[index], dic_image, deck)
if plot_streamplots.lower() == "true":
self.create_streamplot(dic_data.dic_paths[index], dic_image, deck)
# Do we really need this ?
self.plot_dataset(dic_data.dic_paths[index], dic_image, deck)
if plot_deltas == True:
if index == 0:
pass
else:
self.plot_deltas(dic_data.dic_paths[index], dic_image, deck)
if deck.plot_heatmaps.lower() == "true":
for index2, gdf in enumerate(data_modes.grouped):
if index == index2:
self.build_deltaheatmaps(dic_data.dic_paths[index], gdf, deck, data_modes.scale_min, data_modes.scale_max)
if gif_heatmaps == "true":
self.create_heatmaps_gif(data_modes.grouped, deck, data_modes.scale_min, data_modes.scale_max)
if gif_contourlin.lower() == "true":
self.create_contourplotlin_gif(dic_data.dataframe, deck, data_modes, dic_data.dic_paths)
if gif_contourlog.lower() == "true":
self.create_contourplotlog_gif(dic_data.dataframe, deck, data_modes, dic_data.dic_paths)
def filter_NaN_Matrix(self, U, sigVal):
#Fonction pour limiter la propagation des NaNs dans le filtre gaussien lissant l'image
V=U.copy()
V[np.isnan(U)]=0
VV=scipy.ndimage.gaussian_filter(V,sigma=sigVal)
W=0*U.copy()+1
W[np.isnan(U)]=0
WW=scipy.ndimage.gaussian_filter(W,sigma=sigVal)
np.seterr(divide='ignore', invalid='ignore') #enleve le pb de division /0
Z=VV/WW
return Z
def create_contourplot_log(self, file_name, df, deck, data_modes):
x = list(sorted(set( df["x"].values )))
y = list(sorted(set( df["y"].values )))
img_name = file_name[0 : len(file_name) -10] + '.tif'
img = plt.imread(img_name)
fig, ax = plt.subplots(dpi=300,)
ax.imshow(img, alpha = 1, cmap = 'gray')
df.loc[df["sigma"] == -1, deck.doc["Plots"]['Target Plot'] ] = np.nan
e1 = np.array(df[deck.doc["Plots"]['Target Plot']].values)
e1 = e1.reshape(len(y), len(x))
levels = np.sort(np.append( np.append( -np.logspace(0.1, abs(data_modes.vmin_0),10) , np.linspace(-0.01,0.01,5) ), np.logspace(0.1,data_modes.vmax_0,15)))
ax.contour(x, y, e1, colors = 'k', linewidths = 0.5, levels = levels)
pcm = ax.pcolormesh(x,y,e1,norm=matplotlib.colors.SymLogNorm(linthresh=0.001, linscale=0.1, vmin=data_modes.vmin_0, vmax=data_modes.vmax_0),
cmap='plasma')
fig.colorbar(pcm, ax=ax, extend = 'both')
plt.title(deck.doc["Plots"]['Target Plot']+", "+str(self.index))
plot_dir = "./plots/"
check_folder = os.path.isdir(plot_dir)
if not check_folder:
os.makedirs(plot_dir)
plt.savefig("./plots/"+self.zz.strip('"')+"-"+file_name[:-4]+"-contourplot-log"+".png")
plt.close()
def create_contourplot_linear(self, file_name, df, deck, data_modes):
x = list(sorted(set( df["x"].values )))
y = list(sorted(set( df["y"].values )))
img_name = file_name[0 : len(file_name) -10] + '.tif'
img = plt.imread(img_name)
fig, ax = plt.subplots(dpi=300,)
ax.imshow(img, alpha = 1, cmap = 'gray')
df.loc[df["sigma"] == -1, deck.doc["Plots"]['Target Plot'] ] = np.nan
e1 = np.array(df[deck.doc["Plots"]['Target Plot']].values)
e1 = e1.reshape(len(y), len(x))
levels = np.linspace(data_modes.vmin_0, data_modes.vmax_0,10)
cs = plt.contourf(x, y, e1, origin = 'lower', extend = 'both', cmap = 'plasma', alpha = 0.5)
plt.contour(x, y, e1, levels = levels, colors = 'k', linewidths = 0.5)
fig.colorbar(cs)
plt.title(deck.doc["Plots"]['Target Plot']+", "+str(self.index))
plot_dir = "./plots/"
check_folder = os.path.isdir(plot_dir)
if not check_folder:
os.makedirs(plot_dir)
plt.savefig("./plots/"+self.zz.strip('"')+"-"+file_name[:-4]+"-contourplot-linear"+".png")
plt.close()
def create_quiver(self, file_name, df, deck):
x = list(sorted(set( df["x"].values )))
y = list(sorted(set( df["y"].values )))
df.loc[df["sigma"] == -1, "gamma" ] = np.nan
self.teta_ = np.array(df["gamma"].values)
teta_1 = np.cos(self.teta_)
self.teta_1 = teta_1.reshape(len(y), len(x))
teta_2 = np.sin(self.teta_)
self.teta_2 = teta_2.reshape(len(y), len(x))
contour_ = np.array(df[self.zz].values)
self.contour_ = contour_.reshape((len(y), len(x)))
img_name = file_name[0 : len(file_name) -10] + '.tif'
img = plt.imread(img_name)
fig, ax = plt.subplots(dpi=300)
ax.imshow(img, cmap = plt.get_cmap('gray'), alpha = 1)
skip1 = ( slice(None, None, 20))
skip2 = ( slice(None, None, 20), slice(None, None,20) )
tf1 = self.filter_NaN_Matrix(np.array(self.teta_1),7)
tf2 = self.filter_NaN_Matrix(np.array(self.teta_2),7)
contourf = self.filter_NaN_Matrix(np.array(self.contour_),7)
plt.quiver(np.array(x[skip1]),np.array(y[skip1]),tf1[skip2], tf2[skip2], contourf[skip2], cmap='plasma', scale = 50)
plt.colorbar()
plt.title(deck.doc["Plots"]['Target Plot']+", "+str(self.index))
plot_dir = "./plots/"
check_folder = os.path.isdir(plot_dir)
if not check_folder:
os.makedirs(plot_dir)
plt.savefig("./plots/"+self.zz.strip('"')+"-"+file_name[:-4]+"-quiver"+".png")
plt.close()
def create_streamplot(self, file_name, df, deck):
x = list(sorted(set( df["x"].values )))
y = list(sorted(set( df["y"].values )))
img_name = file_name[0 : len(file_name) -10] + '.tif'
img = plt.imread(img_name)
fig, ax = plt.subplots(dpi=300)
ax.imshow(img, cmap = plt.get_cmap('gray'), alpha = 1)
tf1 = self.filter_NaN_Matrix(np.array(self.teta_1),7)
tf2 = self.filter_NaN_Matrix(np.array(self.teta_2),7)
contourf = self.filter_NaN_Matrix(np.array(self.contour_),7)
fig = plt.streamplot(np.array(x), np.array(y), tf1, tf2,
color=contourf,
linewidth=1,
cmap='plasma',
density=1.3,
arrowsize=0.5)
plt.title(deck.doc["Plots"]['Target Plot']+", "+str(self.index))
plt.colorbar()
plot_dir = "./plots/"
check_folder = os.path.isdir(plot_dir)
if not check_folder:
os.makedirs(plot_dir)
plt.savefig("./plots/"+self.zz.strip('"')+"-"+file_name[:-4]+"-stream"+".png")
plt.close()
def plot_dataset(self, file_name, df, deck):
df = df.sort_index(axis=1, level='"x"', ascending=False)
x = list(sorted(set( df["x"].values )))
y = list(sorted(set( df["y"].values )))
df.loc[df["sigma"] == -1, deck.doc["Plots"]['Target Plot'] ] = np.nan
zv = 100*(df[deck.doc["Plots"]['Target Plot']].values)
zv = zv.reshape((len(y), len(x)))
fig = plt.contour(x, y, zv, levels=8, linewidths=0.4, colors="black")
cs = plt.contourf(x, y, zv, origin = 'lower', extend = 'both', cmap = 'plasma', alpha = 0.5)
cbar = plt.colorbar(cs)
cbar.ax.set_xlabel('Strain (%)')
plt.title(deck.doc["Plots"]['Target Plot'])
plt.clabel(fig, inline=0.1, fontsize=5)
plt.legend()
plot_dir = "./plots/"
check_folder = os.path.isdir(plot_dir)
if not check_folder:
os.makedirs(plot_dir)
plt.savefig("./plots/"+self.zz.strip('"')+"-"+file_name[:-3]+"_contour.png")
plt.close()
def plot_deltas(self, file_name, df, deck):
df = df.sort_index(axis=1, level='"x"', ascending=False)
x = list(sorted(set( df["x"].values )))
y = list(sorted(set( df["y"].values )))
df.loc[df["sigma"] == -1, deck.plot_inccontour_target ] = np.nan
zv = 100*(df[deck.plot_inccontour_target].values)
fig = plt.contour(x, y, zv, levels=8, linewidths=0.4, colors="black")
cs = plt.contourf(x, y, zv, origin = 'lower', extend = 'both', cmap = 'plasma', alpha = 0.5)
cbar = plt.colorbar(cs)
cbar.ax.set_xlabel('Strain (%)')
plt.title(deck.plot_inccontour_target)
plt.clabel(fig, inline=0.1, fontsize=5)
plt.legend()
plot_dir = "./plots/"
check_folder = os.path.isdir(plot_dir)
if not check_folder:
os.makedirs(plot_dir)
plt.savefig("./plots/"+self.zz.strip('"')+"-"+file_name[:-4]+"_deltas"+".png")
plt.close()
def build_deltaheatmaps(self, file_name, df, deck, vmin, vmax):
'''
Plots a heatmap for each image with delta variations over the x and y splitting regions
df = pandas data frame with set index, one column and target values.
'''
df = df.pivot('region_y', 'region_x', deck.target)
#df = df.sort_index(ascending=False)
fig, ax = plt.subplots(figsize=(9,6))
sns.set()
# bug of matplotlib 3.1 forces to manually set ylim to avoid cut-off top and bottom
# might remove this later
sns.heatmap(df, linewidths= .5, vmin = float(vmin), vmax = float(vmax), annot = True, annot_kws={"size": 9}, cmap = cmocean.cm.curl, ax = ax)
ax.set_ylim(len(df), 0)
plot_dir = "./plots/"
check_folder = os.path.isdir(plot_dir)
if not check_folder:
os.makedirs(plot_dir)
fig.savefig( "./plots/"+self.zz.strip('"')+"-"+file_name[:-4]+"_heatmap"+".png")
plt.close()
def create_heatmaps_gif(self, dfs, deck, vmin, vmax):
#set base plotting space
fig = plt.figure(figsize=(9,6))
# create iterator
data_frames_iterator = iter(dfs)
# set up formatting of the gif later
writer='matplotlib.animation.PillowWriter'
#'imagemagick'
def update_frame(i):
plt.clf()
heatmap_data = next(data_frames_iterator)
heatmap_data = heatmap_data.pivot('region_y', 'region_x', deck.doc["Plots"]["Incremental Contour"]["Target Plot"])
ax = sns.heatmap(heatmap_data,
linewidths= 0,
vmin = float(vmin),
vmax = float(vmax),
annot = True,
annot_kws={"size": 9},
cmap = "YlGnBu",
)
#need to manually set y_lim to avoi cropping of top and bottom cells
ax.set_ylim(heatmap_data.shape[0], 0)
animation.FuncAnimation(fig, update_frame, frames=len(dfs)-1, interval=400).save('./plots/heatmaps.gif', writer = writer)
def create_contourplotlin_gif(self, dfs, deck, data_modes, filenames):
#set base plotting space
fig, ax = plt.subplots(dpi=200, figsize=(12,10))
x = list(sorted(set( dfs[0]["x"].values )))
y = list(sorted(set( dfs[0]["y"].values )))
# create iterator
data_frames_iterator = iter(dfs)
# set up formatting of the gif later
writer='matplotlib.animation.PillowWriter'
def update_frame_log(i):
plt.clf()
img_name = filenames[i][0 : len(filenames[i]) -10] + '.tif'
img = plt.imread(img_name)
plt.imshow(img, alpha = 1, cmap = 'gray')
df = next(data_frames_iterator)
df.loc[df["sigma"] == -1, deck.doc["Plots"]['Target Plot'] ] = np.nan
e1 = np.array(df[deck.doc["Plots"]['Target Plot']].values)
e1 = e1.reshape(len(y), len(x))
levels = np.sort(np.linspace(data_modes.vmin_0, data_modes.vmax_0,20))
cont = plt.pcolormesh(x,y,e1,vmin=data_modes.vmin_0, vmax=data_modes.vmax_0,cmap='plasma')
plt.contour(x, y, e1, levels = levels, colors = 'k', linewidths = 0.5)
plt.colorbar(cont)
return cont
animation.FuncAnimation(fig, update_frame_log, frames=len(dfs)-1, interval=600).save('./plots/contourplotlin.gif', writer = writer)
def create_contourplotlog_gif(self, dfs, deck, data_modes, filenames):
#set base plotting space
fig, ax = plt.subplots(dpi=92, figsize=(12,10))
x = list(sorted(set( dfs[0]["x"].values )))
y = list(sorted(set( dfs[0]["y"].values )))
# create iterator
data_frames_iterator = iter(dfs)
# set up formatting of the gif later
writer='matplotlib.animation.PillowWriter'
def update_frame_log(i):
plt.clf()
img_name = filenames[i][0 : len(filenames[i]) -10] + '.tif'
img = plt.imread(img_name)
plt.imshow(img, alpha = 1, cmap = 'gray')
df = next(data_frames_iterator)
df.loc[df["sigma"] == -1, deck.doc["Plots"]['Target Plot'] ] = np.nan
e1 = np.array(df[deck.doc["Plots"]['Target Plot']].values)
e1 = e1.reshape(len(y), len(x))
levels = np.sort(np.append( np.append( -np.logspace(0.1, abs(data_modes.vmin_0),10) , np.linspace(-0.01,0.01,5) ), np.logspace(0.1,data_modes.vmax_0,15)))
cont = plt.pcolormesh(x,y,e1,norm=matplotlib.colors.SymLogNorm(linthresh=0.001, linscale=0.1, vmin=data_modes.vmin_0, vmax=data_modes.vmax_0), vmin=data_modes.vmin_0, vmax=data_modes.vmax_0,cmap='plasma')
plt.contour(x, y, e1, levels = levels, colors = 'k', linewidths = 0.5)
plt.colorbar(cont)
return cont
animation.FuncAnimation(fig, update_frame_log, frames=len(dfs)-1, interval=600).save('./plots/contourplotlog.gif', writer = writer)
| [
"matplotlib.pyplot.pcolormesh",
"numpy.array",
"scipy.ndimage.gaussian_filter",
"numpy.sin",
"matplotlib.pyplot.imshow",
"matplotlib.pyplot.contourf",
"seaborn.set",
"matplotlib.pyplot.close",
"matplotlib.pyplot.contour",
"numpy.linspace",
"os.path.isdir",
"matplotlib.pyplot.clabel",
"numpy.... | [((2947, 2993), 'scipy.ndimage.gaussian_filter', 'scipy.ndimage.gaussian_filter', (['V'], {'sigma': 'sigVal'}), '(V, sigma=sigVal)\n', (2976, 2993), False, 'import scipy\n'), ((3053, 3099), 'scipy.ndimage.gaussian_filter', 'scipy.ndimage.gaussian_filter', (['W'], {'sigma': 'sigVal'}), '(W, sigma=sigVal)\n', (3082, 3099), False, 'import scipy\n'), ((3108, 3152), 'numpy.seterr', 'np.seterr', ([], {'divide': '"""ignore"""', 'invalid': '"""ignore"""'}), "(divide='ignore', invalid='ignore')\n", (3117, 3152), True, 'import numpy as np\n'), ((3470, 3490), 'matplotlib.pyplot.imread', 'plt.imread', (['img_name'], {}), '(img_name)\n', (3480, 3490), True, 'import matplotlib.pyplot as plt\n'), ((3509, 3530), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'dpi': '(300)'}), '(dpi=300)\n', (3521, 3530), True, 'import matplotlib.pyplot as plt\n'), ((3681, 3734), 'numpy.array', 'np.array', (["df[deck.doc['Plots']['Target Plot']].values"], {}), "(df[deck.doc['Plots']['Target Plot']].values)\n", (3689, 3734), True, 'import numpy as np\n'), ((4372, 4395), 'os.path.isdir', 'os.path.isdir', (['plot_dir'], {}), '(plot_dir)\n', (4385, 4395), False, 'import os\n'), ((4565, 4576), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (4574, 4576), True, 'import matplotlib.pyplot as plt\n'), ((4858, 4878), 'matplotlib.pyplot.imread', 'plt.imread', (['img_name'], {}), '(img_name)\n', (4868, 4878), True, 'import matplotlib.pyplot as plt\n'), ((4897, 4918), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'dpi': '(300)'}), '(dpi=300)\n', (4909, 4918), True, 'import matplotlib.pyplot as plt\n'), ((5063, 5116), 'numpy.array', 'np.array', (["df[deck.doc['Plots']['Target Plot']].values"], {}), "(df[deck.doc['Plots']['Target Plot']].values)\n", (5071, 5116), True, 'import numpy as np\n'), ((5174, 5227), 'numpy.linspace', 'np.linspace', (['data_modes.vmin_0', 'data_modes.vmax_0', '(10)'], {}), '(data_modes.vmin_0, data_modes.vmax_0, 10)\n', (5185, 5227), True, 'import numpy as np\n'), ((5241, 5320), 'matplotlib.pyplot.contourf', 'plt.contourf', (['x', 'y', 'e1'], {'origin': '"""lower"""', 'extend': '"""both"""', 'cmap': '"""plasma"""', 'alpha': '(0.5)'}), "(x, y, e1, origin='lower', extend='both', cmap='plasma', alpha=0.5)\n", (5253, 5320), True, 'import matplotlib.pyplot as plt\n'), ((5338, 5402), 'matplotlib.pyplot.contour', 'plt.contour', (['x', 'y', 'e1'], {'levels': 'levels', 'colors': '"""k"""', 'linewidths': '(0.5)'}), "(x, y, e1, levels=levels, colors='k', linewidths=0.5)\n", (5349, 5402), True, 'import matplotlib.pyplot as plt\n'), ((5562, 5585), 'os.path.isdir', 'os.path.isdir', (['plot_dir'], {}), '(plot_dir)\n', (5575, 5585), False, 'import os\n'), ((5756, 5767), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (5765, 5767), True, 'import matplotlib.pyplot as plt\n'), ((6008, 6036), 'numpy.array', 'np.array', (["df['gamma'].values"], {}), "(df['gamma'].values)\n", (6016, 6036), True, 'import numpy as np\n'), ((6063, 6081), 'numpy.cos', 'np.cos', (['self.teta_'], {}), '(self.teta_)\n', (6069, 6081), True, 'import numpy as np\n'), ((6161, 6179), 'numpy.sin', 'np.sin', (['self.teta_'], {}), '(self.teta_)\n', (6167, 6179), True, 'import numpy as np\n'), ((6262, 6290), 'numpy.array', 'np.array', (['df[self.zz].values'], {}), '(df[self.zz].values)\n', (6270, 6290), True, 'import numpy as np\n'), ((6431, 6451), 'matplotlib.pyplot.imread', 'plt.imread', (['img_name'], {}), '(img_name)\n', (6441, 6451), True, 'import matplotlib.pyplot as plt\n'), ((6470, 6491), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'dpi': '(300)'}), '(dpi=300)\n', (6482, 6491), True, 'import matplotlib.pyplot as plt\n'), ((6989, 7003), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (7001, 7003), True, 'import matplotlib.pyplot as plt\n'), ((7131, 7154), 'os.path.isdir', 'os.path.isdir', (['plot_dir'], {}), '(plot_dir)\n', (7144, 7154), False, 'import os\n'), ((7315, 7326), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (7324, 7326), True, 'import matplotlib.pyplot as plt\n'), ((7562, 7582), 'matplotlib.pyplot.imread', 'plt.imread', (['img_name'], {}), '(img_name)\n', (7572, 7582), True, 'import matplotlib.pyplot as plt\n'), ((7610, 7631), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'dpi': '(300)'}), '(dpi=300)\n', (7622, 7631), True, 'import matplotlib.pyplot as plt\n'), ((8222, 8236), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (8234, 8236), True, 'import matplotlib.pyplot as plt\n'), ((8291, 8314), 'os.path.isdir', 'os.path.isdir', (['plot_dir'], {}), '(plot_dir)\n', (8304, 8314), False, 'import os\n'), ((8474, 8485), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (8483, 8485), True, 'import matplotlib.pyplot as plt\n'), ((8914, 8977), 'matplotlib.pyplot.contour', 'plt.contour', (['x', 'y', 'zv'], {'levels': '(8)', 'linewidths': '(0.4)', 'colors': '"""black"""'}), "(x, y, zv, levels=8, linewidths=0.4, colors='black')\n", (8925, 8977), True, 'import matplotlib.pyplot as plt\n'), ((8991, 9070), 'matplotlib.pyplot.contourf', 'plt.contourf', (['x', 'y', 'zv'], {'origin': '"""lower"""', 'extend': '"""both"""', 'cmap': '"""plasma"""', 'alpha': '(0.5)'}), "(x, y, zv, origin='lower', extend='both', cmap='plasma', alpha=0.5)\n", (9003, 9070), True, 'import matplotlib.pyplot as plt\n'), ((9094, 9110), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['cs'], {}), '(cs)\n', (9106, 9110), True, 'import matplotlib.pyplot as plt\n'), ((9161, 9204), 'matplotlib.pyplot.title', 'plt.title', (["deck.doc['Plots']['Target Plot']"], {}), "(deck.doc['Plots']['Target Plot'])\n", (9170, 9204), True, 'import matplotlib.pyplot as plt\n'), ((9213, 9252), 'matplotlib.pyplot.clabel', 'plt.clabel', (['fig'], {'inline': '(0.1)', 'fontsize': '(5)'}), '(fig, inline=0.1, fontsize=5)\n', (9223, 9252), True, 'import matplotlib.pyplot as plt\n'), ((9261, 9273), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (9271, 9273), True, 'import matplotlib.pyplot as plt\n'), ((9336, 9359), 'os.path.isdir', 'os.path.isdir', (['plot_dir'], {}), '(plot_dir)\n', (9349, 9359), False, 'import os\n'), ((9516, 9527), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (9525, 9527), True, 'import matplotlib.pyplot as plt\n'), ((9896, 9959), 'matplotlib.pyplot.contour', 'plt.contour', (['x', 'y', 'zv'], {'levels': '(8)', 'linewidths': '(0.4)', 'colors': '"""black"""'}), "(x, y, zv, levels=8, linewidths=0.4, colors='black')\n", (9907, 9959), True, 'import matplotlib.pyplot as plt\n'), ((9973, 10052), 'matplotlib.pyplot.contourf', 'plt.contourf', (['x', 'y', 'zv'], {'origin': '"""lower"""', 'extend': '"""both"""', 'cmap': '"""plasma"""', 'alpha': '(0.5)'}), "(x, y, zv, origin='lower', extend='both', cmap='plasma', alpha=0.5)\n", (9985, 10052), True, 'import matplotlib.pyplot as plt\n'), ((10076, 10092), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['cs'], {}), '(cs)\n', (10088, 10092), True, 'import matplotlib.pyplot as plt\n'), ((10143, 10181), 'matplotlib.pyplot.title', 'plt.title', (['deck.plot_inccontour_target'], {}), '(deck.plot_inccontour_target)\n', (10152, 10181), True, 'import matplotlib.pyplot as plt\n'), ((10190, 10229), 'matplotlib.pyplot.clabel', 'plt.clabel', (['fig'], {'inline': '(0.1)', 'fontsize': '(5)'}), '(fig, inline=0.1, fontsize=5)\n', (10200, 10229), True, 'import matplotlib.pyplot as plt\n'), ((10238, 10250), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (10248, 10250), True, 'import matplotlib.pyplot as plt\n'), ((10305, 10328), 'os.path.isdir', 'os.path.isdir', (['plot_dir'], {}), '(plot_dir)\n', (10318, 10328), False, 'import os\n'), ((10487, 10498), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (10496, 10498), True, 'import matplotlib.pyplot as plt\n'), ((10903, 10931), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(9, 6)'}), '(figsize=(9, 6))\n', (10915, 10931), True, 'import matplotlib.pyplot as plt\n'), ((10939, 10948), 'seaborn.set', 'sns.set', ([], {}), '()\n', (10946, 10948), True, 'import seaborn as sns\n'), ((11310, 11333), 'os.path.isdir', 'os.path.isdir', (['plot_dir'], {}), '(plot_dir)\n', (11323, 11333), False, 'import os\n'), ((11494, 11505), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (11503, 11505), True, 'import matplotlib.pyplot as plt\n'), ((11613, 11639), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(9, 6)'}), '(figsize=(9, 6))\n', (11623, 11639), True, 'import matplotlib.pyplot as plt\n'), ((12828, 12867), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'dpi': '(200)', 'figsize': '(12, 10)'}), '(dpi=200, figsize=(12, 10))\n', (12840, 12867), True, 'import matplotlib.pyplot as plt\n'), ((14198, 14236), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'dpi': '(92)', 'figsize': '(12, 10)'}), '(dpi=92, figsize=(12, 10))\n', (14210, 14236), True, 'import matplotlib.pyplot as plt\n'), ((2921, 2932), 'numpy.isnan', 'np.isnan', (['U'], {}), '(U)\n', (2929, 2932), True, 'import numpy as np\n'), ((3027, 3038), 'numpy.isnan', 'np.isnan', (['U'], {}), '(U)\n', (3035, 3038), True, 'import numpy as np\n'), ((4439, 4460), 'os.makedirs', 'os.makedirs', (['plot_dir'], {}), '(plot_dir)\n', (4450, 4460), False, 'import os\n'), ((5627, 5648), 'os.makedirs', 'os.makedirs', (['plot_dir'], {}), '(plot_dir)\n', (5638, 5648), False, 'import os\n'), ((6699, 6720), 'numpy.array', 'np.array', (['self.teta_1'], {}), '(self.teta_1)\n', (6707, 6720), True, 'import numpy as np\n'), ((6761, 6782), 'numpy.array', 'np.array', (['self.teta_2'], {}), '(self.teta_2)\n', (6769, 6782), True, 'import numpy as np\n'), ((6828, 6851), 'numpy.array', 'np.array', (['self.contour_'], {}), '(self.contour_)\n', (6836, 6851), True, 'import numpy as np\n'), ((6875, 6893), 'numpy.array', 'np.array', (['x[skip1]'], {}), '(x[skip1])\n', (6883, 6893), True, 'import numpy as np\n'), ((6894, 6912), 'numpy.array', 'np.array', (['y[skip1]'], {}), '(y[skip1])\n', (6902, 6912), True, 'import numpy as np\n'), ((7198, 7219), 'os.makedirs', 'os.makedirs', (['plot_dir'], {}), '(plot_dir)\n', (7209, 7219), False, 'import os\n'), ((7733, 7754), 'numpy.array', 'np.array', (['self.teta_1'], {}), '(self.teta_1)\n', (7741, 7754), True, 'import numpy as np\n'), ((7795, 7816), 'numpy.array', 'np.array', (['self.teta_2'], {}), '(self.teta_2)\n', (7803, 7816), True, 'import numpy as np\n'), ((7862, 7885), 'numpy.array', 'np.array', (['self.contour_'], {}), '(self.contour_)\n', (7870, 7885), True, 'import numpy as np\n'), ((7927, 7938), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (7935, 7938), True, 'import numpy as np\n'), ((7940, 7951), 'numpy.array', 'np.array', (['y'], {}), '(y)\n', (7948, 7951), True, 'import numpy as np\n'), ((8357, 8378), 'os.makedirs', 'os.makedirs', (['plot_dir'], {}), '(plot_dir)\n', (8368, 8378), False, 'import os\n'), ((9401, 9422), 'os.makedirs', 'os.makedirs', (['plot_dir'], {}), '(plot_dir)\n', (9412, 9422), False, 'import os\n'), ((10370, 10391), 'os.makedirs', 'os.makedirs', (['plot_dir'], {}), '(plot_dir)\n', (10381, 10391), False, 'import os\n'), ((11375, 11396), 'os.makedirs', 'os.makedirs', (['plot_dir'], {}), '(plot_dir)\n', (11386, 11396), False, 'import os\n'), ((11869, 11878), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (11876, 11878), True, 'import matplotlib.pyplot as plt\n'), ((13182, 13191), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (13189, 13191), True, 'import matplotlib.pyplot as plt\n'), ((13283, 13303), 'matplotlib.pyplot.imread', 'plt.imread', (['img_name'], {}), '(img_name)\n', (13293, 13303), True, 'import matplotlib.pyplot as plt\n'), ((13316, 13353), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {'alpha': '(1)', 'cmap': '"""gray"""'}), "(img, alpha=1, cmap='gray')\n", (13326, 13353), True, 'import matplotlib.pyplot as plt\n'), ((13503, 13556), 'numpy.array', 'np.array', (["df[deck.doc['Plots']['Target Plot']].values"], {}), "(df[deck.doc['Plots']['Target Plot']].values)\n", (13511, 13556), True, 'import numpy as np\n'), ((13704, 13795), 'matplotlib.pyplot.pcolormesh', 'plt.pcolormesh', (['x', 'y', 'e1'], {'vmin': 'data_modes.vmin_0', 'vmax': 'data_modes.vmax_0', 'cmap': '"""plasma"""'}), "(x, y, e1, vmin=data_modes.vmin_0, vmax=data_modes.vmax_0,\n cmap='plasma')\n", (13718, 13795), True, 'import matplotlib.pyplot as plt\n'), ((13800, 13864), 'matplotlib.pyplot.contour', 'plt.contour', (['x', 'y', 'e1'], {'levels': 'levels', 'colors': '"""k"""', 'linewidths': '(0.5)'}), "(x, y, e1, levels=levels, colors='k', linewidths=0.5)\n", (13811, 13864), True, 'import matplotlib.pyplot as plt\n'), ((13884, 13902), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['cont'], {}), '(cont)\n', (13896, 13902), True, 'import matplotlib.pyplot as plt\n'), ((14551, 14560), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (14558, 14560), True, 'import matplotlib.pyplot as plt\n'), ((14652, 14672), 'matplotlib.pyplot.imread', 'plt.imread', (['img_name'], {}), '(img_name)\n', (14662, 14672), True, 'import matplotlib.pyplot as plt\n'), ((14685, 14722), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {'alpha': '(1)', 'cmap': '"""gray"""'}), "(img, alpha=1, cmap='gray')\n", (14695, 14722), True, 'import matplotlib.pyplot as plt\n'), ((14872, 14925), 'numpy.array', 'np.array', (["df[deck.doc['Plots']['Target Plot']].values"], {}), "(df[deck.doc['Plots']['Target Plot']].values)\n", (14880, 14925), True, 'import numpy as np\n'), ((15367, 15431), 'matplotlib.pyplot.contour', 'plt.contour', (['x', 'y', 'e1'], {'levels': 'levels', 'colors': '"""k"""', 'linewidths': '(0.5)'}), "(x, y, e1, levels=levels, colors='k', linewidths=0.5)\n", (15378, 15431), True, 'import matplotlib.pyplot as plt\n'), ((15451, 15469), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['cont'], {}), '(cont)\n', (15463, 15469), True, 'import matplotlib.pyplot as plt\n'), ((3899, 3938), 'numpy.logspace', 'np.logspace', (['(0.1)', 'data_modes.vmax_0', '(15)'], {}), '(0.1, data_modes.vmax_0, 15)\n', (3910, 3938), True, 'import numpy as np\n'), ((4058, 4170), 'matplotlib.colors.SymLogNorm', 'matplotlib.colors.SymLogNorm', ([], {'linthresh': '(0.001)', 'linscale': '(0.1)', 'vmin': 'data_modes.vmin_0', 'vmax': 'data_modes.vmax_0'}), '(linthresh=0.001, linscale=0.1, vmin=data_modes\n .vmin_0, vmax=data_modes.vmax_0)\n', (4086, 4170), False, 'import matplotlib\n'), ((6522, 6542), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""gray"""'], {}), "('gray')\n", (6534, 6542), True, 'import matplotlib.pyplot as plt\n'), ((7662, 7682), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""gray"""'], {}), "('gray')\n", (7674, 7682), True, 'import matplotlib.pyplot as plt\n'), ((13631, 13684), 'numpy.linspace', 'np.linspace', (['data_modes.vmin_0', 'data_modes.vmax_0', '(20)'], {}), '(data_modes.vmin_0, data_modes.vmax_0, 20)\n', (13642, 13684), True, 'import numpy as np\n'), ((3870, 3897), 'numpy.linspace', 'np.linspace', (['(-0.01)', '(0.01)', '(5)'], {}), '(-0.01, 0.01, 5)\n', (3881, 3897), True, 'import numpy as np\n'), ((15098, 15137), 'numpy.logspace', 'np.logspace', (['(0.1)', 'data_modes.vmax_0', '(15)'], {}), '(0.1, data_modes.vmax_0, 15)\n', (15109, 15137), True, 'import numpy as np\n'), ((15184, 15296), 'matplotlib.colors.SymLogNorm', 'matplotlib.colors.SymLogNorm', ([], {'linthresh': '(0.001)', 'linscale': '(0.1)', 'vmin': 'data_modes.vmin_0', 'vmax': 'data_modes.vmax_0'}), '(linthresh=0.001, linscale=0.1, vmin=data_modes\n .vmin_0, vmax=data_modes.vmax_0)\n', (15212, 15296), False, 'import matplotlib\n'), ((15069, 15096), 'numpy.linspace', 'np.linspace', (['(-0.01)', '(0.01)', '(5)'], {}), '(-0.01, 0.01, 5)\n', (15080, 15096), True, 'import numpy as np\n')] |
from creature import *
from nn_model import model_simple_predator
class Predator(Creature):
def __init__(self,starting_pos=[0,0],
speed=np.random.randint(10,20),
size=np.random.randint(1,10),
sense = np.random.uniform(0,20),
weights_food = np.random.uniform(-1,1,(2,6))):
super(Predator,self).__init__(starting_pos,speed,size)
self.color = (0,0,0)
self.nn_food = model_simple_predator(weights_food)
def draw(self,gameDisplay):
draw.circle(gameDisplay,self.color,self.getPos(),self.size)
draw.circle(gameDisplay,self.speed_color,self.getPos(),self.size//2)
def eat(self):
if (self.fertility<100):
if (self.content==False):
self.content=True
else:
self.moveflag=False
self.fertility=100
def compute_nn_food_speed(self):
x = self.towards_prey_velocity
x = self.nn_food.forward(x.reshape(2,1))
x = x/(np.linalg.norm(x)+0.001)
return x.reshape(2,)
def compute_nn_food_custom(self,x):
x = self.nn_food.forward(x.reshape(2,1))
x = x/(np.linalg.norm(x)+0.001)
return x.reshape(2,)
def move(self,worldSz):
if(self.moveflag==True or (self.away_from_predator_velocity[0]!=0 and self.away_from_predator_velocity[1]!=0)):
# decrease in health with every step
self.health = self.health -self.speed/75
if self.health<=0:
self.moveflag=False
# self.theta = self.theta + 0.5*(np.random.rand()) - 0.25
self.velocity = self.velocity+np.random.normal(0,1,self.velocity.shape) + self.sense*self.compute_nn_food_speed()
self.away_from_predator_velocity = np.array([0,0])
self.towards_prey_velocity = np.array([0,0])
multiplier = self.speed/np.linalg.norm(self.velocity)
self.velocity = multiplier*self.velocity
self.pos = self.pos + self.velocity * 0.06
# self.pos[0]=np.random.randint(-1,2)+self.pos[0]
# self.pos[1]=np.random.randint(-1,2)+self.pos[1]
if self.pos[0]<=0:
self.pos[0]=0
self.velocity[0] = -self.velocity[0]
if self.pos[0]>=worldSz[0]:
self.pos[0]=worldSz[0]-1
self.velocity[0] = -self.velocity[0]
if self.pos[1]<0:
self.pos[1]=0
self.velocity[1] = -self.velocity[1]
if self.pos[1]>=worldSz[1]:
self.pos[1]=worldSz[1]-1
self.velocity[1] = -self.velocity[1]
| [
"nn_model.model_simple_predator"
] | [((395, 430), 'nn_model.model_simple_predator', 'model_simple_predator', (['weights_food'], {}), '(weights_food)\n', (416, 430), False, 'from nn_model import model_simple_predator\n')] |
# encoding: utf-8
# Copyright 2011 Tree.io Limited
# This file is part of Treeio.
# License www.tree.io/license
"""
Project management models
"""
from django.db import models
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from treeio.core.models import Object, User
from treeio.identities.models import Contact
from datetime import datetime, timedelta
# Project Model
class Project(Object):
""" Project model """
name = models.CharField(max_length=255)
parent = models.ForeignKey(
'self', blank=True, null=True, related_name='child_set')
manager = models.ForeignKey(
Contact, related_name='manager', null=True, blank=True, on_delete=models.SET_NULL)
client = models.ForeignKey(
Contact, related_name='client', null=True, blank=True, on_delete=models.SET_NULL)
details = models.TextField(max_length=255, null=True, blank=True)
class Meta:
"Project"
ordering = ['name']
def __unicode__(self):
return self.name
def get_absolute_url(self):
"Returns absolute URL for the Project"
try:
return reverse('projects_project_view', args=[self.id])
except Exception:
pass
# TaskStatus model
class TaskStatus(Object):
""" Tasks and milestones have task statuses """
name = models.CharField(max_length=255)
details = models.TextField(max_length=255, null=True, blank=True)
active = models.BooleanField()
hidden = models.BooleanField()
class Meta:
"TaskStatus"
ordering = ('hidden', '-active', 'name')
def __unicode__(self):
return self.name
def get_absolute_url(self):
"Returns absolute URL for the Task Status"
try:
return reverse('projects_index_by_status', args=[self.id])
except Exception:
pass
# Milestone model
class Milestone(Object):
""" Tasks may have milestones """
project = models.ForeignKey(Project)
name = models.CharField(max_length=255)
status = models.ForeignKey(TaskStatus)
details = models.TextField(max_length=255, null=True, blank=True)
start_date = models.DateTimeField(null=True, blank=True)
end_date = models.DateTimeField(null=True, blank=True)
access_inherit = ('project', '*module', '*user')
class Meta:
"Milestone"
ordering = ['start_date', 'name']
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
"Override save to update all included tickets if Milestone.project changed"
if self.id:
original = Milestone.objects.get(pk=self.id)
super(Milestone, self).save(*args, **kwargs)
if self.project != original.project:
for task in self.task_set.all():
task.project = self.project
task.save()
else:
super(Milestone, self).save(*args, **kwargs)
def get_absolute_url(self):
"Returns absolute URL for the Milestone"
try:
return reverse('projects_milestone_view', args=[self.id])
except Exception:
pass
# Task model
class Task(Object):
""" Single task """
parent = models.ForeignKey(
'self', blank=True, null=True, related_name='child_set')
project = models.ForeignKey(Project)
milestone = models.ForeignKey(Milestone, null=True, blank=True)
status = models.ForeignKey(TaskStatus, default=26)
name = models.CharField(max_length=255)
details = models.TextField(max_length=255, null=True, blank=True)
assigned = models.ManyToManyField(User, blank=True, null=True)
depends = models.ForeignKey('Task', blank=True, null=True, related_name='blocked_set',
limit_choices_to={'status__hidden': False})
caller = models.ForeignKey(
Contact, blank=True, null=True, on_delete=models.SET_NULL)
start_date = models.DateTimeField(null=True, blank=True)
end_date = models.DateTimeField(null=True, blank=True)
priority = models.IntegerField(default=3,
choices=((5, _('Highest')), (4, _('High')), (3, _('Normal')),
(2, _('Low')), (1, _('Lowest'))))
estimated_time = models.IntegerField(null=True, blank=True)
access_inherit = ('parent', 'milestone', 'project', '*module', '*user')
class Meta:
"Task"
ordering = ('-priority', 'name')
def __unicode__(self):
return self.name
def priority_human(self):
"Returns a Human-friendly priority name"
choices = ((5, _('Highest')), (4, _('High')), (
3, _('Normal')), (2, _('Low')), (1, _('Lowest')))
for choice in choices:
if choice[0] == self.priority:
return choice[1]
def get_estimated_time(self):
"Converts minutes to Human-friendly time format"
time = timedelta(minutes=self.estimated_time)
days = time.days
seconds = time.seconds
hours = days * 24 + (seconds // (60 * 60))
seconds %= (60 * 60)
minutes = seconds // 60
seconds %= 60
string = ""
if hours or minutes:
if hours:
string += _("%2i hours ") % (hours)
if minutes:
string += _("%2i minutes") % (minutes)
else:
string = _("Less than 1 minute")
return string
def save(self, *args, **kwargs):
"Override save method to check for Milestone-Project links and auto-Status child Tasks"
original = None
if self.id:
# Existing task
original = Task.objects.get(pk=self.id)
if self.project_id != original.project_id:
# Project changed, check milestone is within selected Project
if self.milestone_id and self.milestone.project_id != self.project_id:
self.milestone = None
elif self.milestone_id and self.milestone_id != original.milestone_id:
# Milestone changed, check if it belongs to the selected
# Project
if self.milestone.project_id != self.project_id:
self.project_id = self.milestone.project_id
if self.status_id != original.status_id:
# Changed status
if self.status.hidden:
# Changed to a 'hidden' status, perform same for subtasks
for task in self.child_set.exclude(status=self.status):
task.status_id = self.status_id
task.save()
# Close any open timeslots
for slot in self.tasktimeslot_set.filter(time_to__isnull=True):
slot.time_to = datetime.now()
slot.save()
else:
# New task
if self.milestone_id and self.milestone.project_id != self.project_id:
self.project_id = self.milestone.project_id
# Inherit Project and Milestone from parent if present
if self.parent_id:
if self.project_id != self.parent.project_id:
self.project_id = self.parent.project_id
if self.milestone_id != self.parent.milestone_id:
self.milestone_id = self.parent.milestone_id
super(Task, self).save(*args, **kwargs)
def get_absolute_url(self):
"Returns absolute URL"
try:
return reverse('projects_task_view', args=[self.id])
except Exception:
pass
def get_total_time(self):
"Returns total time spent on the task, based on assigned TimeSlots"
total = timedelta()
for slot in self.tasktimeslot_set.all():
total += slot.get_time()
return total
def get_total_time_tuple(self):
"Returns total time as a tuple with number of full hours and minutes"
time = self.get_total_time()
if not time:
return None
days = time.days
seconds = time.seconds
hours = days * 24 + (seconds // (60 * 60))
seconds %= (60 * 60)
minutes = seconds // 60
seconds %= 60
return (hours, minutes, seconds)
def get_total_time_string(self):
"Returns total time as a string with number of full hours and minutes"
time = self.get_total_time_tuple()
if not time:
return _("0 minutes")
hours = time[0]
minutes = time[1]
string = ""
if hours or minutes:
if hours:
string += _("%2i hours ") % (hours)
if minutes:
string += _("%2i minutes") % (minutes)
else:
string = _("Less than 1 minute")
return string
def is_being_done_by(self, user):
"Returns true if the task is in progress"
if self.tasktimeslot_set.filter(user=user, time_to__isnull=True).exists():
return True
else:
return False
# TaskTimeSlot model
class TaskTimeSlot(Object):
""" Task time slot """
task = models.ForeignKey(Task)
user = models.ForeignKey(User)
time_from = models.DateTimeField()
time_to = models.DateTimeField(null=True, blank=True)
timezone = models.IntegerField(default=0)
details = models.TextField(max_length=255, null=True, blank=True)
access_inherit = ('task', '*module', '*user')
searchable = False
attached = True
class Meta:
"TaskTimeSlot"
ordering = ['-date_created']
def __unicode__(self):
return unicode(self.task)
def get_absolute_url(self):
"Returns absolute URL"
try:
return reverse('projects_task_view', args=[self.task_id])
except Exception:
pass
def get_time_secs(self):
"Return time from epoch"
time = datetime.now() - self.time_from
seconds = time.days * 24 * 3600 + time.seconds
return seconds
def get_time(self):
"Returns time"
if self.time_from and self.time_to:
return self.time_to - self.time_from
else:
return timedelta()
def get_time_tuple(self, time=None):
"Returns time as a tuple with number of full hours and minutes"
if not time:
time = self.get_time()
if not time:
return None
days = time.days
seconds = time.seconds
hours = days * 24 + (seconds // (60 * 60))
seconds %= (60 * 60)
minutes = seconds // 60
seconds %= 60
return (hours, minutes, seconds)
def get_time_string(self, time=None):
"Returns time in string format"
time = self.get_time_tuple(time)
if not time and self.time_from:
return self.get_time_string(datetime.now() - self.time_from)
elif not time:
return ""
hours = time[0]
minutes = time[1]
string = ""
if hours or minutes:
if hours:
string += _("%2i hours ") % (hours)
if minutes:
string += _("%2i minutes") % (minutes)
else:
string = _("Less than 1 minute")
return string
def is_open(self):
"If task is open"
if self.time_from and self.time_to:
return False
return True
| [
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.IntegerField",
"django.db.models.ManyToManyField",
"django.core.urlresolvers.reverse",
"django.db.models.BooleanField",
"django.utils.translation.ugettext",
"datetime.datetime.now",
"django.db.models.DateTimeField",
"da... | [((482, 514), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (498, 514), False, 'from django.db import models\n'), ((528, 602), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""self"""'], {'blank': '(True)', 'null': '(True)', 'related_name': '"""child_set"""'}), "('self', blank=True, null=True, related_name='child_set')\n", (545, 602), False, 'from django.db import models\n'), ((626, 730), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Contact'], {'related_name': '"""manager"""', 'null': '(True)', 'blank': '(True)', 'on_delete': 'models.SET_NULL'}), "(Contact, related_name='manager', null=True, blank=True,\n on_delete=models.SET_NULL)\n", (643, 730), False, 'from django.db import models\n'), ((749, 852), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Contact'], {'related_name': '"""client"""', 'null': '(True)', 'blank': '(True)', 'on_delete': 'models.SET_NULL'}), "(Contact, related_name='client', null=True, blank=True,\n on_delete=models.SET_NULL)\n", (766, 852), False, 'from django.db import models\n'), ((872, 927), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(255)', 'null': '(True)', 'blank': '(True)'}), '(max_length=255, null=True, blank=True)\n', (888, 927), False, 'from django.db import models\n'), ((1360, 1392), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (1376, 1392), False, 'from django.db import models\n'), ((1407, 1462), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(255)', 'null': '(True)', 'blank': '(True)'}), '(max_length=255, null=True, blank=True)\n', (1423, 1462), False, 'from django.db import models\n'), ((1476, 1497), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (1495, 1497), False, 'from django.db import models\n'), ((1511, 1532), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (1530, 1532), False, 'from django.db import models\n'), ((1983, 2009), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Project'], {}), '(Project)\n', (2000, 2009), False, 'from django.db import models\n'), ((2021, 2053), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (2037, 2053), False, 'from django.db import models\n'), ((2067, 2096), 'django.db.models.ForeignKey', 'models.ForeignKey', (['TaskStatus'], {}), '(TaskStatus)\n', (2084, 2096), False, 'from django.db import models\n'), ((2111, 2166), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(255)', 'null': '(True)', 'blank': '(True)'}), '(max_length=255, null=True, blank=True)\n', (2127, 2166), False, 'from django.db import models\n'), ((2184, 2227), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (2204, 2227), False, 'from django.db import models\n'), ((2243, 2286), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (2263, 2286), False, 'from django.db import models\n'), ((3261, 3335), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""self"""'], {'blank': '(True)', 'null': '(True)', 'related_name': '"""child_set"""'}), "('self', blank=True, null=True, related_name='child_set')\n", (3278, 3335), False, 'from django.db import models\n'), ((3359, 3385), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Project'], {}), '(Project)\n', (3376, 3385), False, 'from django.db import models\n'), ((3402, 3453), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Milestone'], {'null': '(True)', 'blank': '(True)'}), '(Milestone, null=True, blank=True)\n', (3419, 3453), False, 'from django.db import models\n'), ((3467, 3508), 'django.db.models.ForeignKey', 'models.ForeignKey', (['TaskStatus'], {'default': '(26)'}), '(TaskStatus, default=26)\n', (3484, 3508), False, 'from django.db import models\n'), ((3520, 3552), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (3536, 3552), False, 'from django.db import models\n'), ((3567, 3622), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(255)', 'null': '(True)', 'blank': '(True)'}), '(max_length=255, null=True, blank=True)\n', (3583, 3622), False, 'from django.db import models\n'), ((3638, 3689), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['User'], {'blank': '(True)', 'null': '(True)'}), '(User, blank=True, null=True)\n', (3660, 3689), False, 'from django.db import models\n'), ((3704, 3828), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""Task"""'], {'blank': '(True)', 'null': '(True)', 'related_name': '"""blocked_set"""', 'limit_choices_to': "{'status__hidden': False}"}), "('Task', blank=True, null=True, related_name='blocked_set',\n limit_choices_to={'status__hidden': False})\n", (3721, 3828), False, 'from django.db import models\n'), ((3870, 3946), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Contact'], {'blank': '(True)', 'null': '(True)', 'on_delete': 'models.SET_NULL'}), '(Contact, blank=True, null=True, on_delete=models.SET_NULL)\n', (3887, 3946), False, 'from django.db import models\n'), ((3973, 4016), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (3993, 4016), False, 'from django.db import models\n'), ((4032, 4075), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (4052, 4075), False, 'from django.db import models\n'), ((4318, 4360), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (4337, 4360), False, 'from django.db import models\n'), ((9202, 9225), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Task'], {}), '(Task)\n', (9219, 9225), False, 'from django.db import models\n'), ((9237, 9260), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {}), '(User)\n', (9254, 9260), False, 'from django.db import models\n'), ((9277, 9299), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (9297, 9299), False, 'from django.db import models\n'), ((9314, 9357), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (9334, 9357), False, 'from django.db import models\n'), ((9373, 9403), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (9392, 9403), False, 'from django.db import models\n'), ((9418, 9473), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(255)', 'null': '(True)', 'blank': '(True)'}), '(max_length=255, null=True, blank=True)\n', (9434, 9473), False, 'from django.db import models\n'), ((4977, 5015), 'datetime.timedelta', 'timedelta', ([], {'minutes': 'self.estimated_time'}), '(minutes=self.estimated_time)\n', (4986, 5015), False, 'from datetime import datetime, timedelta\n'), ((7782, 7793), 'datetime.timedelta', 'timedelta', ([], {}), '()\n', (7791, 7793), False, 'from datetime import datetime, timedelta\n'), ((1157, 1205), 'django.core.urlresolvers.reverse', 'reverse', (['"""projects_project_view"""'], {'args': '[self.id]'}), "('projects_project_view', args=[self.id])\n", (1164, 1205), False, 'from django.core.urlresolvers import reverse\n'), ((1790, 1841), 'django.core.urlresolvers.reverse', 'reverse', (['"""projects_index_by_status"""'], {'args': '[self.id]'}), "('projects_index_by_status', args=[self.id])\n", (1797, 1841), False, 'from django.core.urlresolvers import reverse\n'), ((3094, 3144), 'django.core.urlresolvers.reverse', 'reverse', (['"""projects_milestone_view"""'], {'args': '[self.id]'}), "('projects_milestone_view', args=[self.id])\n", (3101, 3144), False, 'from django.core.urlresolvers import reverse\n'), ((5444, 5467), 'django.utils.translation.ugettext', '_', (['"""Less than 1 minute"""'], {}), "('Less than 1 minute')\n", (5445, 5467), True, 'from django.utils.translation import ugettext as _\n'), ((7570, 7615), 'django.core.urlresolvers.reverse', 'reverse', (['"""projects_task_view"""'], {'args': '[self.id]'}), "('projects_task_view', args=[self.id])\n", (7577, 7615), False, 'from django.core.urlresolvers import reverse\n'), ((8529, 8543), 'django.utils.translation.ugettext', '_', (['"""0 minutes"""'], {}), "('0 minutes')\n", (8530, 8543), True, 'from django.utils.translation import ugettext as _\n'), ((8831, 8854), 'django.utils.translation.ugettext', '_', (['"""Less than 1 minute"""'], {}), "('Less than 1 minute')\n", (8832, 8854), True, 'from django.utils.translation import ugettext as _\n'), ((9805, 9855), 'django.core.urlresolvers.reverse', 'reverse', (['"""projects_task_view"""'], {'args': '[self.task_id]'}), "('projects_task_view', args=[self.task_id])\n", (9812, 9855), False, 'from django.core.urlresolvers import reverse\n'), ((9977, 9991), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (9989, 9991), False, 'from datetime import datetime, timedelta\n'), ((10261, 10272), 'datetime.timedelta', 'timedelta', ([], {}), '()\n', (10270, 10272), False, 'from datetime import datetime, timedelta\n'), ((11288, 11311), 'django.utils.translation.ugettext', '_', (['"""Less than 1 minute"""'], {}), "('Less than 1 minute')\n", (11289, 11311), True, 'from django.utils.translation import ugettext as _\n'), ((4668, 4680), 'django.utils.translation.ugettext', '_', (['"""Highest"""'], {}), "('Highest')\n", (4669, 4680), True, 'from django.utils.translation import ugettext as _\n'), ((4687, 4696), 'django.utils.translation.ugettext', '_', (['"""High"""'], {}), "('High')\n", (4688, 4696), True, 'from django.utils.translation import ugettext as _\n'), ((4716, 4727), 'django.utils.translation.ugettext', '_', (['"""Normal"""'], {}), "('Normal')\n", (4717, 4727), True, 'from django.utils.translation import ugettext as _\n'), ((4734, 4742), 'django.utils.translation.ugettext', '_', (['"""Low"""'], {}), "('Low')\n", (4735, 4742), True, 'from django.utils.translation import ugettext as _\n'), ((4749, 4760), 'django.utils.translation.ugettext', '_', (['"""Lowest"""'], {}), "('Lowest')\n", (4750, 4760), True, 'from django.utils.translation import ugettext as _\n'), ((4170, 4182), 'django.utils.translation.ugettext', '_', (['"""Highest"""'], {}), "('Highest')\n", (4171, 4182), True, 'from django.utils.translation import ugettext as _\n'), ((4189, 4198), 'django.utils.translation.ugettext', '_', (['"""High"""'], {}), "('High')\n", (4190, 4198), True, 'from django.utils.translation import ugettext as _\n'), ((4205, 4216), 'django.utils.translation.ugettext', '_', (['"""Normal"""'], {}), "('Normal')\n", (4206, 4216), True, 'from django.utils.translation import ugettext as _\n'), ((4267, 4275), 'django.utils.translation.ugettext', '_', (['"""Low"""'], {}), "('Low')\n", (4268, 4275), True, 'from django.utils.translation import ugettext as _\n'), ((4282, 4293), 'django.utils.translation.ugettext', '_', (['"""Lowest"""'], {}), "('Lowest')\n", (4283, 4293), True, 'from django.utils.translation import ugettext as _\n'), ((5304, 5319), 'django.utils.translation.ugettext', '_', (['"""%2i hours """'], {}), "('%2i hours ')\n", (5305, 5319), True, 'from django.utils.translation import ugettext as _\n'), ((5380, 5396), 'django.utils.translation.ugettext', '_', (['"""%2i minutes"""'], {}), "('%2i minutes')\n", (5381, 5396), True, 'from django.utils.translation import ugettext as _\n'), ((8691, 8706), 'django.utils.translation.ugettext', '_', (['"""%2i hours """'], {}), "('%2i hours ')\n", (8692, 8706), True, 'from django.utils.translation import ugettext as _\n'), ((8767, 8783), 'django.utils.translation.ugettext', '_', (['"""%2i minutes"""'], {}), "('%2i minutes')\n", (8768, 8783), True, 'from django.utils.translation import ugettext as _\n'), ((10923, 10937), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (10935, 10937), False, 'from datetime import datetime, timedelta\n'), ((11148, 11163), 'django.utils.translation.ugettext', '_', (['"""%2i hours """'], {}), "('%2i hours ')\n", (11149, 11163), True, 'from django.utils.translation import ugettext as _\n'), ((11224, 11240), 'django.utils.translation.ugettext', '_', (['"""%2i minutes"""'], {}), "('%2i minutes')\n", (11225, 11240), True, 'from django.utils.translation import ugettext as _\n'), ((6864, 6878), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6876, 6878), False, 'from datetime import datetime, timedelta\n')] |
# <NAME> <<EMAIL>>
# Seg 14 Mar 2011 21:46:55 BRT
import logging, os, zc.buildout
class CreateSymbolicLinks:
"""
Buildout recipe for create dir and link modules. Use as:
[buildout]
parts =
django
tipfy
[django]
recipe = recipes:ln
target = /home/carlo/django/trunk/django
directory = app/lib/dist/django
[tipfy]
recipe = recipes:ln
directory = app/lib/dist
target = /home/carlo/tipfy/tipfy
target_names =
tipfy
tipfyext
"""
def __init__(self, buildout, name, options):
self.name, self.options = name, options
self.logger = logging.getLogger(self.name)
if 'directory' not in self.options:
raise zc.buildout.UserError('Link directory must be provided')
self.directory = self.options['directory']
if 'target' not in self.options:
raise zc.buildout.UserError('Target directory must be provided')
self.target = self.options['target']
if 'target_names' in self.options:
self.targets = [s.strip() for s in self.options['target_names'].split('\n') if len(s) > 0]
else:
self.targets = None
def create_symbolic_link(self, target, directory):
if not os.path.exists(os.path.dirname(directory)):
os.makedirs(os.path.dirname(directory))
if not os.path.exists(directory):
target = os.path.realpath(target)
self.logger.info('Creating symbolic link %s -> %s', directory, target)
os.symlink(target, directory)
def install(self):
if self.targets:
for link_name in self.targets:
target = os.path.join(self.target, link_name)
directory = os.path.join(self.directory, link_name)
self.create_symbolic_link(target, directory)
else:
self.create_symbolic_link(self.target, self.directory)
return self.targets or self.target
def update(self):
pass
def uninstall(name, options):
logger = logging.getLogger(name)
if 'directory' not in options:
raise zc.buildout.UserError('Link directory must be provided')
directory = options['directory']
if 'target' not in options:
raise zc.buildout.UserError('Target directory must be provided')
target = options['target']
if 'target_names' in options:
targets = [s.strip() for s in options['target_names'].split('\n') if len(s) > 0]
else:
targets = None
if targets:
for link_name in targets:
path = os.path.join(directory, link_name)
try:
os.readlink(path)
os.unlink(path)
except:
pass
else:
try:
os.readlink(directory)
os.unlink(directory)
except:
pass
| [
"logging.getLogger",
"os.path.exists",
"os.readlink",
"os.path.join",
"os.symlink",
"os.path.realpath",
"os.path.dirname",
"os.unlink"
] | [((1774, 1797), 'logging.getLogger', 'logging.getLogger', (['name'], {}), '(name)\n', (1791, 1797), False, 'import logging, os, zc.buildout\n'), ((559, 587), 'logging.getLogger', 'logging.getLogger', (['self.name'], {}), '(self.name)\n', (576, 587), False, 'import logging, os, zc.buildout\n'), ((1202, 1227), 'os.path.exists', 'os.path.exists', (['directory'], {}), '(directory)\n', (1216, 1227), False, 'import logging, os, zc.buildout\n'), ((1241, 1265), 'os.path.realpath', 'os.path.realpath', (['target'], {}), '(target)\n', (1257, 1265), False, 'import logging, os, zc.buildout\n'), ((1343, 1372), 'os.symlink', 'os.symlink', (['target', 'directory'], {}), '(target, directory)\n', (1353, 1372), False, 'import logging, os, zc.buildout\n'), ((2246, 2280), 'os.path.join', 'os.path.join', (['directory', 'link_name'], {}), '(directory, link_name)\n', (2258, 2280), False, 'import logging, os, zc.buildout\n'), ((2368, 2390), 'os.readlink', 'os.readlink', (['directory'], {}), '(directory)\n', (2379, 2390), False, 'import logging, os, zc.buildout\n'), ((2394, 2414), 'os.unlink', 'os.unlink', (['directory'], {}), '(directory)\n', (2403, 2414), False, 'import logging, os, zc.buildout\n'), ((1121, 1147), 'os.path.dirname', 'os.path.dirname', (['directory'], {}), '(directory)\n', (1136, 1147), False, 'import logging, os, zc.buildout\n'), ((1165, 1191), 'os.path.dirname', 'os.path.dirname', (['directory'], {}), '(directory)\n', (1180, 1191), False, 'import logging, os, zc.buildout\n'), ((1460, 1496), 'os.path.join', 'os.path.join', (['self.target', 'link_name'], {}), '(self.target, link_name)\n', (1472, 1496), False, 'import logging, os, zc.buildout\n'), ((1513, 1552), 'os.path.join', 'os.path.join', (['self.directory', 'link_name'], {}), '(self.directory, link_name)\n', (1525, 1552), False, 'import logging, os, zc.buildout\n'), ((2293, 2310), 'os.readlink', 'os.readlink', (['path'], {}), '(path)\n', (2304, 2310), False, 'import logging, os, zc.buildout\n'), ((2315, 2330), 'os.unlink', 'os.unlink', (['path'], {}), '(path)\n', (2324, 2330), False, 'import logging, os, zc.buildout\n')] |
from django.shortcuts import render,redirect,HttpResponse
from django.contrib.auth.decorators import login_required
from django.urls import reverse
from .forms import NewsForm
from .models import News
from django.contrib import messages
from RoboClub.decorators import has_role_head_or_coordinator
from django.template.loader import render_to_string
from django.contrib.auth.models import User
from django.core.mail import EmailMessage,send_mass_mail
from django.conf.global_settings import EMAIL_HOST_USER
from django.http import JsonResponse
# Create your views here.
def news(request):
context={}
context['newslist']=News.objects.filter(is_open=True).order_by('-pk')
context['newslistall'] = News.objects.all().order_by('-pk')
return render(request,"news/notice.html",context)
@login_required
def collegenews(request):
context = {}
context['newslist'] = News.objects.filter(is_open=False).order_by('-pk')
return render(request, "news/notice_college.html", context)
@has_role_head_or_coordinator
def createNews(request):
context={}
if request.method=='POST':
form=NewsForm(request.POST)
form.save()
return redirect('news:news_page')
else:
form=NewsForm()
context['form']=form
return render(request,'news/notice_form.html',context)
@has_role_head_or_coordinator
def broadCastNews(request,pk):
# news_id=request.POST.get('id')
news = News.objects.get(id=pk)
mail_subject = news.title
message = render_to_string('news/notice_email.html', context={'body': news.content})
to_users = []
for user in User.objects.all():
try:
if user.is_active and user.email is not EMAIL_HOST_USER:
to_users.append(user.email)
except:
pass
try:
email = EmailMessage(
subject=mail_subject, body=message, to=to_users,
)
email.content_subtype = "html"
email.send()
except:
pass
messages.success(request, f'Notice has been broadcast to all users')
return redirect('news:news_page')
@has_role_head_or_coordinator
def deleteNews(request,pk):
news=News.objects.get(pk=pk)
if(request.user.profile.role >1):
news.delete()
return redirect('news:news_page')
@has_role_head_or_coordinator
def updateNews(request,pk):
context={}
news = News.objects.get(pk=pk)
if (request.user.profile.role >1):
if request.method == "GET":
form=NewsForm(instance=news)
context['form']=form
return render(request, 'news/notice_form.html', context)
else:
form=NewsForm(request.POST,instance=news)
form.save()
return redirect('news:news_page')
else:
messages.info(request,"Sorry you dont have permission")
return redirect('news:news_page') | [
"django.shortcuts.render",
"django.contrib.messages.info",
"django.shortcuts.redirect",
"django.contrib.messages.success",
"django.core.mail.EmailMessage",
"django.contrib.auth.models.User.objects.all",
"django.template.loader.render_to_string"
] | [((754, 798), 'django.shortcuts.render', 'render', (['request', '"""news/notice.html"""', 'context'], {}), "(request, 'news/notice.html', context)\n", (760, 798), False, 'from django.shortcuts import render, redirect, HttpResponse\n'), ((945, 997), 'django.shortcuts.render', 'render', (['request', '"""news/notice_college.html"""', 'context'], {}), "(request, 'news/notice_college.html', context)\n", (951, 997), False, 'from django.shortcuts import render, redirect, HttpResponse\n'), ((1272, 1321), 'django.shortcuts.render', 'render', (['request', '"""news/notice_form.html"""', 'context'], {}), "(request, 'news/notice_form.html', context)\n", (1278, 1321), False, 'from django.shortcuts import render, redirect, HttpResponse\n'), ((1498, 1572), 'django.template.loader.render_to_string', 'render_to_string', (['"""news/notice_email.html"""'], {'context': "{'body': news.content}"}), "('news/notice_email.html', context={'body': news.content})\n", (1514, 1572), False, 'from django.template.loader import render_to_string\n'), ((1607, 1625), 'django.contrib.auth.models.User.objects.all', 'User.objects.all', ([], {}), '()\n', (1623, 1625), False, 'from django.contrib.auth.models import User\n'), ((1985, 2053), 'django.contrib.messages.success', 'messages.success', (['request', 'f"""Notice has been broadcast to all users"""'], {}), "(request, f'Notice has been broadcast to all users')\n", (2001, 2053), False, 'from django.contrib import messages\n'), ((2065, 2091), 'django.shortcuts.redirect', 'redirect', (['"""news:news_page"""'], {}), "('news:news_page')\n", (2073, 2091), False, 'from django.shortcuts import render, redirect, HttpResponse\n'), ((2255, 2281), 'django.shortcuts.redirect', 'redirect', (['"""news:news_page"""'], {}), "('news:news_page')\n", (2263, 2281), False, 'from django.shortcuts import render, redirect, HttpResponse\n'), ((1171, 1197), 'django.shortcuts.redirect', 'redirect', (['"""news:news_page"""'], {}), "('news:news_page')\n", (1179, 1197), False, 'from django.shortcuts import render, redirect, HttpResponse\n'), ((1811, 1872), 'django.core.mail.EmailMessage', 'EmailMessage', ([], {'subject': 'mail_subject', 'body': 'message', 'to': 'to_users'}), '(subject=mail_subject, body=message, to=to_users)\n', (1823, 1872), False, 'from django.core.mail import EmailMessage, send_mass_mail\n'), ((2716, 2742), 'django.shortcuts.redirect', 'redirect', (['"""news:news_page"""'], {}), "('news:news_page')\n", (2724, 2742), False, 'from django.shortcuts import render, redirect, HttpResponse\n'), ((2761, 2817), 'django.contrib.messages.info', 'messages.info', (['request', '"""Sorry you dont have permission"""'], {}), "(request, 'Sorry you dont have permission')\n", (2774, 2817), False, 'from django.contrib import messages\n'), ((2832, 2858), 'django.shortcuts.redirect', 'redirect', (['"""news:news_page"""'], {}), "('news:news_page')\n", (2840, 2858), False, 'from django.shortcuts import render, redirect, HttpResponse\n'), ((2559, 2608), 'django.shortcuts.render', 'render', (['request', '"""news/notice_form.html"""', 'context'], {}), "(request, 'news/notice_form.html', context)\n", (2565, 2608), False, 'from django.shortcuts import render, redirect, HttpResponse\n')] |
from websocket import WebSocket
from pymongo import MongoClient
import json
import re
#preventing eval mistakes
true = True
false = False
null = None
#connecting to db for posts and blockchain websocket
db = MongoClient().golos
ws = WebSocket()
ws.connect('wss://api.golos.cf')
print('Sending for 100')
#Sending query to fetch last 100 posts
ws.send(json.dumps({"id":6,"method":"get_discussions_by_created","params":[{"tag":"","limit":"100"}]}))
post = eval(ws.recv())['result']
#get needed params and dump in db
for i in post:
ids = i['id']
try:
tags = eval(i['json_metadata'])['tags']
except:
tags = []
write = dump = {'id': ids,
'author': i['author'],
'permlink': i['permlink'],
'timestamp': i['created'].split('T')[0],
'title': i['title'],
'votes':i['net_votes'],
'comments':i['children'],
'reward': float(info['pending_payout_value'].split()[0]),
'body': re.sub(r"<.*?>",' ',i['body']),
'tags': tags
}
#db.posts.insert_one(write)
print('Got 100, going in WHILE')
#parsing stream of new articles
while(True):
#first var without except
try:
ids = post['id']
except Exception:
pass
#getting info on last post
ws.send(json.dumps({"id":6,"method":"get_discussions_by_created","params":[{"tag":"","limit":"1"}]}))
post = eval(ws.recv())['result'][0]
#check for new post
#print(ids)
if ids != post['id']:
#print(post['id'])
print('Got new one')
#if id chaged get params and dump in db
try:
tags = eval(post['json_metadata'])['tags']
except:
tags = []
write ={'id': ids,
'author': post['author'],
'permlink': post['permlink'],
'timestamp': post['created'].split('T')[0],
'title': post['title'],
'body': re.sub(r"<.*?>",' ',post['body']),
'tags': tags
}
# print(post['title'])
# print(post['created'])
db.posts.insert_one(write)
| [
"re.sub",
"pymongo.MongoClient",
"json.dumps",
"websocket.WebSocket"
] | [((235, 246), 'websocket.WebSocket', 'WebSocket', ([], {}), '()\n', (244, 246), False, 'from websocket import WebSocket\n'), ((210, 223), 'pymongo.MongoClient', 'MongoClient', ([], {}), '()\n', (221, 223), False, 'from pymongo import MongoClient\n'), ((354, 461), 'json.dumps', 'json.dumps', (["{'id': 6, 'method': 'get_discussions_by_created', 'params': [{'tag': '',\n 'limit': '100'}]}"], {}), "({'id': 6, 'method': 'get_discussions_by_created', 'params': [{\n 'tag': '', 'limit': '100'}]})\n", (364, 461), False, 'import json\n'), ((953, 984), 're.sub', 're.sub', (['"""<.*?>"""', '""" """', "i['body']"], {}), "('<.*?>', ' ', i['body'])\n", (959, 984), False, 'import re\n'), ((1268, 1373), 'json.dumps', 'json.dumps', (["{'id': 6, 'method': 'get_discussions_by_created', 'params': [{'tag': '',\n 'limit': '1'}]}"], {}), "({'id': 6, 'method': 'get_discussions_by_created', 'params': [{\n 'tag': '', 'limit': '1'}]})\n", (1278, 1373), False, 'import json\n'), ((1917, 1951), 're.sub', 're.sub', (['"""<.*?>"""', '""" """', "post['body']"], {}), "('<.*?>', ' ', post['body'])\n", (1923, 1951), False, 'import re\n')] |
'''
Description:
Given an array A of integers, return true if and only if it is a valid mountain array.
Recall that A is a mountain array if and only if:
A.length >= 3
There exists some i with 0 < i < A.length - 1 such that:
A[0] < A[1] < ... A[i-1] < A[i]
A[i] > A[i+1] > ... > A[A.length - 1]
Example 1:
Input: [2,1]
Output: false
Example 2:
Input: [3,5,5]
Output: false
Example 3:
Input: [0,3,2,1]
Output: true
Note:
0 <= A.length <= 10000
0 <= A[i] <= 10000
Hint #1
It's very easy to keep track of a monotonically increasing or decreasing ordering of elements. You just need to be able to determine the start of the valley in the mountain and from that point onwards, it should be a valley i.e. no mini-hills after that.
'''
from typing import List
class Solution:
def validMountainArray(self, A: List[int]) -> bool:
if len(A) < 3 or A[1] <= A[0]:
# Quick rejection for invalid cases
return False
increasing = True
last = A[1]
for cur_num in A[2:]:
if increasing:
# up-hill
if cur_num < last:
increasing = False
elif cur_num == last:
return False
else:
# down-hill
if cur_num >= last:
return False
# update current number
last = cur_num
if increasing:
return False
else:
# Final valid state is decreasing after mountain climbing
return True
# n : the length of input array, A
## Time Complexity: O( n )
#
# The overhead in time is the cost of linear scan, which is of O( n )
## Space Complexity: O( 1 )
#
# The overhead in space is the storage for loop index and temporary flag, which is of O( 1 )
from collections import namedtuple
TestEntry = namedtuple('TestEntry', 'sequence')
def test_bench():
test_data = [
TestEntry( sequence = [2,1] ) , # False
TestEntry( sequence = [3,5,5] ), # False
TestEntry( sequence = [0,3,2,1] ), # True
TestEntry( sequence = [1,2,3,4] ), # False
TestEntry( sequence = [4,3,2,1] ), # False
TestEntry( sequence = [1,2,3,2,1] ), # True
TestEntry( sequence = [1,3,3,2,1] ), # False
TestEntry( sequence = [1,2,3,3,1] ), # False
TestEntry( sequence = [1,5,3,5,1] ), # False
TestEntry( sequence = [] ), # corner case, False
]
for t in test_data:
print( Solution().validMountainArray( A = t.sequence) )
return
if __name__ == '__main__':
test_bench() | [
"collections.namedtuple"
] | [((2007, 2042), 'collections.namedtuple', 'namedtuple', (['"""TestEntry"""', '"""sequence"""'], {}), "('TestEntry', 'sequence')\n", (2017, 2042), False, 'from collections import namedtuple\n')] |
from typing import TypeVar, Generic, Iterable, Tuple
T = TypeVar('T')
class ImmutableSequence(Generic[T]):
def __init__(self, items: Iterable[T]):
self._items: Tuple[T] = tuple(items)
def __eq__(self, other) -> bool:
if not isinstance(other, ImmutableSequence):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other) -> bool:
if not isinstance(other, ImmutableSequence):
return True
return self.__dict__ != other.__dict__
def __repr__(self) -> str:
class_name = self.__class__.__name__
attrs = {k: getattr(self, k) for k in vars(self)}
if "__immutable__" in attrs: # pragma: no cover
del attrs["__immutable__"]
attrs_string = " ".join([f"{k}={v}" for k, v in attrs.items()])
return f"{class_name}({attrs_string})"
def __len__(self) -> int:
return len(self._items)
def __iter__(self):
for item in self._items:
yield item
def __getitem__(self, key) -> T:
return self._items[key]
| [
"typing.TypeVar"
] | [((58, 70), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (65, 70), False, 'from typing import TypeVar, Generic, Iterable, Tuple\n')] |
#!/usr/bin/env python3
"""
ROS component that implement a ball detector
"""
# Import of libraries
import sys
import time
import numpy as np
from scipy.ndimage import filters
import imutils
import cv2
import roslib
import rospy
from sensor_msgs.msg import CompressedImage
from sensoring.srv import DetectImage,DetectImageResponse
## Variable for logging purpose
VERBOSE = False
class image_feature:
"""
A class used to detect a green ball
Attributes
-----
@param subscriber: variable that represents a subscriber to the camera topic
@type subscriber: Subscriber
@param resp_center: center of the ball
@type resp_center: int
@param resp_radius: radius of the ball
@type resp_radius: int
Methods
-----
getCenter():
Get the center of the ball
getRadius()
Get the radius of the ball
callback(ros_data)
Callback function of subscribed topic.
Here images get converted and features detected
"""
def __init__(self):
'''
Constuctor. Initialize the node and the attributes, subscribe to topic of the camera
'''
rospy.init_node('image_detector', anonymous=True)
## ROS Subsriber object for getting the images
self.subscriber = rospy.Subscriber("/robot/camera1/image_raw/compressed",CompressedImage, self.callback, queue_size=1)
## Center of the ball
self.resp_center = -1
## Radius of the ball
self.resp_radius = -1
def getCenter(self):
'''
Get the center of the ball
@returns: center of the ball
@rtype: int
'''
return self.resp_center
def getRadius(self):
'''
Get the radius of the ball
@returns: radius of the ball
@rtype: int
'''
return self.resp_radius
def callback(self, ros_data):
'''
Callback function for converting the images and
detecting the features
'''
if VERBOSE:
print ('received image of type: "%s"' % ros_data.format)
#### direct conversion to CV2 ####
np_arr = np.fromstring(ros_data.data, np.uint8)
image_np = cv2.imdecode(np_arr, cv2.IMREAD_COLOR) # OpenCV >= 3.0:
greenLower = (50, 50, 20)
greenUpper = (70, 255, 255)
blurred = cv2.GaussianBlur(image_np, (11, 11), 0)
hsv = cv2.cvtColor(blurred, cv2.COLOR_BGR2HSV)
mask = cv2.inRange(hsv, greenLower, greenUpper)
mask = cv2.erode(mask, None, iterations=2)
mask = cv2.dilate(mask, None, iterations=2)
#cv2.imshow('mask', mask)
cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_SIMPLE)
cnts = imutils.grab_contours(cnts)
center = None
# only proceed if at least one contour was found
if len(cnts) > 0:
# find the largest contour in the mask, then use
# it to compute the minimum enclosing circle and
# centroid
c = max(cnts, key=cv2.contourArea)
((x, y), radius) = cv2.minEnclosingCircle(c)
M = cv2.moments(c)
center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"]))
# only proceed if the radius meets a minimum size
if radius > 10:
# draw the circle and centroid on the frame,
# then update the list of tracked points
cv2.circle(image_np, (int(x), int(y)), int(radius),
(0, 255, 255), 2)
cv2.circle(image_np, center, 5, (0, 0, 255), -1)
self.resp_center = center[0]
self.resp_radius = radius
else:
self.resp_center = -1
self.resp_radius = -1
cv2.imshow('window', image_np)
cv2.waitKey(2)
class ball_info:
"""
A class used to represent a service for providing the radius
and center of the ball
Attributes
-----
@param ic: istance of class image_feature
@type ic: image_feature
@param s: service object
@type s: Service
Methods
-----
handle_object(req):
Received a request and reply with the center and radius
of the ball
"""
def __init__(self):
'''
Constuctor. Initialize the node and service, create an instance of the class
image_feature
'''
rospy.init_node('image_detector', anonymous=True)
## Image feature object
self.ic = image_feature()
## ROS service object
self.s = rospy.Service('detect_image', DetectImage, self.handle_object)
def handle_object(self,req):
"""
Received a request and reply with the center and radius
of the ball(the request is empty)
@returns: radius and center of the ball
@rtype: DetectImageResponse
"""
resp = DetectImageResponse()
resp.object = str(self.ic.getCenter())+" "+str(self.ic.getRadius())
return resp
def main(args):
'''
Main function.Starting the nodes
'''
c = ball_info()
try:
rospy.spin()
except KeyboardInterrupt:
print ("Shutting down ROS Image feature detector module")
cv2.destroyAllWindows()
if __name__ == '__main__':
main(sys.argv) | [
"rospy.init_node",
"cv2.imshow",
"cv2.destroyAllWindows",
"cv2.imdecode",
"cv2.erode",
"rospy.Service",
"imutils.grab_contours",
"rospy.spin",
"numpy.fromstring",
"rospy.Subscriber",
"cv2.waitKey",
"cv2.minEnclosingCircle",
"sensoring.srv.DetectImageResponse",
"cv2.circle",
"cv2.cvtColor... | [((5511, 5534), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (5532, 5534), False, 'import cv2\n'), ((1227, 1276), 'rospy.init_node', 'rospy.init_node', (['"""image_detector"""'], {'anonymous': '(True)'}), "('image_detector', anonymous=True)\n", (1242, 1276), False, 'import rospy\n'), ((1361, 1466), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/robot/camera1/image_raw/compressed"""', 'CompressedImage', 'self.callback'], {'queue_size': '(1)'}), "('/robot/camera1/image_raw/compressed', CompressedImage,\n self.callback, queue_size=1)\n", (1377, 1466), False, 'import rospy\n'), ((2277, 2315), 'numpy.fromstring', 'np.fromstring', (['ros_data.data', 'np.uint8'], {}), '(ros_data.data, np.uint8)\n', (2290, 2315), True, 'import numpy as np\n'), ((2335, 2373), 'cv2.imdecode', 'cv2.imdecode', (['np_arr', 'cv2.IMREAD_COLOR'], {}), '(np_arr, cv2.IMREAD_COLOR)\n', (2347, 2373), False, 'import cv2\n'), ((2482, 2521), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['image_np', '(11, 11)', '(0)'], {}), '(image_np, (11, 11), 0)\n', (2498, 2521), False, 'import cv2\n'), ((2536, 2576), 'cv2.cvtColor', 'cv2.cvtColor', (['blurred', 'cv2.COLOR_BGR2HSV'], {}), '(blurred, cv2.COLOR_BGR2HSV)\n', (2548, 2576), False, 'import cv2\n'), ((2592, 2632), 'cv2.inRange', 'cv2.inRange', (['hsv', 'greenLower', 'greenUpper'], {}), '(hsv, greenLower, greenUpper)\n', (2603, 2632), False, 'import cv2\n'), ((2648, 2683), 'cv2.erode', 'cv2.erode', (['mask', 'None'], {'iterations': '(2)'}), '(mask, None, iterations=2)\n', (2657, 2683), False, 'import cv2\n'), ((2699, 2735), 'cv2.dilate', 'cv2.dilate', (['mask', 'None'], {'iterations': '(2)'}), '(mask, None, iterations=2)\n', (2709, 2735), False, 'import cv2\n'), ((2906, 2933), 'imutils.grab_contours', 'imutils.grab_contours', (['cnts'], {}), '(cnts)\n', (2927, 2933), False, 'import imutils\n'), ((3952, 3982), 'cv2.imshow', 'cv2.imshow', (['"""window"""', 'image_np'], {}), "('window', image_np)\n", (3962, 3982), False, 'import cv2\n'), ((3991, 4005), 'cv2.waitKey', 'cv2.waitKey', (['(2)'], {}), '(2)\n', (4002, 4005), False, 'import cv2\n'), ((4649, 4698), 'rospy.init_node', 'rospy.init_node', (['"""image_detector"""'], {'anonymous': '(True)'}), "('image_detector', anonymous=True)\n", (4664, 4698), False, 'import rospy\n'), ((4814, 4876), 'rospy.Service', 'rospy.Service', (['"""detect_image"""', 'DetectImage', 'self.handle_object'], {}), "('detect_image', DetectImage, self.handle_object)\n", (4827, 4876), False, 'import rospy\n'), ((5168, 5189), 'sensoring.srv.DetectImageResponse', 'DetectImageResponse', ([], {}), '()\n', (5187, 5189), False, 'from sensoring.srv import DetectImage, DetectImageResponse\n'), ((5398, 5410), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (5408, 5410), False, 'import rospy\n'), ((3262, 3287), 'cv2.minEnclosingCircle', 'cv2.minEnclosingCircle', (['c'], {}), '(c)\n', (3284, 3287), False, 'import cv2\n'), ((3304, 3318), 'cv2.moments', 'cv2.moments', (['c'], {}), '(c)\n', (3315, 3318), False, 'import cv2\n'), ((3731, 3779), 'cv2.circle', 'cv2.circle', (['image_np', 'center', '(5)', '(0, 0, 255)', '(-1)'], {}), '(image_np, center, 5, (0, 0, 255), -1)\n', (3741, 3779), False, 'import cv2\n')] |
import logging
import datetime
from concurrent.futures import ThreadPoolExecutor
thread_pool = ThreadPoolExecutor()
def debug():
logger = logging.getLogger('discord')
logger.setLevel(logging.DEBUG)
handler = logging.FileHandler(filename=f'Logs/{datetime.date.today()}-debug.log', encoding='utf-8')
handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s'))
logger.addHandler(handler)
def info():
logger = logging.getLogger('discord')
logger.setLevel(logging.INFO)
handler = logging.FileHandler(filename=f'Logs/{datetime.date.today()}-info.log', encoding='utf-8')
handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s'))
logger.addHandler(handler)
def print_to_console(message):
print(message)
with open(f"Logs/{datetime.date.today()}-console.log", "a+", encoding='utf-8') as file:
output = f"{datetime.datetime.now()} -- {message}\n"
file.write(output)
def console(message):
# run in a separate thread to avoid blocking.
(thread_pool.submit(print_to_console, message)).result()
def logfile(message):
with open(f"Logs/{datetime.date.today()}-info.log", "a+", encoding='utf-8') as file:
output = f"{datetime.datetime.now()} -- {message}\n"
file.write(output)
| [
"logging.getLogger",
"concurrent.futures.ThreadPoolExecutor",
"logging.Formatter",
"datetime.datetime.now",
"datetime.date.today"
] | [((96, 116), 'concurrent.futures.ThreadPoolExecutor', 'ThreadPoolExecutor', ([], {}), '()\n', (114, 116), False, 'from concurrent.futures import ThreadPoolExecutor\n'), ((145, 173), 'logging.getLogger', 'logging.getLogger', (['"""discord"""'], {}), "('discord')\n", (162, 173), False, 'import logging\n'), ((466, 494), 'logging.getLogger', 'logging.getLogger', (['"""discord"""'], {}), "('discord')\n", (483, 494), False, 'import logging\n'), ((338, 406), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s:%(levelname)s:%(name)s: %(message)s"""'], {}), "('%(asctime)s:%(levelname)s:%(name)s: %(message)s')\n", (355, 406), False, 'import logging\n'), ((657, 725), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s:%(levelname)s:%(name)s: %(message)s"""'], {}), "('%(asctime)s:%(levelname)s:%(name)s: %(message)s')\n", (674, 725), False, 'import logging\n'), ((922, 945), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (943, 945), False, 'import datetime\n'), ((1258, 1281), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1279, 1281), False, 'import datetime\n'), ((260, 281), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (279, 281), False, 'import datetime\n'), ((580, 601), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (599, 601), False, 'import datetime\n'), ((832, 853), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (851, 853), False, 'import datetime\n'), ((1171, 1192), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (1190, 1192), False, 'import datetime\n')] |
from model.user_form import UserForm
import random
import string
# def random_string(prefix, maxlen):
# symbols = string.ascii_letters + string.digits + " " * 10
# return prefix + "".join([random.choice(symbols) for i in range(random.randrange(maxlen))])
#
#
# testdata =[
# UserForm(firstname=random_string("firstname", 10), lastname=random_string("lastname", 10),
# middlename=random_string("middlename:", 10),homepage=random_string("homepage:", 10),
# nickname=random_string("nickname:", 10), address=random_string("address:", 10),
# company=random_string("company:", 10), email=random_string("email:", 10))
# for i in range(5)
# ]
testdata = [
UserForm(firstname="firstname", lastname="lastname", middlename="miDname", nickname="nickname",
homepage="homePage")
] | [
"model.user_form.UserForm"
] | [((714, 834), 'model.user_form.UserForm', 'UserForm', ([], {'firstname': '"""firstname"""', 'lastname': '"""lastname"""', 'middlename': '"""miDname"""', 'nickname': '"""nickname"""', 'homepage': '"""homePage"""'}), "(firstname='firstname', lastname='lastname', middlename='miDname',\n nickname='nickname', homepage='homePage')\n", (722, 834), False, 'from model.user_form import UserForm\n')] |
# project/tests/test_users.py
import json
from project.tests.base import BaseTestCase
from project import create_app
app = create_app()
class TestSendersService(BaseTestCase):
"""Tests for the Users Service."""
def test_all_senders(self):
"""Ensure get all senders behaves correctly."""
with self.client:
response = self.client.get('/settings/senders')
if response.status == 'error':
self.assertEqual(response.status, "error")
return
data = json.loads(response.data.decode())
self.assertTrue('id' in data['data']['senders'][0])
self.assertTrue('status' in data['data']['senders'][0])
self.assertTrue('login' in data['data']['senders'][0])
self.assertTrue('label' in data['data']['senders'][0])
self.assertIn('s170503t03', data['data']['senders'][0]['label'])
| [
"project.create_app"
] | [((127, 139), 'project.create_app', 'create_app', ([], {}), '()\n', (137, 139), False, 'from project import create_app\n')] |
from django.test import TestCase
from django.core.urlresolvers import reverse
from http import HTTPStatus
from django.contrib.auth.models import Group
class SurveyRenderTestCase(TestCase):
@classmethod
def setUpTestData(cls):
Group.objects.create(name='admin')
def setUp(self):
pass
class WellTestCase(SurveyRenderTestCase):
fixtures = ['well_detail_fixture', 'survey_well_fixture']
def test_well(self):
url = reverse('well_detail', kwargs={'pk': '123'})
response = self.client.get(url)
self.assertEqual(response.status_code, HTTPStatus.OK)
survey_id = ' id="survey:'
self.assertContains(response, survey_id, count=1) # one and only one survey included
survey_id = survey_id + '495a9927-5a13-490e-bf1d-08bf2048b098'
self.assertContains(response, survey_id) # the right survey included
class DisabledSurveyTestCase(SurveyRenderTestCase):
fixtures = ['well_detail_fixture', 'survey_well_disabled_survey_fixture']
def test_disabled_survey(self):
url = reverse('well_detail', kwargs={'pk': '123'})
response = self.client.get(url)
self.assertEqual(response.status_code, HTTPStatus.OK)
survey_id = 'id="survey:'
self.assertContains(response, survey_id, count=0) # one and only one survey included
class RegistryTestCase(SurveyRenderTestCase):
fixtures = ['survey_registry_fixture']
def test_registry(self):
url = reverse('registry-legacy')
response = self.client.get(url)
self.assertEqual(response.status_code, HTTPStatus.OK)
survey_id = 'id="survey:'
self.assertContains(response, survey_id, count=1) # one and only one survey included
survey_id = survey_id + '495a9927-5a13-490e-bf1d-08bf2048b098'
self.assertContains(response, survey_id) # the right survey included
class SearchTestCase(SurveyRenderTestCase):
fixtures = ['survey_search_fixture']
def test_search(self):
url = reverse('search')
response = self.client.get(url)
self.assertEqual(response.status_code, HTTPStatus.OK)
survey_id = 'id="survey:'
self.assertContains(response, survey_id, count=1) # one and only one survey included
survey_id = survey_id + '495a9927-5a13-490e-bf1d-08bf2048b098'
self.assertContains(response, survey_id) # the right survey included
class NoSurveysTestCase(SurveyRenderTestCase):
fixtures = ['well_detail_fixture']
def test_no_surveys(self):
url = reverse('well_detail', kwargs={'pk': '123'})
response = self.client.get(url)
self.assertEqual(response.status_code, HTTPStatus.OK)
survey_id = 'id="survey:'
self.assertContains(response, survey_id, count=0)
class MultipleSurveysTestCase(SurveyRenderTestCase):
fixtures = ['well_detail_fixture', 'survey_well_fixture1', 'survey_well_fixture2']
def test_multiple_surveys(self):
url = reverse('well_detail', kwargs={'pk': '123'})
response = self.client.get(url)
self.assertEqual(response.status_code, HTTPStatus.OK)
survey_id = 'id="survey:'
self.assertContains(response, survey_id, count=2) # 2 surveys included
survey_id1 = survey_id + '495a9927-5a13-490e-bf1d-08bf2048b098'
self.assertContains(response, survey_id1) # the first survey is included
survey_id2 = survey_id + '6ec24102-129d-44ea-ad3d-67c2517e3fb9'
self.assertContains(response, survey_id2) # the second survey is included
| [
"django.core.urlresolvers.reverse",
"django.contrib.auth.models.Group.objects.create"
] | [((244, 278), 'django.contrib.auth.models.Group.objects.create', 'Group.objects.create', ([], {'name': '"""admin"""'}), "(name='admin')\n", (264, 278), False, 'from django.contrib.auth.models import Group\n'), ((460, 504), 'django.core.urlresolvers.reverse', 'reverse', (['"""well_detail"""'], {'kwargs': "{'pk': '123'}"}), "('well_detail', kwargs={'pk': '123'})\n", (467, 504), False, 'from django.core.urlresolvers import reverse\n'), ((1068, 1112), 'django.core.urlresolvers.reverse', 'reverse', (['"""well_detail"""'], {'kwargs': "{'pk': '123'}"}), "('well_detail', kwargs={'pk': '123'})\n", (1075, 1112), False, 'from django.core.urlresolvers import reverse\n'), ((1478, 1504), 'django.core.urlresolvers.reverse', 'reverse', (['"""registry-legacy"""'], {}), "('registry-legacy')\n", (1485, 1504), False, 'from django.core.urlresolvers import reverse\n'), ((2013, 2030), 'django.core.urlresolvers.reverse', 'reverse', (['"""search"""'], {}), "('search')\n", (2020, 2030), False, 'from django.core.urlresolvers import reverse\n'), ((2544, 2588), 'django.core.urlresolvers.reverse', 'reverse', (['"""well_detail"""'], {'kwargs': "{'pk': '123'}"}), "('well_detail', kwargs={'pk': '123'})\n", (2551, 2588), False, 'from django.core.urlresolvers import reverse\n'), ((2977, 3021), 'django.core.urlresolvers.reverse', 'reverse', (['"""well_detail"""'], {'kwargs': "{'pk': '123'}"}), "('well_detail', kwargs={'pk': '123'})\n", (2984, 3021), False, 'from django.core.urlresolvers import reverse\n')] |
import numpy as np
import openpnm as op
from porespy.filters import trim_nonpercolating_paths
import collections
def tortuosity(im, axis, return_im=False, **kwargs):
r"""
Calculates tortuosity of given image in specified direction
Parameters
----------
im : ND-image
The binary image to analyze with ``True`` indicating phase of interest
axis : int
The axis along which to apply boundary conditions
return_im : boolean
If ``True`` then the resulting tuple contains a copy of the input
image with the concentration profile.
Returns
-------
results : tuple
A named-tuple containing:
* ``tortuosity`` calculated using the ``effective_porosity`` as
* ``effective_porosity`` of the image after applying
``trim_nonpercolating_paths``. This removes disconnected
voxels which cause singular matrices.
:math:`(D_{AB}/D_{eff}) \cdot \varepsilon`.
* ``original_porosity`` of the image as given
* ``formation_factor`` found as :math:`D_{AB}/D_{eff}`.
* ``image`` containing the concentration values from the simulation.
This is only returned if ``return_im`` is ``True``.
"""
if axis > (im.ndim - 1):
raise Exception("Axis argument is too high")
# Obtain original porosity
porosity_orig = im.sum()/im.size
# removing floating pores
im = trim_nonpercolating_paths(im, inlet_axis=axis, outlet_axis=axis)
# porosity is changed because of trimmimg floating pores
porosity_true = im.sum()/im.size
if porosity_true < porosity_orig:
print('Caution, True porosity is:', porosity_true,
'and volume fraction filled:',
abs(porosity_orig-porosity_true)*100, '%')
# cubic network generation
net = op.network.CubicTemplate(template=im, spacing=1)
# adding phase
water = op.phases.Water(network=net)
water['throat.diffusive_conductance'] = 1 # dummy value
# running Fickian Diffusion
fd = op.algorithms.FickianDiffusion(network=net, phase=water)
# choosing axis of concentration gradient
inlets = net['pore.coords'][:, axis] <= 1
outlets = net['pore.coords'][:, axis] >= im.shape[axis]-1
# boundary conditions on concentration
C_in = 1.0
C_out = 0.0
fd.set_value_BC(pores=inlets, values=C_in)
fd.set_value_BC(pores=outlets, values=C_out)
# Use specified solver if given
if 'solver_family' in kwargs.keys():
fd.settings.update(kwargs)
else: # Use pyamg otherwise, if presnet
try:
import pyamg
fd.settings['solver_family'] = 'pyamg'
except ModuleNotFoundError: # Use scipy cg as last resort
fd.settings['solver_family'] = 'scipy'
fd.settings['solver_type'] = 'cg'
op.utils.tic()
fd.run()
op.utils.toc()
# calculating molar flow rate, effective diffusivity and tortuosity
rate_out = fd.rate(pores=outlets)[0]
rate_in = fd.rate(pores=inlets)[0]
if not np.allclose(-rate_out, rate_in):
raise Exception('Something went wrong, inlet and outlet rate do not match')
delta_C = C_in - C_out
L = im.shape[axis]
A = np.prod(im.shape)/L
N_A = A/L*delta_C
Deff = rate_in/N_A
tau = porosity_true/(Deff)
result = collections.namedtuple('tortuosity_result', ['tortuosity',
'effective_porosity',
'original_porosity',
'formation_factor',
'image'])
result.tortuosity = tau
result.formation_factor = 1/Deff
result.original_porosity = porosity_orig
result.effective_porosity = porosity_true
if return_im:
conc = np.zeros([im.size, ], dtype=float)
conc[net['pore.template_indices']] = fd['pore.concentration']
conc = np.reshape(conc, newshape=im.shape)
result.image = conc
else:
result.image = None
return result
| [
"numpy.prod",
"openpnm.utils.toc",
"openpnm.phases.Water",
"openpnm.utils.tic",
"collections.namedtuple",
"numpy.allclose",
"numpy.reshape",
"porespy.filters.trim_nonpercolating_paths",
"numpy.zeros",
"openpnm.algorithms.FickianDiffusion",
"openpnm.network.CubicTemplate"
] | [((1416, 1480), 'porespy.filters.trim_nonpercolating_paths', 'trim_nonpercolating_paths', (['im'], {'inlet_axis': 'axis', 'outlet_axis': 'axis'}), '(im, inlet_axis=axis, outlet_axis=axis)\n', (1441, 1480), False, 'from porespy.filters import trim_nonpercolating_paths\n'), ((1819, 1867), 'openpnm.network.CubicTemplate', 'op.network.CubicTemplate', ([], {'template': 'im', 'spacing': '(1)'}), '(template=im, spacing=1)\n', (1843, 1867), True, 'import openpnm as op\n'), ((1899, 1927), 'openpnm.phases.Water', 'op.phases.Water', ([], {'network': 'net'}), '(network=net)\n', (1914, 1927), True, 'import openpnm as op\n'), ((2030, 2086), 'openpnm.algorithms.FickianDiffusion', 'op.algorithms.FickianDiffusion', ([], {'network': 'net', 'phase': 'water'}), '(network=net, phase=water)\n', (2060, 2086), True, 'import openpnm as op\n'), ((2825, 2839), 'openpnm.utils.tic', 'op.utils.tic', ([], {}), '()\n', (2837, 2839), True, 'import openpnm as op\n'), ((2857, 2871), 'openpnm.utils.toc', 'op.utils.toc', ([], {}), '()\n', (2869, 2871), True, 'import openpnm as op\n'), ((3319, 3454), 'collections.namedtuple', 'collections.namedtuple', (['"""tortuosity_result"""', "['tortuosity', 'effective_porosity', 'original_porosity',\n 'formation_factor', 'image']"], {}), "('tortuosity_result', ['tortuosity',\n 'effective_porosity', 'original_porosity', 'formation_factor', 'image'])\n", (3341, 3454), False, 'import collections\n'), ((3035, 3066), 'numpy.allclose', 'np.allclose', (['(-rate_out)', 'rate_in'], {}), '(-rate_out, rate_in)\n', (3046, 3066), True, 'import numpy as np\n'), ((3210, 3227), 'numpy.prod', 'np.prod', (['im.shape'], {}), '(im.shape)\n', (3217, 3227), True, 'import numpy as np\n'), ((3872, 3904), 'numpy.zeros', 'np.zeros', (['[im.size]'], {'dtype': 'float'}), '([im.size], dtype=float)\n', (3880, 3904), True, 'import numpy as np\n'), ((3992, 4027), 'numpy.reshape', 'np.reshape', (['conc'], {'newshape': 'im.shape'}), '(conc, newshape=im.shape)\n', (4002, 4027), True, 'import numpy as np\n')] |
import unittest
import numpy
from oo_trees.dataset import *
from oo_trees.decision_tree import *
from oo_trees.attribute import *
class TestDecisionTree(unittest.TestCase):
def test_classification(self):
X = numpy.array([[0, 1], [0, 0], [1, 0], [1, 1]])
y = numpy.array(['H', 'H', 'H', 'T'])
dataset = Dataset(X, y)
tree = DecisionTree(dataset)
self.assertEqual(len(tree.branches), 2)
self.assertEqual(len(tree.branches[1].branches), 0)
self.assertEqual(len(tree.branches[0].branches), 2)
self.assertEqual(len(tree.branches[0].branches[1].branches), 0)
self.assertEqual(len(tree.branches[0].branches[0].branches), 0)
self.assertEqual(tree.classify([0, 0]), 'H')
self.assertEqual(tree.classify([0, 1]), 'H')
self.assertEqual(tree.classify([1, 0]), 'H')
self.assertEqual(tree.classify([1, 1]), 'T')
self.assertEqual(tree.classify([2, 0]), 'H') # it can handle unknown values too
def test_min_points(self):
X = numpy.array([[0], [1], [1]])
y = numpy.array(['H', 'T', 'T'])
dataset = Dataset(X, y)
tree = DecisionTree(dataset, min_samples_split=0)
self.assertEqual(len(tree.branches), 2)
tree = DecisionTree(dataset, min_samples_split=5)
self.assertEqual(len(tree.branches), 0)
self.assertEqual(tree.leaf_value(), 'T')
def test_max_depth(self):
X = numpy.array([[0], [1], [1]])
y = numpy.array(['H', 'T', 'T'])
dataset = Dataset(X, y)
tree = DecisionTree(dataset, max_depth=3)
self.assertEqual(len(tree.branches), 2)
numpy.testing.assert_array_equal([2, 2],
[t.depth for t in tree.branches.values()])
tree = DecisionTree(dataset, max_depth=1)
self.assertEqual(len(tree.branches), 0)
self.assertEqual(tree.leaf_value(), 'T')
def test_performance_on(self):
# x1 < 0.25 => 'a'
# x1 >= 0.25, x2 = 0 => 'b'
# x1 < 0.50, x2 = 1 => 'c'
# x1 >= 0.50, x2 = 1 => 'a'
Xtrain = numpy.array([[0.15, 0], [0.232, 1], [0.173, 0], [0.263, 0], [0.671, 0], [0.9, 0], [0.387, 1], [0.482, 1], [0.632, 1], [0.892, 1]])
ytrain = numpy.array([ 'a', 'a', 'a', 'b', 'b', 'b', 'c', 'c', 'a', 'a'])
training_dataset = Dataset(Xtrain, ytrain, [NumericAttribute(0), CategoricalAttribute(1)])
tree = DecisionTree(training_dataset)
# expecting
# Real
# a b c
#Pred a 2 0 2
#
# b 1 2 0
#
# c 1 0 2
# accuracy: 6/10
# a,a a,a a,c a,c b,a b,b b,b c,a c,c c,c
Xtest = numpy.array([[0.13, 0], [0.73, 1], [0.47, 1], [0.33, 1], [0.7, 1], [0.3, 0], [0.5, 0], [0.1, 1], [0.476, 1], [0.265, 1]])
ytest = numpy.array([ 'a', 'a', 'a', 'a', 'b', 'b', 'b', 'c', 'c', 'c'])
test_dataset = Dataset(Xtest, ytest, [NumericAttribute(0), CategoricalAttribute(1)])
performance = tree.performance_on(test_dataset)
self.assertEqual(performance.accuracy, 0.6)
numpy.testing.assert_array_equal(performance.to_array(), [[2,0,2], [1,2,0], [1,0,2]])
| [
"numpy.array"
] | [((221, 266), 'numpy.array', 'numpy.array', (['[[0, 1], [0, 0], [1, 0], [1, 1]]'], {}), '([[0, 1], [0, 0], [1, 0], [1, 1]])\n', (232, 266), False, 'import numpy\n'), ((279, 312), 'numpy.array', 'numpy.array', (["['H', 'H', 'H', 'T']"], {}), "(['H', 'H', 'H', 'T'])\n", (290, 312), False, 'import numpy\n'), ((1039, 1067), 'numpy.array', 'numpy.array', (['[[0], [1], [1]]'], {}), '([[0], [1], [1]])\n', (1050, 1067), False, 'import numpy\n'), ((1080, 1108), 'numpy.array', 'numpy.array', (["['H', 'T', 'T']"], {}), "(['H', 'T', 'T'])\n", (1091, 1108), False, 'import numpy\n'), ((1445, 1473), 'numpy.array', 'numpy.array', (['[[0], [1], [1]]'], {}), '([[0], [1], [1]])\n', (1456, 1473), False, 'import numpy\n'), ((1486, 1514), 'numpy.array', 'numpy.array', (["['H', 'T', 'T']"], {}), "(['H', 'T', 'T'])\n", (1497, 1514), False, 'import numpy\n'), ((2089, 2224), 'numpy.array', 'numpy.array', (['[[0.15, 0], [0.232, 1], [0.173, 0], [0.263, 0], [0.671, 0], [0.9, 0], [\n 0.387, 1], [0.482, 1], [0.632, 1], [0.892, 1]]'], {}), '([[0.15, 0], [0.232, 1], [0.173, 0], [0.263, 0], [0.671, 0], [\n 0.9, 0], [0.387, 1], [0.482, 1], [0.632, 1], [0.892, 1]])\n', (2100, 2224), False, 'import numpy\n'), ((2237, 2300), 'numpy.array', 'numpy.array', (["['a', 'a', 'a', 'b', 'b', 'b', 'c', 'c', 'a', 'a']"], {}), "(['a', 'a', 'a', 'b', 'b', 'b', 'c', 'c', 'a', 'a'])\n", (2248, 2300), False, 'import numpy\n'), ((2855, 2980), 'numpy.array', 'numpy.array', (['[[0.13, 0], [0.73, 1], [0.47, 1], [0.33, 1], [0.7, 1], [0.3, 0], [0.5, 0],\n [0.1, 1], [0.476, 1], [0.265, 1]]'], {}), '([[0.13, 0], [0.73, 1], [0.47, 1], [0.33, 1], [0.7, 1], [0.3, 0],\n [0.5, 0], [0.1, 1], [0.476, 1], [0.265, 1]])\n', (2866, 2980), False, 'import numpy\n'), ((2993, 3056), 'numpy.array', 'numpy.array', (["['a', 'a', 'a', 'a', 'b', 'b', 'b', 'c', 'c', 'c']"], {}), "(['a', 'a', 'a', 'a', 'b', 'b', 'b', 'c', 'c', 'c'])\n", (3004, 3056), False, 'import numpy\n')] |
#!/usr/local/bin/python
from sprint import sprint as print
import string
import uuid
import basics
import config
import useful
# ------ user editor
def print_users(pif):
table_info = pif.dbh.table_info['user']
entries = []
for user in pif.dbh.fetch_users():
user['user_id'] = '<a href="user.cgi?id={}">{}</a>'.format(user.id, user.user_id)
flags = [x[1] for x in table_info.get('bits', {}).get('flags', []) if (user['flags'] & int(x[0], 16))]
user['flags'] = '<br>'.join(flags)
entries.append(user)
lsection = dict(columns=table_info['columns'], headers=table_info['title'], range=[{'entry': entries}], note='')
llineup = {'section': [lsection], 'columns': lsection['columns']}
return pif.render.format_template('simplelistix.html', llineup=llineup)
def print_user_form(pif, id):
user = pif.dbh.fetch_user(id)
if not user:
return print_users(pif)
cols = ['title', 'value']
heads = dict(zip(cols, ['Titles', 'Values']))
entries = []
table_info = pif.dbh.table_info['user']
for col in table_info['columns']:
title = table_info['title'][col]
if col == 'id':
value = '<input type="hidden" name="id" value="{}"><div class="lefty">{}</div>'.format(user[col], user[col])
value += '<a href="user.cgi?delete=1&id={}">{}</a>'.format(
id, pif.render.format_button('delete', also={'style': 'float:right'}))
elif col in table_info.get('bits', {}):
value = pif.render.format_checkbox(col, table_info['bits'][col], useful.bit_list(user[col], format='%04x'))
elif col == 'email':
value = '<input type="text" name="{}" value="{}" size=60>'.format(col, user[col])
else:
value = pif.render.format_text_input(col, 80, value=user[col])
entries.append({'title': title, 'value': value})
lrange = dict(entry=entries, note='')
lsection = dict(columns=cols, headers=heads, range=[lrange], note='',
header='<form name="userform" method="post" action="/cgi-bin/user.cgi">' + pif.create_token())
llineup = dict(
section=[lsection],
footer='{} -\n{} -\n{}</form>'.format(
pif.render.format_button_input("save changes", "submit"),
pif.render.format_button_reset("userform"),
pif.render.format_button("change password", pif.secure_host + "/cgi-bin/chpass.cgi?id={}".format(id))))
return pif.render.format_template('simplelistix.html', llineup=llineup)
def delete_user(pif):
pif.dbh.delete_user(pif.form.get_str('id'))
def update_user(pif):
newuser = pif.dbh.fetch_user(user_id=pif.form.get_str('user_id'))
if newuser and newuser.id != pif.form.get_int('id'):
raise useful.SimpleError('The requested user ID is already in use.')
pif.form.set_val('flags', pif.form.get_bits('flags'))
pif.dbh.update_user(pif.form.get_int('id'), **pif.form.get_dict(keylist=pif.dbh.table_info['user']['columns']))
@basics.web_page
def user_main(pif):
pif.render.set_button_comment(pif)
pif.restrict('a')
pif.render.set_page_extra(pif.render.reset_button_js)
pif.render.print_html()
if pif.form.has('user_id'):
update_user(pif)
elif pif.form.has('delete'):
delete_user(pif)
elif pif.form.has('id'):
return print_user_form(pif, pif.form.get_str('id'))
return print_users(pif)
# ------ login
@basics.web_page
def login_main(pif):
if pif.form.has('user_id') and pif.form.has('p'):
user = pif.dbh.fetch_user(user_id=pif.form.get_str('user_id'), passwd=pif.form.get_str('p'))
if user:
pif.dbh.update_user_last_login(user.id)
pif.create_cookie(user)
if not user.flags & config.FLAG_USER_VERIFIED:
raise useful.Redirect('/cgi-bin/validate.cgi')
raise useful.Redirect(pif.form.get_str('dest', '/index.php'))
useful.warn("Login Failed!")
pif.render.print_html()
return pif.render.format_template('login.html', dest=pif.form.get_str('dest', '/index.php'),
register='signup.cgi?dest=' + pif.form.get_str('dest', '/index.php'),
forgot='recover.cgi')
# ------ logout
@basics.web_page
def logout_main(pif):
pif.dbh.delete_cookie(pif.user_id, ip=pif.remote_addr)
pif.render.set_cookie(pif.render.secure.clear_cookie(['id']))
raise useful.Redirect(pif.form.get_str('dest', '/'))
# ------ signup
def create(pif):
# os.environ['PYTHON_EGG_CACHE'] = '/var/tmp'
user_id = pif.form.get_str('user_id')
p1 = pif.form.get_str('p')
p2 = pif.form.get_str('p2')
email = pif.form.get_str('email')
if not user_id or (set(user_id) - set(string.ascii_letters + string.digits + '._')):
raise useful.SimpleError('That is not a legal user ID.')
if pif.dbh.fetch_user(user_id=user_id):
raise useful.SimpleError('That ID is already in use.')
if not email:
raise useful.SimpleError('Please specify an email address.')
if not p1 or p1 != p2:
raise useful.SimpleError('Please specify the same password in both password boxes.')
vkey = useful.generate_token(10)
rec_id = pif.dbh.create_user(passwd=p1, vkey=vkey, privs='b', **pif.form.form)
if rec_id:
user = pif.dbh.fetch_user(id=rec_id)
generate_signup_email(pif, user)
useful.warn("Your account has been created. Please check your email for the verification.")
raise useful.Redirect("/cgi-bin/validate.cgi")
return pif.render.format_template('signup.html', dest=pif.form.get_str('dest'))
def generate_signup_email(pif, user):
user['host'] = pif.server_name
user['secure_host'] = pif.secure_host
user['validate'] = "{secure_host}/cgi-bin/validate.cgi?user_id={user_id}&vkey={vkey}".format(**user.todict())
# user = {k: useful.url_quote(str(v), plus=True) for k, v in user.todict().items()}
print(user)
msg = '''To: "{user_id}" <{email}>
From: "Account Verification" <webmaster@{host}>
Subject: Verify your account
Content-Type: text/plain; charset=us-ascii
Content-Transfer-Encoding: 7bit
Dear {first_name} {last_name},
You have registered for an account on {host}. Please verify your new account
by visiting the following link:
<a href="{validate}">{validate}</a>
Or, the next time you log in, you will be taken to the verification page.
Enter this code to verify your account.
{vkey}
Thank you!
'''.format(**user.todict())
useful.simple_process(('/usr/sbin/sendmail', '-t',), msg)
@basics.web_page
def register_main(pif):
pif.render.print_html()
if pif.form.get_str('user_id'):
return create(pif)
return pif.render.format_template('signup.html', dest=pif.form.get_str('dest'))
# ------ chpass
@basics.web_page
def change_password_main(pif):
pif.render.title = 'Change Password'
pif.render.hide_title = False
pif.render.print_html()
if not pif.user_id:
raise useful.SimpleError("It doesn't look like you're logged in!")
if pif.form.has('id') and pif.is_allowed('a') and pif.form.get_int('id') != pif.user_id:
user = pif.dbh.fetch_user(pif.form.get_int('id'))
else:
user = pif.user
if not user:
raise useful.SimpleError('That user record ({}) was not found.'.format(pif.user_id))
if pif.is_allowed('a') and 'p1' in pif.form:
user_id = pif.form.get_int('id')
if pif.form.get_str('p1') != pif.form.get_str('p2'):
useful.warn("The new passwords don't match!")
else:
pif.dbh.update_password(user_id, pif.form.get_str('p2'))
useful.warn("This password has been changed.")
elif 'op' in pif.form:
newuser = pif.dbh.fetch_user(user_id=pif.user_id, passwd=pif.form.get_str('op'))
if not newuser:
useful.warn("That password isn't correct!")
elif pif.form.get_str('p1') != pif.form.get_str('p2'):
useful.warn("The new passwords don't match!")
else:
pif.dbh.update_password(pif.user_id, pif.form.get_str('p2'))
pif.dbh.update_user(pif.user_id, ckey=uuid.uuid4())
pif.create_cookie()
useful.warn("Your password has been changed.")
entries = [
{'title': 'Old password:', 'value': '<input type="password" name="op">'},
{'title': 'New password:', 'value': '<input type="password" name="p1">'},
{'title': 'Retry new password:', 'value': '<input type="password" name="p2">'},
]
lsection = {
'columns': ['title', 'value'],
'range': [{'entry': entries}],
'note': '',
'noheaders': True,
'header': pif.render.format_form_start(method='post', token=pif.dbh.create_token()),
'footer': pif.render.format_hidden_input({'id': user['id']}) + pif.render.format_button_input() + "</form>",
}
return pif.render.format_template(
'simplelistix.html',
header='''<br>You have requested to change your password.<br>''',
llineup={'section': [lsection]}, nofooter=True)
# ------ validate
@basics.web_page
def validate_main(pif):
pif.render.print_html()
if not pif.user_id:
raise useful.Redirect("/cgi-bin/login.cgi")
user = pif.user
if 'vkey' in pif.form:
if user and user.vkey == pif.form.get_str('vkey'):
rec_id = user.id
pif.dbh.verify_user(rec_id)
useful.warn("Your account has been verified!")
raise useful.Redirect("/", delay=5)
else:
useful.warn("That code is not correct. Please try again.")
if 'resend' in pif.form:
generate_signup_email(pif, pif.user)
useful.warn("The code has been resent.")
return pif.render.format_template('validate.html', user_id=pif.user.user_id, dest=pif.form.get_str('dest'))
# def verify(pif, user_id, vkey):
# user = pif.dbh.fetch_user(vkey=vkey, user_id=user_id)
# if user:
# rec_id = user.id
# pif.dbh.verify_user(rec_id)
# useful.warn("Your account has been verified! Now please log in.<br><hr>")
# raise useful.Redirect("/cgi-bin/login.cgi", delay=5)
#
# useful.warn("You have not verified your account. Please contact <EMAIL> for help.")
# raise useful.Redirect("/", delay=5)
# ------ recover
@basics.web_page
def recover_main(pif):
pif.render.print_html()
hide_vkey = recovering = False
user_id = None
if pif.form.has('user_id'):
if pif.form.has('vkey'):
user = pif.dbh.fetch_user(user_id=pif.form.get_alnum('user_id'), vkey=pif.form.get_alnum('vkey'))
if user:
if pif.form.has('p1') and pif.form.get_str('p1') == pif.form.get_str('p2'):
pif.dbh.update_password(user.id, pif.form.get_str('p2'))
pif.dbh.update_user(rec_id=user.id, flags=user.flags & ~config.FLAG_USER_PASSWORD_RECOVERY)
pif.render.set_cookie(pif.render.secure.clear_cookie(['id']))
useful.warn("Your password has been changed.")
raise useful.Redirect('/cgi-bin/login.cgi', delay=5)
else:
user_id = user.user_id
recovering = hide_vkey = True
else:
user = pif.dbh.fetch_user(email=pif.form.get_str('user_id'))
if not user:
user = pif.dbh.fetch_user(user_id=pif.form.get_alnum('user_id'))
if user:
pif.dbh.update_user(rec_id=user.id, flags=user.flags | config.FLAG_USER_PASSWORD_RECOVERY)
generate_recovery_email(pif, user)
recovering = True
user_id = user.user_id
return pif.render.format_template('recover.html', recovering=recovering, user_id=user_id, show_vkey=not hide_vkey)
def generate_recovery_email(pif, user):
user['host'] = pif.server_name
user['secure_host'] = pif.secure_host
user['recover'] = "{secure_host}/cgi-bin/recover.cgi?user_id={user_id}&vkey={vkey}".format(**user.todict())
# user = {k: useful.url_quote(str(v), plus=True) for k, v in user.todict().items()}
print(user)
msg = '''To: "{user_id}" <{email}>
From: "Account Verification" <webmaster@{host}>
Subject: Reset your password
Content-Type: text/plain; charset=us-ascii
Content-Transfer-Encoding: 7bit
Dear {first_name} {last_name},
A request has been made to reset your password on {host}. Please verify that you
made this request by visiting the following link:
<a href="{recover}">{recover}</a>
Or, the page where you requested to change your password will now ask for a verification code.
Enter this code in the verification input as you change your password.
{vkey}
Thank you!
'''.format(**user.todict())
useful.simple_process(('/usr/sbin/sendmail', '-t',), msg)
# ------ user profile
@basics.web_page
def profile_main(pif):
pif.render.title = 'User Profile'
pif.render.hide_title = False
pif.render.print_html()
if not pif.user_id:
raise useful.SimpleError("It doesn't look like you're logged in!")
table_info = pif.dbh.table_info['user']
user = pif.user
if not user:
raise useful.SimpleError('That user record ({}) was not found.'.format(pif.user_id))
if 'user_id' in pif.form:
newuser = pif.dbh.fetch_user(user_id=pif.form.get_str('user_id'))
if newuser and newuser.id != pif.form.get_int('id'):
raise useful.SimpleError('Sorry, but that user ID is already in use.')
if pif.dbh.update_profile(user, **pif.form.form):
useful.warn('Your profile has been updated.')
else:
useful.warn('Updating your profile failed.')
# if email changed, clear verified
header = pif.render.format_form_start(method='post', token=pif.dbh.create_token())
rows = table_info['editable']
desc = pif.dbh.describe_dict('user')
def prof_row(row):
return {'title': table_info['title'][row], 'value': pif.render.format_text_input(
row, desc[row]['length'], 80, value=user[row]) + (
'<br>If you change your email address, you will have to verify the new one.' if row == 'email' else '')}
entries = [prof_row(row) for row in rows]
if user['flags'] & config.FLAG_USER_BAMCA_MEMBER:
entries[0]['value'] += ' ' + pif.render.fmt_art('bamca_member')
footer = pif.render.format_hidden_input({'id': user['id']})
footer += pif.render.format_button_input() + "</form>"
footer += pif.render.format_button('change password', '/cgi-bin/chpass.cgi')
if user['photographer_id']:
footer += pif.render.format_button(
'your pictures', '/cgi-bin/photogs.cgi?id={}'.format(user['photographer_id']))
lsection = dict(columns=['title', 'value'], range=[{'entry': entries}], note='',
noheaders=True, header=header, footer=footer)
return pif.render.format_template(
'simplelistix.html',
header=('''<br>Currently this information is only available to administrators of this website. We're '''
'''looking at possibly doing more in the future though.<br><br>'''),
llineup=dict(section=[lsection]), nofooter=True)
# ------
def user_list(pif):
print(pif.dbh.fetch_users())
cmds = [
('l', user_list, "list users"),
]
if __name__ == '__main__': # pragma: no cover
basics.process_command_list(cmds=cmds)
| [
"useful.generate_token",
"useful.SimpleError",
"useful.bit_list",
"basics.process_command_list",
"uuid.uuid4",
"sprint.sprint",
"useful.warn",
"useful.simple_process",
"useful.Redirect"
] | [((5222, 5247), 'useful.generate_token', 'useful.generate_token', (['(10)'], {}), '(10)\n', (5243, 5247), False, 'import useful\n'), ((5996, 6007), 'sprint.sprint', 'print', (['user'], {}), '(user)\n', (6001, 6007), True, 'from sprint import sprint as print\n'), ((6563, 6619), 'useful.simple_process', 'useful.simple_process', (["('/usr/sbin/sendmail', '-t')", 'msg'], {}), "(('/usr/sbin/sendmail', '-t'), msg)\n", (6584, 6619), False, 'import useful\n'), ((12224, 12235), 'sprint.sprint', 'print', (['user'], {}), '(user)\n', (12229, 12235), True, 'from sprint import sprint as print\n'), ((12863, 12919), 'useful.simple_process', 'useful.simple_process', (["('/usr/sbin/sendmail', '-t')", 'msg'], {}), "(('/usr/sbin/sendmail', '-t'), msg)\n", (12884, 12919), False, 'import useful\n'), ((15479, 15517), 'basics.process_command_list', 'basics.process_command_list', ([], {'cmds': 'cmds'}), '(cmds=cmds)\n', (15506, 15517), False, 'import basics\n'), ((2772, 2834), 'useful.SimpleError', 'useful.SimpleError', (['"""The requested user ID is already in use."""'], {}), "('The requested user ID is already in use.')\n", (2790, 2834), False, 'import useful\n'), ((3948, 3976), 'useful.warn', 'useful.warn', (['"""Login Failed!"""'], {}), "('Login Failed!')\n", (3959, 3976), False, 'import useful\n'), ((4845, 4895), 'useful.SimpleError', 'useful.SimpleError', (['"""That is not a legal user ID."""'], {}), "('That is not a legal user ID.')\n", (4863, 4895), False, 'import useful\n'), ((4954, 5002), 'useful.SimpleError', 'useful.SimpleError', (['"""That ID is already in use."""'], {}), "('That ID is already in use.')\n", (4972, 5002), False, 'import useful\n'), ((5035, 5089), 'useful.SimpleError', 'useful.SimpleError', (['"""Please specify an email address."""'], {}), "('Please specify an email address.')\n", (5053, 5089), False, 'import useful\n'), ((5131, 5209), 'useful.SimpleError', 'useful.SimpleError', (['"""Please specify the same password in both password boxes."""'], {}), "('Please specify the same password in both password boxes.')\n", (5149, 5209), False, 'import useful\n'), ((5440, 5542), 'useful.warn', 'useful.warn', (['"""Your account has been created. Please check your email for the verification."""'], {}), "(\n 'Your account has been created. Please check your email for the verification.'\n )\n", (5451, 5542), False, 'import useful\n'), ((5547, 5587), 'useful.Redirect', 'useful.Redirect', (['"""/cgi-bin/validate.cgi"""'], {}), "('/cgi-bin/validate.cgi')\n", (5562, 5587), False, 'import useful\n'), ((7050, 7110), 'useful.SimpleError', 'useful.SimpleError', (['"""It doesn\'t look like you\'re logged in!"""'], {}), '("It doesn\'t look like you\'re logged in!")\n', (7068, 7110), False, 'import useful\n'), ((9278, 9315), 'useful.Redirect', 'useful.Redirect', (['"""/cgi-bin/login.cgi"""'], {}), "('/cgi-bin/login.cgi')\n", (9293, 9315), False, 'import useful\n'), ((9767, 9807), 'useful.warn', 'useful.warn', (['"""The code has been resent."""'], {}), "('The code has been resent.')\n", (9778, 9807), False, 'import useful\n'), ((13126, 13186), 'useful.SimpleError', 'useful.SimpleError', (['"""It doesn\'t look like you\'re logged in!"""'], {}), '("It doesn\'t look like you\'re logged in!")\n', (13144, 13186), False, 'import useful\n'), ((7570, 7615), 'useful.warn', 'useful.warn', (['"""The new passwords don\'t match!"""'], {}), '("The new passwords don\'t match!")\n', (7581, 7615), False, 'import useful\n'), ((7711, 7757), 'useful.warn', 'useful.warn', (['"""This password has been changed."""'], {}), "('This password has been changed.')\n", (7722, 7757), False, 'import useful\n'), ((9503, 9549), 'useful.warn', 'useful.warn', (['"""Your account has been verified!"""'], {}), "('Your account has been verified!')\n", (9514, 9549), False, 'import useful\n'), ((9568, 9597), 'useful.Redirect', 'useful.Redirect', (['"""/"""'], {'delay': '(5)'}), "('/', delay=5)\n", (9583, 9597), False, 'import useful\n'), ((9624, 9683), 'useful.warn', 'useful.warn', (['"""That code is not correct. Please try again."""'], {}), "('That code is not correct. Please try again.')\n", (9635, 9683), False, 'import useful\n'), ((13544, 13608), 'useful.SimpleError', 'useful.SimpleError', (['"""Sorry, but that user ID is already in use."""'], {}), "('Sorry, but that user ID is already in use.')\n", (13562, 13608), False, 'import useful\n'), ((13679, 13724), 'useful.warn', 'useful.warn', (['"""Your profile has been updated."""'], {}), "('Your profile has been updated.')\n", (13690, 13724), False, 'import useful\n'), ((13751, 13795), 'useful.warn', 'useful.warn', (['"""Updating your profile failed."""'], {}), "('Updating your profile failed.')\n", (13762, 13795), False, 'import useful\n'), ((3825, 3865), 'useful.Redirect', 'useful.Redirect', (['"""/cgi-bin/validate.cgi"""'], {}), "('/cgi-bin/validate.cgi')\n", (3840, 3865), False, 'import useful\n'), ((7910, 7953), 'useful.warn', 'useful.warn', (['"""That password isn\'t correct!"""'], {}), '("That password isn\'t correct!")\n', (7921, 7953), False, 'import useful\n'), ((1578, 1619), 'useful.bit_list', 'useful.bit_list', (['user[col]'], {'format': '"""%04x"""'}), "(user[col], format='%04x')\n", (1593, 1619), False, 'import useful\n'), ((8029, 8074), 'useful.warn', 'useful.warn', (['"""The new passwords don\'t match!"""'], {}), '("The new passwords don\'t match!")\n', (8040, 8074), False, 'import useful\n'), ((8270, 8316), 'useful.warn', 'useful.warn', (['"""Your password has been changed."""'], {}), "('Your password has been changed.')\n", (8281, 8316), False, 'import useful\n'), ((11102, 11148), 'useful.warn', 'useful.warn', (['"""Your password has been changed."""'], {}), "('Your password has been changed.')\n", (11113, 11148), False, 'import useful\n'), ((11175, 11221), 'useful.Redirect', 'useful.Redirect', (['"""/cgi-bin/login.cgi"""'], {'delay': '(5)'}), "('/cgi-bin/login.cgi', delay=5)\n", (11190, 11221), False, 'import useful\n'), ((8212, 8224), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (8222, 8224), False, 'import uuid\n')] |
import tensorflow as tf # for tf libs
import pathlib # for giving path of the images
# a closs to wrap dataset functions
class LoadImages:
def __init__(self, data_path):
# assign a data path to all the libs
self.__data_root = pathlib.Path(data_path)
# def a function to process data
def __process_image(self, image_data):
# read a raw image
image_raw = tf.io.read_file(image_data)
image_decoded = tf.image.decode_image(image_raw) # decode a raw image
return (
tf.image.resize(image_decoded, [192, 192]) / 255.0
) # normalize and resize an image
# function to retrive image labels
def __retrive_image_labels(self):
# load a list of all the images
all_image_list = self.__data_root.glob("*/*")
# convert path objs into str
self.__all_image_data = [str(image) for image in all_image_list]
# extract all the labels
root_labels = [
label.name for label in self.__data_root.glob("*/") if label.is_dir()
]
# encode labels into a dict
root_labels = dict((name, index) for index, name in enumerate(root_labels))
# extract the labels of each images
all_images_labels = [
root_labels[pathlib.Path(image).parent.name]
for image in self.__all_image_data
]
# return all the labels and root labels
return all_images_labels, root_labels
# a function to make tf image ds
def __make_ds(self, images_labels):
# a labels dataset
labelds = tf.data.Dataset.from_tensor_slices((images_labels))
# a raw image list data
imageds = tf.data.Dataset.from_tensor_slices(
([self.__process_image(image) for image in self.__all_image_data])
)
# zip both the dataset together
image_ds = tf.data.Dataset.zip((imageds, labelds))
# return a batchec and shuffled images
return image_ds.shuffle(100).batch(64, drop_remainder=True)
# a getter function to get imageds and rootlabels
def get_processed_data(self):
# retrive labels using retrive image labels
all_image_labels, root_labels = self.__retrive_image_labels()
# retrive image dataset using makeds function
imageds = self.__make_ds(all_image_labels)
# return image dataset and root labels
return imageds, root_labels
| [
"tensorflow.data.Dataset.zip",
"tensorflow.data.Dataset.from_tensor_slices",
"pathlib.Path",
"tensorflow.image.resize",
"tensorflow.io.read_file",
"tensorflow.image.decode_image"
] | [((249, 272), 'pathlib.Path', 'pathlib.Path', (['data_path'], {}), '(data_path)\n', (261, 272), False, 'import pathlib\n'), ((401, 428), 'tensorflow.io.read_file', 'tf.io.read_file', (['image_data'], {}), '(image_data)\n', (416, 428), True, 'import tensorflow as tf\n'), ((453, 485), 'tensorflow.image.decode_image', 'tf.image.decode_image', (['image_raw'], {}), '(image_raw)\n', (474, 485), True, 'import tensorflow as tf\n'), ((1590, 1639), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['images_labels'], {}), '(images_labels)\n', (1624, 1639), True, 'import tensorflow as tf\n'), ((1876, 1915), 'tensorflow.data.Dataset.zip', 'tf.data.Dataset.zip', (['(imageds, labelds)'], {}), '((imageds, labelds))\n', (1895, 1915), True, 'import tensorflow as tf\n'), ((537, 579), 'tensorflow.image.resize', 'tf.image.resize', (['image_decoded', '[192, 192]'], {}), '(image_decoded, [192, 192])\n', (552, 579), True, 'import tensorflow as tf\n'), ((1282, 1301), 'pathlib.Path', 'pathlib.Path', (['image'], {}), '(image)\n', (1294, 1301), False, 'import pathlib\n')] |
#!/usr/bin/env python3
from setuptools import find_packages, setup
VERSION = '0.0.3'
PYTHON_REQUIRES = '3.8'
packagedata = {
'include_package_data': True,
'name': "snowshu",
'version': VERSION,
'author': "Health Union Data Team",
'author_email': '<EMAIL>',
'url': 'https://snowshu.readthedocs.io/en/master/index.html',
'description': "Sample image management for data transform TDD.",
'classifiers': ["Development Status :: 4 - Beta", "License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent"],
'python_requires': f'>={PYTHON_REQUIRES}',
'install_requires': [],
'packages': find_packages(exclude=['tests', ]),
'entry_points': {'console_scripts': ['snowshu=snowshu.core.main:cli']}
}
with open('./README.md') as readme:
packagedata['long_description'] = readme.read()
packagedata['long_description_content_type'] = 'text/markdown'
for file_name in ['base.txt', 'snowflake_pins.txt']:
with open(f'./requirements/{file_name}') as requirements:
for line in requirements.readlines():
if not line.startswith('-r'):
packagedata['install_requires'].append(line)
setup(**packagedata)
| [
"setuptools.find_packages",
"setuptools.setup"
] | [((1209, 1229), 'setuptools.setup', 'setup', ([], {}), '(**packagedata)\n', (1214, 1229), False, 'from setuptools import find_packages, setup\n'), ((674, 706), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['tests']"}), "(exclude=['tests'])\n", (687, 706), False, 'from setuptools import find_packages, setup\n')] |
from ivy import ivy_module as im
from ivy.ivy_compiler import ivy_from_string
from ivy.tk_ui import new_ui
from ivy import ivy_utils as iu
prog = """#lang ivy1.6
type t
individual x(X:t) : t
object foo(me:t) = {
after init {
x(me) := me;
assert false
}
}
isolate iso_foo(me:t) = foo(me) with x(me)
"""
with im.Module():
iu.set_parameters({'mode':'induction','isolate':'iso_foo','show_compiled':'true'})
main_ui = new_ui()
ui = main_ui.add(ivy_from_string(prog))
main_ui.tk.update_idletasks()
main_ui.answer("OK")
ui.check_safety_node(ui.node(0))
assert not ui.node(0).safe
# ui.check_inductiveness()
# # ui = ui.cti
# cg = ui.current_concept_graph
# cg.show_relation(cg.relation('link(X,Y)'),'+')
# cg.gather()
# main_ui.answer("OK")
# cg.strengthen()
# main_ui.answer("OK")
# ui.check_inductiveness()
# # cg.show_relation(cg.relation('semaphore'),'+')
# cg.gather()
# main_ui.answer("View")
# cg.bmc_conjecture(bound=1)
# main_ui.mainloop()
| [
"ivy.ivy_module.Module",
"ivy.tk_ui.new_ui",
"ivy.ivy_compiler.ivy_from_string",
"ivy.ivy_utils.set_parameters"
] | [((339, 350), 'ivy.ivy_module.Module', 'im.Module', ([], {}), '()\n', (348, 350), True, 'from ivy import ivy_module as im\n'), ((356, 447), 'ivy.ivy_utils.set_parameters', 'iu.set_parameters', (["{'mode': 'induction', 'isolate': 'iso_foo', 'show_compiled': 'true'}"], {}), "({'mode': 'induction', 'isolate': 'iso_foo',\n 'show_compiled': 'true'})\n", (373, 447), True, 'from ivy import ivy_utils as iu\n'), ((453, 461), 'ivy.tk_ui.new_ui', 'new_ui', ([], {}), '()\n', (459, 461), False, 'from ivy.tk_ui import new_ui\n'), ((483, 504), 'ivy.ivy_compiler.ivy_from_string', 'ivy_from_string', (['prog'], {}), '(prog)\n', (498, 504), False, 'from ivy.ivy_compiler import ivy_from_string\n')] |
#
# Copyright 2017 Import.io
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from importio2.commands import AdBase
import csv
import logging
import json
import os
from datetime import datetime
logger = logging.getLogger(__name__)
DEFAULT_RESOURCE_ID = 'F' * 32
class CsvToJson(AdBase):
def __init__(self):
super(CsvToJson, self).__init__()
self._csv_path = None
self._json_path = None
self._overwrite = False
self._date = None
self._remove_bom = False
def handle_arguments(self):
self._parser.add_argument('--csv-path', action='store', dest='csv_path', metavar='path', required=True,
help='Path to CSV file to transform')
self._parser.add_argument('--json-path', action='store', dest='json_path', metavar='path', required=True,
help='Destination path of JSON document')
self._parser.add_argument('--overwrite', action='store_true', dest='overwrite',
help='Flag to explicitly set to overwrite file if it exists')
self._parser.add_argument('--remove-bom', action='store_true', dest='remove_bom', default=False,
help='Remove byte mark order')
super(CsvToJson, self).handle_arguments()
def get_arguments(self):
super(CsvToJson, self).get_arguments()
if 'csv_path' in self._args:
self._csv_path = self._args.csv_path
if 'json_path' in self._args:
self._json_path = self._args.json_path
if 'overwrite' in self._args:
self._overwrite = self._args.overwrite
if 'date' in self._args:
self._date = self._args.date
if 'remove_bom':
self._remove_bom = self._args.remove_bom
def base_document(self, url, resource_id, timestamp, sequence_number, status_code=200):
"""
Generates the base document for each URL
:param url: Source of the URL with the result
:param resource_id: Import.io resource Id
:param timestamp: Timestamp of when the data was fetched in Unix epoch in milliseconds
:param sequence_number: Row number
:param status_code: HTTP status code from fetching results from URL
:return:
"""
document = {}
document['url'] = url
document['result'] = {}
document['result']['timestamp'] = timestamp
document['result']['sequenceNumber'] = sequence_number
document['result']['pageData'] = {
'resourceId': resource_id, 'statusCode': status_code, 'timestamp': timestamp}
document['result']['extractorData'] = {}
document['result']['extractorData']['url'] = url
document['result']['extractorData']['resourceId'] = resource_id
document['result']['extractorData']['data'] = []
document['result']['extractorData']['data'].append({})
document['result']['extractorData']['data'][0]['group'] = []
return document
def row_to_group_instance(self, row, columns):
"""
Generates a row of data contains within a group
:param row: A single row of data from the CSV file
:param columns: The columns from the header of the CSV file
:return: A dictionary of the transformed row
"""
d = {}
for col in columns:
d[col] = []
d[col].append({'text': row[col]})
return d
def write_document(self, document_list):
with open(self._json_path, 'w') as json_file:
first = True
for document in document_list:
if first:
first = False
else:
json_file.write('\n')
# Append to file in compact form with no spaces
json.dump(document, json_file, separators=(',', ':'))
def csv_to_json(self):
with open(self._csv_path) as csv_file:
if self._remove_bom:
dummy = csv_file.read(1)
csv_reader = csv.DictReader(csv_file)
# Get the field names from the CSV and
# remove the first column which contains the URL
fields = csv_reader.fieldnames
headers = fields[1:]
logger.debug("CSV headers: {0}".format(headers))
last_url = None
group = None
document = None
document_list = []
sequence_number = 0
for row in csv_reader:
url = row['url']
if last_url != url:
ts = int(datetime.now().strftime('%s')) * 1000
document = self.base_document(url=url, resource_id=DEFAULT_RESOURCE_ID,
timestamp=ts, sequence_number=sequence_number)
group = document['result']['extractorData']['data'][0]['group']
document_list.append(document)
sequence_number += 1
group.append(self.row_to_group_instance(row, headers))
last_url = url
# If file exists then throw and exception unless the
# override flag is set
if not self._overwrite and os.path.exists(self._json_path):
msg = "File: {0} exists. Use --overwrite flag to allow".format(self._json_path)
raise Exception(msg)
with open(self._json_path, 'w') as json_file:
first = True
for document in document_list:
if first:
first = False
else:
json_file.write('\n')
# Append to file in compact form with no spaces
json.dump(document, json_file, separators=(',', ':'))
def run(self, csv_path, json_path, overwrite=True):
self._csv_path = csv_path
self._json_path = json_path
self._overwrite = overwrite
self.csv_to_json()
def execute(self):
try:
self.handle_arguments()
self.csv_to_json()
except Exception as e:
logger.exception(e)
def main():
cli = CsvToJson()
cli.execute()
if __name__ == '__main__':
main()
| [
"logging.getLogger",
"os.path.exists",
"csv.DictReader",
"datetime.datetime.now",
"json.dump"
] | [((704, 731), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (721, 731), False, 'import logging\n'), ((4582, 4606), 'csv.DictReader', 'csv.DictReader', (['csv_file'], {}), '(csv_file)\n', (4596, 4606), False, 'import csv\n'), ((5767, 5798), 'os.path.exists', 'os.path.exists', (['self._json_path'], {}), '(self._json_path)\n', (5781, 5798), False, 'import os\n'), ((4354, 4407), 'json.dump', 'json.dump', (['document', 'json_file'], {'separators': "(',', ':')"}), "(document, json_file, separators=(',', ':'))\n", (4363, 4407), False, 'import json\n'), ((6252, 6305), 'json.dump', 'json.dump', (['document', 'json_file'], {'separators': "(',', ':')"}), "(document, json_file, separators=(',', ':'))\n", (6261, 6305), False, 'import json\n'), ((5135, 5149), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (5147, 5149), False, 'from datetime import datetime\n')] |
#!/usr/bin/env python
import glob
import os
import shutil
import subprocess
import uuid
def renameFiles(tooldir, dadir, fromnames, toname):
"Recursively replace file names and contents."
tooldir = os.path.join('..', tooldir)
os.chdir(dadir)
for fromname in fromnames:
fromspaced = "".join([x if x.islower() else ' '+x for x in fromname]).strip()
tospaced = "".join([x if x.islower() else ' '+x for x in toname]).strip()
fromunder = fromspaced.lower().replace(' ', '_')
tounder = tospaced.lower().replace(' ', '_')
fromlo = fromname.lower()
tolo = toname.lower()
print("Renaming '%s' to '%s' in dir '%s'." % (fromname, toname, dadir))
subprocess.call(['python', tooldir+'/renamefiles.py', '*'+fromname, '*'+toname])
subprocess.call(['python', tooldir+'/renamefiles.py', fromname+'*', toname+'*'])
subprocess.call(['python', tooldir+'/regexpfiles.py', fromname, toname, '*'])
subprocess.call(['python', tooldir+'/regexpfiles.py', fromspaced, tospaced, '*'])
subprocess.call(['python', tooldir+'/regexpfiles.py', fromunder, tounder, '*'])
subprocess.call(['python', tooldir+'/regexpfiles.py', fromlo, tolo, '*'])
files = glob.glob('*')
for fn in files:
if os.path.isdir(fn):
renameFiles(tooldir, fn, fromnames, toname)
os.chdir('..')
def add_makefile_generator(filename, fromname, toname):
outfn = filename+".tmp"
with open(filename, "rt") as r:
with open(outfn, "wt") as w:
for line in r:
w.write(line)
if fromname in line:
toline = line.replace(fromname, toname)
w.write(toline)
os.remove(filename)
os.rename(outfn, filename)
def add_builders(filename, fromname, toname):
if len(fromname) <= 5:
fromsuffix = fromname.lower()
else:
fromsuffix = "".join(filter(str.isupper, fromname)).lower()
fromsuffix = '_'+fromsuffix+'():'
if len(toname) <= 5:
tosuffix = toname.lower()
else:
tosuffix = "".join(filter(str.isupper, toname)).lower()
tosuffix = '_'+tosuffix+'():'
fromspaced = "".join([x if x.islower() else ' '+x for x in fromname]).strip()
tospaced = "".join([x if x.islower() else ' '+x for x in toname]).strip()
outfn = filename+".tmp"
with open(filename, "rt") as r:
with open(outfn, "wt") as w:
inblock = False
block = ""
for line in r:
wasinblock = inblock
if line.startswith("def "):
inblock = line.strip().endswith(fromsuffix)
if inblock:
block += line
if wasinblock and not inblock:
block = block.replace(fromname, toname)
block = block.replace(fromspaced, tospaced)
block = block.replace(fromsuffix, tosuffix)
w.write(block)
block = ""
w.write(line)
os.remove(filename)
os.rename(outfn, filename)
def fix_guids(toname):
for filename in glob.glob(toname+'/'+toname+'*.vc*proj*'):
outfn = filename+".tmp"
with open(filename, "rt") as r:
with open(outfn, "wt") as w:
prefixes = ('ProjectGUID="{', '<ProjectGuid>{', '<UniqueIdentifier>{')
for line in r:
for prefix in prefixes:
if prefix in line:
new_guid = str(uuid.uuid1()).upper()
index1 = line.index(prefix) + len(prefix)
index2 = index1 + len(new_guid)
line = line[:index1] + new_guid + line[index2:]
break
w.write(line)
os.remove(filename)
os.rename(outfn, filename)
def clone_project(fromnames, toname):
print("Copying files...")
fromname = fromnames[0]
pat = ('*.user', 'makefile', '*.mesh', '*.class', '*.phys', 'Unicode Debug', 'Unicode Release Candiate', 'Unicode Final', 'Final', 'Debug', 'Release')
shutil.copytree(fromname, toname, ignore=shutil.ignore_patterns(*pat))
todir = toname
renameFiles('Tools/Util', todir, fromnames, toname)
print("Files and contents renamed successfully.")
add_makefile_generator('Tools/GCC/generate_makefile.py', fromname, toname)
print("Makefile generation added successfully.")
add_builders('Tools/Build/rgo.py', fromname, toname)
print("Builders added successfully.")
fix_guids(toname)
print("GUIDs changed.")
print(fromname, "->", toname, "done!")
if __name__ == "__main__":
import sys
if len(sys.argv) < 3:
print("Usage: %s <fromname> [fromname...] <toname>" % sys.argv[0])
print("Example: %s KillCutie GrenadeRun TireFire" % sys.argv[0])
sys.exit(1)
clone_project(sys.argv[1:-1], sys.argv[-1])
| [
"os.rename",
"shutil.ignore_patterns",
"os.path.join",
"os.chdir",
"uuid.uuid1",
"os.path.isdir",
"subprocess.call",
"sys.exit",
"glob.glob",
"os.remove"
] | [((201, 228), 'os.path.join', 'os.path.join', (['""".."""', 'tooldir'], {}), "('..', tooldir)\n", (213, 228), False, 'import os\n'), ((230, 245), 'os.chdir', 'os.chdir', (['dadir'], {}), '(dadir)\n', (238, 245), False, 'import os\n'), ((1167, 1181), 'glob.glob', 'glob.glob', (['"""*"""'], {}), "('*')\n", (1176, 1181), False, 'import glob\n'), ((1272, 1286), 'os.chdir', 'os.chdir', (['""".."""'], {}), "('..')\n", (1280, 1286), False, 'import os\n'), ((1561, 1580), 'os.remove', 'os.remove', (['filename'], {}), '(filename)\n', (1570, 1580), False, 'import os\n'), ((1582, 1608), 'os.rename', 'os.rename', (['outfn', 'filename'], {}), '(outfn, filename)\n', (1591, 1608), False, 'import os\n'), ((2636, 2655), 'os.remove', 'os.remove', (['filename'], {}), '(filename)\n', (2645, 2655), False, 'import os\n'), ((2657, 2683), 'os.rename', 'os.rename', (['outfn', 'filename'], {}), '(outfn, filename)\n', (2666, 2683), False, 'import os\n'), ((2725, 2772), 'glob.glob', 'glob.glob', (["(toname + '/' + toname + '*.vc*proj*')"], {}), "(toname + '/' + toname + '*.vc*proj*')\n", (2734, 2772), False, 'import glob\n'), ((672, 762), 'subprocess.call', 'subprocess.call', (["['python', tooldir + '/renamefiles.py', '*' + fromname, '*' + toname]"], {}), "(['python', tooldir + '/renamefiles.py', '*' + fromname, '*' +\n toname])\n", (687, 762), False, 'import subprocess\n'), ((755, 846), 'subprocess.call', 'subprocess.call', (["['python', tooldir + '/renamefiles.py', fromname + '*', toname + '*']"], {}), "(['python', tooldir + '/renamefiles.py', fromname + '*', \n toname + '*'])\n", (770, 846), False, 'import subprocess\n'), ((838, 917), 'subprocess.call', 'subprocess.call', (["['python', tooldir + '/regexpfiles.py', fromname, toname, '*']"], {}), "(['python', tooldir + '/regexpfiles.py', fromname, toname, '*'])\n", (853, 917), False, 'import subprocess\n'), ((918, 1005), 'subprocess.call', 'subprocess.call', (["['python', tooldir + '/regexpfiles.py', fromspaced, tospaced, '*']"], {}), "(['python', tooldir + '/regexpfiles.py', fromspaced,\n tospaced, '*'])\n", (933, 1005), False, 'import subprocess\n'), ((1002, 1087), 'subprocess.call', 'subprocess.call', (["['python', tooldir + '/regexpfiles.py', fromunder, tounder, '*']"], {}), "(['python', tooldir + '/regexpfiles.py', fromunder, tounder,\n '*'])\n", (1017, 1087), False, 'import subprocess\n'), ((1084, 1159), 'subprocess.call', 'subprocess.call', (["['python', tooldir + '/regexpfiles.py', fromlo, tolo, '*']"], {}), "(['python', tooldir + '/regexpfiles.py', fromlo, tolo, '*'])\n", (1099, 1159), False, 'import subprocess\n'), ((1205, 1222), 'os.path.isdir', 'os.path.isdir', (['fn'], {}), '(fn)\n', (1218, 1222), False, 'import os\n'), ((3229, 3248), 'os.remove', 'os.remove', (['filename'], {}), '(filename)\n', (3238, 3248), False, 'import os\n'), ((3251, 3277), 'os.rename', 'os.rename', (['outfn', 'filename'], {}), '(outfn, filename)\n', (3260, 3277), False, 'import os\n'), ((4220, 4231), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (4228, 4231), False, 'import sys\n'), ((3563, 3591), 'shutil.ignore_patterns', 'shutil.ignore_patterns', (['*pat'], {}), '(*pat)\n', (3585, 3591), False, 'import shutil\n'), ((3030, 3042), 'uuid.uuid1', 'uuid.uuid1', ([], {}), '()\n', (3040, 3042), False, 'import uuid\n')] |
import json
from automator_class import Automator
# fetch('https://jsonplaceholder.typicode.com/posts/1')
# .then(res => res.json())
# .then(console.log)
if __name__ == "__main__":
url = "https://jsonplaceholder.typicode.com/posts/1"
automator = Automator()
automator.start_html_session()
res_status_code = 200
res = automator.send_get_html_request(url=url, res_status_code=res_status_code)
# use strict=false if response contains backslashes
readable_response = json.loads(res.content.decode("unicode_escape"), strict=False)
print(f"Response: {readable_response}")
automator.close_html_session()
| [
"automator_class.Automator"
] | [((262, 273), 'automator_class.Automator', 'Automator', ([], {}), '()\n', (271, 273), False, 'from automator_class import Automator\n')] |
import unittest
import hashids
import time
import os
hashid = hashids.Hashids()
__all__ = ['user_tester', 'NormTestCase']
os.environ['NORM_DATA_STORAGE_ROOT'] = 'data'
os.environ['NORM_DB_PATH'] = 'norm/db/norm.db'
from norm.config import session
def user_tester():
from norm.models.user import User
tester = session.query(User).filter(User.username == 'tester',
User.email == '<EMAIL>').first()
if tester is None:
tester = User(username='tester', first_name='tester', last_name='norm',
email='<EMAIL>')
session.add(tester)
session.commit()
return tester
class NormTestCase(unittest.TestCase):
def setUp(self):
from norm.engine import NormCompiler
self.session = session
# override norm configuration
self.user = user_tester()
self.context_id = hashid.encode(int(time.time() * 1000))
self.executor = NormCompiler(self.context_id, self.user, self.session)
def tearDown(self):
self.session.rollback()
self.session.close()
def execute(self, script):
return self.executor.execute(script)
| [
"norm.config.session.add",
"norm.models.user.User",
"norm.config.session.commit",
"hashids.Hashids",
"time.time",
"norm.config.session.query",
"norm.engine.NormCompiler"
] | [((63, 80), 'hashids.Hashids', 'hashids.Hashids', ([], {}), '()\n', (78, 80), False, 'import hashids\n'), ((490, 569), 'norm.models.user.User', 'User', ([], {'username': '"""tester"""', 'first_name': '"""tester"""', 'last_name': '"""norm"""', 'email': '"""<EMAIL>"""'}), "(username='tester', first_name='tester', last_name='norm', email='<EMAIL>')\n", (494, 569), False, 'from norm.models.user import User\n'), ((600, 619), 'norm.config.session.add', 'session.add', (['tester'], {}), '(tester)\n', (611, 619), False, 'from norm.config import session\n'), ((628, 644), 'norm.config.session.commit', 'session.commit', ([], {}), '()\n', (642, 644), False, 'from norm.config import session\n'), ((963, 1017), 'norm.engine.NormCompiler', 'NormCompiler', (['self.context_id', 'self.user', 'self.session'], {}), '(self.context_id, self.user, self.session)\n', (975, 1017), False, 'from norm.engine import NormCompiler\n'), ((323, 342), 'norm.config.session.query', 'session.query', (['User'], {}), '(User)\n', (336, 342), False, 'from norm.config import session\n'), ((918, 929), 'time.time', 'time.time', ([], {}), '()\n', (927, 929), False, 'import time\n')] |
import numpy as np
k_i = np.array([0.20, 0.22, 0.78, 0.80,
0.30, 0.32, 0.96, 1.00,
1.20, 1.43, 1.80, 1.88,
0.40, 0.50, 3.24, 3.50,
0.38, 0.43, 2.24, 4.90,
0.40, 0.44, 1.22, 4.00,
0.39, 0.44, 0.96, 1.80,
0.39, 0.45, 0.80, 1.60,
0.40, 0.47, 0.60, 1.60], dtype=float)
c_i = np.linspace(0, 160, 9)
t_i = np.array([16, 25, 50, 75], dtype=float)
def phi_ij(c_ii, c_j, t_ii, t_j):
return np.sqrt(1 + (c_ii - c_j)**2 + (t_ii - t_j)**2)
def calculate_aj():
b_ij = np.zeros((36, 36), dtype=float)
i = 0
for c_j_val in c_i:
for t_j_val in t_i:
j = 0
for c_i_val in c_i:
for t_i_val in t_i:
b_ij[i, j] = phi_ij(c_i_val, c_j_val, t_i_val, t_j_val)
j += 1
i += 1
a_ij = np.linalg.solve(b_ij, k_i)
return a_ij
def tk_ct(a_ij, c, t):
i = 0
function_value = 0
for c_j in c_i:
for t_j in t_i:
function_value += a_ij[i] * phi_ij(c, c_j, t, t_j)
i += 1
return function_value
def check():
a_ij = calculate_aj()
k_test = np.zeros(36, dtype=float)
i = 0
for c in c_i:
for t in t_i:
k_test[i] = tk_ct(a_ij, c, t)
i += 1
print(k_test)
| [
"numpy.linalg.solve",
"numpy.sqrt",
"numpy.array",
"numpy.linspace",
"numpy.zeros"
] | [((26, 255), 'numpy.array', 'np.array', (['[0.2, 0.22, 0.78, 0.8, 0.3, 0.32, 0.96, 1.0, 1.2, 1.43, 1.8, 1.88, 0.4, 0.5,\n 3.24, 3.5, 0.38, 0.43, 2.24, 4.9, 0.4, 0.44, 1.22, 4.0, 0.39, 0.44, \n 0.96, 1.8, 0.39, 0.45, 0.8, 1.6, 0.4, 0.47, 0.6, 1.6]'], {'dtype': 'float'}), '([0.2, 0.22, 0.78, 0.8, 0.3, 0.32, 0.96, 1.0, 1.2, 1.43, 1.8, 1.88,\n 0.4, 0.5, 3.24, 3.5, 0.38, 0.43, 2.24, 4.9, 0.4, 0.44, 1.22, 4.0, 0.39,\n 0.44, 0.96, 1.8, 0.39, 0.45, 0.8, 1.6, 0.4, 0.47, 0.6, 1.6], dtype=float)\n', (34, 255), True, 'import numpy as np\n'), ((401, 423), 'numpy.linspace', 'np.linspace', (['(0)', '(160)', '(9)'], {}), '(0, 160, 9)\n', (412, 423), True, 'import numpy as np\n'), ((430, 469), 'numpy.array', 'np.array', (['[16, 25, 50, 75]'], {'dtype': 'float'}), '([16, 25, 50, 75], dtype=float)\n', (438, 469), True, 'import numpy as np\n'), ((517, 567), 'numpy.sqrt', 'np.sqrt', (['(1 + (c_ii - c_j) ** 2 + (t_ii - t_j) ** 2)'], {}), '(1 + (c_ii - c_j) ** 2 + (t_ii - t_j) ** 2)\n', (524, 567), True, 'import numpy as np\n'), ((597, 628), 'numpy.zeros', 'np.zeros', (['(36, 36)'], {'dtype': 'float'}), '((36, 36), dtype=float)\n', (605, 628), True, 'import numpy as np\n'), ((913, 939), 'numpy.linalg.solve', 'np.linalg.solve', (['b_ij', 'k_i'], {}), '(b_ij, k_i)\n', (928, 939), True, 'import numpy as np\n'), ((1233, 1258), 'numpy.zeros', 'np.zeros', (['(36)'], {'dtype': 'float'}), '(36, dtype=float)\n', (1241, 1258), True, 'import numpy as np\n')] |
'''
A command library help user upload their results to dashboard.
'''
#!/usr/bin/env python
import argparse
from ..file_utils import get_resource_file_path, get_resource_list
from . import cli_constant as cli
def entry(args):
'''Entrance of show resources path and whether resource is cached or not'''
resource_names = get_resource_list()
parser = argparse.ArgumentParser(prog="cotk resources", \
description="check resources site and whether s specific resource cache is available")
parser.add_argument("--show_all", action="store_true", help="Show path of all resources")
parser.add_argument("--show_stored", action="store_true", help="Show path of all stored resource")
parser.add_argument("--show", type=str, help="Show path of a specific resource")
cargs = parser.parse_args(args)
if cargs.show_all:
cli.LOGGER.info("{:30}\t{:100}".format(
"Resource IDs", "Cache paths"))
for resource in resource_names:
cache_path = get_resource_file_path("resources://"+resource, download=False)
if cache_path is not None:
cli.LOGGER.info("{:30}\t{:100}".format(
resource, cache_path))
else:
cli.LOGGER.info("{:30}\t{:100}".format(
resource, "Not cached"))
elif cargs.show_stored:
cli.LOGGER.info("{:30}\t{:100}".format(
"Resource IDs", "Cache paths"))
for resource in resource_names:
cache_path = get_resource_file_path("resources://"+resource, download=False)
if cache_path is not None:
cli.LOGGER.info("{:30}\t{:100}".format(
resource, cache_path))
elif cargs.show is not None:
if cargs.show[:12] != ("resources://"):
raise RuntimeError('Please input a string starting with "resources://"')
if cargs.show[12:] not in resource_names:
raise RuntimeError("Unkown resource name {}".format(cargs.show[12:]))
cache_path = get_resource_file_path(cargs.show, download=False)
if cache_path is not None:
cli.LOGGER.info("{:30}\t{:100}".format(
"Resource IDs", "Cache paths"))
cli.LOGGER.info("{:30}\t{:100}".format(
cargs.show, cache_path))
else:
cli.LOGGER.info("resource {} is not cached.".format(cargs.show))
else:
raise RuntimeError("Unkown params.")
| [
"argparse.ArgumentParser"
] | [((355, 493), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""cotk resources"""', 'description': '"""check resources site and whether s specific resource cache is available"""'}), "(prog='cotk resources', description=\n 'check resources site and whether s specific resource cache is available')\n", (378, 493), False, 'import argparse\n')] |
from Bio.Data.IUPACData import protein_letters_3to1_extended
import os
import pandas as pd
import re
import sys
# Note: the 'logging' module does not work with unit tests for some reason, replaced to 'print' for now
# logging.basicConfig(level=logging.DEBUG)
COL__INFO = 'INFO'
def eprint(*a):
"""Print message to stderr (in cases when stdout is used for json generation, etc.)"""
print(*a, file=sys.stderr)
class VcfParser:
"""
Class for loading and parsing specified VCF file, with ability to extract mutations.
"""
def __init__(self, expected_vcf_format='##fileformat=VCFv4.2'):
self.df_vcf = None
self.expected_vcf_format = expected_vcf_format # First expected line. May be None to skip the check.
def read_vcf_file(self, file_name, verbose=False):
# Check if the file exists
if not os.path.isfile(file_name):
raise FileNotFoundError(f"Cannot find the specified file: '{file_name}'")
# Count number of comment lines at file beginning
comment_lines_cnt = 0
with open(file_name, 'r') as f:
for i, s in enumerate(f):
# Check the first line (format) if required
if (i == 0) and (self.expected_vcf_format is not None) and (self.expected_vcf_format != s.strip()):
raise ValueError(f"Unexpected first line of file: '{s}' instead of '{self.expected_vcf_format}'. " +
f"To skip this check, reconstruct the class with expected_vcf_format=None")
if not s.startswith('##'):
comment_lines_cnt = i
break
assert comment_lines_cnt > 0 # At least one non-comment line must be present!
# Read the file into pandas DataFrame
self.df_vcf = pd.read_csv(file_name, sep='\t', skiprows=comment_lines_cnt)
# Sanity check
for col in ['#CHROM', 'POS', 'ID', 'REF', 'ALT']:
assert col in self.df_vcf.columns, f"Cannot find column '{col}' in the file header: '{self.df_vcf.columns}'"
if verbose:
eprint(f'Success. Loaded data shape: {self.df_vcf.shape}')
def get_mutations(self, notation=None, verbose=False):
# Checks
assert notation is None, 'Currently only None is supported as notation'
if self.df_vcf is None:
raise Exception('No VCF data is loaded. Use read_vcf_file first')
# Remove duplicates (use only major fields, ignoring annotations, etc.)
df = self.df_vcf[['POS', 'ID', 'REF', 'ALT']].drop_duplicates()
cnt_duplicates = len(self.df_vcf) - len(df)
if verbose:
eprint(f'Original mutations: {len(self.df_vcf)}, unique: {len(df)}, duplicates removed: {cnt_duplicates}')
# Return pandas Series object with mutation strings
mutations_series = df.POS.astype(str) + df.REF + '>' + df.ALT
return mutations_series.tolist()
@staticmethod
def convert_protein_mutations_from_3_to_1_letters(muts: [list, set], is_strict_check=True):
"""
Convert protein mutations from 3-letter acids to 1-letter acid format. Example: "p.Thr5262Ile" -> "T5262I"
"""
new_muts = []
for mut in muts:
m = re.match(r"p\.(?P<acid1>[A-Z][a-z][a-z])(?P<pos>\d+)(?P<acid2>[A-Z][a-z][a-z])", mut)
try:
assert m, "Unexpected format (correct example: 'p.Thr42Ser')."
acid1 = m['acid1']
acid2 = m['acid2']
assert acid1 in protein_letters_3to1_extended, f'Cannot recognize acid1: {acid1}'
assert acid2 in protein_letters_3to1_extended, f'Cannot recognize acid2: {acid2}'
new_acid1 = protein_letters_3to1_extended[acid1]
new_acid2 = protein_letters_3to1_extended[acid2]
new_mut = f"{new_acid1}{m['pos']}{new_acid2}"
new_muts.append(new_mut)
except AssertionError as e:
if is_strict_check:
raise ValueError(f"Error while parsing protein mutation '{mut}': {e}.")
# Cannot sort out stderr from stdout on the backend side, so no warnings for now
#else:
# eprint(f"Warning while parsing protein mutation '{mut}' -> it will be skipped. Details: {e}")
return new_muts
@staticmethod
def _extract_protein_mutations(info_text):
"""
Example: QNAME=hCoV-19...;QSTART=274;QSTRAND=+;ANN=
T|synonymous_variant|LOW|ORF1ab|GU280_gp01|transcript|GU280_gp01|
protein_coding|1/2|c.9C>T|p.Ser3Ser|9/21291|9/21291|3/7096||,
T|synonymous_variant|LOW|ORF1ab|GU280_gp01|transcript|YP_009725297.1|
protein_coding|1/1|c.9C>T|p.Ser3Ser|9/540|9/540|3/179||WARNING_TRANSCRIPT_NO_STOP_CODON,
...,
T|upstream_gene_variant|MODIFIER|ORF1ab|GU280_gp01|transcript|YP_009742610.1|
protein_coding||c.-2446C>T|||||2446|WARNING_TRANSCRIPT_NO_START_CODON
"""
# Use regexp to find nucleotide mutations (for future use) and protein mutations
res_list = []
# for m in re.findall(r"protein_coding\|\d+/\d+\|(?P<nuc_mut>[c.\dACGT>]*)\|(?P<prot_mut>[^|]*)", info_text):
for m in re.finditer(r"protein_coding\|\d+/\d+\|(?P<nuc_mut>[c.\dACGT>]*)\|(?P<prot_mut>[^|]*)", info_text):
res_list.append(m.group('prot_mut'))
return res_list
def get_protein_mutations(self, is_strict_check=True, verbose=False):
# Checks
if self.df_vcf is None:
raise Exception('No VCF data is loaded. Use read_vcf_file first')
if COL__INFO not in self.df_vcf.columns:
raise Exception(f"Cannot find column '{COL__INFO}' in the file header: '{self.df_vcf.columns}'")
# For each row - extract information text and parse it
resulting_set = set()
for i, (_, row) in enumerate(self.df_vcf.iterrows()):
if len(row['REF']) > 1 or row['REF'] == 'N' or len(row['ALT']) > 1 or row['ALT'] == 'N':
# Skip non-relevant mutations
continue
nuc_mutation = str(row['POS']) + row['REF'] + '>' + row['ALT']
prot_mutations = self._extract_protein_mutations(row[COL__INFO])
if verbose:
eprint(f'DBG: processing row {i}. Found muts: {prot_mutations}')
unique_prot_mutations = set()
# Save only unique protein mutations
unique_prot_mutations.update(prot_mutations)
# Convert 3-letter acids to 1-letter
unique_prot_mutations_converted = self.convert_protein_mutations_from_3_to_1_letters(unique_prot_mutations, is_strict_check=is_strict_check)
# Keep nucleotide and protein mutations as one enumerator (comma separated)
resulting_string = nuc_mutation if len(prot_mutations) == 0 else nuc_mutation + ',' + ','.join(unique_prot_mutations_converted)
# Put the connected mutations in unique output set
resulting_set.update([resulting_string])
if verbose:
eprint(f'DBG: total number of found muts: {len(resulting_set)}')
resulting_list = list(resulting_set)
resulting_list = sorted(resulting_list)
return resulting_list # List enumerators (comma separated)
@staticmethod
def write_mutations_to_file(mutations: list, output_file: str):
df = pd.Series(mutations)
df.to_csv(output_file, index=False, header=False)
print(f'Success. Mutations written to file {output_file}')
| [
"pandas.Series",
"pandas.read_csv",
"re.match",
"os.path.isfile",
"re.finditer"
] | [((1814, 1874), 'pandas.read_csv', 'pd.read_csv', (['file_name'], {'sep': '"""\t"""', 'skiprows': 'comment_lines_cnt'}), "(file_name, sep='\\t', skiprows=comment_lines_cnt)\n", (1825, 1874), True, 'import pandas as pd\n'), ((5270, 5383), 're.finditer', 're.finditer', (['"""protein_coding\\\\|\\\\d+/\\\\d+\\\\|(?P<nuc_mut>[c.\\\\dACGT>]*)\\\\|(?P<prot_mut>[^|]*)"""', 'info_text'], {}), "(\n 'protein_coding\\\\|\\\\d+/\\\\d+\\\\|(?P<nuc_mut>[c.\\\\dACGT>]*)\\\\|(?P<prot_mut>[^|]*)'\n , info_text)\n", (5281, 5383), False, 'import re\n'), ((7442, 7462), 'pandas.Series', 'pd.Series', (['mutations'], {}), '(mutations)\n', (7451, 7462), True, 'import pandas as pd\n'), ((857, 882), 'os.path.isfile', 'os.path.isfile', (['file_name'], {}), '(file_name)\n', (871, 882), False, 'import os\n'), ((3270, 3365), 're.match', 're.match', (['"""p\\\\.(?P<acid1>[A-Z][a-z][a-z])(?P<pos>\\\\d+)(?P<acid2>[A-Z][a-z][a-z])"""', 'mut'], {}), "(\n 'p\\\\.(?P<acid1>[A-Z][a-z][a-z])(?P<pos>\\\\d+)(?P<acid2>[A-Z][a-z][a-z])',\n mut)\n", (3278, 3365), False, 'import re\n')] |
import os
import shutil
import json
import pandas as pd
import ast
import numpy as np
from utils.read_convergence import plot_convergence, parse, get_cffl_best
fairness_keys = [
'standalone_vs_fedavg_mean',
'standalone_vs_rrdssgd_mean',
'standalone_vs_final_mean',
]
def collect_and_compile_performance(dirname):
fairness_rows = []
performance_rows = []
for folder in os.listdir(dirname):
if os.path.isfile(os.path.join(dirname, folder)) or not 'complete.txt' in os.listdir(os.path.join(dirname, folder)):
continue
setup = parse(dirname, folder)
n_participants = setup['P']
fl_epochs = setup['Communication Rounds']
theta = setup['theta']
try:
with open(os.path.join(dirname, folder, 'aggregate_dict.txt')) as dict_log:
aggregate_dict = json.loads(dict_log.read())
with open(os.path.join(dirname, folder, 'aggregate_dict_pretrain.txt')) as dict_log:
aggregate_dict_pretrain = json.loads(dict_log.read())
f_data_row = ['P' + str(n_participants) + '_' + str(theta)] + [aggregate_dict[f_key][0] for f_key in fairness_keys]
f_data_row.append(aggregate_dict_pretrain['standalone_vs_final_mean'][0])
p_data_row = ['P' + str(n_participants) + '_' + str(theta)] + [aggregate_dict['rr_fedavg_best'][0],
aggregate_dict['rr_dssgd_best'][0],
aggregate_dict['standalone_best_participant'][0],
aggregate_dict['CFFL_best_participant'][0],
aggregate_dict_pretrain['CFFL_best_participant'][0]
]
fairness_rows.append(f_data_row)
performance_rows.append(p_data_row)
except Exception as e:
print("Compiling fairness and accuracy csvs")
print(e)
shorthand_f_keys = ['Fedavg', 'DSSGD', 'CFFL', 'CFFL pretrain']
fair_df = pd.DataFrame(fairness_rows, columns=[' '] + shorthand_f_keys).set_index(' ')
fair_df = fair_df.sort_values(' ')
print(fair_df.to_markdown())
print(os.path.join(dirname, 'fairness.csv'))
fair_df.to_csv( os.path.join(dirname, 'fairness.csv'))
shorthand_p_keys = ['Fedavg', 'DSSGD', 'Standalone', 'CFFL', 'CFFL pretrain']
pd.options.display.float_format = '{:,.2f}'.format
perf_df = pd.DataFrame(performance_rows, columns=[' '] + shorthand_p_keys).set_index(' ').T
perf_df = perf_df[sorted(perf_df.columns)]
print(perf_df.to_markdown())
perf_df.to_csv( os.path.join(dirname, 'performance.csv'))
return fair_df, perf_df
def collate_pngs(dirname):
os.makedirs(os.path.join(dirname, 'figures'), exist_ok=True)
figures_dir = os.path.join(dirname, 'figures')
for directory in os.listdir(dirname):
if os.path.isfile(os.path.join(dirname, directory)) or not 'complete.txt' in os.listdir(os.path.join(dirname, directory)):
continue
setup = parse(dirname, directory)
subdir = os.path.join(dirname, directory)
figure_name = '{}_{}_p{}e{}_cffl_localepoch{}_localbatch{}_lr{}_upload{}_pretrain0.png'.format(
setup['dataset'], setup['model'],
setup['P'], setup['Communication Rounds'],
setup['E'], setup['B'],
str(setup['lr']).replace('.', ''),
str(setup['theta']).replace('.', '').rstrip('0'))
pastfig_name = figure_name.replace('_pretrain0','')
if os.path.exists(os.path.join(figures_dir, pastfig_name)):
os.remove(os.path.join(figures_dir, pastfig_name))
shutil.copy(os.path.join(subdir,'figure.png'), os.path.join(figures_dir, figure_name) )
shutil.copy(os.path.join(subdir,'figure_pretrain.png'), os.path.join(figures_dir, figure_name.replace('pretrain0','pretrain1')) )
standalone_name = '{}_{}_p{}e{}_standalone.png'.format(
setup['dataset'], setup['model'],
setup['P'], setup['Communication Rounds'])
shutil.copy(os.path.join(subdir,'standlone.png'), os.path.join(figures_dir, standalone_name) )
convergence_name = '{}_{}_p{}e{}_upload{}_convergence.png'.format(
setup['dataset'], setup['model'],
setup['P'], setup['Communication Rounds'],
str(setup['theta']).replace('.', '').rstrip('0'))
shutil.copy(os.path.join(subdir,'convergence_for_one.png'), os.path.join(figures_dir, convergence_name) )
return
def examine(dirname):
experiment_results = plot_convergence(dirname)
collate_pngs(dirname)
fair_df, perf_df = collect_and_compile_performance(dirname)
if __name__ == '__main__':
"""
Give the directory to the experiment to dirname
"""
dirname = 'cifar10/Experiments_2020-08-06-01:21'
examine(dirname)
| [
"os.listdir",
"os.path.join",
"utils.read_convergence.plot_convergence",
"utils.read_convergence.parse",
"pandas.DataFrame"
] | [((383, 402), 'os.listdir', 'os.listdir', (['dirname'], {}), '(dirname)\n', (393, 402), False, 'import os\n'), ((2500, 2532), 'os.path.join', 'os.path.join', (['dirname', '"""figures"""'], {}), "(dirname, 'figures')\n", (2512, 2532), False, 'import os\n'), ((2553, 2572), 'os.listdir', 'os.listdir', (['dirname'], {}), '(dirname)\n', (2563, 2572), False, 'import os\n'), ((4101, 4126), 'utils.read_convergence.plot_convergence', 'plot_convergence', (['dirname'], {}), '(dirname)\n', (4117, 4126), False, 'from utils.read_convergence import plot_convergence, parse, get_cffl_best\n'), ((546, 568), 'utils.read_convergence.parse', 'parse', (['dirname', 'folder'], {}), '(dirname, folder)\n', (551, 568), False, 'from utils.read_convergence import plot_convergence, parse, get_cffl_best\n'), ((1915, 1952), 'os.path.join', 'os.path.join', (['dirname', '"""fairness.csv"""'], {}), "(dirname, 'fairness.csv')\n", (1927, 1952), False, 'import os\n'), ((1971, 2008), 'os.path.join', 'os.path.join', (['dirname', '"""fairness.csv"""'], {}), "(dirname, 'fairness.csv')\n", (1983, 2008), False, 'import os\n'), ((2326, 2366), 'os.path.join', 'os.path.join', (['dirname', '"""performance.csv"""'], {}), "(dirname, 'performance.csv')\n", (2338, 2366), False, 'import os\n'), ((2436, 2468), 'os.path.join', 'os.path.join', (['dirname', '"""figures"""'], {}), "(dirname, 'figures')\n", (2448, 2468), False, 'import os\n'), ((2722, 2747), 'utils.read_convergence.parse', 'parse', (['dirname', 'directory'], {}), '(dirname, directory)\n', (2727, 2747), False, 'from utils.read_convergence import plot_convergence, parse, get_cffl_best\n'), ((2760, 2792), 'os.path.join', 'os.path.join', (['dirname', 'directory'], {}), '(dirname, directory)\n', (2772, 2792), False, 'import os\n'), ((1763, 1824), 'pandas.DataFrame', 'pd.DataFrame', (['fairness_rows'], {'columns': "([' '] + shorthand_f_keys)"}), "(fairness_rows, columns=[' '] + shorthand_f_keys)\n", (1775, 1824), True, 'import pandas as pd\n'), ((3168, 3207), 'os.path.join', 'os.path.join', (['figures_dir', 'pastfig_name'], {}), '(figures_dir, pastfig_name)\n', (3180, 3207), False, 'import os\n'), ((3278, 3312), 'os.path.join', 'os.path.join', (['subdir', '"""figure.png"""'], {}), "(subdir, 'figure.png')\n", (3290, 3312), False, 'import os\n'), ((3314, 3352), 'os.path.join', 'os.path.join', (['figures_dir', 'figure_name'], {}), '(figures_dir, figure_name)\n', (3326, 3352), False, 'import os\n'), ((3369, 3412), 'os.path.join', 'os.path.join', (['subdir', '"""figure_pretrain.png"""'], {}), "(subdir, 'figure_pretrain.png')\n", (3381, 3412), False, 'import os\n'), ((3645, 3682), 'os.path.join', 'os.path.join', (['subdir', '"""standlone.png"""'], {}), "(subdir, 'standlone.png')\n", (3657, 3682), False, 'import os\n'), ((3685, 3727), 'os.path.join', 'os.path.join', (['figures_dir', 'standalone_name'], {}), '(figures_dir, standalone_name)\n', (3697, 3727), False, 'import os\n'), ((3950, 3997), 'os.path.join', 'os.path.join', (['subdir', '"""convergence_for_one.png"""'], {}), "(subdir, 'convergence_for_one.png')\n", (3962, 3997), False, 'import os\n'), ((4000, 4043), 'os.path.join', 'os.path.join', (['figures_dir', 'convergence_name'], {}), '(figures_dir, convergence_name)\n', (4012, 4043), False, 'import os\n'), ((424, 453), 'os.path.join', 'os.path.join', (['dirname', 'folder'], {}), '(dirname, folder)\n', (436, 453), False, 'import os\n'), ((2153, 2217), 'pandas.DataFrame', 'pd.DataFrame', (['performance_rows'], {'columns': "([' '] + shorthand_p_keys)"}), "(performance_rows, columns=[' '] + shorthand_p_keys)\n", (2165, 2217), True, 'import pandas as pd\n'), ((2594, 2626), 'os.path.join', 'os.path.join', (['dirname', 'directory'], {}), '(dirname, directory)\n', (2606, 2626), False, 'import os\n'), ((3223, 3262), 'os.path.join', 'os.path.join', (['figures_dir', 'pastfig_name'], {}), '(figures_dir, pastfig_name)\n', (3235, 3262), False, 'import os\n'), ((689, 740), 'os.path.join', 'os.path.join', (['dirname', 'folder', '"""aggregate_dict.txt"""'], {}), "(dirname, folder, 'aggregate_dict.txt')\n", (701, 740), False, 'import os\n'), ((818, 878), 'os.path.join', 'os.path.join', (['dirname', 'folder', '"""aggregate_dict_pretrain.txt"""'], {}), "(dirname, folder, 'aggregate_dict_pretrain.txt')\n", (830, 878), False, 'import os\n'), ((491, 520), 'os.path.join', 'os.path.join', (['dirname', 'folder'], {}), '(dirname, folder)\n', (503, 520), False, 'import os\n'), ((2664, 2696), 'os.path.join', 'os.path.join', (['dirname', 'directory'], {}), '(dirname, directory)\n', (2676, 2696), False, 'import os\n')] |
#!/usr/bin/python
import sys
import requests
import json
import kerberos
"""
Queries the YARN Job History Server (JHS) for a given jobId and returns the Hive counters
Works with unauthenticated and SPNEGO (Kerberos) authenticated API endpoints
"""
__author__ = "<NAME>"
__copyright__ = "Copyright 2016, <NAME>"
__license__ = "Apache License Version 2.0"
# Global configuration
hostname="db-secure.local" # Hostname of job history server
port="19888" # Job history server port number
# getNegotiateString
# Returns the Negotiate header payload for SPNEGO authentication
def getNegotiateString(service, hostname):
negotiate = None
__, krb_context = kerberos.authGSSClientInit("%s@%s" % (service, hostname))
kerberos.authGSSClientStep(krb_context, "")
return kerberos.authGSSClientResponse(krb_context)
# getHttpResponse
# Attempts an unauthenticated call to url, then attempts SPNEGO auth if required
# This does not attempt a Kerberos login, you need to kinit before running this script
def getHttpResponse(url):
response = requests.get(url)
# Check to see if the API endpoint requires Kerberos (SPNEGO) authentication
if (response.status_code == 401 and response.headers["www-authenticate"].startswith("Negotiate")):
# SPNEGO authentication required, let's get a HTTP ticket for the API
negotiateString = getNegotiateString("HTTP", hostname)
if (negotiateString == None):
sys.stderr.write("Error: Unable to get Kerberos authentication header. Did you kinit?")
# Build a new HTTP response using SPNEGO
headers = {"Authorization": "Negotiate " + negotiateString}
response = requests.get(url, headers=headers)
return response
# getHiveCounters
# Extracts the Hive counters from the JSON received from the job history server
def getHiveCounters(jData):
hiveCounters = None
for counterGroup in jData['jobCounters']['counterGroup']:
if counterGroup['counterGroupName'] == "HIVE":
hiveCounters = counterGroup['counter']
return hiveCounters
def main():
if len(sys.argv) != 2:
sys.stderr.write("Usage: get_counters.py <job_id>")
exit(1)
# The script takes one argument, a YARN jobId for a Hive job
jobIds=sys.argv[1]
allMetrics = {}
allMetrics['hiveJobCounters'] = []
for jobId in jobIds.split(","):
url = 'http://%s:%s/ws/v1/history/mapreduce/jobs/%s/counters' % (hostname, port, jobId)
response = getHttpResponse(url)
# We should either have a non-secure or a SPNEGO response object at this point
if (response.status_code != 200):
# A 404 response indicates the jobId was not found on the server
if (response.status_code == 404):
sys.stderr.write("Error: jobId %s not found on job history server" % (jobId))
else:
sys.stderr.write("HTTP %d: Unable to get counters" % (response.status_code))
exit(1)
jData = json.loads(response.content)
hiveCounters = getHiveCounters(jData)
if (hiveCounters == None):
sys.stderr.write("No Hive counters in job output, was %s really a Hive job?" % jobId)
exit(2)
metrics = {}
counters = {}
metrics['jobId'] = jobId
for counter in hiveCounters:
counters[counter['name']] = counter['totalCounterValue']
metrics['jobId'] = jobId
metrics['counters'] = counters
allMetrics['hiveJobCounters'].append(metrics)
result = "metrics=" + str(allMetrics) + "\n"
# We can log the result to a file
f = open('/tmp/output.txt', 'a')
f.write(result)
f.close()
# Alternatively we can write the result to stdout
sys.stdout.write(result)
if __name__ == '__main__': main()
| [
"json.loads",
"kerberos.authGSSClientInit",
"requests.get",
"sys.stderr.write",
"kerberos.authGSSClientStep",
"kerberos.authGSSClientResponse",
"sys.stdout.write"
] | [((662, 719), 'kerberos.authGSSClientInit', 'kerberos.authGSSClientInit', (["('%s@%s' % (service, hostname))"], {}), "('%s@%s' % (service, hostname))\n", (688, 719), False, 'import kerberos\n'), ((722, 765), 'kerberos.authGSSClientStep', 'kerberos.authGSSClientStep', (['krb_context', '""""""'], {}), "(krb_context, '')\n", (748, 765), False, 'import kerberos\n'), ((775, 818), 'kerberos.authGSSClientResponse', 'kerberos.authGSSClientResponse', (['krb_context'], {}), '(krb_context)\n', (805, 818), False, 'import kerberos\n'), ((1045, 1062), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1057, 1062), False, 'import requests\n'), ((3561, 3585), 'sys.stdout.write', 'sys.stdout.write', (['result'], {}), '(result)\n', (3577, 3585), False, 'import sys\n'), ((1628, 1662), 'requests.get', 'requests.get', (['url'], {'headers': 'headers'}), '(url, headers=headers)\n', (1640, 1662), False, 'import requests\n'), ((2050, 2101), 'sys.stderr.write', 'sys.stderr.write', (['"""Usage: get_counters.py <job_id>"""'], {}), "('Usage: get_counters.py <job_id>')\n", (2066, 2101), False, 'import sys\n'), ((2865, 2893), 'json.loads', 'json.loads', (['response.content'], {}), '(response.content)\n', (2875, 2893), False, 'import json\n'), ((1416, 1508), 'sys.stderr.write', 'sys.stderr.write', (['"""Error: Unable to get Kerberos authentication header. Did you kinit?"""'], {}), "(\n 'Error: Unable to get Kerberos authentication header. Did you kinit?')\n", (1432, 1508), False, 'import sys\n'), ((2973, 3063), 'sys.stderr.write', 'sys.stderr.write', (["('No Hive counters in job output, was %s really a Hive job?' % jobId)"], {}), "(\n 'No Hive counters in job output, was %s really a Hive job?' % jobId)\n", (2989, 3063), False, 'import sys\n'), ((2663, 2738), 'sys.stderr.write', 'sys.stderr.write', (["('Error: jobId %s not found on job history server' % jobId)"], {}), "('Error: jobId %s not found on job history server' % jobId)\n", (2679, 2738), False, 'import sys\n'), ((2761, 2835), 'sys.stderr.write', 'sys.stderr.write', (["('HTTP %d: Unable to get counters' % response.status_code)"], {}), "('HTTP %d: Unable to get counters' % response.status_code)\n", (2777, 2835), False, 'import sys\n')] |
#!/usr/bin/python
# -*- coding: utf8 -*-
"""
Functions and classes for handling next generation sequencing data.
:author: <NAME>
:license: FreeBSD
License
----------
Copyright (c) 2016, <NAME>
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from __future__ import division, absolute_import, unicode_literals, print_function
import datetime
from .Formats import GFF, GFF3Entry
def extractProm(inGFF, name, path, upstram=499, downstream=100, filter={}):
"""
Extract promotor regions from GFF file as provided by GENCODE:
http://www.gencodegenes.org
Genes can be filtered by GFF fields and attributes. Results are stored
in a new GFF file.
Parameters
---------
inGFF : GFF
The GFF object with a filled dataset. The entries describe the gene
regions. From these regions the promoter is extracted.
name : basestring
The filename of the resulting GFF file.
path : basestring
The path where to save the results
upstream : int
Determines how many genes upstream of the TSS shall regarded as
promoter. (in front of the gene)
downstream : int
Determines how many genes downstream of the TSS shall regarded as
promoter. (inside the gene)
filter : dict
Regions in the GFF are only considerd if they match the filter entries.
Keys can be any attribute in the GFFEntry. For most tags it is checked
if the attribute value is in the filter. For "start" it is checked if
the value is higher than the filter. The same is for score. For "end"
it is checked if the value is lower than the filter.
"""
def check_attr(filter_attr, item_attr):
for key in filter_attr.keys():
if not item_attr[key] in filter_attr[key]:
return False
return True
if len(inGFF) == 0:
raise ValueError("No data in input GFF")
outGFF = GFF()
outGFF._header_general.append("## Promotors extracted at: " + str(datetime.date.today()) + "\n")
for gff in inGFF:
# filter if item shall be extracted
# shall we filer for seqid
if "seqid" in filter:
if not gff.seqid in filter["seqid"]:
continue
if "source" in filter:
if not gff.source in filter["source"]:
continue
if "type" in filter:
if not gff.type in filter["type"]:
continue
if "start" in filter:
if not gff.start > filter["start"]:
continue
if "end" in filter:
if not gff.end < filter["end"]:
continue
if "score" in filter:
if gff.score < filter["score"]:
continue
if "strand" in filter:
if not gff.strand in filter["strand"]:
continue
if "phase" in filter:
if not gff.phase in filter["phase"]:
continue
if "attributes" in filter:
if not check_attr(filter["attributes"], gff.attributes):
continue
seqid = gff.seqid
source = "ROI"
type = "promotor"
strand = gff.strand
attributes = {}
attributes["ID"] = gff.attributes["ID"]
# TSS varies from + and - strands
if gff.strand == '-':
start = gff.end - downstream
end = gff.end + upstram
else: # = is assumed for '.'
start = gff.start - upstram
end = gff.start + downstream
entry = GFF3Entry(seqid, source, type, start, end, strand=strand, attributes=attributes)
outGFF.append(entry)
outGFF.sort()
outGFF.write(name, path) | [
"datetime.date.today"
] | [((3222, 3243), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (3241, 3243), False, 'import datetime\n')] |
import numpy as np
def fit_MRF_pseudolikelihood(adj_exc,adj_inh,y):
'''
Fit a Markov random field using maximum pseudolikelihood estimation,
also known as logistic regression. The conditional probabilities
follow
y_i ~ Logistic(B[0] + B[1] A1_{ij} y_j + A1[2] X_{ij} (1-y_j)
+ B[3] A2_{ij} y_j + B[4] A3_{ij} (1-y_j) ),
where A1 = adj_exc and A2 = adj_inh and each term is summed over
j.
Params
======
adj_exc: excitatory adjacency matrix
adj_inh: inhibitory adjacency matrix
y: site variables, 0 or 1
Returns
=======
B: logistic regression coefficients
'''
from sklearn.linear_model import LogisticRegression
from sklearn import cross_validation
if len(np.unique(y)) < 2:
B=np.array([np.nan,np.nan,np.nan,np.nan,np.nan])
else:
N=y.shape[0]
ytile=np.tile(y,(N,1)).T
X1=np.array(np.sum(np.multiply(adj_exc,ytile),0)).flatten()
X2=np.array(np.sum(np.multiply(adj_exc,1-ytile),0)).flatten()
X3=np.array(np.sum(np.multiply(adj_inh,ytile),0)).flatten()
X4=np.array(np.sum(np.multiply(adj_inh,1-ytile),0)).flatten()
model=LogisticRegression(penalty='l2')
X=np.column_stack((X1,X2,X3,X4))
model.fit(X,y)
B=np.hstack((model.intercept_, model.coef_.flatten()))
return B
def predict_MRF(B, adj_exc, adj_inh, burn_in=4e3, steps=1e4,
skip_multiple=3):
'''
Perform prediction with an MRF (Markov random field). Uses Gibbs sampling
to sample from the distribution P(y) =1/Z exp( -H(y) ).
The Hamiltonian is:
H = \sum_{ij} y_i (B[0] + B[1] A1_{ji} y_j + A1[2] X_{ji} (1-y_j)
+ B[3] A2_{ji} y_j + B[4] A3_{ji} (1-y_j))
Params
======
B: coefficients of the MRF
adj_exc: excitatory adjacency matrix
adj_inh: inhibitory adjacency matrix
burn_in: number of burn-in steps to take (default: 4000)
steps: total number of Gibbs steps to take (default: 10000)
skip_multiple: skips skip_multiple * num_neuron steps between samples
Returns
=======
ypostmean: posterior mean of state
'''
import numpy.random
def gibbs_proba(y,B,adj_exc,adj_inh):
term0=B[0]*np.dot(adj_exc.T,y)
term1=B[1]*np.dot(adj_exc.T,(1-y))
term2=B[2]*np.dot(adj_inh.T,y)
term3=B[3]*np.dot(adj_inh.T,(1-y))
e=B[4]+term0+term1+term2+term3
return np.exp(e)/(np.exp(e)+1.0)
N=adj_exc.shape[0]
steps=int(steps)
# run a Gibbs sampler
y=np.random.rand(N,1)
samples=np.zeros((N,steps))
# zero diagonals (should be 0 already)
np.fill_diagonal(adj_exc,0)
np.fill_diagonal(adj_inh,0)
for ii in range(steps):
yt=y
# Gibbs update
proba=gibbs_proba(y,B,adj_exc,adj_inh)
yt=np.array(np.random.rand(N,1) < proba,dtype=np.float)
y=yt
samples[:,ii]=y.flatten()
# compute mle
use_indices=np.arange(burn_in,steps,skip_multiple*N, dtype=int)
final_samples=samples[:,use_indices]
ypostmean=np.mean(final_samples,axis=1)
return ypostmean
def fit_logistic_graph_features():
pass
def get_node_features(adj_exc,adj_inh,normalize_centrality=True):
'''
Get node-based features to train logistic classifier
Params
======
adj_exc: excitatory adjacency matrix
adj_inh: inhibitory adjacency matrix
normalize_centrality: normalize relevant measures? (default: True)
Returns
=======
X: numneuron x numfeatures array to be used with logistic regression
X_labels
'''
import networkx as nx
G_exc=nx.DiGraph(adj_exc)
G_inh=nx.DiGraph(adj_inh)
def dict_to_array(d):
return np.array([d[i] for i in sorted(d)])
def features(G,normalize_centrality):
'''
Returns the features we are interested in within a dict
'''
load_centrality=nx.load_centrality(G,normalized=normalize_centrality)
betweenness_centrality=nx.betweenness_centrality(G,normalized=normalize_centrality)
eigenvector_centrality=nx.eigenvector_centrality_numpy(G,normalized=normalize_centrality)
closeness_centrality=nx.closeness_centrality(G,normalized=normalize_centrality)
in_degree=G.in_degree()
out_degree=G.out_degree()
core_number=nx.core_number(G)
clustering=nx.clustering(G)
d={}
d['in_degree']=in_degree
d['out_degree']=out_degree
d['load_centrality']=load_centrality
d['betweennes_centrality']=betweennes_centrality
d['eigenvector_centrality']=eigenvector_centrality
d['closeness_centrality']=closeness_centrality
d['core_number']=core_number
return d
# grab the features
d_exc=features(G_exc)
d_inh=features(G_inh)
# setup some structures
num_features=len(d_exc)+len(d_inh)
num_nodes=G.number_of_nodes()
X=np.zeros((num_nodes,num_features),dtype=np.float)
X_labels=[]
# fill in X and Xlabels
feature_index=0
for gclass in ('exc','inh'):
if gclass == 'exc':
d=d_exc
else:
d=d_inh
for feature in sorted(d):
X_labels.append(feature+"_"+gclass)
X[:,feature_index]=dict_to_array(d[feature])
feature_index+=1
return X, X_labels
| [
"numpy.random.rand",
"numpy.column_stack",
"numpy.array",
"networkx.closeness_centrality",
"networkx.betweenness_centrality",
"numpy.arange",
"networkx.eigenvector_centrality_numpy",
"numpy.mean",
"numpy.multiply",
"networkx.clustering",
"networkx.DiGraph",
"numpy.exp",
"numpy.dot",
"netwo... | [((2585, 2605), 'numpy.random.rand', 'np.random.rand', (['N', '(1)'], {}), '(N, 1)\n', (2599, 2605), True, 'import numpy as np\n'), ((2617, 2637), 'numpy.zeros', 'np.zeros', (['(N, steps)'], {}), '((N, steps))\n', (2625, 2637), True, 'import numpy as np\n'), ((2684, 2712), 'numpy.fill_diagonal', 'np.fill_diagonal', (['adj_exc', '(0)'], {}), '(adj_exc, 0)\n', (2700, 2712), True, 'import numpy as np\n'), ((2717, 2745), 'numpy.fill_diagonal', 'np.fill_diagonal', (['adj_inh', '(0)'], {}), '(adj_inh, 0)\n', (2733, 2745), True, 'import numpy as np\n'), ((3001, 3056), 'numpy.arange', 'np.arange', (['burn_in', 'steps', '(skip_multiple * N)'], {'dtype': 'int'}), '(burn_in, steps, skip_multiple * N, dtype=int)\n', (3010, 3056), True, 'import numpy as np\n'), ((3108, 3138), 'numpy.mean', 'np.mean', (['final_samples'], {'axis': '(1)'}), '(final_samples, axis=1)\n', (3115, 3138), True, 'import numpy as np\n'), ((3681, 3700), 'networkx.DiGraph', 'nx.DiGraph', (['adj_exc'], {}), '(adj_exc)\n', (3691, 3700), True, 'import networkx as nx\n'), ((3711, 3730), 'networkx.DiGraph', 'nx.DiGraph', (['adj_inh'], {}), '(adj_inh)\n', (3721, 3730), True, 'import networkx as nx\n'), ((4975, 5026), 'numpy.zeros', 'np.zeros', (['(num_nodes, num_features)'], {'dtype': 'np.float'}), '((num_nodes, num_features), dtype=np.float)\n', (4983, 5026), True, 'import numpy as np\n'), ((791, 841), 'numpy.array', 'np.array', (['[np.nan, np.nan, np.nan, np.nan, np.nan]'], {}), '([np.nan, np.nan, np.nan, np.nan, np.nan])\n', (799, 841), True, 'import numpy as np\n'), ((1192, 1224), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {'penalty': '"""l2"""'}), "(penalty='l2')\n", (1210, 1224), False, 'from sklearn.linear_model import LogisticRegression\n'), ((1235, 1268), 'numpy.column_stack', 'np.column_stack', (['(X1, X2, X3, X4)'], {}), '((X1, X2, X3, X4))\n', (1250, 1268), True, 'import numpy as np\n'), ((3968, 4022), 'networkx.load_centrality', 'nx.load_centrality', (['G'], {'normalized': 'normalize_centrality'}), '(G, normalized=normalize_centrality)\n', (3986, 4022), True, 'import networkx as nx\n'), ((4053, 4114), 'networkx.betweenness_centrality', 'nx.betweenness_centrality', (['G'], {'normalized': 'normalize_centrality'}), '(G, normalized=normalize_centrality)\n', (4078, 4114), True, 'import networkx as nx\n'), ((4145, 4212), 'networkx.eigenvector_centrality_numpy', 'nx.eigenvector_centrality_numpy', (['G'], {'normalized': 'normalize_centrality'}), '(G, normalized=normalize_centrality)\n', (4176, 4212), True, 'import networkx as nx\n'), ((4241, 4300), 'networkx.closeness_centrality', 'nx.closeness_centrality', (['G'], {'normalized': 'normalize_centrality'}), '(G, normalized=normalize_centrality)\n', (4264, 4300), True, 'import networkx as nx\n'), ((4386, 4403), 'networkx.core_number', 'nx.core_number', (['G'], {}), '(G)\n', (4400, 4403), True, 'import networkx as nx\n'), ((4423, 4439), 'networkx.clustering', 'nx.clustering', (['G'], {}), '(G)\n', (4436, 4439), True, 'import networkx as nx\n'), ((762, 774), 'numpy.unique', 'np.unique', (['y'], {}), '(y)\n', (771, 774), True, 'import numpy as np\n'), ((883, 901), 'numpy.tile', 'np.tile', (['y', '(N, 1)'], {}), '(y, (N, 1))\n', (890, 901), True, 'import numpy as np\n'), ((2279, 2299), 'numpy.dot', 'np.dot', (['adj_exc.T', 'y'], {}), '(adj_exc.T, y)\n', (2285, 2299), True, 'import numpy as np\n'), ((2318, 2342), 'numpy.dot', 'np.dot', (['adj_exc.T', '(1 - y)'], {}), '(adj_exc.T, 1 - y)\n', (2324, 2342), True, 'import numpy as np\n'), ((2361, 2381), 'numpy.dot', 'np.dot', (['adj_inh.T', 'y'], {}), '(adj_inh.T, y)\n', (2367, 2381), True, 'import numpy as np\n'), ((2400, 2424), 'numpy.dot', 'np.dot', (['adj_inh.T', '(1 - y)'], {}), '(adj_inh.T, 1 - y)\n', (2406, 2424), True, 'import numpy as np\n'), ((2478, 2487), 'numpy.exp', 'np.exp', (['e'], {}), '(e)\n', (2484, 2487), True, 'import numpy as np\n'), ((2489, 2498), 'numpy.exp', 'np.exp', (['e'], {}), '(e)\n', (2495, 2498), True, 'import numpy as np\n'), ((2876, 2896), 'numpy.random.rand', 'np.random.rand', (['N', '(1)'], {}), '(N, 1)\n', (2890, 2896), True, 'import numpy as np\n'), ((929, 956), 'numpy.multiply', 'np.multiply', (['adj_exc', 'ytile'], {}), '(adj_exc, ytile)\n', (940, 956), True, 'import numpy as np\n'), ((997, 1028), 'numpy.multiply', 'np.multiply', (['adj_exc', '(1 - ytile)'], {}), '(adj_exc, 1 - ytile)\n', (1008, 1028), True, 'import numpy as np\n'), ((1067, 1094), 'numpy.multiply', 'np.multiply', (['adj_inh', 'ytile'], {}), '(adj_inh, ytile)\n', (1078, 1094), True, 'import numpy as np\n'), ((1135, 1166), 'numpy.multiply', 'np.multiply', (['adj_inh', '(1 - ytile)'], {}), '(adj_inh, 1 - ytile)\n', (1146, 1166), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('wuqian', '0008_wuqianbusiness_image'),
]
operations = [
migrations.CreateModel(
name='UploadImage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('image', models.ImageField(upload_to=b'media/%Y/%m/%d', null=True, verbose_name=b'\xe4\xb8\x8a\xe4\xbc\xa0\xe5\x9b\xbe\xe7\x89\x87')),
],
options={
'verbose_name': '\u56fe\u7247\u4e0a\u4f20',
'verbose_name_plural': '\u56fe\u7247\u4e0a\u4f20',
},
),
]
| [
"django.db.models.ImageField",
"django.db.models.AutoField"
] | [((352, 445), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (368, 445), False, 'from django.db import models, migrations\n'), ((470, 598), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': "b'media/%Y/%m/%d'", 'null': '(True)', 'verbose_name': "b'\\xe4\\xb8\\x8a\\xe4\\xbc\\xa0\\xe5\\x9b\\xbe\\xe7\\x89\\x87'"}), "(upload_to=b'media/%Y/%m/%d', null=True, verbose_name=\n b'\\xe4\\xb8\\x8a\\xe4\\xbc\\xa0\\xe5\\x9b\\xbe\\xe7\\x89\\x87')\n", (487, 598), False, 'from django.db import models, migrations\n')] |
from __future__ import print_function
import numpy as np
import treegp
from treegp_test_helper import timer
from treegp_test_helper import get_correlation_length_matrix
from treegp_test_helper import make_1d_grf
from treegp_test_helper import make_2d_grf
@timer
def test_hyperparameter_search_1d():
optimizer = ['log-likelihood', 'two-pcf']
npoints = [100, 2000]
noise = 0.01
sigma = [1., 2., 1., 2.]
l = [0.5, 0.8, 8., 10.]
kernels = ['RBF', 'RBF', 'VonKarman', 'VonKarman']
max_sep = [1.75, 1.75, 1.25, 1.25]
for n, opt in enumerate(optimizer):
for i, ker in enumerate(kernels):
# Generate 1D gaussian random fields.
kernel = "%f**2 * %s(%f)"%((sigma[i], ker, l[i]))
kernel_skl = treegp.eval_kernel(kernel)
x, y, y_err = make_1d_grf(kernel_skl,
noise=noise,
seed=42, npoints=npoints[n])
# Do gp interpolation without hyperparameters
# fitting (truth is put initially).
gp = treegp.GPInterpolation(kernel=kernel, optimizer=opt,
normalize=True, nbins=15, min_sep=0.1,
max_sep=max_sep[i])
gp.initialize(x, y, y_err=y_err)
gp.solve()
# test if found hyperparameters are close the true hyperparameters.
np.testing.assert_allclose(kernel_skl.theta, gp.kernel.theta, atol=7e-1)
if opt == "two-pcf":
xi, xi_weight, distance, coord, mask = gp.return_2pcf()
np.testing.assert_allclose(xi, gp._optimizer._2pcf, atol=1e-10)
if opt == "log-likelihood":
logL = gp.return_log_likelihood()
np.testing.assert_allclose(logL, gp._optimizer._logL, atol=1e-10)
# Predict at same position as the simulated data.
# Predictions are strictily equal to the input data
# in the case of no noise. With noise you should expect
# to have a pull distribution with mean function arround 0
# with a std<1 (you use the same data to train and validate, and
# the data are well sample compared to the input correlation
# length).
y_predict, y_cov = gp.predict(x, return_cov=True)
y_std = np.sqrt(np.diag(y_cov))
pull = y - y_predict
mean_pull = np.mean(pull)
std_pull = np.std(pull)
# Test that mean of the pull is close to zeros and std of the pull bellow 1.
np.testing.assert_allclose(0., mean_pull, atol=3.*(std_pull)/np.sqrt(npoints[n]))
if std_pull > 1.:
raise ValueError("std_pull is > 1. Current value std_pull = %f"%(std_pull))
# Test that for extrapolation, interpolation is the mean function (0 here)
# and the diagonal of the covariance matrix is close to the hyperameters is
# link to the amplitudes of the fluctuation of the gaussian random fields.
new_x = np.linspace(np.max(x)+6.*l[i], np.max(x)+7.*l[i], npoints[n]).reshape((npoints[n],1))
y_predict, y_cov = gp.predict(new_x, return_cov=True)
y_std = np.sqrt(np.diag(y_cov))
np.testing.assert_allclose(np.mean(y)*np.ones_like(y_std), y_predict, atol=1e-5)
sig = np.sqrt(np.exp(gp.kernel.theta[0]))
np.testing.assert_allclose(sig*np.ones_like(y_std), y_std, atol=1e-5)
@timer
def test_hyperparameter_search_2d():
optimizer = ['log-likelihood', 'anisotropic', 'anisotropic']
npoints = [600, 2000, 2000]
noise = 0.01
sigma = 2.
size = [0.5, 0.5, 1.5]
g1 = 0.2
g2 = 0.2
ker = ['AnisotropicRBF', 'AnisotropicRBF', 'AnisotropicVonKarman']
for n, opt in enumerate(optimizer):
# Generate 2D gaussian random fields.
L = get_correlation_length_matrix(size[n], g1, g2)
invL = np.linalg.inv(L)
kernel = "%f**2*%s"%((sigma, ker[n]))
kernel += "(invLam={0!r})".format(invL)
kernel_skl = treegp.eval_kernel(kernel)
x, y, y_err = make_2d_grf(kernel_skl,
noise=noise,
seed=42, npoints=npoints[n])
# Do gp interpolation without hyperparameters
# fitting (truth is put initially).
gp = treegp.GPInterpolation(kernel=kernel, optimizer=opt,
normalize=True, nbins=21, min_sep=0.,
max_sep=1., p0=[0.3, 0.,0.])
gp.initialize(x, y, y_err=y_err)
gp.solve()
# test if found hyperparameters are close the true hyperparameters.
np.testing.assert_allclose(kernel_skl.theta, gp.kernel.theta, atol=5e-1)
# Predict at same position as the simulated data.
# Predictions are strictily equal to the input data
# in the case of no noise. With noise you should expect
# to have a pull distribution with mean function arround 0
# with a std<1 (you use the same data to train and validate, and
# the data are well sample compared to the input correlation
# length).
y_predict, y_cov = gp.predict(x, return_cov=True)
y_std = np.sqrt(np.diag(y_cov))
pull = y - y_predict
pull /= np.sqrt(y_err**2 + y_std**2)
mean_pull = np.mean(pull)
std_pull = np.std(pull)
# Test that mean of the pull is close to zeros and std of the pull bellow 1.
np.testing.assert_allclose(0., mean_pull, atol=3.*(std_pull)/np.sqrt(npoints[n]))
if std_pull > 1.:
raise ValueError("std_pull is > 1. Current value std_pull = %f"%(std_pull))
# Test that for extrapolation, interpolation is the mean function (0 here)
# and the diagonal of the covariance matrix is close to the hyperameters is
# link to the amplitudes of the fluctuation of the gaussian random fields.
np.random.seed(42)
x1 = np.random.uniform(np.max(x)+6.*size[n],
np.max(x)+6.*size[n], npoints[n])
x2 = np.random.uniform(np.max(x)+6.*size[n],
np.max(x)+6.*size[n], npoints[n])
new_x = np.array([x1, x2]).T
y_predict, y_cov = gp.predict(new_x, return_cov=True)
y_std = np.sqrt(np.diag(y_cov))
np.testing.assert_allclose(np.mean(y), y_predict, atol=1e-5)
sig = np.sqrt(np.exp(gp.kernel.theta[0]))
np.testing.assert_allclose(sig*np.ones_like(y_std), y_std, atol=1e-5)
if __name__ == "__main__":
test_hyperparameter_search_1d()
test_hyperparameter_search_2d()
| [
"numpy.mean",
"treegp.GPInterpolation",
"treegp.eval_kernel",
"numpy.sqrt",
"numpy.ones_like",
"numpy.testing.assert_allclose",
"treegp_test_helper.make_1d_grf",
"numpy.diag",
"numpy.exp",
"treegp_test_helper.get_correlation_length_matrix",
"numpy.array",
"numpy.linalg.inv",
"numpy.max",
"... | [((3968, 4014), 'treegp_test_helper.get_correlation_length_matrix', 'get_correlation_length_matrix', (['size[n]', 'g1', 'g2'], {}), '(size[n], g1, g2)\n', (3997, 4014), False, 'from treegp_test_helper import get_correlation_length_matrix\n'), ((4030, 4046), 'numpy.linalg.inv', 'np.linalg.inv', (['L'], {}), '(L)\n', (4043, 4046), True, 'import numpy as np\n'), ((4162, 4188), 'treegp.eval_kernel', 'treegp.eval_kernel', (['kernel'], {}), '(kernel)\n', (4180, 4188), False, 'import treegp\n'), ((4212, 4277), 'treegp_test_helper.make_2d_grf', 'make_2d_grf', (['kernel_skl'], {'noise': 'noise', 'seed': '(42)', 'npoints': 'npoints[n]'}), '(kernel_skl, noise=noise, seed=42, npoints=npoints[n])\n', (4223, 4277), False, 'from treegp_test_helper import make_2d_grf\n'), ((4458, 4587), 'treegp.GPInterpolation', 'treegp.GPInterpolation', ([], {'kernel': 'kernel', 'optimizer': 'opt', 'normalize': '(True)', 'nbins': '(21)', 'min_sep': '(0.0)', 'max_sep': '(1.0)', 'p0': '[0.3, 0.0, 0.0]'}), '(kernel=kernel, optimizer=opt, normalize=True, nbins=\n 21, min_sep=0.0, max_sep=1.0, p0=[0.3, 0.0, 0.0])\n', (4480, 4587), False, 'import treegp\n'), ((4794, 4865), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['kernel_skl.theta', 'gp.kernel.theta'], {'atol': '(0.5)'}), '(kernel_skl.theta, gp.kernel.theta, atol=0.5)\n', (4820, 4865), True, 'import numpy as np\n'), ((5421, 5453), 'numpy.sqrt', 'np.sqrt', (['(y_err ** 2 + y_std ** 2)'], {}), '(y_err ** 2 + y_std ** 2)\n', (5428, 5453), True, 'import numpy as np\n'), ((5470, 5483), 'numpy.mean', 'np.mean', (['pull'], {}), '(pull)\n', (5477, 5483), True, 'import numpy as np\n'), ((5503, 5515), 'numpy.std', 'np.std', (['pull'], {}), '(pull)\n', (5509, 5515), True, 'import numpy as np\n'), ((6066, 6084), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (6080, 6084), True, 'import numpy as np\n'), ((765, 791), 'treegp.eval_kernel', 'treegp.eval_kernel', (['kernel'], {}), '(kernel)\n', (783, 791), False, 'import treegp\n'), ((818, 883), 'treegp_test_helper.make_1d_grf', 'make_1d_grf', (['kernel_skl'], {'noise': 'noise', 'seed': '(42)', 'npoints': 'npoints[n]'}), '(kernel_skl, noise=noise, seed=42, npoints=npoints[n])\n', (829, 883), False, 'from treegp_test_helper import make_1d_grf\n'), ((1084, 1200), 'treegp.GPInterpolation', 'treegp.GPInterpolation', ([], {'kernel': 'kernel', 'optimizer': 'opt', 'normalize': '(True)', 'nbins': '(15)', 'min_sep': '(0.1)', 'max_sep': 'max_sep[i]'}), '(kernel=kernel, optimizer=opt, normalize=True, nbins=\n 15, min_sep=0.1, max_sep=max_sep[i])\n', (1106, 1200), False, 'import treegp\n'), ((1438, 1509), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['kernel_skl.theta', 'gp.kernel.theta'], {'atol': '(0.7)'}), '(kernel_skl.theta, gp.kernel.theta, atol=0.7)\n', (1464, 1509), True, 'import numpy as np\n'), ((2483, 2496), 'numpy.mean', 'np.mean', (['pull'], {}), '(pull)\n', (2490, 2496), True, 'import numpy as np\n'), ((2520, 2532), 'numpy.std', 'np.std', (['pull'], {}), '(pull)\n', (2526, 2532), True, 'import numpy as np\n'), ((5360, 5374), 'numpy.diag', 'np.diag', (['y_cov'], {}), '(y_cov)\n', (5367, 5374), True, 'import numpy as np\n'), ((6337, 6355), 'numpy.array', 'np.array', (['[x1, x2]'], {}), '([x1, x2])\n', (6345, 6355), True, 'import numpy as np\n'), ((6445, 6459), 'numpy.diag', 'np.diag', (['y_cov'], {}), '(y_cov)\n', (6452, 6459), True, 'import numpy as np\n'), ((6497, 6507), 'numpy.mean', 'np.mean', (['y'], {}), '(y)\n', (6504, 6507), True, 'import numpy as np\n'), ((6553, 6579), 'numpy.exp', 'np.exp', (['gp.kernel.theta[0]'], {}), '(gp.kernel.theta[0])\n', (6559, 6579), True, 'import numpy as np\n'), ((1645, 1708), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['xi', 'gp._optimizer._2pcf'], {'atol': '(1e-10)'}), '(xi, gp._optimizer._2pcf, atol=1e-10)\n', (1671, 1708), True, 'import numpy as np\n'), ((1815, 1880), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['logL', 'gp._optimizer._logL'], {'atol': '(1e-10)'}), '(logL, gp._optimizer._logL, atol=1e-10)\n', (1841, 1880), True, 'import numpy as np\n'), ((2410, 2424), 'numpy.diag', 'np.diag', (['y_cov'], {}), '(y_cov)\n', (2417, 2424), True, 'import numpy as np\n'), ((3312, 3326), 'numpy.diag', 'np.diag', (['y_cov'], {}), '(y_cov)\n', (3319, 3326), True, 'import numpy as np\n'), ((3460, 3486), 'numpy.exp', 'np.exp', (['gp.kernel.theta[0]'], {}), '(gp.kernel.theta[0])\n', (3466, 3486), True, 'import numpy as np\n'), ((6116, 6125), 'numpy.max', 'np.max', (['x'], {}), '(x)\n', (6122, 6125), True, 'import numpy as np\n'), ((6169, 6178), 'numpy.max', 'np.max', (['x'], {}), '(x)\n', (6175, 6178), True, 'import numpy as np\n'), ((6234, 6243), 'numpy.max', 'np.max', (['x'], {}), '(x)\n', (6240, 6243), True, 'import numpy as np\n'), ((6287, 6296), 'numpy.max', 'np.max', (['x'], {}), '(x)\n', (6293, 6296), True, 'import numpy as np\n'), ((6620, 6639), 'numpy.ones_like', 'np.ones_like', (['y_std'], {}), '(y_std)\n', (6632, 6639), True, 'import numpy as np\n'), ((3380, 3390), 'numpy.mean', 'np.mean', (['y'], {}), '(y)\n', (3387, 3390), True, 'import numpy as np\n'), ((3391, 3410), 'numpy.ones_like', 'np.ones_like', (['y_std'], {}), '(y_std)\n', (3403, 3410), True, 'import numpy as np\n'), ((3531, 3550), 'numpy.ones_like', 'np.ones_like', (['y_std'], {}), '(y_std)\n', (3543, 3550), True, 'import numpy as np\n'), ((5671, 5690), 'numpy.sqrt', 'np.sqrt', (['npoints[n]'], {}), '(npoints[n])\n', (5678, 5690), True, 'import numpy as np\n'), ((2696, 2715), 'numpy.sqrt', 'np.sqrt', (['npoints[n]'], {}), '(npoints[n])\n', (2703, 2715), True, 'import numpy as np\n'), ((3135, 3144), 'numpy.max', 'np.max', (['x'], {}), '(x)\n', (3141, 3144), True, 'import numpy as np\n'), ((3154, 3163), 'numpy.max', 'np.max', (['x'], {}), '(x)\n', (3160, 3163), True, 'import numpy as np\n')] |
#!/usr/bin/env python
# *-* coding: utf-8 *-*
from os import path
from setuptools import setup, find_packages
with open('VERSION') as v_file:
version = v_file.read().strip()
def read(fname):
return open(path.join(path.dirname(__file__), fname)).read()
setup(
name='vbox_sdk',
packages=find_packages(exclude=['docs', ]),
version=version,
license='Unlicense',
description='The VirtualBox Software Developer Kit as available from https://www.virtualbox.org/wiki/Downloads',
long_description=read('README.rst'),
author='Oracle Corp.',
author_email='<EMAIL>',
maintainer='<NAME>',
maintainer_email='<EMAIL>',
url='https://github.com/mmabey/vbox_sdk',
download_url='https://github.com/mmabey/vbox_sdk/archive/v{}.tar.gz'.format(version),
# install_requires=[x.strip() for x in open('requirements.txt')],
keywords=[
'vbox',
'vbox api',
'vbox sdk',
'VirtualBox',
'Oracle',
'virtual machine manager',
],
classifiers=[ # Full list at: https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Intended Audience :: Developers',
# Operating systems supported
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS',
# Versions of Python supported
'Programming Language :: Python :: 2.7',
],
)
| [
"os.path.dirname",
"setuptools.find_packages"
] | [((307, 338), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['docs']"}), "(exclude=['docs'])\n", (320, 338), False, 'from setuptools import setup, find_packages\n'), ((225, 247), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (237, 247), False, 'from os import path\n')] |
import numpy as np
import pandas as pd
from collections import OrderedDict
import tabulate
del(tabulate.LATEX_ESCAPE_RULES[u'$'])
del(tabulate.LATEX_ESCAPE_RULES[u'\\'])
del(tabulate.LATEX_ESCAPE_RULES[u'{'])
del(tabulate.LATEX_ESCAPE_RULES[u'}'])
del(tabulate.LATEX_ESCAPE_RULES[u'^'])
data = {}
scens = ["SPEAR-SWV","SPEAR-IBM","CPLEX-RCW","CPLEX-REG","CPLEX-CORLAT"]
models = ["RF", "DNN"]
EVA_BUDGETs = [1,3600]
WC_BUDGET = 172800 # sec
RUNS = 3
for scen in scens:
for model in models:
for EVA_BUDGET in EVA_BUDGETs:
rep_options = [True] if model=="DNN" else [False]
for reg in rep_options:
print(scen,model,EVA_BUDGET,reg)
data[scen] = data.get(scen,{})
data[scen][model] = data[scen].get(model,{})
data[scen][model][EVA_BUDGET] = data[scen][model].get(EVA_BUDGET,{})
t = [np.inf]
rmse_I = [np.inf]
rmsle_I = [np.inf]
rmse_II = [np.inf]
rmsle_II = [np.inf]
rmse_III = [np.inf]
rmsle_III = [np.inf]
rmse_IV = [np.inf]
rmsle_IV = [np.inf]
for seed in range(1,RUNS+1):
with open("{0}_{1}_{2}_{3}_{4}_{5}.log".format(scen, model, reg, EVA_BUDGET,WC_BUDGET, seed)) as fp:
for line in fp:
if line.startswith("Training Time: "):
t.append(float(line.split(":")[1]))
elif line.startswith("RMSE (I)"):
rmse_I.append(float(line.split(":")[1]))
elif line.startswith("RMSLE (I)"):
rmsle_I.append(float(line.split(":")[1]))
elif line.startswith("RMSE (II)"):
rmse_II.append(float(line.split(":")[1]))
elif line.startswith("RMSLE (II)"):
rmsle_II.append(float(line.split(":")[1]))
elif line.startswith("RMSE (III)"):
rmse_III.append(float(line.split(":")[1]))
elif line.startswith("RMSLE (III)"):
rmsle_III.append(float(line.split(":")[1]))
elif line.startswith("RMSE (IV)"):
rmse_IV.append(float(line.split(":")[1]))
elif line.startswith("RMSLE (IV)"):
rmsle_IV.append(float(line.split(":")[1]))
best_run = np.argmin(rmse_I)
data[scen][model][EVA_BUDGET]["time"] = t[best_run]
data[scen][model][EVA_BUDGET]["RMSE (I)"] = rmse_I[best_run]
data[scen][model][EVA_BUDGET]["RMSEL (I)"] = rmsle_I[best_run]
data[scen][model][EVA_BUDGET]["RMSE (II)"] = rmse_II[best_run]
data[scen][model][EVA_BUDGET]["RMSEL (II)"] = rmsle_II[best_run]
data[scen][model][EVA_BUDGET]["RMSE (III)"] = rmse_III[best_run]
data[scen][model][EVA_BUDGET]["RMSEL (III)"] = rmsle_III[best_run]
data[scen][model][EVA_BUDGET]["RMSE (IV)"] = rmse_IV[best_run]
data[scen][model][EVA_BUDGET]["RMSEL (IV)"] = rmsle_IV[best_run]
for budget in EVA_BUDGETs:
table_data = [["","","\multicolumn{2}{c}{$\conf_{\\text{train}}$}","\multicolumn{2}{c}{$\conf_{\\text{test}}$}"],
["Domain", "Instances","RF","DNN","RF","DNN"]
]
for scen in scens:
row = [scen, "$\insts_{\\text{train}}$"]
for quad in ["I","III"]:
for model in models:
row.append("$%.2f$" %(data[scen][model][budget]["RMSEL (%s)" %(quad)]))
table_data.append(row)
row = [scen, "$\insts_{\\text{test}}$"]
for quad in ["II","IV"]:
for model in models:
row.append("$%.2f$" %(data[scen][model][budget]["RMSEL (%s)" %(quad)]))
table_data.append(row)
print(tabulate.tabulate(tabular_data=table_data, tablefmt="latex_booktabs"))
| [
"numpy.argmin",
"tabulate.tabulate"
] | [((4159, 4228), 'tabulate.tabulate', 'tabulate.tabulate', ([], {'tabular_data': 'table_data', 'tablefmt': '"""latex_booktabs"""'}), "(tabular_data=table_data, tablefmt='latex_booktabs')\n", (4176, 4228), False, 'import tabulate\n'), ((2679, 2696), 'numpy.argmin', 'np.argmin', (['rmse_I'], {}), '(rmse_I)\n', (2688, 2696), True, 'import numpy as np\n')] |
import numpy as np
from utils import Data
import keras.optimizers
from keras.callbacks import LambdaCallback, ModelCheckpoint
from keras.models import Sequential
from keras.layers import Dense, Embedding, GlobalMaxPooling1D, CuDNNLSTM, Dropout, BatchNormalization, Activation, LSTM
import getopt
import sys
from sklearn.utils import shuffle
seq_len = 50
num_epoch = 25
batch_size = 32
num_char = 1000
num_gen = 20
use_generator = True
try:
opts, args = getopt.getopt(sys.argv[1:], 's:c:e:b:g:u:', \
['seq=', 'char=', 'epoch=', 'batch=', 'gen=', 'use_g='])
except getopt.GetoptError:
sys.exit(2)
for opt, arg in opts:
if opt in ('-s', '--seq'):
seq_len = int(arg)
elif opt in ('-c', '--char'):
num_char = int(arg)
elif opt in ('-e', '--epoch'):
num_epoch = int(arg)
elif opt in ('-b', '--batch'):
batch_size = int(arg)
elif opt in ('-g', '--gen'):
num_gen = int(arg)
elif opt in ('-u', '--use_g'):
if int(arg) == 1:
use_generator = True
elif int(arg) == 0:
use_generator = False
else:
sys.exit(2)
data = Data.Data(seq_len, hm_char=num_char, which_text='bible')
num_classes = data.get_num_classes()
training_size = data.get_training_size()
print('Training size: ', training_size)
data.save_things(100000)
if use_generator == False:
x, y = data.generate_data()
x, y = shuffle(x, y, random_state=42)
model = Sequential()
model.add(CuDNNLSTM(256, input_shape=(seq_len, num_classes), return_sequences=True))
model.add(Activation('relu'))
model.add(BatchNormalization())
model.add(Dropout(.5))
model.add(CuDNNLSTM(256))
model.add(Activation('relu'))
model.add(BatchNormalization())
model.add(Dropout(.5))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
print(model.summary())
checkpoint_callback = ModelCheckpoint('char_rnn_model_weights.h5', monitor='val_loss',\
save_best_only=True, save_weights_only=True)
generate_callback = LambdaCallback(on_epoch_end=lambda batch, logs: \
data.predict(model=model, num_gen=num_gen, use_empty_vector=False))
callback_list = [generate_callback, checkpoint_callback]
if use_generator:
model.fit_generator(data.get_generator(), epochs=num_epoch, \
steps_per_epoch=training_size/batch_size, callbacks=callback_list, verbose=1,\
validation_data=data.get_generator(), validation_steps=training_size*.01)
else:
model.fit(x, y, epochs=num_epoch, batch_size=batch_size, verbose=1, callbacks=[generate_callback])
print('Saving model...')
model.save_weights('../char_rnn_model_weights.h5')
print('Finished saving!') | [
"keras.layers.CuDNNLSTM",
"getopt.getopt",
"keras.callbacks.ModelCheckpoint",
"sklearn.utils.shuffle",
"keras.models.Sequential",
"utils.Data.Data",
"keras.layers.Activation",
"sys.exit",
"keras.layers.Dense",
"keras.layers.BatchNormalization",
"keras.layers.Dropout"
] | [((1049, 1105), 'utils.Data.Data', 'Data.Data', (['seq_len'], {'hm_char': 'num_char', 'which_text': '"""bible"""'}), "(seq_len, hm_char=num_char, which_text='bible')\n", (1058, 1105), False, 'from utils import Data\n'), ((1354, 1366), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (1364, 1366), False, 'from keras.models import Sequential\n'), ((1836, 1949), 'keras.callbacks.ModelCheckpoint', 'ModelCheckpoint', (['"""char_rnn_model_weights.h5"""'], {'monitor': '"""val_loss"""', 'save_best_only': '(True)', 'save_weights_only': '(True)'}), "('char_rnn_model_weights.h5', monitor='val_loss',\n save_best_only=True, save_weights_only=True)\n", (1851, 1949), False, 'from keras.callbacks import LambdaCallback, ModelCheckpoint\n'), ((459, 563), 'getopt.getopt', 'getopt.getopt', (['sys.argv[1:]', '"""s:c:e:b:g:u:"""', "['seq=', 'char=', 'epoch=', 'batch=', 'gen=', 'use_g=']"], {}), "(sys.argv[1:], 's:c:e:b:g:u:', ['seq=', 'char=', 'epoch=',\n 'batch=', 'gen=', 'use_g='])\n", (472, 563), False, 'import getopt\n'), ((1314, 1344), 'sklearn.utils.shuffle', 'shuffle', (['x', 'y'], {'random_state': '(42)'}), '(x, y, random_state=42)\n', (1321, 1344), False, 'from sklearn.utils import shuffle\n'), ((1377, 1450), 'keras.layers.CuDNNLSTM', 'CuDNNLSTM', (['(256)'], {'input_shape': '(seq_len, num_classes)', 'return_sequences': '(True)'}), '(256, input_shape=(seq_len, num_classes), return_sequences=True)\n', (1386, 1450), False, 'from keras.layers import Dense, Embedding, GlobalMaxPooling1D, CuDNNLSTM, Dropout, BatchNormalization, Activation, LSTM\n'), ((1462, 1480), 'keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (1472, 1480), False, 'from keras.layers import Dense, Embedding, GlobalMaxPooling1D, CuDNNLSTM, Dropout, BatchNormalization, Activation, LSTM\n'), ((1492, 1512), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (1510, 1512), False, 'from keras.layers import Dense, Embedding, GlobalMaxPooling1D, CuDNNLSTM, Dropout, BatchNormalization, Activation, LSTM\n'), ((1524, 1536), 'keras.layers.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (1531, 1536), False, 'from keras.layers import Dense, Embedding, GlobalMaxPooling1D, CuDNNLSTM, Dropout, BatchNormalization, Activation, LSTM\n'), ((1548, 1562), 'keras.layers.CuDNNLSTM', 'CuDNNLSTM', (['(256)'], {}), '(256)\n', (1557, 1562), False, 'from keras.layers import Dense, Embedding, GlobalMaxPooling1D, CuDNNLSTM, Dropout, BatchNormalization, Activation, LSTM\n'), ((1574, 1592), 'keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (1584, 1592), False, 'from keras.layers import Dense, Embedding, GlobalMaxPooling1D, CuDNNLSTM, Dropout, BatchNormalization, Activation, LSTM\n'), ((1604, 1624), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (1622, 1624), False, 'from keras.layers import Dense, Embedding, GlobalMaxPooling1D, CuDNNLSTM, Dropout, BatchNormalization, Activation, LSTM\n'), ((1636, 1648), 'keras.layers.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (1643, 1648), False, 'from keras.layers import Dense, Embedding, GlobalMaxPooling1D, CuDNNLSTM, Dropout, BatchNormalization, Activation, LSTM\n'), ((1660, 1700), 'keras.layers.Dense', 'Dense', (['num_classes'], {'activation': '"""softmax"""'}), "(num_classes, activation='softmax')\n", (1665, 1700), False, 'from keras.layers import Dense, Embedding, GlobalMaxPooling1D, CuDNNLSTM, Dropout, BatchNormalization, Activation, LSTM\n'), ((598, 609), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (606, 609), False, 'import sys\n'), ((1029, 1040), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (1037, 1040), False, 'import sys\n')] |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import concurrent.futures
import os
import random
import textwrap
import threading
import time
import grpc
import grpc_reflection.v1alpha.reflection
import users_pb2
import users_pb2_grpc
_MAX_ID = 2 ** 64 - 1
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
def _new_id(locked_container, max_attempts):
"""Compute a new (random) ID.
The container is named ``locked_container``; the assumption is that the
caller is using a mutex to prevent other code from modifying the container
while this function executes.
Args:
locked_container (Container): A container that has IDs as indices,
e.g. ``10 in locked_container``.
max_attempts (int): The maximum number of random numbers to be
generated.
Returns:
int: The randomly generated ID.
Raises:
RuntimeError: If no random number can be generated.
"""
for _ in range(max_attempts):
# NOTE: Don't use 0, because the 0-value corresponds to unset.
new_id = random.randint(1, _MAX_ID)
if new_id not in locked_container:
return new_id
raise RuntimeError(
f"Failed to generate a new ID in {max_attempts} attempts"
)
class Users:
def __init__(self, *args, **kwargs):
self._database = {}
# NOTE: We hold this lock for **all** operations on ``_database``.
self._lock = threading.Lock()
super().__init__(*args, **kwargs)
def AddUser(self, request, context):
"""Add a user to the database.
Args:
request (users_pb2.User): The request from the API.
context (grpc._server._Context): A request context.
Returns:
users_pb2.AddUserResponse: The response containing the ID in the
DB for the inserted user.
"""
print(f"AddUser:\n{textwrap.indent(str(request), ' ')}")
if request.id != 0:
message = "`id` cannot be set on user creation"
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
context.set_details(message)
return users_pb2.AddUserResponse()
with self._lock:
inserted_id = _new_id(self._database, 5)
self._database[inserted_id] = (
request.first_name,
request.last_name,
)
return users_pb2.AddUserResponse(user_id=inserted_id)
def GetUsers(self, request, unused_context):
"""Get all users from the database.
Args:
request (google.protobuf.empty_pb2.Empty): The request from
the API.
unused_context (grpc._server._Context): A request context.
Returns:
Generator[users_pb2.Users]: The response stream containing all
users in the DB.
"""
print("GetUsers: (empty)")
with self._lock:
user_ids = sorted(self._database.keys())
for user_id in user_ids:
# NOTE: This **does not** try to catch a ``KeyError`` because
# it assumes the ID **must** be in there.
first_name, last_name = self._database[user_id]
yield users_pb2.User(
id=user_id, first_name=first_name, last_name=last_name
)
class UsersServicer(Users, users_pb2_grpc.UsersServicer):
pass
def wait_for_termination(server):
# See:
# https://github.com/grpc/grpc/pull/19299
# https://github.com/grpc/grpc/pull/19852
try:
while True:
time.sleep(_ONE_DAY_IN_SECONDS)
except KeyboardInterrupt:
server.stop(0)
def enable_reflection(server):
service_names = (
users_pb2.DESCRIPTOR.services_by_name["Users"].full_name,
grpc_reflection.v1alpha.reflection.SERVICE_NAME,
)
grpc_reflection.v1alpha.reflection.enable_server_reflection(
service_names, server
)
def serve(grpc_port):
server = grpc.server(concurrent.futures.ThreadPoolExecutor(max_workers=10))
users_pb2_grpc.add_UsersServicer_to_server(UsersServicer(), server)
enable_reflection(server)
server.add_insecure_port(f"[::]:{grpc_port}")
print(f"Running Users service on port {grpc_port}")
server.start()
wait_for_termination(server)
def main():
grpc_port = os.environ.get("GRPC_PORT", 50051)
serve(grpc_port)
if __name__ == "__main__":
main()
| [
"users_pb2.User",
"threading.Lock",
"os.environ.get",
"time.sleep",
"users_pb2.AddUserResponse",
"random.randint"
] | [((4843, 4877), 'os.environ.get', 'os.environ.get', (['"""GRPC_PORT"""', '(50051)'], {}), "('GRPC_PORT', 50051)\n", (4857, 4877), False, 'import os\n'), ((1548, 1574), 'random.randint', 'random.randint', (['(1)', '_MAX_ID'], {}), '(1, _MAX_ID)\n', (1562, 1574), False, 'import random\n'), ((1921, 1937), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (1935, 1937), False, 'import threading\n'), ((2628, 2655), 'users_pb2.AddUserResponse', 'users_pb2.AddUserResponse', ([], {}), '()\n', (2653, 2655), False, 'import users_pb2\n'), ((2883, 2929), 'users_pb2.AddUserResponse', 'users_pb2.AddUserResponse', ([], {'user_id': 'inserted_id'}), '(user_id=inserted_id)\n', (2908, 2929), False, 'import users_pb2\n'), ((4078, 4109), 'time.sleep', 'time.sleep', (['_ONE_DAY_IN_SECONDS'], {}), '(_ONE_DAY_IN_SECONDS)\n', (4088, 4109), False, 'import time\n'), ((3720, 3790), 'users_pb2.User', 'users_pb2.User', ([], {'id': 'user_id', 'first_name': 'first_name', 'last_name': 'last_name'}), '(id=user_id, first_name=first_name, last_name=last_name)\n', (3734, 3790), False, 'import users_pb2\n')] |
#!/usr/bin/python3
import os
import sys
file_dir = os.path.dirname(__file__)
sys.path.append(file_dir)
import io
import logging
import time
import traceback
from flask import Flask, render_template, send_file, Response
from threading import Condition, Lock
from enums import Position
from mymemcache import MemCache
from servo import Servo, MOVEMENT_DELAY
HTTP_HOST = '0.0.0.0'
HTTP_PORT = 8000
HTTP_DEBUG = True
logging.basicConfig(level=logging.INFO)
logging.info("starting smart camera...")
isServoAvailable = False
h_servo = None
v_servo = None
logging.info("creating memcache and locks...")
ptz_lock = Lock()
camera_lock = Lock()
image_cache = MemCache()
logging.info("starting servo detection...")
try:
h_servo = Servo(0, 0.7, 2.3) # sg90, hw address 1, duty 1-2ms
v_servo = Servo(1, 0.7, 2.3) # mg90s, hw address 0, duty 1-2ms
isServoAvailable = True
logging.info("servo is available")
except Exception as e:
logging.info("servo not available: {0}".format(e))
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_tb(exc_traceback, file=sys.stdout)
h_servo = Servo(0, 1.0, 2.0, True)
v_servo = Servo(1, 1.0, 2.0, True)
isServoAvailable = False
logging.info("checking for camera")
camera = None
class StreamingOutput(object):
def __init__(self):
self.frame = None
self.buffer = io.BytesIO()
self.condition = Condition()
def write(self, buf):
if buf.startswith(b'\xff\xd8'):
self.buffer.truncate()
with self.condition:
self.frame = self.buffer.getvalue()
self.condition.notify_all()
self.buffer.seek(0)
return self.buffer.write(buf)
try:
import picamera
camera = picamera.PiCamera()
camera_video_output_stream = StreamingOutput()
except Exception as e:
logging.info("camera not available: {0}".format(e))
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_tb(exc_traceback, file=sys.stdout)
import camera_emulation
camera = camera_emulation.PiCamera()
camera_video_output_stream = camera_emulation.EmulatedStreamingOutput()
try:
with camera:
camera.rotation = 180
camera.resolution = (2592, 1944) # WaveShare J 2592 × 1944
camera.framerate = 24
logging.info("start camera recording...")
camera.start_recording(camera_video_output_stream, format='mjpeg', resize=(640, 480))
try:
logging.info("starting http server")
app = Flask("server")
def position_to_servo(servo, position):
if position == Position.MIN:
servo.min()
elif position == Position.MAX:
servo.max()
else:
servo.center()
def ptz_center():
position_to_servo(h_servo, Position.CENTER)
position_to_servo(v_servo, Position.CENTER)
time.sleep(MOVEMENT_DELAY)
def stream():
while True:
# logging.info("requested frame")
camera.annotate_text = "ISO " + str(camera.iso) \
+ ", exposure " + str(camera.exposure_speed) \
+ ", analog gain " + str(camera.analog_gain) \
+ ", awb mode " + str(camera.awb_mode)
# + ", shutter " + str(camera.shutter_speed) \
# + ", awb gains " + ''.join(str(x) for x in camera.awb_gains)
with camera_lock:
with camera_video_output_stream.condition:
camera_video_output_stream.condition.wait()
frame = camera_video_output_stream.frame
# logging.info("sending bytes len " + str(len(frame)))
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
@app.after_request
def add_header(response):
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
response.headers['Pragma'] = 'no-cache'
return response
@app.route('/', endpoint="root")
def root_endpoint():
logging.info("requested root")
return render_template("index.html")
@app.route('/images/<name>', endpoint="camera_image")
def image_endpoint(name):
logging.info("requested image " + name)
try:
if image_cache.has(name):
logging.info("getting image from cache")
image_bytes = image_cache.get(name)
else:
if name == "current.jpg":
position_h = Position.CENTER
position_v = Position.CENTER
elif name == "left.jpg":
position_h = Position.MAX
position_v = Position.CENTER
elif name == "right.jpg":
position_h = Position.MIN
position_v = Position.CENTER
elif name == "up.jpg":
position_h = Position.CENTER
position_v = Position.MIN
elif name == "down.jpg":
position_h = Position.CENTER
position_v = Position.MAX
else:
return 'invalid position!', 400
if not isServoAvailable and name != "current.jpg":
logging.info("ptz not available, return dummy image")
with open("templates/ptz_na.jpg", mode='rb') as file:
image_bytes = file.read()
else:
with camera_lock:
with ptz_lock:
logging.info("requested left image")
position_to_servo(h_servo, position_h)
position_to_servo(v_servo, position_v)
time.sleep(MOVEMENT_DELAY)
image_stream = io.BytesIO()
camera.capture(image_stream, format='jpeg')
image_stream.seek(0)
image_bytes = image_stream.read()
image_cache.put(name, image_bytes)
return send_file(
io.BytesIO(image_bytes),
mimetype="image/jpeg",
as_attachment=True,
attachment_filename=name)
except Exception as ie:
logging.info("can't send image: {0}".format(ie))
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_tb(exc_traceback, file=sys.stdout)
return 'bad request!', 500
@app.route('/stream', endpoint="camera_stream")
def stream_endpoint():
logging.info("requested stream")
try:
logging.info("requested left image")
position_to_servo(h_servo, Position.CENTER)
position_to_servo(v_servo, Position.CENTER)
time.sleep(MOVEMENT_DELAY)
return Response(stream(),
mimetype='multipart/x-mixed-replace; boundary=frame')
except Exception as ie:
logging.info("can't start stream: {0}".format(ie))
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_tb(exc_traceback, file=sys.stdout)
return "can't start stream", 500
app.run(host=HTTP_HOST, port=HTTP_PORT, debug=HTTP_DEBUG, use_reloader=False)
finally:
logging.info("stop camera recording")
camera.stop_recording()
except Exception as e:
logging.error("camera error {0}".format(e))
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_tb(exc_traceback, file=sys.stdout)
| [
"logging.basicConfig",
"flask.render_template",
"camera_emulation.PiCamera",
"flask.Flask",
"threading.Lock",
"io.BytesIO",
"logging.info",
"mymemcache.MemCache",
"servo.Servo",
"picamera.PiCamera",
"os.path.dirname",
"sys.exc_info",
"traceback.print_tb",
"time.sleep",
"camera_emulation.... | [((53, 78), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (68, 78), False, 'import os\n'), ((79, 104), 'sys.path.append', 'sys.path.append', (['file_dir'], {}), '(file_dir)\n', (94, 104), False, 'import sys\n'), ((421, 460), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (440, 460), False, 'import logging\n'), ((462, 502), 'logging.info', 'logging.info', (['"""starting smart camera..."""'], {}), "('starting smart camera...')\n", (474, 502), False, 'import logging\n'), ((560, 606), 'logging.info', 'logging.info', (['"""creating memcache and locks..."""'], {}), "('creating memcache and locks...')\n", (572, 606), False, 'import logging\n'), ((618, 624), 'threading.Lock', 'Lock', ([], {}), '()\n', (622, 624), False, 'from threading import Condition, Lock\n'), ((639, 645), 'threading.Lock', 'Lock', ([], {}), '()\n', (643, 645), False, 'from threading import Condition, Lock\n'), ((660, 670), 'mymemcache.MemCache', 'MemCache', ([], {}), '()\n', (668, 670), False, 'from mymemcache import MemCache\n'), ((673, 716), 'logging.info', 'logging.info', (['"""starting servo detection..."""'], {}), "('starting servo detection...')\n", (685, 716), False, 'import logging\n'), ((1226, 1261), 'logging.info', 'logging.info', (['"""checking for camera"""'], {}), "('checking for camera')\n", (1238, 1261), False, 'import logging\n'), ((736, 754), 'servo.Servo', 'Servo', (['(0)', '(0.7)', '(2.3)'], {}), '(0, 0.7, 2.3)\n', (741, 754), False, 'from servo import Servo, MOVEMENT_DELAY\n'), ((804, 822), 'servo.Servo', 'Servo', (['(1)', '(0.7)', '(2.3)'], {}), '(1, 0.7, 2.3)\n', (809, 822), False, 'from servo import Servo, MOVEMENT_DELAY\n'), ((891, 925), 'logging.info', 'logging.info', (['"""servo is available"""'], {}), "('servo is available')\n", (903, 925), False, 'import logging\n'), ((1772, 1791), 'picamera.PiCamera', 'picamera.PiCamera', ([], {}), '()\n', (1789, 1791), False, 'import picamera\n'), ((1046, 1060), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (1058, 1060), False, 'import sys\n'), ((1065, 1115), 'traceback.print_tb', 'traceback.print_tb', (['exc_traceback'], {'file': 'sys.stdout'}), '(exc_traceback, file=sys.stdout)\n', (1083, 1115), False, 'import traceback\n'), ((1131, 1155), 'servo.Servo', 'Servo', (['(0)', '(1.0)', '(2.0)', '(True)'], {}), '(0, 1.0, 2.0, True)\n', (1136, 1155), False, 'from servo import Servo, MOVEMENT_DELAY\n'), ((1170, 1194), 'servo.Servo', 'Servo', (['(1)', '(1.0)', '(2.0)', '(True)'], {}), '(1, 1.0, 2.0, True)\n', (1175, 1194), False, 'from servo import Servo, MOVEMENT_DELAY\n'), ((1381, 1393), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (1391, 1393), False, 'import io\n'), ((1419, 1430), 'threading.Condition', 'Condition', ([], {}), '()\n', (1428, 1430), False, 'from threading import Condition, Lock\n'), ((1965, 1979), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (1977, 1979), False, 'import sys\n'), ((1984, 2034), 'traceback.print_tb', 'traceback.print_tb', (['exc_traceback'], {'file': 'sys.stdout'}), '(exc_traceback, file=sys.stdout)\n', (2002, 2034), False, 'import traceback\n'), ((2077, 2104), 'camera_emulation.PiCamera', 'camera_emulation.PiCamera', ([], {}), '()\n', (2102, 2104), False, 'import camera_emulation\n'), ((2138, 2180), 'camera_emulation.EmulatedStreamingOutput', 'camera_emulation.EmulatedStreamingOutput', ([], {}), '()\n', (2178, 2180), False, 'import camera_emulation\n'), ((2342, 2383), 'logging.info', 'logging.info', (['"""start camera recording..."""'], {}), "('start camera recording...')\n", (2354, 2383), False, 'import logging\n'), ((8528, 8542), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (8540, 8542), False, 'import sys\n'), ((8547, 8597), 'traceback.print_tb', 'traceback.print_tb', (['exc_traceback'], {'file': 'sys.stdout'}), '(exc_traceback, file=sys.stdout)\n', (8565, 8597), False, 'import traceback\n'), ((2503, 2539), 'logging.info', 'logging.info', (['"""starting http server"""'], {}), "('starting http server')\n", (2515, 2539), False, 'import logging\n'), ((2558, 2573), 'flask.Flask', 'Flask', (['"""server"""'], {}), "('server')\n", (2563, 2573), False, 'from flask import Flask, render_template, send_file, Response\n'), ((8341, 8378), 'logging.info', 'logging.info', (['"""stop camera recording"""'], {}), "('stop camera recording')\n", (8353, 8378), False, 'import logging\n'), ((3007, 3033), 'time.sleep', 'time.sleep', (['MOVEMENT_DELAY'], {}), '(MOVEMENT_DELAY)\n', (3017, 3033), False, 'import time\n'), ((4458, 4488), 'logging.info', 'logging.info', (['"""requested root"""'], {}), "('requested root')\n", (4470, 4488), False, 'import logging\n'), ((4512, 4541), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (4527, 4541), False, 'from flask import Flask, render_template, send_file, Response\n'), ((4663, 4702), 'logging.info', 'logging.info', (["('requested image ' + name)"], {}), "('requested image ' + name)\n", (4675, 4702), False, 'import logging\n'), ((7487, 7519), 'logging.info', 'logging.info', (['"""requested stream"""'], {}), "('requested stream')\n", (7499, 7519), False, 'import logging\n'), ((7562, 7598), 'logging.info', 'logging.info', (['"""requested left image"""'], {}), "('requested left image')\n", (7574, 7598), False, 'import logging\n'), ((7748, 7774), 'time.sleep', 'time.sleep', (['MOVEMENT_DELAY'], {}), '(MOVEMENT_DELAY)\n', (7758, 7774), False, 'import time\n'), ((4795, 4835), 'logging.info', 'logging.info', (['"""getting image from cache"""'], {}), "('getting image from cache')\n", (4807, 4835), False, 'import logging\n'), ((6909, 6932), 'io.BytesIO', 'io.BytesIO', (['image_bytes'], {}), '(image_bytes)\n', (6919, 6932), False, 'import io\n'), ((7242, 7256), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (7254, 7256), False, 'import sys\n'), ((7277, 7327), 'traceback.print_tb', 'traceback.print_tb', (['exc_traceback'], {'file': 'sys.stdout'}), '(exc_traceback, file=sys.stdout)\n', (7295, 7327), False, 'import traceback\n'), ((8081, 8095), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (8093, 8095), False, 'import sys\n'), ((8116, 8166), 'traceback.print_tb', 'traceback.print_tb', (['exc_traceback'], {'file': 'sys.stdout'}), '(exc_traceback, file=sys.stdout)\n', (8134, 8166), False, 'import traceback\n'), ((5919, 5972), 'logging.info', 'logging.info', (['"""ptz not available, return dummy image"""'], {}), "('ptz not available, return dummy image')\n", (5931, 5972), False, 'import logging\n'), ((6571, 6583), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (6581, 6583), False, 'import io\n'), ((6272, 6308), 'logging.info', 'logging.info', (['"""requested left image"""'], {}), "('requested left image')\n", (6284, 6308), False, 'import logging\n'), ((6496, 6522), 'time.sleep', 'time.sleep', (['MOVEMENT_DELAY'], {}), '(MOVEMENT_DELAY)\n', (6506, 6522), False, 'import time\n')] |
"""Storage registry."""
from typing import Type
from mode.utils.imports import FactoryMapping
from faust.types import StoreT
__all__ = ['by_name', 'by_url']
STORES: FactoryMapping[Type[StoreT]] = FactoryMapping(
memory='faust.stores.memory:Store',
rocksdb='faust.stores.rocksdb:Store',
)
STORES.include_setuptools_namespace('faust.stores')
by_name = STORES.by_name
by_url = STORES.by_url
| [
"mode.utils.imports.FactoryMapping"
] | [((198, 291), 'mode.utils.imports.FactoryMapping', 'FactoryMapping', ([], {'memory': '"""faust.stores.memory:Store"""', 'rocksdb': '"""faust.stores.rocksdb:Store"""'}), "(memory='faust.stores.memory:Store', rocksdb=\n 'faust.stores.rocksdb:Store')\n", (212, 291), False, 'from mode.utils.imports import FactoryMapping\n')] |
# Copyright (C) 2016 <NAME> <<EMAIL>>
# Released under the terms of the BSD license.
import os
import re
import sys
import click
import string
import datetime
import subprocess
from . import __version__
from .remote import get_remote
from .crypto import fernet_encrypt, fernet_decrypt, sha512_hash, digest
from .formatter import make_html
from .utils import get_password, copy_to_clipboard
from .fs import (
find_in_path, load_configuration, is_encrypted, list_files, test_mode,
fix_mode, asset_path
)
DEFAULT_CSS = 'stylesheet.css'
DATE_YMD = '%Y-%m-%d'
def die(message):
click.echo('Error: %s' % message)
sys.exit(1)
def echo(message=''):
if type(message) in (dict, list, tuple):
message = __import__('json').dumps(message, indent=2)
click.echo(message)
def main():
try:
__import__('weasyprint')
except ValueError as e:
if 'unknown locale' not in str(e):
raise e
# Fix locale on Mac OS.
os.environ['LC_CTYPE'] = 'en_US'
try:
cli(obj={})
except AssertionError as e:
die(e)
@click.group(invoke_without_command=True)
@click.option('-v', '--version', is_flag=True, help='Print program version.')
def cli(version):
if version:
echo(__version__)
@cli.command()
def remote():
'''Remote configuration for `pull` and `push` commands.
Both `pull` and `push` commands rely on `.no-cloud.yml` (which can be
transparently encrypted for figuring out remote information. Configuration
files are looked for recursively starting from the path provided to said
commands.
Sample configuration for S3:
\b
driver: s3
bucket: bucket-xyz
region: eu-west-1
key: PRIVATE_KEY
secret: SECRET
Sample configuration for Minio (also uses S3):
\b
driver: minio
endpoint: https://minio.example.com
bucket: documents
key: PRIVATE_KEY
secret: SECRET
'''
doc = remote.__doc__
doc = re.sub(r'^ ', '', doc, flags=re.M)
doc = re.sub(' \b\n', '', doc, flags=re.M)
echo(doc)
@cli.command()
@click.argument('paths', nargs=-1)
def push(paths):
'''Push files to remote storage.
This command will push files to remote storage, overriding any previously
existing remote file.
$ no-cloud push ~/Documents/passwords
Remote configuration is found recursively starting from the path provided.
See `remote` for more information.
'''
for path in paths:
root, filename = find_in_path(path, '.no-cloud.yml.crypt',
'.no-cloud.yml')
assert root and filename, 'no configuration found'
config = load_configuration(root + '/' + filename)
with get_remote(config, root) as remote:
for filename in list_files(path):
remote.push(filename)
@cli.command()
@click.argument('paths', nargs=-1)
def pull(paths):
'''Pull files from remote storage.
This command will pull files from remote storage, overriding any previously
existing local file.
$ no-cloud pull ~/Documents/passwords
Remote configuration is found recursively starting from the path provided.
See `remote` for more information.
'''
for path in paths:
root, filename = find_in_path(path, '.no-cloud.yml.crypt',
'.no-cloud.yml')
assert root and filename, 'no configuration found'
config = load_configuration(root + '/' + filename)
with get_remote(config, root) as remote:
remote.pull(path)
@cli.command()
@click.option('-d', '--dry-run', is_flag=True, help='Do not perform anything.')
@click.option('-k', '--keep', is_flag=True, help='Leave clear files behind.')
@click.argument('paths', nargs=-1)
def encrypt(dry_run, keep, paths):
'''Encrypt files using a passphrase.
Encrypt files using Fernet encryption. Unless `--keep` is passed, the
command will remove the clear version of the file.
Encrypted files have the `.crypt` extension.
\b
$ no-cloud encrypt ~/Documents/letters
Encryption password: ***
Confirmation: ***
/home/benoit/Documents/letters/2016-12-20-santa.md
'''
password = None if dry_run else get_password('Encryption password',
confirm=True)
for filename in list_files(paths):
if is_encrypted(filename):
continue
echo(filename)
if dry_run:
continue
with open(filename, 'rb') as file:
data = file.read()
data = fernet_encrypt(data, password)
with open(filename + '.crypt', 'wb') as file:
file.write(data)
if not keep:
os.remove(filename)
@cli.command()
@click.option('-d', '--dry-run', is_flag=True, help='Do not perform anything.')
@click.option('-k', '--keep', is_flag=True, help='Leave encrypted files '
'behind.')
@click.argument('paths', nargs=-1)
def decrypt(dry_run, keep, paths):
'''Decrypt files using a password.
Decrypt a Fernet encrypted files. Unless `--keep` is passed, the command
will remove the encrypted version of the file.
Encrypted files must have the `.crypt` extension.
\b
$ no-cloud decrypt ~/Documents/letters
Decryption password: ***
/home/benoit/Documents/letters/2016-12-20-santa.md.crypt
'''
password = None if dry_run else get_password('Decryption Password')
for filename in list_files(paths):
if not is_encrypted(filename):
continue
echo(filename)
if dry_run:
continue
with open(filename, 'rb') as file:
data = file.read()
data = fernet_decrypt(data, password)
filename, ext = os.path.splitext(filename)
with open(filename, 'wb') as file:
file.write(data)
if not keep:
os.remove(filename + ext)
@cli.command()
@click.option('-s', '--service', help='', default='Service to generate a '
'password for.')
@click.option('-u', '--username', help='', default='User name to generate a '
'password for.')
@click.option('-i', '--iterations', help='Number of iterations for the SHA512 '
'algorithm (defaults to 100000).', default=100000)
@click.option('-c', '--characters', help='Characters classes to use for the '
'digest; `l` for lowercase, `u` for uppercase, `d` for digits and `p` '
'for punctuation (defaults to `ludp`).', default='ludp')
@click.option('-l', '--length', help='Length of the digest (defaults to 32).',
default=32)
@click.option('-f', '--filename', help='YAML file to read the above '
'information from.', default=None)
@click.option('-v', '--version', help='YAML document starting at zero '
'(defaults to 0).', default=0)
@click.option('-n', '--no-clipboard', is_flag=True, help='Disable clipboard '
'copy, password is printed to stdout.')
def password(service, username, iterations, characters, length, filename,
version, no_clipboard):
'''Reproducibly generate passwords.
Passwords are built using the SHA512 hashing function and a configurable
digest function (depending on what characters should be supported).
To compute passwords, it uses the service name, the user name and a master
password. The number of iterations of the algorithm can be tweaked which is
especially useful for password rotation (you should keep it above 100000
which is the default).
The hashing function is ran twice, first on the user name using the master
password as salt and then on the service name using the initial result as
salt.
This command will print sensitive information to standard output so you
*must* make sure this does not represent a security issue.
\b
- Set your terminal output history (or scrollback) to a sensible value with
no saving or restoration.
- Activate history skipping in your shell and put a whitespace before the
command (or whatever it supports).
Passwords are copied to the clipboard unless `--no-clipboard` is passed.
\b
$ no-cloud password --service example.com --username <EMAIL>
Master password: ***
Confirmation: ***
service: example.com
username: <EMAIL>
password: *<PASSWORD>*
This command also supports reading credentials from a YAML file through the
`--filename` option. It can be transparently encrypted (highly
recommended). The master password will *always* be prompted for.
When reading credentials from a YAML file, the `--version` can be used to
determine what YAML document should be used (by default, the first version
found is used).
\b
$ cat ~/Documents/passwords/example.yml
service: example.com
username: <EMAIL>
iterations: 110000
comment: >
Updated on 2016-12-20
---
service: example.com
username: <EMAIL>
comment: >
Updated on 2016-11-20
We can now encrypt this file:
\b
$ no-cloud encrypt ~/Documents/passwords/example.yml
Encryption password: ***
Confirmation: ***
/home/benoit/Documents/passwords/example.yml
And passwords can be generated:
\b
$ no-cloud password -f ~/Documents/passwords/example.yml.crypt
Decryption password: ***
Master password: ***
Confirmation: ***
service: example.com
username: <EMAIL>
password: *<PASSWORD>*
comment: >
Updated on 2016-12-20
'''
config = {
'service': service,
'username': username,
'iterations': iterations,
'characters': characters,
'length': length
}
def done(password):
if not no_clipboard:
password = copy_to_clipboard(password)
echo('service: %s' % config['service'])
echo('username: %s' % config['username'])
echo('password: %s' % password)
if 'comment' in config:
echo('comment: >')
echo(' %s' % config['comment'].strip())
if filename:
data = load_configuration(filename, version)
config.update(data)
assert config['service'], 'missing service'
assert config['username'], 'missing username'
assert config['length'] > 0, 'invalid length'
if 'password' in config:
return done(config['password'])
characters = ''
if 'l' in config['characters']:
characters += string.ascii_lowercase * 3
if 'u' in config['characters']:
characters += string.ascii_uppercase * 3
if 'd' in config['characters']:
characters += string.digits * 3
if 'p' in config['characters']:
characters += string.punctuation * 2
assert len(characters), 'invalid characters'
password = get_password('<PASSWORD>', confirm=True)
hashed = sha512_hash(config['username'], password, config['iterations'])
hashed = sha512_hash(config['service'], hashed, config['iterations'])
hashed = digest(hashed, characters, config['length'])
done(hashed)
@cli.command()
@click.option('-d', '--dry-run', is_flag=True, help='Do not perform anything.')
@click.option('-f', '--force', is_flag=True, help='Force renaming, possibly '
'overwriting existing files.')
@click.argument('pattern', nargs=1)
@click.argument('paths', nargs=-1)
def rename(dry_run, force, pattern, paths):
'''Rename files using a substition pattern.
Substitution patterns follow the form `s/pattern/replacement/`. Unless
`--force` is passed, the command will not overwrite existing files.
$ no-cloud rename 's/monica/hillary/' *.png
The special `$i` replacement variable holds the current iteration starting
at one and left-padded with zeros according to the number of target files.
$ no-cloud rename 's/^/$i-/' *.png
'''
assert pattern.startswith('s/'), 'invalid pattern'
assert pattern.endswith('/'), 'invalid pattern'
filenames = list_files(paths)
filenames = tuple(filenames)
length = len(filenames)
length = str(length)
length = len(length)
fmt = '%0' + str(length) + 'd'
files = []
for filename in filenames:
files.append({
'path': os.path.dirname(filename),
'src': os.path.basename(filename),
'dst': None
})
pattern, replacement = pattern[2:-1].split('/', 1)
pattern = re.compile(pattern)
i = 1
for file in files:
repl = replacement[:].replace('$i', fmt % i)
file['dst'] = pattern.sub(repl, file['src'])
file['src'] = file['path'] + '/' + file['src']
file['dst'] = file['path'] + '/' + file['dst']
echo(file['dst'])
if not dry_run:
if not force:
assert not os.path.isfile(file['dst']), \
'destination exists `%s`' % file['dst']
os.rename(file['src'], file['dst'])
i += 1
@cli.command()
@click.option('-d', '--dry-run', is_flag=True, help='Do not perform anything '
'(ie.: not file mode fixing).')
@click.argument('paths', nargs=-1)
def audit(dry_run, paths):
'''Audit files for security issues.
Files that are not encrypted (c) or have an incorrect mode set (m) are
printed to stdout. File modes are fixed by default.
\b
$ no-cloud audit ~/Documents
m /home/benoit/Documents/.no-cloud.yml.crypt
c /home/benoit/Documents/diamond.db
/home/benoit/Documents/letters/2016-12-20-santa.md.crypt
'''
for filename in list_files(paths):
clear = not is_encrypted(filename)
mode = not test_mode(filename)
status = '' \
+ ('c' if clear else ' ') \
+ ('m' if mode else ' ')
echo('%s %s' % (status, filename))
if mode and not dry_run:
fix_mode(filename)
@cli.command()
@click.option('-p', '--preview', is_flag=True, help='Automatically preview '
'document.')
@click.option('-t', '--timestamp', is_flag=True, help='Timestamp PDF file.')
@click.option('-s', '--stylesheet', help='CSS stylesheet.', default='default')
@click.argument('paths', nargs=-1)
def render(preview, timestamp, stylesheet, paths):
'''Render a Markdown file as a PDF.
Sample usage:
\b
$ no-cloud render -p ~/Documents/letters/2016-12-20-santa.md
/home/benoit/Documents/letters/2016-12-20-santa.pdf
Markdown rendering supports custom classes through annotations (eg.
`{right}`); here are some classes defined in the default CSS:
- `right`: align a block of text on the right-half of the page
- `letter`: add 3em worth of indentation for the first line in
paragraphs
- `t-2` to `t-10`: add 2 to 10 em worth of top margin
- `b-2` to `b-10`: add 2 to 10 em worth of bottom margin
- `l-pad-1` to `l-pad-3`: add 1 to 3 em worth of left padding
- `signature`: limit an image's width to 10em
- `pull-right`: make an element float to the right
- `break`: insert a page break before an element
- `centered`: centered text
- `light`: lighter gray text
- `small`: smaller texter (0.9em)
It also contains rules for links, code, citations, tables and horizontal
rules.
Please note that this feature may not work on Python2/Mac OS.
'''
from weasyprint import HTML, CSS
for filename in list_files(paths):
assert filename.endswith('.md'), ''
with open(filename) as file:
data = file.read()
html = make_html(data)
filename, ext = os.path.splitext(filename)
if timestamp:
now = datetime.datetime.now()
dirname = os.path.dirname(filename)
filename = os.path.basename(filename)
filename = dirname + '/' + now.strftime(DATE_YMD) + '-' + filename
filename = filename + '.pdf'
if stylesheet == 'default':
stylesheet = asset_path('stylesheet.css')
echo(filename)
HTML(string=html) \
.write_pdf(filename, stylesheets=[
CSS(stylesheet)
])
if preview:
subprocess.call(['open', filename])
| [
"click.argument",
"re.compile",
"click.group",
"click.option",
"os.rename",
"os.path.splitext",
"os.path.isfile",
"click.echo",
"datetime.datetime.now",
"os.path.dirname",
"os.path.basename",
"subprocess.call",
"sys.exit",
"weasyprint.HTML",
"re.sub",
"weasyprint.CSS",
"os.remove"
] | [((1098, 1138), 'click.group', 'click.group', ([], {'invoke_without_command': '(True)'}), '(invoke_without_command=True)\n', (1109, 1138), False, 'import click\n'), ((1140, 1216), 'click.option', 'click.option', (['"""-v"""', '"""--version"""'], {'is_flag': '(True)', 'help': '"""Print program version."""'}), "('-v', '--version', is_flag=True, help='Print program version.')\n", (1152, 1216), False, 'import click\n'), ((2148, 2181), 'click.argument', 'click.argument', (['"""paths"""'], {'nargs': '(-1)'}), "('paths', nargs=-1)\n", (2162, 2181), False, 'import click\n'), ((2910, 2943), 'click.argument', 'click.argument', (['"""paths"""'], {'nargs': '(-1)'}), "('paths', nargs=-1)\n", (2924, 2943), False, 'import click\n'), ((3621, 3699), 'click.option', 'click.option', (['"""-d"""', '"""--dry-run"""'], {'is_flag': '(True)', 'help': '"""Do not perform anything."""'}), "('-d', '--dry-run', is_flag=True, help='Do not perform anything.')\n", (3633, 3699), False, 'import click\n'), ((3701, 3777), 'click.option', 'click.option', (['"""-k"""', '"""--keep"""'], {'is_flag': '(True)', 'help': '"""Leave clear files behind."""'}), "('-k', '--keep', is_flag=True, help='Leave clear files behind.')\n", (3713, 3777), False, 'import click\n'), ((3779, 3812), 'click.argument', 'click.argument', (['"""paths"""'], {'nargs': '(-1)'}), "('paths', nargs=-1)\n", (3793, 3812), False, 'import click\n'), ((4792, 4870), 'click.option', 'click.option', (['"""-d"""', '"""--dry-run"""'], {'is_flag': '(True)', 'help': '"""Do not perform anything."""'}), "('-d', '--dry-run', is_flag=True, help='Do not perform anything.')\n", (4804, 4870), False, 'import click\n'), ((4872, 4957), 'click.option', 'click.option', (['"""-k"""', '"""--keep"""'], {'is_flag': '(True)', 'help': '"""Leave encrypted files behind."""'}), "('-k', '--keep', is_flag=True, help='Leave encrypted files behind.'\n )\n", (4884, 4957), False, 'import click\n'), ((4965, 4998), 'click.argument', 'click.argument', (['"""paths"""'], {'nargs': '(-1)'}), "('paths', nargs=-1)\n", (4979, 4998), False, 'import click\n'), ((5984, 6076), 'click.option', 'click.option', (['"""-s"""', '"""--service"""'], {'help': '""""""', 'default': '"""Service to generate a password for."""'}), "('-s', '--service', help='', default=\n 'Service to generate a password for.')\n", (5996, 6076), False, 'import click\n'), ((6084, 6179), 'click.option', 'click.option', (['"""-u"""', '"""--username"""'], {'help': '""""""', 'default': '"""User name to generate a password for."""'}), "('-u', '--username', help='', default=\n 'User name to generate a password for.')\n", (6096, 6179), False, 'import click\n'), ((6187, 6322), 'click.option', 'click.option', (['"""-i"""', '"""--iterations"""'], {'help': '"""Number of iterations for the SHA512 algorithm (defaults to 100000)."""', 'default': '(100000)'}), "('-i', '--iterations', help=\n 'Number of iterations for the SHA512 algorithm (defaults to 100000).',\n default=100000)\n", (6199, 6322), False, 'import click\n'), ((6326, 6535), 'click.option', 'click.option', (['"""-c"""', '"""--characters"""'], {'help': '"""Characters classes to use for the digest; `l` for lowercase, `u` for uppercase, `d` for digits and `p` for punctuation (defaults to `ludp`)."""', 'default': '"""ludp"""'}), "('-c', '--characters', help=\n 'Characters classes to use for the digest; `l` for lowercase, `u` for uppercase, `d` for digits and `p` for punctuation (defaults to `ludp`).'\n , default='ludp')\n", (6338, 6535), False, 'import click\n'), ((6549, 6643), 'click.option', 'click.option', (['"""-l"""', '"""--length"""'], {'help': '"""Length of the digest (defaults to 32)."""', 'default': '(32)'}), "('-l', '--length', help=\n 'Length of the digest (defaults to 32).', default=32)\n", (6561, 6643), False, 'import click\n'), ((6648, 6753), 'click.option', 'click.option', (['"""-f"""', '"""--filename"""'], {'help': '"""YAML file to read the above information from."""', 'default': 'None'}), "('-f', '--filename', help=\n 'YAML file to read the above information from.', default=None)\n", (6660, 6753), False, 'import click\n'), ((6761, 6864), 'click.option', 'click.option', (['"""-v"""', '"""--version"""'], {'help': '"""YAML document starting at zero (defaults to 0)."""', 'default': '(0)'}), "('-v', '--version', help=\n 'YAML document starting at zero (defaults to 0).', default=0)\n", (6773, 6864), False, 'import click\n'), ((6872, 6990), 'click.option', 'click.option', (['"""-n"""', '"""--no-clipboard"""'], {'is_flag': '(True)', 'help': '"""Disable clipboard copy, password is printed to stdout."""'}), "('-n', '--no-clipboard', is_flag=True, help=\n 'Disable clipboard copy, password is printed to stdout.')\n", (6884, 6990), False, 'import click\n'), ((11237, 11315), 'click.option', 'click.option', (['"""-d"""', '"""--dry-run"""'], {'is_flag': '(True)', 'help': '"""Do not perform anything."""'}), "('-d', '--dry-run', is_flag=True, help='Do not perform anything.')\n", (11249, 11315), False, 'import click\n'), ((11317, 11426), 'click.option', 'click.option', (['"""-f"""', '"""--force"""'], {'is_flag': '(True)', 'help': '"""Force renaming, possibly overwriting existing files."""'}), "('-f', '--force', is_flag=True, help=\n 'Force renaming, possibly overwriting existing files.')\n", (11329, 11426), False, 'import click\n'), ((11430, 11464), 'click.argument', 'click.argument', (['"""pattern"""'], {'nargs': '(1)'}), "('pattern', nargs=1)\n", (11444, 11464), False, 'import click\n'), ((11466, 11499), 'click.argument', 'click.argument', (['"""paths"""'], {'nargs': '(-1)'}), "('paths', nargs=-1)\n", (11480, 11499), False, 'import click\n'), ((13117, 13228), 'click.option', 'click.option', (['"""-d"""', '"""--dry-run"""'], {'is_flag': '(True)', 'help': '"""Do not perform anything (ie.: not file mode fixing)."""'}), "('-d', '--dry-run', is_flag=True, help=\n 'Do not perform anything (ie.: not file mode fixing).')\n", (13129, 13228), False, 'import click\n'), ((13236, 13269), 'click.argument', 'click.argument', (['"""paths"""'], {'nargs': '(-1)'}), "('paths', nargs=-1)\n", (13250, 13269), False, 'import click\n'), ((14041, 14131), 'click.option', 'click.option', (['"""-p"""', '"""--preview"""'], {'is_flag': '(True)', 'help': '"""Automatically preview document."""'}), "('-p', '--preview', is_flag=True, help=\n 'Automatically preview document.')\n", (14053, 14131), False, 'import click\n'), ((14139, 14214), 'click.option', 'click.option', (['"""-t"""', '"""--timestamp"""'], {'is_flag': '(True)', 'help': '"""Timestamp PDF file."""'}), "('-t', '--timestamp', is_flag=True, help='Timestamp PDF file.')\n", (14151, 14214), False, 'import click\n'), ((14216, 14293), 'click.option', 'click.option', (['"""-s"""', '"""--stylesheet"""'], {'help': '"""CSS stylesheet."""', 'default': '"""default"""'}), "('-s', '--stylesheet', help='CSS stylesheet.', default='default')\n", (14228, 14293), False, 'import click\n'), ((14295, 14328), 'click.argument', 'click.argument', (['"""paths"""'], {'nargs': '(-1)'}), "('paths', nargs=-1)\n", (14309, 14328), False, 'import click\n'), ((591, 624), 'click.echo', 'click.echo', (["('Error: %s' % message)"], {}), "('Error: %s' % message)\n", (601, 624), False, 'import click\n'), ((629, 640), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (637, 640), False, 'import sys\n'), ((777, 796), 'click.echo', 'click.echo', (['message'], {}), '(message)\n', (787, 796), False, 'import click\n'), ((2027, 2063), 're.sub', 're.sub', (['"""^ """', '""""""', 'doc'], {'flags': 're.M'}), "('^ ', '', doc, flags=re.M)\n", (2033, 2063), False, 'import re\n'), ((2075, 2116), 're.sub', 're.sub', (['""" \x08\n"""', '""""""', 'doc'], {'flags': 're.M'}), "(' \\x08\\n', '', doc, flags=re.M)\n", (2081, 2116), False, 'import re\n'), ((12563, 12582), 're.compile', 're.compile', (['pattern'], {}), '(pattern)\n', (12573, 12582), False, 'import re\n'), ((5807, 5833), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (5823, 5833), False, 'import os\n'), ((15728, 15754), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (15744, 15754), False, 'import os\n'), ((4754, 4773), 'os.remove', 'os.remove', (['filename'], {}), '(filename)\n', (4763, 4773), False, 'import os\n'), ((5940, 5965), 'os.remove', 'os.remove', (['(filename + ext)'], {}), '(filename + ext)\n', (5949, 5965), False, 'import os\n'), ((13047, 13082), 'os.rename', 'os.rename', (["file['src']", "file['dst']"], {}), "(file['src'], file['dst'])\n", (13056, 13082), False, 'import os\n'), ((15796, 15819), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (15817, 15819), False, 'import datetime\n'), ((15843, 15868), 'os.path.dirname', 'os.path.dirname', (['filename'], {}), '(filename)\n', (15858, 15868), False, 'import os\n'), ((15892, 15918), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (15908, 15918), False, 'import os\n'), ((16308, 16343), 'subprocess.call', 'subprocess.call', (["['open', filename]"], {}), "(['open', filename])\n", (16323, 16343), False, 'import subprocess\n'), ((12384, 12409), 'os.path.dirname', 'os.path.dirname', (['filename'], {}), '(filename)\n', (12399, 12409), False, 'import os\n'), ((12430, 12456), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (12446, 12456), False, 'import os\n'), ((16161, 16178), 'weasyprint.HTML', 'HTML', ([], {'string': 'html'}), '(string=html)\n', (16165, 16178), False, 'from weasyprint import HTML, CSS\n'), ((12939, 12966), 'os.path.isfile', 'os.path.isfile', (["file['dst']"], {}), "(file['dst'])\n", (12953, 12966), False, 'import os\n'), ((16244, 16259), 'weasyprint.CSS', 'CSS', (['stylesheet'], {}), '(stylesheet)\n', (16247, 16259), False, 'from weasyprint import HTML, CSS\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2018-12-17 19:55
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('App', '0006_goods_user'),
]
operations = [
migrations.RemoveField(
model_name='user',
name='phone_number',
),
]
| [
"django.db.migrations.RemoveField"
] | [((280, 342), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""user"""', 'name': '"""phone_number"""'}), "(model_name='user', name='phone_number')\n", (302, 342), False, 'from django.db import migrations\n')] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sublime
import sublime_plugin
import LiveReload
import webbrowser
import os
class LiveReloadTest(sublime_plugin.ApplicationCommand):
def run(self):
path = os.path.join(sublime.packages_path(), 'LiveReload', 'web')
file_name = os.path.join(path, 'test.html')
webbrowser.open_new_tab("file://"+file_name)
class LiveReloadHelp(sublime_plugin.ApplicationCommand):
def run(self):
webbrowser.open_new_tab('https://github.com/alepez/LiveReload-sublimetext3#using'
)
class LiveReloadEnablePluginCommand(sublime_plugin.ApplicationCommand):
def on_done(self, index):
if not index is -1:
LiveReload.Plugin.togglePlugin(index)
def run(self):
sublime.active_window().show_quick_panel(LiveReload.Plugin.listPlugins(),
self.on_done)
| [
"sublime.active_window",
"LiveReload.Plugin.togglePlugin",
"os.path.join",
"LiveReload.Plugin.listPlugins",
"webbrowser.open_new_tab",
"sublime.packages_path"
] | [((299, 330), 'os.path.join', 'os.path.join', (['path', '"""test.html"""'], {}), "(path, 'test.html')\n", (311, 330), False, 'import os\n'), ((339, 385), 'webbrowser.open_new_tab', 'webbrowser.open_new_tab', (["('file://' + file_name)"], {}), "('file://' + file_name)\n", (362, 385), False, 'import webbrowser\n'), ((471, 558), 'webbrowser.open_new_tab', 'webbrowser.open_new_tab', (['"""https://github.com/alepez/LiveReload-sublimetext3#using"""'], {}), "(\n 'https://github.com/alepez/LiveReload-sublimetext3#using')\n", (494, 558), False, 'import webbrowser\n'), ((233, 256), 'sublime.packages_path', 'sublime.packages_path', ([], {}), '()\n', (254, 256), False, 'import sublime\n'), ((732, 769), 'LiveReload.Plugin.togglePlugin', 'LiveReload.Plugin.togglePlugin', (['index'], {}), '(index)\n', (762, 769), False, 'import LiveReload\n'), ((839, 870), 'LiveReload.Plugin.listPlugins', 'LiveReload.Plugin.listPlugins', ([], {}), '()\n', (868, 870), False, 'import LiveReload\n'), ((798, 821), 'sublime.active_window', 'sublime.active_window', ([], {}), '()\n', (819, 821), False, 'import sublime\n')] |
# Advent of Code 2019, Day 6
# (c) blu3r4y
import networkx as nx
from aocd.models import Puzzle
from funcy import print_calls
@print_calls
def part1(graph):
checksum = 0
for target in graph.nodes:
checksum += nx.shortest_path_length(graph, "COM", target)
return checksum
@print_calls
def part2(graph):
return nx.shortest_path_length(graph.to_undirected(), "YOU", "SAN") - 2
def load(data):
return nx.DiGraph([line.split(")") for line in data.split()])
if __name__ == "__main__":
puzzle = Puzzle(year=2019, day=6)
ans1 = part1(load(puzzle.input_data))
# puzzle.answer_a = ans1
ans2 = part2(load(puzzle.input_data))
# puzzle.answer_b = ans2
| [
"networkx.shortest_path_length",
"aocd.models.Puzzle"
] | [((530, 554), 'aocd.models.Puzzle', 'Puzzle', ([], {'year': '(2019)', 'day': '(6)'}), '(year=2019, day=6)\n', (536, 554), False, 'from aocd.models import Puzzle\n'), ((229, 274), 'networkx.shortest_path_length', 'nx.shortest_path_length', (['graph', '"""COM"""', 'target'], {}), "(graph, 'COM', target)\n", (252, 274), True, 'import networkx as nx\n')] |
from decimal import Decimal
import json
import logging
from django.shortcuts import render
from django.core.serializers.json import DjangoJSONEncoder
from django.http import (HttpResponse)
from django.views.generic import View
from rest_framework import status
from rest_framework.views import APIView
from api.models import (User,Spots,Images,Tags,TypesUserAction,UserActions,
SpotTags)
from api.api import (SpotsViewSet)
import boto3
from botocore.exceptions import NoCredentialsError
import requests
from core.settings import (API_KEY,FONT_AWESOME_KEY,defaultLat,defaultLng,
max_distance)
class IndexView(View):
def __init__(self,*args, **kwargs):
self.response_data = {'error': [], 'data': {}, 'code': status.HTTP_200_OK}
def get(self, request, *args, **kwargs):
try:
spot_instance = SpotsViewSet()
spot_instance.user_places(request,user='1')
if spot_instance.code == 200:
self.response_data['data']['spots'] = spot_instance.response_data['data'][0]['spots']
self.response_data['data']['api_key'] = API_KEY
if FONT_AWESOME_KEY:
self.response_data['data']['fontawesome_key'] = FONT_AWESOME_KEY
else:
self.response_data['data']['fontawesome_key'] = ''
self.response_data['data']['defaultLat'] = defaultLat
self.response_data['data']['defaultLng'] = defaultLng
else:
self.response_data = self.response_data['data']
self.response_data['code'] = spot_instance.code
except Exception as e:
self.response_data['data'] = {'name':'Not found information'}
self.code = status.HTTP_500_INTERNAL_SERVER_ERROR
self.response_data['error'].append("[IndexView] - Error: " + str(e))
return render(request,template_name='frontend/index.html',context=self.response_data)
class SpotView(APIView):
def __init__(self,*args, **kwargs):
self.response_data = {'error': [], 'data': {}, 'code': status.HTTP_200_OK}
def post(self, request, *args, **kwargs):
data = {}
# Request to create a new place
try:
# Request to get information about the place clicked
if request.is_ajax() == True and request.POST['action'] == 'get_spot_modal':
try:
spot_instance = SpotsViewSet()
spot_instance.place_information(request,
latitude=request.POST['lat'],
longitude=request.POST['lng'])
if spot_instance.code == 200:
self.response_data['data']['place_information'] = spot_instance.response_data['data'][0]['place_information']
else:
self.response_data = self.response_data['data']
self.response_data['code'] = spot_instance.code
except Exception as e:
logging.getLogger('error_logger').error("Error in get_spot_modal: " + str(e))
self.code = status.HTTP_500_INTERNAL_SERVER_ERROR
self.response_data['error'].append("[SpotsView] - Error: " + str(e))
# Request to display nearby places
elif request.is_ajax() == True and request.POST['action'] == 'get_nearby_places':
current_latitude = Decimal(request.POST['lat'])
current_longitude = Decimal(request.POST['lng'])
try:
spot_instance = SpotsViewSet()
spot_instance.nearby_places(request,
latitude=current_latitude,
longitude=current_longitude,
max_distance=max_distance,user='1'
)
if spot_instance.code == 200:
self.response_data['data']['nearby'] = spot_instance.response_data['data'][0]['nearby']
else:
self.response_data = self.response_data['data']
self.response_data['code'] = spot_instance.code
except Exception as e:
logging.getLogger('error_logger').error("Error in get_nearby_places: " + str(e))
self.code = status.HTTP_500_INTERNAL_SERVER_ERROR
self.response_data['error'].append("[SpotsView] - Error: " + str(e))
# Request to create a new spot
elif request.is_ajax() == True and request.POST['action'] == 'create_spot':
try:
spot_instance = SpotsViewSet()
if (request.POST['tagList'].split(',')[0]==''):
tagList=[]
else:
tagList=request.POST['tagList'].split(',')
spot_instance.create_spot(request,
country=request.POST['country'],
country_code=request.POST['countryCode'],
state=request.POST['state_name'],
city=request.POST['city'],
postal_code=request.POST['postalCode'],
full_address=request.POST['fullAddress'],
lat=request.POST['latitude'],
lng=request.POST['length'],
name=request.POST['placeName'],
tag_list=tagList,user=1
)
if spot_instance.code == 200:
self.response_data['data']['spots'] = spot_instance.response_data['data'][0]
self.response_data['code'] = spot_instance.code
else:
self.response_data = self.response_data['data']
self.response_data['code'] = spot_instance.code
except Exception as e:
logging.getLogger('error_logger').error("Error in create_spot: " + str(e))
self.code = status.HTTP_500_INTERNAL_SERVER_ERROR
self.response_data['error'].append("[SpotsView] - Error: " + str(e))
# Request to get information about an specific place to attempt edition
elif request.is_ajax() == True and request.POST['action'] == "edit_spot_modal":
try:
spot_instance = SpotsViewSet()
spot_instance.spot_details(request,spot_id=request.POST['spot_id'])
if spot_instance.code == 200:
self.response_data['data'] = spot_instance.response_data['data'][0]
else:
self.response_data = self.response_data['data']
except Exception as e:
logging.getLogger('error_logger').error("Error in edit_spot_modal: " + str(e))
self.code = status.HTTP_500_INTERNAL_SERVER_ERROR
self.response_data['error'].append("[SpotsView] - Error: " + str(e))
else:
self.response_data = self.response_data['data']
self.response_data['code'] = status.HTTP_400_BAD_REQUEST
except Exception as e:
logging.getLogger('error_logger').error("Error Creating a new spot: " + str(e))
self.code = status.HTTP_500_INTERNAL_SERVER_ERROR
self.response_data['error'].append("[SpotsView] - Error: " + str(e))
return HttpResponse(json.dumps(self.response_data, cls=DjangoJSONEncoder), content_type='application/json')
def put(self, request, *args, **kwargs):
# User is sending spot data to update
if request.is_ajax() == True and request.method == 'PUT':
try:
spot_instance = SpotsViewSet()
if (request.POST['tags'].split(',')[0]==''):
tagList=[]
else:
tagList=request.POST['tags'].split(',')
spot_instance.edit_spot(request,
spot_id=request.POST['spotId'],
name=request.POST['name'],
tags=tagList
)
if spot_instance.code == 200:
self.response_data['data'] = spot_instance.response_data['data']
else:
self.response_data = self.response_data['data']
self.response_data['code'] = spot_instance.code
except Exception as e:
logging.getLogger('error_logger').error("Error Editing a spot: " + str(e))
self.code = status.HTTP_500_INTERNAL_SERVER_ERROR
self.response_data['error'].append("[SpotsView] - Error: " + str(e))
else:
self.response_data = self.response_data['data']
self.response_data['code'] = status.HTTP_400_BAD_REQUEST
return HttpResponse(json.dumps(self.response_data, cls=DjangoJSONEncoder), content_type='application/json')
def delete(self, request, *args, **kwargs):
data = {}
# A spot is requested by the user to remove it
if request.method == 'DELETE':
try:
_delete_spot = SpotsViewSet()
_delete_spot.destroy_spot(request,spot_id=request.data['spot_id'])
if _delete_spot.code == 200:
self.response_data['data']['placeName'] = _delete_spot.response_data['data'][0]['placeName']
else:
self.response_data['data'] = self.response_data['data']
self.response_data['code'] = _delete_spot.code
except Exception as e:
logging.getLogger('error_logger').error("Error Deleting a spot: " + str(e))
else:
self.response_data = self.response_data['data']
self.response_data['code'] = status.HTTP_400_BAD_REQUEST
return HttpResponse(json.dumps(self.response_data, cls=DjangoJSONEncoder), content_type='application/json') | [
"django.shortcuts.render",
"logging.getLogger",
"json.dumps",
"api.api.SpotsViewSet",
"decimal.Decimal"
] | [((1887, 1972), 'django.shortcuts.render', 'render', (['request'], {'template_name': '"""frontend/index.html"""', 'context': 'self.response_data'}), "(request, template_name='frontend/index.html', context=self.response_data\n )\n", (1893, 1972), False, 'from django.shortcuts import render\n'), ((837, 851), 'api.api.SpotsViewSet', 'SpotsViewSet', ([], {}), '()\n', (849, 851), False, 'from api.api import SpotsViewSet\n'), ((7598, 7651), 'json.dumps', 'json.dumps', (['self.response_data'], {'cls': 'DjangoJSONEncoder'}), '(self.response_data, cls=DjangoJSONEncoder)\n', (7608, 7651), False, 'import json\n'), ((9027, 9080), 'json.dumps', 'json.dumps', (['self.response_data'], {'cls': 'DjangoJSONEncoder'}), '(self.response_data, cls=DjangoJSONEncoder)\n', (9037, 9080), False, 'import json\n'), ((10050, 10103), 'json.dumps', 'json.dumps', (['self.response_data'], {'cls': 'DjangoJSONEncoder'}), '(self.response_data, cls=DjangoJSONEncoder)\n', (10060, 10103), False, 'import json\n'), ((7895, 7909), 'api.api.SpotsViewSet', 'SpotsViewSet', ([], {}), '()\n', (7907, 7909), False, 'from api.api import SpotsViewSet\n'), ((9326, 9340), 'api.api.SpotsViewSet', 'SpotsViewSet', ([], {}), '()\n', (9338, 9340), False, 'from api.api import SpotsViewSet\n'), ((2447, 2461), 'api.api.SpotsViewSet', 'SpotsViewSet', ([], {}), '()\n', (2459, 2461), False, 'from api.api import SpotsViewSet\n'), ((3463, 3491), 'decimal.Decimal', 'Decimal', (["request.POST['lat']"], {}), "(request.POST['lat'])\n", (3470, 3491), False, 'from decimal import Decimal\n'), ((3528, 3556), 'decimal.Decimal', 'Decimal', (["request.POST['lng']"], {}), "(request.POST['lng'])\n", (3535, 3556), False, 'from decimal import Decimal\n'), ((3615, 3629), 'api.api.SpotsViewSet', 'SpotsViewSet', ([], {}), '()\n', (3627, 3629), False, 'from api.api import SpotsViewSet\n'), ((7346, 7379), 'logging.getLogger', 'logging.getLogger', (['"""error_logger"""'], {}), "('error_logger')\n", (7363, 7379), False, 'import logging\n'), ((4697, 4711), 'api.api.SpotsViewSet', 'SpotsViewSet', ([], {}), '()\n', (4709, 4711), False, 'from api.api import SpotsViewSet\n'), ((8628, 8661), 'logging.getLogger', 'logging.getLogger', (['"""error_logger"""'], {}), "('error_logger')\n", (8645, 8661), False, 'import logging\n'), ((9801, 9834), 'logging.getLogger', 'logging.getLogger', (['"""error_logger"""'], {}), "('error_logger')\n", (9818, 9834), False, 'import logging\n'), ((3049, 3082), 'logging.getLogger', 'logging.getLogger', (['"""error_logger"""'], {}), "('error_logger')\n", (3066, 3082), False, 'import logging\n'), ((6502, 6516), 'api.api.SpotsViewSet', 'SpotsViewSet', ([], {}), '()\n', (6514, 6516), False, 'from api.api import SpotsViewSet\n'), ((4267, 4300), 'logging.getLogger', 'logging.getLogger', (['"""error_logger"""'], {}), "('error_logger')\n", (4284, 4300), False, 'import logging\n'), ((6033, 6066), 'logging.getLogger', 'logging.getLogger', (['"""error_logger"""'], {}), "('error_logger')\n", (6050, 6066), False, 'import logging\n'), ((6908, 6941), 'logging.getLogger', 'logging.getLogger', (['"""error_logger"""'], {}), "('error_logger')\n", (6925, 6941), False, 'import logging\n')] |
# -*- coding: utf-8 -*-
"""
.. module:: openzwave.group
This file is part of **python-openzwave** project https://github.com/OpenZWave/python-openzwave.
:platform: Unix, Windows, MacOS X
:sinopsis: openzwave API
.. moduleauthor: bibi21000 aka <NAME> <<EMAIL>>
License : GPL(v3)
**python-openzwave** is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
**python-openzwave** is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with python-openzwave. If not, see http://www.gnu.org/licenses.
"""
from openzwave.object import ZWaveObject
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
"""NullHandler logger for python 2.6"""
def emit(self, record):
pass
logger = logging.getLogger('openzwave')
logger.addHandler(NullHandler())
class ZWaveGroup(ZWaveObject):
"""
The driver object.
Hold options of the manager
Also used to retrieve information about the library, ...
"""
def __init__(self, group_index, network=None, node_id=None):
"""
Initialize driver object
:param group_index: index of the group
:type group_index: int
:param network: The network object to access the manager
:type network: ZWaveNetwork
:param node_id: ID of node
:type node_id: int
"""
ZWaveObject.__init__(self, group_index, network)
self._node_id = node_id
self._index = group_index
def __str__(self):
"""
The string representation of the group.
:rtype: str
"""
return 'index: [%s] label: [%s]' % (self.index, self.label)
@property
def index(self):
"""
The index of the group.
:rtype: int
"""
return self._index
@property
def label(self):
"""
The label of the group.
:rtype: int
"""
return self._network.manager.getGroupLabel(self.home_id, self._node_id, self.index)
@property
def max_associations(self):
"""
The number of associations.
:rtype: int
"""
return self._network.manager.getMaxAssociations(self.home_id, self._node_id, self.index)
@property
def associations(self):
"""
The members of associations.
:rtype: set()
"""
return self._network.manager.getAssociations(self.home_id, self._node_id, self.index)
@property
def associations_instances(self):
"""
The members of associations with theirs instances.
Nodes that does not support multi-instances have an instanceid equal to 0.
:rtype: set() of tuples (nodeid,instanceid)
"""
return self._network.manager.getAssociationsInstances(self.home_id, self._node_id, self.index)
def add_association(self, target_node_id, instance=0x00):
"""
Adds a node to an association group.
Due to the possibility of a device being asleep, the command is assumed to
complete with success, and the association data held in this class is updated directly. This
will be reverted by a future Association message from the device if the Z-Wave
message actually failed to get through. Notification callbacks will be sent in
both cases.
:param target_node_id: Identifier for the node that will be added to the association group.
:type target_node_id: int
:param instance: The instance that will be added to the association group.
:type instance: int
"""
self._network.manager.addAssociation(self.home_id, self._node_id, self.index, target_node_id, instance)
def remove_association(self, target_node_id, instance=0x00):
"""
Removes a node from an association group.
Due to the possibility of a device being asleep, the command is assumed to
succeed, and the association data held in this class is updated directly. This
will be reverted by a future Association message from the device if the Z-Wave
message actually failed to get through. Notification callbacks will be sent
in both cases.
:param target_node_id: Identifier for the node that will be removed from the association group.
:type target_node_id: int
:param instance: The instance that will be added to the association group.
:type instance: int
"""
self._network.manager.removeAssociation(self._network.home_id, self._node_id, self.index, target_node_id, instance)
def to_dict(self, extras=['all']):
"""
Return a dict representation of the group.
:param extras: The extra inforamtions to add
:type extras: []
:returns: A dict
:rtype: dict()
"""
if 'all' in extras:
extras = ['associations']
ret = {}
ret['label'] = self.label
if 'associations' in extras:
ret['associations'] = dict.fromkeys(self.associations, 0)
return ret
| [
"logging.getLogger",
"openzwave.object.ZWaveObject.__init__",
"logging.NullHandler"
] | [((1268, 1298), 'logging.getLogger', 'logging.getLogger', (['"""openzwave"""'], {}), "('openzwave')\n", (1285, 1298), False, 'import logging\n'), ((1317, 1330), 'logging.NullHandler', 'NullHandler', ([], {}), '()\n', (1328, 1330), False, 'from logging import NullHandler\n'), ((1871, 1919), 'openzwave.object.ZWaveObject.__init__', 'ZWaveObject.__init__', (['self', 'group_index', 'network'], {}), '(self, group_index, network)\n', (1891, 1919), False, 'from openzwave.object import ZWaveObject\n')] |
"""
(C) Copyright 2021 IBM Corp.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Created on June 30, 2021
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from fuse.losses.loss_base import FuseLossBase
from fuse.utils.utils_hierarchical_dict import FuseUtilsHierarchicalDict
from typing import Callable, Dict, Optional
def make_one_hot(input, num_classes, device='cuda'):
"""Convert class index tensor to one hot encoding tensor.
Args:
input: A tensor of shape [N, 1, *]
num_classes: An int of number of class
Returns:
A tensor of shape [N, num_classes, *]
"""
shape = np.array(input.shape)
shape[1] = num_classes
shape = tuple(shape)
result = torch.zeros(shape, device=device)
result = result.scatter_(1, input, 1)
return result
class BinaryDiceLoss(nn.Module):
def __init__(self, power: int=1, eps: float =1., reduction: str = 'mean'):
'''
:param power: Denominator value: \sum{x^p} + \sum{y^p}, default: 1
:param eps: A float number to smooth loss, and avoid NaN error, default: 1
:param reduction: Reduction method to apply, return mean over batch if 'mean',
return sum if 'sum', return a tensor of shape [N,] if 'none'
Returns: Loss tensor according to arg reduction
Raise: Exception if unexpected reduction
'''
super().__init__()
self.p = power
self.reduction = reduction
self.eps = eps
def __call__(self, predict, target):
assert predict.shape[0] == target.shape[0], "predict & target batch size don't match"
predict = predict.contiguous().view(predict.shape[0], -1)
target = target.contiguous().view(target.shape[0], -1)
if target.dtype == torch.int64:
target = target.type(torch.float32).to(target.device)
num = 2*torch.sum(torch.mul(predict, target), dim=1) + self.eps
den = torch.sum(predict.pow(self.p) + target.pow(self.p), dim=1) + self.eps
loss = 1 - num / den
# return loss
if self.reduction == 'mean':
return loss.mean()
elif self.reduction == 'sum':
return loss.sum()
elif self.reduction == 'none':
return loss
else:
raise Exception('Unexpected reduction {}'.format(self.reduction))
class FuseDiceLoss(FuseLossBase):
def __init__(self, pred_name,
target_name,
filter_func: Optional[Callable] = None,
class_weights=None,
ignore_cls_index_list=None,
resize_mode: str = 'maxpool',
**kwargs):
'''
:param pred_name: batch_dict key for predicted output (e.g., class probabilities after softmax).
Expected Tensor shape = [batch, num_classes, height, width]
:param target_name: batch_dict key for target (e.g., ground truth label). Expected Tensor shape = [batch, height, width]
:param filter_func: function that filters batch_dict/ The function gets ans input batch_dict and returns filtered batch_dict
:param class_weights: An array of shape [num_classes,]
:param ignore_cls_index_list: class index to ignore (list)
:param resize_mode: Resize mode- either using a max pooling kernel(default), or using PyTorch
interpolation ('interpolate'/'maxpool')
:param kwargs: args pass to BinaryDiceLoss
'''
super().__init__(pred_name, target_name, class_weights)
self.class_weights = class_weights
self.filter_func = filter_func
self.kwargs = kwargs
self.ignore_cls_index_list = ignore_cls_index_list
self.resize_mode = resize_mode
self.dice = BinaryDiceLoss(**self.kwargs)
def __call__(self, batch_dict):
if self.filter_func is not None:
batch_dict = self.filter_func(batch_dict)
predict = FuseUtilsHierarchicalDict.get(batch_dict, self.pred_name).float()
target = FuseUtilsHierarchicalDict.get(batch_dict, self.target_name).long()
n, c, h, w = predict.shape
tar_shape = target.shape
if len(tar_shape) < 4:
target = target.unsqueeze(1)
nt, ct, ht, wt = target.shape
if h != ht or w != wt: # upsample
if self.resize_mode == 'maxpool':
block_height = int(ht / h)
block_width = int(wt / w)
residual_h = int((ht - (block_height * h)) / 2)
residual_w = int((wt - (block_width * w)) / 2)
target = torch.nn.functional.max_pool2d(target[:, :, residual_h:ht - residual_h, residual_w:wt - residual_w],
kernel_size=(block_height, block_width))
elif self.resize_mode == 'interpolate':
target = torch.nn.functional.interpolate(target, size=(h, w))
else:
raise Exception
total_loss = 0
n_classes = predict.shape[1]
# Convert target to one hot encoding
if n_classes > 1 and target.shape[1] != n_classes:
target = make_one_hot(target, n_classes)
assert predict.shape == target.shape, 'predict & target shape do not match'
total_class_weights = sum(self.class_weights) if self.class_weights is not None else n_classes
for cls_index in range(n_classes):
if cls_index not in self.ignore_cls_index_list:
dice_loss = self.dice(predict[:, cls_index, :, :], target[:, cls_index, :, :])
if self.class_weights is not None:
assert self.class_weights.shape[0] == n_classes, \
'Expect weight shape [{}], got[{}]'.format(n_classes, self.class_weights.shape[0])
dice_loss *= self.class_weights[cls_index]
total_loss += dice_loss
total_loss /= total_class_weights
return self.weight*total_loss
| [
"torch.mul",
"numpy.array",
"fuse.utils.utils_hierarchical_dict.FuseUtilsHierarchicalDict.get",
"torch.nn.functional.interpolate",
"torch.nn.functional.max_pool2d",
"torch.zeros"
] | [((1137, 1158), 'numpy.array', 'np.array', (['input.shape'], {}), '(input.shape)\n', (1145, 1158), True, 'import numpy as np\n'), ((1224, 1257), 'torch.zeros', 'torch.zeros', (['shape'], {'device': 'device'}), '(shape, device=device)\n', (1235, 1257), False, 'import torch\n'), ((4650, 4707), 'fuse.utils.utils_hierarchical_dict.FuseUtilsHierarchicalDict.get', 'FuseUtilsHierarchicalDict.get', (['batch_dict', 'self.pred_name'], {}), '(batch_dict, self.pred_name)\n', (4679, 4707), False, 'from fuse.utils.utils_hierarchical_dict import FuseUtilsHierarchicalDict\n'), ((4733, 4792), 'fuse.utils.utils_hierarchical_dict.FuseUtilsHierarchicalDict.get', 'FuseUtilsHierarchicalDict.get', (['batch_dict', 'self.target_name'], {}), '(batch_dict, self.target_name)\n', (4762, 4792), False, 'from fuse.utils.utils_hierarchical_dict import FuseUtilsHierarchicalDict\n'), ((5307, 5452), 'torch.nn.functional.max_pool2d', 'torch.nn.functional.max_pool2d', (['target[:, :, residual_h:ht - residual_h, residual_w:wt - residual_w]'], {'kernel_size': '(block_height, block_width)'}), '(target[:, :, residual_h:ht - residual_h,\n residual_w:wt - residual_w], kernel_size=(block_height, block_width))\n', (5337, 5452), False, 'import torch\n'), ((2444, 2470), 'torch.mul', 'torch.mul', (['predict', 'target'], {}), '(predict, target)\n', (2453, 2470), False, 'import torch\n'), ((5583, 5635), 'torch.nn.functional.interpolate', 'torch.nn.functional.interpolate', (['target'], {'size': '(h, w)'}), '(target, size=(h, w))\n', (5614, 5635), False, 'import torch\n')] |
from base import assert_case
def test_script_pipeline():
assert_case('scm_pipeline')
| [
"base.assert_case"
] | [((63, 90), 'base.assert_case', 'assert_case', (['"""scm_pipeline"""'], {}), "('scm_pipeline')\n", (74, 90), False, 'from base import assert_case\n')] |
from typing import Optional
import tensorflow as tf
import tensorflow.keras.backend as K
from tensorflow.keras import Model
from tensorflow.keras.layers import Layer
import numpy as np
import rinokeras as rk
from rinokeras.layers import WeightNormDense as Dense
from rinokeras.layers import LayerNorm, Stack
class RandomReplaceMask(Layer):
""" Copied from rinokeras because we're going to potentially have
different replace masks.
Replaces some percentage of the input with a mask token. Used for
implementing style models. This is actually slightly more complex - it
does one of three things
Based on https://arxiv.org/abs/1810.04805.
Args:
percentage (float): Percentage of input tokens to mask
mask_token (int): Token to replace masked input with
"""
def __init__(self,
percentage: float,
mask_token: int,
n_symbols: Optional[int] = None,
*args, **kwargs) -> None:
super().__init__(*args, **kwargs)
if not 0 <= percentage < 1:
raise ValueError("Masking percentage must be in [0, 1).\
Received {}".format(percentage))
self.percentage = percentage
self.mask_token = mask_token
self.n_symbols = n_symbols
def _generate_bert_mask(self, inputs):
mask_shape = K.shape(inputs)
bert_mask = K.random_uniform(mask_shape) < self.percentage
return bert_mask
def call(self,
inputs: tf.Tensor,
mask: Optional[tf.Tensor] = None):
"""
Args:
inputs (tf.Tensor[ndims=2, int]): Tensor of values to mask
mask (Optional[tf.Tensor[bool]]): Locations in the inputs to that are valid
(i.e. not padding, start tokens, etc.)
Returns:
masked_inputs (tf.Tensor[ndims=2, int]): Tensor of masked values
bert_mask: Locations in the input that were masked
"""
bert_mask = self._generate_bert_mask(inputs)
if mask is not None:
bert_mask &= mask
masked_inputs = inputs * tf.cast(~bert_mask, inputs.dtype)
token_bert_mask = K.random_uniform(K.shape(bert_mask)) < 0.8
random_bert_mask = (K.random_uniform(
K.shape(bert_mask)) < 0.1) & ~token_bert_mask
true_bert_mask = ~token_bert_mask & ~random_bert_mask
token_bert_mask = tf.cast(token_bert_mask & bert_mask, inputs.dtype)
random_bert_mask = tf.cast(random_bert_mask & bert_mask, inputs.dtype)
true_bert_mask = tf.cast(true_bert_mask & bert_mask, inputs.dtype)
masked_inputs += self.mask_token * token_bert_mask # type: ignore
masked_inputs += K.random_uniform(
K.shape(bert_mask), 0, self.n_symbols, dtype=inputs.dtype) * random_bert_mask
masked_inputs += inputs * true_bert_mask
return masked_inputs, bert_mask
class ContiguousReplaceMask(Layer):
""" Copied from rinokeras because we're going to potentially have
different replace masks.
Replaces some percentage of the input with a mask token. Used for
implementing style models. This is actually slightly more complex - it
does one of three things
Based on https://arxiv.org/abs/1810.04805.
Args:
percentage (float): Percentage of input tokens to mask
mask_token (int): Token to replace masked input with
"""
def __init__(self,
percentage: float,
mask_token: int,
n_symbols: Optional[int] = None,
avg_seq_len: int = 3,
*args, **kwargs) -> None:
super().__init__(*args, **kwargs)
if not 0 <= percentage < 1:
raise ValueError("Masking percentage must be in [0, 1).\
Received {}".format(percentage))
self.percentage = percentage
self.mask_token = mask_token
self.avg_seq_len = avg_seq_len
self.n_symbols = n_symbols
def _generate_bert_mask(self, inputs):
def _numpy_generate_contiguous_mask(array):
mask = np.random.random(array.shape) < (1 / self.avg_seq_len)
mask = np.cumsum(mask, 1)
seqvals = np.max(mask)
mask_prob = self.percentage * array.shape[1] / seqvals # increase probability because fewer sequences
vals_to_mask = np.arange(seqvals)[np.random.random((seqvals,)) < mask_prob]
indices_to_mask = np.isin(mask, vals_to_mask)
mask[indices_to_mask] = 1
mask[~indices_to_mask] = 0
return np.asarray(mask, np.bool)
bert_mask = tf.py_func(_numpy_generate_contiguous_mask, [inputs], tf.bool)
bert_mask.set_shape(inputs.shape)
return bert_mask
class RandomSequenceMask(Model):
def __init__(self,
n_symbols: int,
mask_token: int,
mask_percentage: float = 0.15,
mask_type: str = 'random'):
super().__init__()
if mask_type == 'random':
self.bert_mask = RandomReplaceMask(mask_percentage, mask_token, n_symbols)
elif mask_type == 'contiguous':
self.bert_mask = ContiguousReplaceMask(mask_percentage, mask_token, n_symbols)
else:
raise ValueError("Unrecognized mask_type: {}".format(mask_type))
def call(self, inputs):
"""
Args:
sequence: tf.Tensor[int32] - Amino acid sequence,
a padded tensor with shape [batch_size, MAX_PROTEIN_LENGTH]
protein_length: tf.Tensor[int32] - Length of each protein in the sequence, a tensor with shape [batch_size]
Output:
amino_acid_probs: tf.Tensor[float32] - Probability of each type of amino acid,
a tensor with shape [batch_size, MAX_PROTEIN_LENGTH, n_symbols]
"""
sequence = inputs['primary']
protein_length = inputs['protein_length']
sequence_mask = rk.utils.convert_sequence_length_to_sequence_mask(
sequence, protein_length)
masked_sequence, bert_mask = self.bert_mask(sequence, sequence_mask)
inputs['original_sequence'] = sequence
inputs['primary'] = masked_sequence
inputs['bert_mask'] = bert_mask
return inputs
| [
"tensorflow.keras.backend.shape",
"numpy.random.random",
"numpy.asarray",
"numpy.isin",
"numpy.max",
"rinokeras.utils.convert_sequence_length_to_sequence_mask",
"numpy.cumsum",
"tensorflow.keras.backend.random_uniform",
"tensorflow.py_func",
"tensorflow.cast",
"numpy.arange"
] | [((1368, 1383), 'tensorflow.keras.backend.shape', 'K.shape', (['inputs'], {}), '(inputs)\n', (1375, 1383), True, 'import tensorflow.keras.backend as K\n'), ((2467, 2517), 'tensorflow.cast', 'tf.cast', (['(token_bert_mask & bert_mask)', 'inputs.dtype'], {}), '(token_bert_mask & bert_mask, inputs.dtype)\n', (2474, 2517), True, 'import tensorflow as tf\n'), ((2545, 2596), 'tensorflow.cast', 'tf.cast', (['(random_bert_mask & bert_mask)', 'inputs.dtype'], {}), '(random_bert_mask & bert_mask, inputs.dtype)\n', (2552, 2596), True, 'import tensorflow as tf\n'), ((2622, 2671), 'tensorflow.cast', 'tf.cast', (['(true_bert_mask & bert_mask)', 'inputs.dtype'], {}), '(true_bert_mask & bert_mask, inputs.dtype)\n', (2629, 2671), True, 'import tensorflow as tf\n'), ((4697, 4759), 'tensorflow.py_func', 'tf.py_func', (['_numpy_generate_contiguous_mask', '[inputs]', 'tf.bool'], {}), '(_numpy_generate_contiguous_mask, [inputs], tf.bool)\n', (4707, 4759), True, 'import tensorflow as tf\n'), ((6044, 6119), 'rinokeras.utils.convert_sequence_length_to_sequence_mask', 'rk.utils.convert_sequence_length_to_sequence_mask', (['sequence', 'protein_length'], {}), '(sequence, protein_length)\n', (6093, 6119), True, 'import rinokeras as rk\n'), ((1404, 1432), 'tensorflow.keras.backend.random_uniform', 'K.random_uniform', (['mask_shape'], {}), '(mask_shape)\n', (1420, 1432), True, 'import tensorflow.keras.backend as K\n'), ((2170, 2203), 'tensorflow.cast', 'tf.cast', (['(~bert_mask)', 'inputs.dtype'], {}), '(~bert_mask, inputs.dtype)\n', (2177, 2203), True, 'import tensorflow as tf\n'), ((4238, 4256), 'numpy.cumsum', 'np.cumsum', (['mask', '(1)'], {}), '(mask, 1)\n', (4247, 4256), True, 'import numpy as np\n'), ((4279, 4291), 'numpy.max', 'np.max', (['mask'], {}), '(mask)\n', (4285, 4291), True, 'import numpy as np\n'), ((4525, 4552), 'numpy.isin', 'np.isin', (['mask', 'vals_to_mask'], {}), '(mask, vals_to_mask)\n', (4532, 4552), True, 'import numpy as np\n'), ((4650, 4675), 'numpy.asarray', 'np.asarray', (['mask', 'np.bool'], {}), '(mask, np.bool)\n', (4660, 4675), True, 'import numpy as np\n'), ((2248, 2266), 'tensorflow.keras.backend.shape', 'K.shape', (['bert_mask'], {}), '(bert_mask)\n', (2255, 2266), True, 'import tensorflow.keras.backend as K\n'), ((2804, 2822), 'tensorflow.keras.backend.shape', 'K.shape', (['bert_mask'], {}), '(bert_mask)\n', (2811, 2822), True, 'import tensorflow.keras.backend as K\n'), ((4164, 4193), 'numpy.random.random', 'np.random.random', (['array.shape'], {}), '(array.shape)\n', (4180, 4193), True, 'import numpy as np\n'), ((4434, 4452), 'numpy.arange', 'np.arange', (['seqvals'], {}), '(seqvals)\n', (4443, 4452), True, 'import numpy as np\n'), ((2332, 2350), 'tensorflow.keras.backend.shape', 'K.shape', (['bert_mask'], {}), '(bert_mask)\n', (2339, 2350), True, 'import tensorflow.keras.backend as K\n'), ((4453, 4481), 'numpy.random.random', 'np.random.random', (['(seqvals,)'], {}), '((seqvals,))\n', (4469, 4481), True, 'import numpy as np\n')] |
from django.shortcuts import render, redirect
from django.http import HttpResponse
from .models import BoardMember
# Post 추가
from django.core import serializers
from rest_framework.decorators import api_view, permission_classes, authentication_classes
from rest_framework.permissions import IsAuthenticated # 로그인여부 확인
from rest_framework_jwt.authentication import JSONWebTokenAuthentication # JWT인증 확인
from .models import Post
def register(request):
if request.method == 'GET':
return render(request, 'register.html')
elif request.method == 'POST':
username = request.POST.get('username', None)
password = request.POST['password']
email = request.POST.get('email', None)
res_data = {}
if not (username and email):
res_data['error'] = '모든 값을 입력해주세요.'
else:
member = BoardMember(
username = username,
password = password,
email = email,
)
member.save() # 데이터베이스에 저장
print("#####회원가입#####\nid: ", member.username, "\npw: ", member.password, "\nemail: ", member.email)
return redirect('/') # 다른 페이지로 이동
@api_view(['GET']) # 요청이 GET인지 확인하여 JSON 타입으로 반환
@permission_classes((IsAuthenticated, )) # 권한을 체크(로그인 했는지 여부만 체크)
@authentication_classes((JSONWebTokenAuthentication,)) # JWT토큰 확인, 토큰에 이상이 있으면 JSON으로 반환
def posts(request):
posts = Post.objects.filter(published_at__isnull=False).order_by('-published_at')
post_list = serializers.serialize('json', posts)
return HttpResponse(post_list, content_type="text/json-comment-filtered")
# 다음일정 (대기)
# https://velog.io/@teddybearjung/Django-%EB%A1%9C-%EA%B2%8C%EC%8B%9C%ED%8C%90-%EB%A7%8C%EB%93%A4%EA%B8%B010.-Login-%ED%99%94%EB%A9%B4-templates-%EB%A7%8C%EB%93%A4%EA%B8%B0-login-%ED%95%A8%EC%88%98-%EC%9E%91%EC%84%B1 | [
"django.shortcuts.render",
"rest_framework.decorators.permission_classes",
"django.http.HttpResponse",
"rest_framework.decorators.authentication_classes",
"django.shortcuts.redirect",
"django.core.serializers.serialize",
"rest_framework.decorators.api_view"
] | [((1197, 1214), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (1205, 1214), False, 'from rest_framework.decorators import api_view, permission_classes, authentication_classes\n'), ((1246, 1284), 'rest_framework.decorators.permission_classes', 'permission_classes', (['(IsAuthenticated,)'], {}), '((IsAuthenticated,))\n', (1264, 1284), False, 'from rest_framework.decorators import api_view, permission_classes, authentication_classes\n'), ((1312, 1365), 'rest_framework.decorators.authentication_classes', 'authentication_classes', (['(JSONWebTokenAuthentication,)'], {}), '((JSONWebTokenAuthentication,))\n', (1334, 1365), False, 'from rest_framework.decorators import api_view, permission_classes, authentication_classes\n'), ((1522, 1558), 'django.core.serializers.serialize', 'serializers.serialize', (['"""json"""', 'posts'], {}), "('json', posts)\n", (1543, 1558), False, 'from django.core import serializers\n'), ((1570, 1636), 'django.http.HttpResponse', 'HttpResponse', (['post_list'], {'content_type': '"""text/json-comment-filtered"""'}), "(post_list, content_type='text/json-comment-filtered')\n", (1582, 1636), False, 'from django.http import HttpResponse\n'), ((499, 531), 'django.shortcuts.render', 'render', (['request', '"""register.html"""'], {}), "(request, 'register.html')\n", (505, 531), False, 'from django.shortcuts import render, redirect\n'), ((1168, 1181), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (1176, 1181), False, 'from django.shortcuts import render, redirect\n')] |
#!/usr/bin/python2.6
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittests for the ibmperf module.
If the IBM Performance Inspector tools are installed at "C:\ibmperf\bin" it
will run some tests using the actual tools. However, if the tools are not
installed it still runs a suite of tests using mocked versions of the tools.
"""
__author__ = "<EMAIL> (<NAME>)"
import ibmperf
import logging
import os
import random
import unittest
class MockPopen(object):
"""A mock subprocess.Popen object.
Implements "returncode" and "communicate", the only attributes/routines
used by the ibmperf module.
Attributes:
returncode: The return code of the mocked sub-process.
"""
def __init__(self, stdout="", stderr="", returncode=0,
raise_on_init=None, raise_on_communicate=None):
"""Initializes this mock Popen object with the given output and returncode.
Args:
stdout: The data to return for stdout in "communicate".
stderr: The data to return for stderr in "communicate".
returncode: The return code to expose via the "returncode" attribute.
raise_on_init: If this is not None, will cause the constructor to raise
an error. Expected to be a 2-tuple, containing (type, args), and will
call "raise type(args)".
raise_on_communicate: Similar to raise_on_init, but will cause the error
to be raised on calls to "communicate".
"""
if raise_on_init:
raise raise_on_init[0](*raise_on_init[1])
self._stdout = stdout
self._stderr = stderr
self.returncode = returncode
self._raise_on_communicate = raise_on_communicate
def communicate(self):
"""Simulates running the command, returning its stdout and stderr.
Raises an exception if raise_on_communicate was specified in the
constructor.
"""
if self._raise_on_communicate:
return self._raise_on_communicate[0](*self._raise_on_communicate[1])
return (self._stdout, self._stderr)
class MockHardwarePerformanceCounter(ibmperf.HardwarePerformanceCounter):
"""A mocked ibmperf.HardwarePerformanceCounter object.
Replaces the _Popen member function with one that returns canned results.
"""
def __init__(self, popen_results, *args, **kwargs):
"""Initializes the mock HardwarePerformanceCounter object.
Passes args and kwargs directly through to the
ibmperf.HardwarePerformanceCounter initializer.
Args:
popen_results: A list of (type, args, kwargs) 3-tuples that will be
returned from calls to _Popen, in order.
"""
self._popen_results = list(popen_results)
super(MockHardwarePerformanceCounter, self).__init__(*args, **kwargs)
def AddPopenResult(self, result_tuple):
"""Adds the given result tuple to the queue of results to return.
Args:
result_tuple: A (type, args, kwargs) triplet.
"""
self._popen_results.append(result_tuple)
def _Popen(self, dummy_command_line):
"""Overrides _Popen from ibmperf.HardwarePerformanceCounter.
Returns the mocked object from the head of the _popen_results queue.
"""
object_type, args, kwargs = self._popen_results.pop(0)
return object_type(*args, **kwargs)
# A few specific metric names.
_CYCLES = "CYCLES"
_UOPS = "UOPS"
# A list of metrics that we will simulate supporting.
_METRICS = {
_CYCLES: None,
"NONHALTED_CYCLES": ("Number of cycles during which the processor is not "
"halted (and not in Thermal Trip on Pentium Ms)"),
"INSTR": "Number of instructions retired",
_UOPS: "Number of uOps retired",
"BRANCH": "Number of branch instruction retired",
"MISPRED_BRANCH": "Number of mispredicted branch instructions retired"}
# A generic command that is successful outputs nothing and returns the default
# error code of 0.
_GENERIC_SUCCESS = (MockPopen, [], {})
# Simulates a successful run of "ddq", indicating that the toolkit is
# installed.
_DDQ_INSTALLED = _GENERIC_SUCCESS
# The simulated output of a successful call to "ptt".
_PTT_OUTPUT = "\n".join([" - %s" % _metric for _metric in _METRICS])
_PTT_SUCCESS = (MockPopen, [], {"stdout": _PTT_OUTPUT})
# The simulated output of a successful call to "mpevt -ld".
_MPEVT_OUTPUT = "Id Name Description\n-- ---- -----------"
for i, _metric in enumerate(_METRICS):
desc = _METRICS[_metric]
if desc:
_MPEVT_OUTPUT += "\n%d %s %s" % (100 + i, _metric, desc)
_MPEVT_SUCCESS = (MockPopen, [], {"stdout": _MPEVT_OUTPUT, "returncode": -1})
# This is a set of MockPopen results that imitates a successful initialization
# of the toolkit.
_SUCCESSFUL_INIT = [_DDQ_INSTALLED, _PTT_SUCCESS, _MPEVT_SUCCESS]
def _CreateQueryResults(metrics):
"""Returns a set of made up results for the given metrics.
Args:
metrics: An iterable collection of metric names.
"""
results = {}
pids = [1015, 1016]
for metric in metrics:
pid_results = {}
for pid in pids:
pid_results[pid] = random.randint(100000, 1000000)
results[metric] = pid_results
return results
def _CreateQueryStdout(results):
"""Returns a "ptt dump" stdout for the given dict of results.
See ibmperf.py for a full listing of sample output.
Args:
results: A dict of results as returned by
ibmperf.HardwarePerformanceCounters.Query.
"""
stdout = "***** ptt v2.0.8 for x86 ***** pid=1944/0x798 *****\n"
stdout += "\n"
pids = results[results.keys()[0]].keys()
for pid in pids:
stdout += " PID %d is foo\n" % pid
stdout += "\n"
stdout += "PTT Facility Per-Thread Information\n"
stdout += "-----------------------------------\n"
stdout += "\n"
stdout += " PID TID Disp Intr"
for metric in results:
stdout += " %s" % metric
stdout += "\n"
stdout += " --- --- ---- ----"
for metric in results:
stdout += " %s" % ("-" * len(metric))
stdout += "\n"
for pid in pids:
tid = random.randint(100, 1000)
disp = random.randint(1, 10000)
intr = random.randint(1, 10000)
metric_values = ""
for metric in results:
metric_values += " %d" % results[metric][pid]
stdout += " %d %d %d %d%s\n" % (pid, tid, disp, intr, metric_values)
stdout += " "
stdout += "-".join("%s" % ("-" * len(metric)) for metric in results)
stdout += "\n"
stdout += " "
stdout += metric_values
stdout += "\n\n"
stdout += "Execution ended: 1 iterations.\n"
return stdout
class TestHardwarePerformanceCounter(unittest.TestCase):
"""Unittests for ibmperf.HardwarePerformanceCounter."""
def setUp(self):
# By default we create a mock HardwarePerformanceCounter object that
# successfully initializes the toolkit.
self._hpc = MockHardwarePerformanceCounter(
_SUCCESSFUL_INIT)
def _TestStart(self, metrics):
"""Utility function for starting data collection.
Args:
metrics: Iterable collection of metrics to be started.
"""
self._hpc.AddPopenResult(_GENERIC_SUCCESS) # ptt term
self._hpc.AddPopenResult(_GENERIC_SUCCESS) # ptt noautoterm
self._hpc.AddPopenResult(_GENERIC_SUCCESS) # ptt init
self._hpc.Start(metrics)
def _TestStop(self):
"""Utility function for stopping data collection."""
self._hpc.AddPopenResult(_GENERIC_SUCCESS) # ptt term
self._hpc.Stop()
# Pylint complains that this need not be a member function, but the
# unittest machinery requires this.
# pylint: disable=R0201
def testInstallsIfNotInstalled(self):
MockHardwarePerformanceCounter(
[(MockPopen, [], {"returncode": -1}), # ddq failure.
(MockPopen, [], {"returncode": 0}), # tinstall success.
_PTT_SUCCESS, _MPEVT_SUCCESS])
def testFailedInstall(self):
self.assertRaises(ibmperf.ExecutionFailed,
MockHardwarePerformanceCounter,
[(MockPopen, [], {"returncode": -1}), # ddq failure.
(MockPopen, [], {"returncode": -1})]) # tinstall failure.
def testHaveMetrics(self):
self.assertEqual(set(_METRICS.keys()), set(self._hpc.metrics.keys()))
def testQueryFailsWhenNotRunning(self):
self.assertRaises(ibmperf.NotRunning, self._hpc.Query, "foo")
def testStopFailsWhenNotRunning(self):
self.assertRaises(ibmperf.NotRunning, self._hpc.Stop)
def testStartFailsOnInvalidMetric(self):
self.assertRaises(ibmperf.InvalidMetric,
self._TestStart,
["INVALID_METRIC_NAME"])
def testAllMetricsCanBeStartedIndividually(self):
for name in self._hpc.metrics:
self._TestStart([name])
self._TestStop()
def testDumpFails(self):
self._TestStart([_CYCLES])
# ptt returns 210 when it fails.
self._hpc.AddPopenResult((MockPopen, [], {"returncode": 210}))
self.assertRaises(ibmperf.ExecutionFailed,
MockHardwarePerformanceCounter.Query,
self._hpc,
"foo")
def testUnexpectedDumpOutput(self):
self._TestStart([_CYCLES])
stdout = "This is garbage, and is not parsable."
self._hpc.AddPopenResult((MockPopen, [], {"stdout": stdout}))
self.assertRaises(ibmperf.UnexpectedOutput,
MockHardwarePerformanceCounter.Query,
self._hpc,
"foo")
def testWrongMetricsDumped(self):
self._TestStart([_CYCLES])
results = _CreateQueryResults([_UOPS])
stdout = _CreateQueryStdout(results)
self._hpc.AddPopenResult((MockPopen, [], {"stdout": stdout}))
self.assertRaises(ibmperf.UnexpectedOutput,
MockHardwarePerformanceCounter.Query,
self._hpc,
"foo")
def _TestMetricsFully(self, metrics):
"""Collects the provided metrics for an imaginary process 'foo'.
This helper function starts the metrics, sleeps for 2 seconds, queries them
and finally stops them. It ensures that the reported metrics match those
that were requested to be collected.
Args:
metrics: Iterable collection of metrics to be started.
"""
self._TestStart(metrics)
expected_results = _CreateQueryResults(metrics)
query_stdout = _CreateQueryStdout(expected_results)
self._hpc.AddPopenResult((MockPopen, [], {"stdout": query_stdout}))
results = self._hpc.Query("foo")
self.assertTrue(isinstance(results, dict))
self.assertEqual(expected_results, results)
self._TestStop()
def testOneMetricFully(self):
name = self._hpc.metrics.keys()[0]
self._TestMetricsFully([name])
def _GetMaximalMetrics(self):
"""Helper function that returns a set of maximal metrics.
This returns all free metrics, plus max_counters non-free metrics.
"""
metrics = list(self._hpc.free_metrics)
metrics += list(self._hpc.non_free_metrics)[0:self._hpc.max_counters]
return metrics
def testMaximalMetricsFully(self):
metrics = self._GetMaximalMetrics()
self._TestMetricsFully(metrics)
def testMaximalMetricsFullyForReal(self):
# Only run this test if the toolkit is actually present at the
# default path.
if (not os.path.isdir(ibmperf.DEFAULT_DIR) or
not os.path.exists(os.path.join(ibmperf.DEFAULT_DIR, 'ddq.exe'))):
return
self._hpc = ibmperf.HardwarePerformanceCounter()
metrics = self._GetMaximalMetrics()
self._hpc.Start(metrics)
try:
results = self._hpc.Query("python")
self.assertTrue(isinstance(results, dict))
self.assertEqual(set(metrics), set(results))
except ibmperf.ExecutionFailed:
# We swallow this error, as it can happen if the local machine doesn't
# actually support per-thread metrics. Some versions of Windows don't.
pass
self._hpc.Stop()
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main()
| [
"logging.basicConfig",
"os.path.join",
"unittest.main",
"os.path.isdir",
"ibmperf.HardwarePerformanceCounter",
"random.randint"
] | [((12278, 12318), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (12297, 12318), False, 'import logging\n'), ((12321, 12336), 'unittest.main', 'unittest.main', ([], {}), '()\n', (12334, 12336), False, 'import unittest\n'), ((6431, 6456), 'random.randint', 'random.randint', (['(100)', '(1000)'], {}), '(100, 1000)\n', (6445, 6456), False, 'import random\n'), ((6468, 6492), 'random.randint', 'random.randint', (['(1)', '(10000)'], {}), '(1, 10000)\n', (6482, 6492), False, 'import random\n'), ((6504, 6528), 'random.randint', 'random.randint', (['(1)', '(10000)'], {}), '(1, 10000)\n', (6518, 6528), False, 'import random\n'), ((11766, 11802), 'ibmperf.HardwarePerformanceCounter', 'ibmperf.HardwarePerformanceCounter', ([], {}), '()\n', (11800, 11802), False, 'import ibmperf\n'), ((5504, 5535), 'random.randint', 'random.randint', (['(100000)', '(1000000)'], {}), '(100000, 1000000)\n', (5518, 5535), False, 'import random\n'), ((11623, 11657), 'os.path.isdir', 'os.path.isdir', (['ibmperf.DEFAULT_DIR'], {}), '(ibmperf.DEFAULT_DIR)\n', (11636, 11657), False, 'import os\n'), ((11688, 11732), 'os.path.join', 'os.path.join', (['ibmperf.DEFAULT_DIR', '"""ddq.exe"""'], {}), "(ibmperf.DEFAULT_DIR, 'ddq.exe')\n", (11700, 11732), False, 'import os\n')] |
#!/usr/bin/env python3
from dev_aberto import hello
from babel.dates import format_datetime
from datetime import datetime
import gettext
gettext.install('hello', localedir='locale')
if __name__ == '__main__':
date, name = hello()
date = format_datetime(datetime.strptime(date, '%Y-%m-%dT%H:%M:%SZ'))
print(_('Ultimo commit feito em: '), date, _(' por '), name) | [
"datetime.datetime.strptime",
"gettext.install",
"dev_aberto.hello"
] | [((138, 182), 'gettext.install', 'gettext.install', (['"""hello"""'], {'localedir': '"""locale"""'}), "('hello', localedir='locale')\n", (153, 182), False, 'import gettext\n'), ((228, 235), 'dev_aberto.hello', 'hello', ([], {}), '()\n', (233, 235), False, 'from dev_aberto import hello\n'), ((263, 308), 'datetime.datetime.strptime', 'datetime.strptime', (['date', '"""%Y-%m-%dT%H:%M:%SZ"""'], {}), "(date, '%Y-%m-%dT%H:%M:%SZ')\n", (280, 308), False, 'from datetime import datetime\n')] |
# from __future__ import absolute_import
# from __future__ import division
# from __future__ import print_function
# import _init_paths
# from opts import opts
import os
import json
import cv2
import collections
def xychange(a, w, h):
ans = [(a[0]+a[2]/2)/w, (a[1]+a[3]/2)/h, a[2]/w, a[3]/h]
return ans
prefix = '/home/yix/track_traindata/'
with open(prefix + 'coco2017/annotations/person_keypoints_train2017.json', 'r') as load_f:
f = json.load(load_f)
length = len(f['annotations'])
# test = len(f['categories'])
# print(length, test)
dic = collections.defaultdict(list)
dic1 = collections.defaultdict(list)
for i in range(length):
# print(f['annotations'][i])
# print(f['categories'][i])
# break
dic['{0:012d}.jpg'.format(f['annotations'][i]['image_id'])].append(f['annotations'][i]['bbox'])
dic1['{0:012d}.jpg'.format(f['annotations'][i]['image_id'])].append(f['annotations'][i]['keypoints'])
# dic1['{0:012d}.jpg'.format(f['annotations'][i]['image_id'])].append(f['categories'][i]['skeleton'])
# print(f['annotations'][i]['image_id'], f['annotations'][i]['bbox'], f['annotations'][i]['category_id'])
# if f['annotations'][i]['image_id'] == 455677:
# print(f['annotations'][i]['keypoints'])
# break
# print(dic1)
# break
load_f.close()
t = 'key'
if t == 'bbox':
with open("/mnt/track_traindata/coco2017/coco17.train",'w') as f:
for item in dic:
path = os.path.join(prefix, 'coco2017/images/', item)
print(path)
img = cv2.imread(path)
h, w = img.shape[0], img.shape[1]
f.write(os.path.join('coco2017/images/', item) +'\n')
with open("/mnt/track_traindata/coco2017/labels_with_ids/{}.txt".format(item[:-4]),'w') as anno:
for l in dic[item]:
anno.write('0 -1 ')
l = xychange(l, w, h)
for i in range(4):
anno.write(str(l[i]))
if i == 3:
anno.write('\n')
else:
anno.write(' ')
anno.close()
f.close()
else:
for item in dic1:
# path = os.path.join(prefix, 'coco2017/images/', item)
# print(path)
# img = cv2.imread(path)
# h, w = img.shape[0], img.shape[1]
with open(prefix + "coco2017/keypoints/{}.txt".format(item[:-4]),'w') as anno:
for l in dic1[item]:
# print(dic[item])
for i in range(len(l)):
anno.write(str(l[i]))
if (i+1)%51 == 0:
anno.write('\n')
else:
anno.write(' ')
anno.close()
# with open("C:\\downloads\\dataset\\gt.txt",'w') as f:
# for item in dic:
# f.write('0 -1 ')
# for i in range(len(dic[item])):
# f.write(str(dic[item][i]))
# if i == len(dic[item])-1:
# f.write('\n')
# else:
# f.write(' ')
# f.close() | [
"json.load",
"collections.defaultdict",
"os.path.join",
"cv2.imread"
] | [((590, 619), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (613, 619), False, 'import collections\n'), ((628, 657), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (651, 657), False, 'import collections\n'), ((474, 491), 'json.load', 'json.load', (['load_f'], {}), '(load_f)\n', (483, 491), False, 'import json\n'), ((1511, 1557), 'os.path.join', 'os.path.join', (['prefix', '"""coco2017/images/"""', 'item'], {}), "(prefix, 'coco2017/images/', item)\n", (1523, 1557), False, 'import os\n'), ((1602, 1618), 'cv2.imread', 'cv2.imread', (['path'], {}), '(path)\n', (1612, 1618), False, 'import cv2\n'), ((1687, 1725), 'os.path.join', 'os.path.join', (['"""coco2017/images/"""', 'item'], {}), "('coco2017/images/', item)\n", (1699, 1725), False, 'import os\n')] |
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="iam-permissions-guardrails", # Replace with your own username
version="0.0.3",
author="<NAME>",
author_email="<EMAIL>",
description="IAM Permissions Guardrails module",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://code.amazon.com/packages/IAM-Permissions-Guardrails/trees/mainline",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
],
python_requires=">=3.8",
install_requires=["aws-cdk.core>=1.74.0", "aws-cdk.custom-resources>=1.74.0","aws-cdk.aws_lambda>=1.74.0","aws-cdk.aws_iam>=1.74.0"],
)
| [
"setuptools.find_packages"
] | [((491, 517), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (515, 517), False, 'import setuptools\n')] |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import os
import sys
from scrapy.cmdline import execute
if __name__ == '__main__':
sys.path.append(os.path.abspath(__file__))
# execute(['scrapy', 'crawl', 'XiLa'])
execute(['scrapy', 'crawl', 'Kuai'])
| [
"os.path.abspath",
"scrapy.cmdline.execute"
] | [((228, 264), 'scrapy.cmdline.execute', 'execute', (["['scrapy', 'crawl', 'Kuai']"], {}), "(['scrapy', 'crawl', 'Kuai'])\n", (235, 264), False, 'from scrapy.cmdline import execute\n'), ((154, 179), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (169, 179), False, 'import os\n')] |
from django.utils.translation import ugettext_lazy as _
from django.contrib import messages
from django.contrib.auth import logout as logout_user
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
from django.shortcuts import render, redirect, get_object_or_404
from django.http import HttpResponse, JsonResponse
from django.core.mail import send_mail
from django.template.loader import get_template, render_to_string
from django.conf import settings
from events.models.profiles import Organization, Team, UserProfile, Member
from events.models.events import Event, CommonEvent, Place, Attendee
from events.forms import OrganizationForm, NewCommonEventForm
from events import location
from events.utils import slugify
from accounts.models import EmailRecord
import datetime
import simplejson
# Create your views here.
def show_org(request, org_slug):
org = get_object_or_404(Organization, slug=org_slug)
upcoming_events = CommonEvent.objects.filter(organization=org, end_time__gt=datetime.datetime.now()).order_by('start_time')
recent_events = CommonEvent.objects.filter(organization=org, end_time__lte=datetime.datetime.now()).order_by('-start_time')[:5]
context = {
'org': org,
'upcoming_events': upcoming_events,
'recent_events': recent_events,
'member_list': Team.objects.filter(organization=org).order_by('name'),
'can_create_event': request.user.profile.can_create_common_event(org),
'can_edit_org': request.user.profile.can_edit_org(org),
}
return render(request, 'get_together/orgs/show_org.html', context)
@login_required
def edit_org(request, org_slug):
org = get_object_or_404(Organization, slug=org_slug)
if not request.user.profile.can_edit_org(org):
messages.add_message(request, messages.WARNING, message=_('You can not make changes to this organization.'))
return redirect('show-org', org_slug=org.slug)
if request.method == 'GET':
form = OrganizationForm(instance=org)
context = {
'org': org,
'org_form': form,
}
return render(request, 'get_together/orgs/edit_org.html', context)
elif request.method == 'POST':
form = OrganizationForm(request.POST, request.FILES, instance=org)
if form.is_valid():
form.save()
return redirect('show-org', org_slug=org.slug)
else:
context = {
'org': org,
'org_form': form,
}
return render(request, 'get_together/orgs/edit_org.html', context)
else:
return redirect('home')
def show_common_event(request, event_id, event_slug):
event = get_object_or_404(CommonEvent, id=event_id)
context = {
'org': event.organization,
'common_event': event,
'participating_events': event.participating_events.all().order_by('start_time'),
'can_edit_event': False,
}
return render(request, 'get_together/orgs/show_common_event.html', context)
@login_required
def create_common_event(request, org_slug):
org = get_object_or_404(Organization, slug=org_slug)
if not request.user.profile.can_create_common_event(org):
messages.add_message(request, messages.WARNING, message=_('You can not create events for this org.'))
return redirect('show-org', org_id=org.pk)
new_event = CommonEvent(organization=org, created_by=request.user.profile)
if request.method == 'GET':
form = NewCommonEventForm(instance=new_event)
context = {
'org': org,
'event_form': form,
}
return render(request, 'get_together/orgs/create_common_event.html', context)
elif request.method == 'POST':
form = NewCommonEventForm(request.POST, instance=new_event)
if form.is_valid:
new_event = form.save()
return redirect('show-common-event', new_event.id, new_event.slug)
else:
context = {
'org': org,
'event_form': form,
}
return render(request, 'get_together/orgs/create_common_event.html', context)
else:
return redirect('home')
@login_required
def create_common_event_team_select(request, event_id):
teams = request.user.profile.moderating
if len(teams) == 1:
return redirect(reverse('create-event', kwargs={'team_id':teams[0].id}) + '?common=%s'%event_id)
context = {
'common_event_id': event_id,
'teams': teams
}
return render(request, 'get_together/orgs/create_common_event_team_select.html', context)
| [
"django.shortcuts.render",
"django.utils.translation.ugettext_lazy",
"events.models.profiles.Team.objects.filter",
"events.forms.OrganizationForm",
"django.shortcuts.get_object_or_404",
"datetime.datetime.now",
"django.shortcuts.redirect",
"events.forms.NewCommonEventForm",
"events.models.events.Com... | [((919, 965), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Organization'], {'slug': 'org_slug'}), '(Organization, slug=org_slug)\n', (936, 965), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1585, 1644), 'django.shortcuts.render', 'render', (['request', '"""get_together/orgs/show_org.html"""', 'context'], {}), "(request, 'get_together/orgs/show_org.html', context)\n", (1591, 1644), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1706, 1752), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Organization'], {'slug': 'org_slug'}), '(Organization, slug=org_slug)\n', (1723, 1752), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2735, 2778), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['CommonEvent'], {'id': 'event_id'}), '(CommonEvent, id=event_id)\n', (2752, 2778), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3000, 3068), 'django.shortcuts.render', 'render', (['request', '"""get_together/orgs/show_common_event.html"""', 'context'], {}), "(request, 'get_together/orgs/show_common_event.html', context)\n", (3006, 3068), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3140, 3186), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Organization'], {'slug': 'org_slug'}), '(Organization, slug=org_slug)\n', (3157, 3186), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3427, 3489), 'events.models.events.CommonEvent', 'CommonEvent', ([], {'organization': 'org', 'created_by': 'request.user.profile'}), '(organization=org, created_by=request.user.profile)\n', (3438, 3489), False, 'from events.models.events import Event, CommonEvent, Place, Attendee\n'), ((4577, 4663), 'django.shortcuts.render', 'render', (['request', '"""get_together/orgs/create_common_event_team_select.html"""', 'context'], {}), "(request, 'get_together/orgs/create_common_event_team_select.html',\n context)\n", (4583, 4663), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1936, 1975), 'django.shortcuts.redirect', 'redirect', (['"""show-org"""'], {'org_slug': 'org.slug'}), "('show-org', org_slug=org.slug)\n", (1944, 1975), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2024, 2054), 'events.forms.OrganizationForm', 'OrganizationForm', ([], {'instance': 'org'}), '(instance=org)\n', (2040, 2054), False, 'from events.forms import OrganizationForm, NewCommonEventForm\n'), ((2155, 2214), 'django.shortcuts.render', 'render', (['request', '"""get_together/orgs/edit_org.html"""', 'context'], {}), "(request, 'get_together/orgs/edit_org.html', context)\n", (2161, 2214), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3374, 3409), 'django.shortcuts.redirect', 'redirect', (['"""show-org"""'], {'org_id': 'org.pk'}), "('show-org', org_id=org.pk)\n", (3382, 3409), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3537, 3575), 'events.forms.NewCommonEventForm', 'NewCommonEventForm', ([], {'instance': 'new_event'}), '(instance=new_event)\n', (3555, 3575), False, 'from events.forms import OrganizationForm, NewCommonEventForm\n'), ((3678, 3748), 'django.shortcuts.render', 'render', (['request', '"""get_together/orgs/create_common_event.html"""', 'context'], {}), "(request, 'get_together/orgs/create_common_event.html', context)\n", (3684, 3748), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2265, 2324), 'events.forms.OrganizationForm', 'OrganizationForm', (['request.POST', 'request.FILES'], {'instance': 'org'}), '(request.POST, request.FILES, instance=org)\n', (2281, 2324), False, 'from events.forms import OrganizationForm, NewCommonEventForm\n'), ((2651, 2667), 'django.shortcuts.redirect', 'redirect', (['"""home"""'], {}), "('home')\n", (2659, 2667), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3799, 3851), 'events.forms.NewCommonEventForm', 'NewCommonEventForm', (['request.POST'], {'instance': 'new_event'}), '(request.POST, instance=new_event)\n', (3817, 3851), False, 'from events.forms import OrganizationForm, NewCommonEventForm\n'), ((4221, 4237), 'django.shortcuts.redirect', 'redirect', (['"""home"""'], {}), "('home')\n", (4229, 4237), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1369, 1406), 'events.models.profiles.Team.objects.filter', 'Team.objects.filter', ([], {'organization': 'org'}), '(organization=org)\n', (1388, 1406), False, 'from events.models.profiles import Organization, Team, UserProfile, Member\n'), ((1868, 1919), 'django.utils.translation.ugettext_lazy', '_', (['"""You can not make changes to this organization."""'], {}), "('You can not make changes to this organization.')\n", (1869, 1919), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2396, 2435), 'django.shortcuts.redirect', 'redirect', (['"""show-org"""'], {'org_slug': 'org.slug'}), "('show-org', org_slug=org.slug)\n", (2404, 2435), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2569, 2628), 'django.shortcuts.render', 'render', (['request', '"""get_together/orgs/edit_org.html"""', 'context'], {}), "(request, 'get_together/orgs/edit_org.html', context)\n", (2575, 2628), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3313, 3357), 'django.utils.translation.ugettext_lazy', '_', (['"""You can not create events for this org."""'], {}), "('You can not create events for this org.')\n", (3314, 3357), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3933, 3992), 'django.shortcuts.redirect', 'redirect', (['"""show-common-event"""', 'new_event.id', 'new_event.slug'], {}), "('show-common-event', new_event.id, new_event.slug)\n", (3941, 3992), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((4128, 4198), 'django.shortcuts.render', 'render', (['request', '"""get_together/orgs/create_common_event.html"""', 'context'], {}), "(request, 'get_together/orgs/create_common_event.html', context)\n", (4134, 4198), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1046, 1069), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1067, 1069), False, 'import datetime\n'), ((1173, 1196), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1194, 1196), False, 'import datetime\n')] |
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import glob as glb
import os
import cv2
import pickle
#################################################################################################################
def create_new_folder (new_dir):
if not os.path.exists(new_dir):
os.makedirs(new_dir)
return
#################################################################################################################
def save_transform_matrix(img_filename, saveas_dir):
img = np.copy(mpimg.imread(img_filename))
img_size = img.shape[1::-1]
# prepare source and destination points to calculate the transform matrix
dim_x = img_size[0]
pt1 = (219, 720)
pt2 = (1110, 720)
pt3 = (675, 442)
pt4 = (602, 442)
pts = (pt1, pt2, pt3, pt4)
src = np.float32(pts).reshape(-1, 2)
dst = np.copy(src)
dst[0][0] = 400
dst[1][0] = dim_x - 400
dst[3][0] = 400
dst[2][0] = dim_x - 400
dst[3][1] = 0
dst[2][1] = 0
# calculate transform matrix
M = cv2.getPerspectiveTransform(src, dst)
# calculate inverse transform matrix
Minv = cv2.getPerspectiveTransform(dst, src)
# save M and Minv in binary format
db_file = open(saveas_dir + 'mtx_transform', 'wb')
db = {}
db['TM'] = M
db['TMinv'] = Minv
pickle.dump(db, db_file)
db_file.close()
return
#get images from folder
images=glb.glob('./*.jpg')
#create folder to save the new img in
new_dir = './transform_matrix/'
create_new_folder (new_dir)
#caluculate transform Matrix (using first image)
test=images[0]
save_transform_matrix(test, new_dir)
| [
"numpy.copy",
"os.path.exists",
"pickle.dump",
"os.makedirs",
"cv2.getPerspectiveTransform",
"matplotlib.image.imread",
"numpy.float32",
"glob.glob"
] | [((1438, 1457), 'glob.glob', 'glb.glob', (['"""./*.jpg"""'], {}), "('./*.jpg')\n", (1446, 1457), True, 'import glob as glb\n'), ((878, 890), 'numpy.copy', 'np.copy', (['src'], {}), '(src)\n', (885, 890), True, 'import numpy as np\n'), ((1066, 1103), 'cv2.getPerspectiveTransform', 'cv2.getPerspectiveTransform', (['src', 'dst'], {}), '(src, dst)\n', (1093, 1103), False, 'import cv2\n'), ((1157, 1194), 'cv2.getPerspectiveTransform', 'cv2.getPerspectiveTransform', (['dst', 'src'], {}), '(dst, src)\n', (1184, 1194), False, 'import cv2\n'), ((1347, 1371), 'pickle.dump', 'pickle.dump', (['db', 'db_file'], {}), '(db, db_file)\n', (1358, 1371), False, 'import pickle\n'), ((298, 321), 'os.path.exists', 'os.path.exists', (['new_dir'], {}), '(new_dir)\n', (312, 321), False, 'import os\n'), ((331, 351), 'os.makedirs', 'os.makedirs', (['new_dir'], {}), '(new_dir)\n', (342, 351), False, 'import os\n'), ((549, 575), 'matplotlib.image.imread', 'mpimg.imread', (['img_filename'], {}), '(img_filename)\n', (561, 575), True, 'import matplotlib.image as mpimg\n'), ((837, 852), 'numpy.float32', 'np.float32', (['pts'], {}), '(pts)\n', (847, 852), True, 'import numpy as np\n')] |
from django.db.models import Avg, Count
from prof_education.regions.models import Region
from prof_education.students.models import Student
from ..column_sets import ColumnFormats, ColumnSet, ColumnSetItem
from ..columns import AggrColumn, IntegerAggrColumn
from ..filters import ModelChoicesFilter, NumberFilter
from .base import ColumnGroup, DataSource
from .utils import count_true, count_value
class StudentsDataSource(DataSource):
name = "students"
label = "Студенты"
model = Student
def get_filters(self):
return [
NumberFilter("student_age", "age", "Возраст студентов"),
]
def get_columns(self):
return [
AggrColumn("student_count", "Всего", Count("*"), self),
IntegerAggrColumn("student_age_avg", "Средний возраст", Avg("age"), self),
IntegerAggrColumn(
"student_in_residence",
"Проживают в обжещитии",
count_true("in_residence"),
self,
),
IntegerAggrColumn(
"student_is_orphan", "Сироты", count_true("is_orphan"), self
),
IntegerAggrColumn(
"student_is_handicapped", "Инвалиды", count_true("is_handicapped"), self
),
IntegerAggrColumn(
"student_male",
"Мужчины",
count_value("sex", Student.Sexes.male),
self,
),
IntegerAggrColumn(
"student_female",
"Женщины",
count_value("sex", Student.Sexes.female),
self,
),
AggrColumn("excluded_count", "Всего", Count("*"), self),
IntegerAggrColumn("excluded_age_avg", "Средний возраст", Avg("age"), self),
IntegerAggrColumn(
"excluded_in_residence",
"Проживали в обжещитии",
count_true("in_residence"),
self,
),
IntegerAggrColumn(
"excluded_is_orphan", "Сироты", count_true("is_orphan"), self
),
IntegerAggrColumn(
"excluded_is_handicapped",
"Инвалиды",
count_true("is_handicapped"),
self,
),
IntegerAggrColumn(
"excluded_male",
"Мужчины",
count_value("sex", Student.Sexes.male),
self,
),
IntegerAggrColumn(
"excluded_female",
"Женщины",
count_value("sex", Student.Sexes.female),
self,
),
]
def get_columnsets(self):
return [
ColumnSet(
"students",
"Обучающиеся",
[
ColumnSetItem("student_count", [ColumnFormats.emphasis]),
ColumnSetItem("student_age_avg"),
ColumnSetItem("student_in_residence"),
ColumnSetItem("student_is_orphan"),
ColumnSetItem("student_is_handicapped"),
ColumnSetItem("student_male"),
ColumnSetItem("student_female"),
],
),
ColumnSet(
"excluded",
"Выбыло",
[
ColumnSetItem("excluded_count", [ColumnFormats.emphasis]),
ColumnSetItem("excluded_age_avg"),
ColumnSetItem("excluded_in_residence"),
ColumnSetItem("excluded_is_orphan"),
ColumnSetItem("excluded_is_handicapped"),
ColumnSetItem("excluded_male"),
ColumnSetItem("excluded_female"),
],
),
]
def get_columns_query_groups(self):
students_query = Student.objects.with_age().with_status()
learns_query = students_query.filter(status="learns")
excluded_query = students_query.filter(status="is_excluded")
return [
ColumnGroup(
"learns_query",
learns_query,
[
"student_count",
"student_age_avg",
"student_in_residence",
"student_is_orphan",
"student_is_handicapped",
"student_male",
"student_female",
],
),
ColumnGroup(
"excluded_query",
excluded_query,
[
"excluded_count",
"excluded_age_avg",
"excluded_in_residence",
"excluded_is_orphan",
"excluded_is_handicapped",
"excluded_male",
"excluded_female",
],
),
]
| [
"django.db.models.Count",
"prof_education.students.models.Student.objects.with_age",
"django.db.models.Avg"
] | [((728, 738), 'django.db.models.Count', 'Count', (['"""*"""'], {}), "('*')\n", (733, 738), False, 'from django.db.models import Avg, Count\n'), ((824, 834), 'django.db.models.Avg', 'Avg', (['"""age"""'], {}), "('age')\n", (827, 834), False, 'from django.db.models import Avg, Count\n'), ((1705, 1715), 'django.db.models.Count', 'Count', (['"""*"""'], {}), "('*')\n", (1710, 1715), False, 'from django.db.models import Avg, Count\n'), ((1802, 1812), 'django.db.models.Avg', 'Avg', (['"""age"""'], {}), "('age')\n", (1805, 1812), False, 'from django.db.models import Avg, Count\n'), ((3910, 3936), 'prof_education.students.models.Student.objects.with_age', 'Student.objects.with_age', ([], {}), '()\n', (3934, 3936), False, 'from prof_education.students.models import Student\n')] |
#!/usr/bin/env python3
"""Application for managing the PyLith build environment.
"""
import sys
import os
import argparse
import subprocess
import configparser
class Package():
"""Base class for software package.
"""
NAME = None
CLONE_RECURSIVE = False
def __init__(self, config):
if not "base" in config:
raise ValueError(f"Configure missing base settings.")
self.base = config["base"]
if not self.NAME in config:
raise ValueError(f"Configure missing settings for '{self.NAME}'.")
self.config = config[self.NAME]
self.base["debug"] = self.base["debug"] == "True"
self.base["build_threads"] = int(self.base["build_threads"])
self.config["upstream"] = self.config["upstream"] if self.config["upstream"] != "False" else False
@staticmethod
def _display(lines):
print("\n".join(lines))
def _get_src_dir(self, top=False):
return os.path.join(self.base["src_dir"], self.NAME) if not top else self.base["src_dir"]
def _get_build_dir(self):
build_arch = "debug" if self.base["debug"] else "opt"
return os.path.join(self.base["build_dir"], build_arch, self.NAME)
def _get_install_dir(self):
build_arch = "debug" if self.base["debug"] else "opt"
return os.path.join(self.base["install_dir"], build_arch)
def _git_current_branch(self):
"""Get current git branch.
"""
src_dir = self.base["src_dir"]
if not os.path.exists(src_dir):
return None
proc = subprocess.run("git branch", cwd=self.base["src_dir"])
lines = proc.stdout.splitlines()
for line in lines:
if line.startswith("* "):
branch = line[2:].strip()
return branch
return None
def configure(self):
lines = [f"# Configure '{self.NAME}''."]
lines += ["# Generate the configure script using autoreconf."]
lines += ["cd " + self._get_src_dir()]
lines += ["autoreconf -if"]
lines += [""]
lines += ["# Run configure."]
lines += ["cd " + self._get_build_dir()]
configure_fullpath = os.path.join(self._get_src_dir(), "configure")
lines += [f"{configure_fullpath} {self._configure_args()}"]
lines += ["# After running configure, use --build to see the command for building."]
self._display(lines)
def build(self):
branch = self._git_current_branch()
lines = [f"# Build branch '{branch}' for '{self.NAME}'."]
lines += ["cd " + self._get_build_dir()]
lines += [f"make install -j{self.base['build_threads']}"]
lines += ["# After building the software, use --test to see the command for testing."]
self._display(lines)
def test(self):
lines = [f"# Test '{self.NAME}'."]
lines += ["cd " + self._get_build_dir()]
lines += [f"make check -j{self.base['build_threads']}"]
self._display(lines)
def git_clone(self):
lines = [f"# Clone '{self.NAME}'."]
lines += ["cd " + self._get_src_dir(top=True)]
cmd = "git clone "
if self.CLONE_RECURSIVE:
cmd += " --recursive"
if self.config["branch"] != "main":
cmd += f" --branch {self.config['branch']}"
cmd += " " + self.config["repo_url"]
lines += [cmd]
lines += [""]
lines += ["# When you clone a forked repository, you need to fix the cloning of the m4 submodules."]
lines += ["git config submodule.m4.url https://github.com/geodynamics/autoconf_cig.git"]
lines += ["# Repeat for any other m4 submodules, for example `templates/friction/m4`"]
lines += ["git submodule update"]
lines += [""]
lines += ["# After running git clone, use --configure to see how to configure."]
self._display(lines)
def git_set_upstream(self):
if not self.config["upstream"]:
lines = [f"# No upstream repository for '{self.NAME}'."]
else:
lines = [f"# Set upstream repository for '{self.NAME}.'"]
lines += ["cd " + self._get_src_dir()]
lines += ["git remote -v # Show current remote repositories."]
lines += [f"git remote add upstream {self.config['upstream']}"]
lines += ["git remote -v # Verify new upstream"]
self._display(lines)
def git_sync_fork(self, branch="main"):
if not self.config["upstream"]:
lines = [f"# No upstream repository for '{self.NAME}'."]
else:
lines = [f"# Synchronize local branch {branch} for '{self.NAME}.'"]
lines += ["# NOTE: You must have set the upstream repository. See --git-set-upstream."]
lines += ["cd " + self._get_src_dir()]
lines += ["git fetch upstream"]
lines += [f"git checkout {branch}"]
lines += [f"git merge upstream/{branch}"]
self._display(lines)
def git_fetch(self):
lines = [f"# Update local clone for '{self.NAME}'."]
lines += ["cd " + self._get_src_dir()]
lines += [f"git fetch -p"]
self._display(lines)
return
def git_set_branch(self, branch):
current_branch = self._git_current_branch()
lines = [f"# Change from branch '{current_branch}' to branch '{branch}' for '{self.NAME}'."]
lines += ["cd " + self._get_src_dir()]
lines += [f"git checkout {branch}"]
self._display(lines)
return
def git_new_branch(self, branch):
current_branch = self._git_current_branch()
lines = [f"# Create new branch '{branch}' from branch '{current_branch}' for '{self.NAME}'."]
lines += ["cd " + self._get_src_dir()]
lines += [f"git checkout -b {branch}"]
self._display(lines)
return
def git_delete_branch(self, branch):
lines = [f"# Delete local branch '{branch}' from '{self.NAME}'."]
lines += ["cd " + self._get_src_dir()]
lines += [f"git branch -D {branch}"]
self._display(lines)
return
def git_rebase(self, ref_branch):
lines = [
"DANGER: Rebasing can lead to irreversible changes to your repository!!!",
" Only rebase if you are sure you know what you are doing.",
"",
"TIP: You should always test your code _after_ rebasing and _before_ pushing to verify",
" your changes.",
"",
"After successfully rebasing, DO NOT run 'git pull' as git will suggest.",
"Instead, run 'git push --force'",
"",
"If you encounter problems while rebasing, you can abort by running 'git rebase --abort'.",
"This will return the repository to the state it was in before rebasing.",
"",
]
lines += [f"# Interactive rebase for '{self.NAME}'."]
lines += [f"git rebase -i {ref_branch}"]
self._display(lines)
def git_replace_branch(self, branch):
lines = [f"Delete and checkout branch '{branch}' for '{self.NAME}'."]
lines += [f"git checkout main && git delete -D {branch} && git checkout {branch}"]
self._display(lines)
return
def _configure_args(self):
return
class Pythia(Package):
"""Pythia package.
"""
NAME = "pythia"
CLONE_RECURSIVE = True
def _configure_args(self):
args = [
f"--prefix={self._get_install_dir()}",
"--enable-testing",
"CC=mpicc",
"CXX=mpicxx",
]
if self.base["debug"]:
args += [
"CFLAGS='-g -Wall'",
"CXXFLAGS='-g -Wall'",
]
else:
args += [
"CFLAGS='-g -O3 -DNDEBUG'",
"CXXFLAGS='-g -O3 -DNDEBUG'",
]
return " ".join(args)
class Spatialdata(Package):
"""Spatialdata package.
"""
NAME = "spatialdata"
CLONE_RECURSIVE = True
def _configure_args(self):
install_dir = self._get_install_dir()
args = [
f"--prefix={install_dir}",
"--enable-swig",
"--enable-testing",
"CC=mpicc",
"CXX=mpicxx",
f"CPPFLAGS=\"-I{self.base['deps_dir']}/include -I{install_dir}/include\"",
f"LDFLAGS=\"-L{self.base['deps_dir']}/lib -L{install_dir}/lib\"",
]
if self.base["debug"]:
args += [
"CFLAGS='-g -O0 -Wall'",
"CXXFLAGS='-g -O0 -Wall'",
]
else:
args += [
"CFLAGS='-g -O3 -DNDEBUG'",
"CXXFLAGS='-g -O3 -DNDEBUG'",
]
return " ".join(args)
class Petsc(Package):
"""Petsc package.
"""
NAME = "petsc"
CLONE_RECURSIVE = False
def __init__(self, config):
super().__init__(config)
self.petsc_arch = "arch-pylith-debug" if self.base["debug"] else "arch-pylith-opt"
self.petsc_dir = os.path.join(self.base["src_dir"], "petsc")
def configure(self):
lines = [f"# Configure '{self.NAME}''."]
lines += ["# Run configure."]
lines += ["cd " + self._get_src_dir()]
lines += [f"python3 ./configure {self._configure_args()}"]
self._display(lines)
def build(self):
lines = [f"# Build '{self.NAME}'."]
lines += ["cd " + self._get_src_dir()]
lines += [f"make -j{self.base['build_threads']} PETSC_DIR={self.petsc_dir} PETSC_ARCH={self.petsc_arch}"]
self._display(lines)
def test(self):
lines = [f"# Test '{self.NAME}'."]
lines += ["cd " + self._get_src_dir()]
lines += [f"make check -j{self.base['build_threads']} PETSC_DIR={self.petsc_dir} PETSC_ARCH={self.petsc_arch}"]
self._display(lines)
def _configure_args(self):
install_dir = self._get_install_dir()
args = [
"--with-c2html=0",
"--with-lgrind=0",
"--with-fc=0",
"--with-x=0",
"--with-clanguage=C",
"--with-mpicompilers=1",
"--with-shared-libraries=1",
"--with-64-bit-points=1",
"--with-large-file-io=1",
"--with-hdf5=1",
"--download-chaco=1",
"--download-ml=1",
"--download-f2cblaslapack=1",
]
if self.base["debug"]:
args += [
"--with-debugging=1",
"CFLAGS='-g -O0 -Wall'",
]
else:
args += [
"--with-debugging=0",
"CFLAGS='-g -O3 -DNDEBUG'",
]
args += [
f"CPPFLAGS=\"-I$HDF5_INCDIR -I{self.base['deps_dir']}/include -I{install_dir}/include\"",
f"LDFLAGS=\"-L$HDF5_LIBDIR -L{self.base['deps_dir']}/lib -L{install_dir}/lib\"",
f"PETSC_DIR={self.petsc_dir}",
f"PETSC_ARCH={self.petsc_arch}",
]
return " ".join(args)
class PyLith(Package):
"""PyLith package.
"""
NAME = "pylith"
CLONE_RECURSIVE = True
def _configure_args(self):
install_dir = self._get_install_dir()
args = [
f"--prefix={install_dir}",
"--enable-cubit",
"--enable-hdf5",
"--enable-swig",
"--enable-testing",
"CC=mpicc",
"CXX=mpicxx",
f"CPPFLAGS=\"-I$HDF5_INCDIR -I{self.base['deps_dir']}/include -I{install_dir}/include\"",
f"LDFLAGS=\"-L$HDF5_LIBDIR -L{self.base['deps_dir']}/lib -L{install_dir}/lib\"",
]
if self.base["debug"]:
args += [
"CFLAGS='-g -O0 -Wall'",
"CXXFLAGS='-g -O0 -Wall'",
]
else:
args += [
"CFLAGS='-g -O3 -DNDEBUG'",
"CXXFLAGS='-g -O3 -DNDEBUG'",
]
return " ".join(args)
def create_package(name, config):
if name == "pythia":
return Pythia(config)
elif name == "spatialdata":
return Spatialdata(config)
elif name == "petsc":
return Petsc(config)
elif name == "pylith":
return PyLith(config)
else:
raise ValueError(f"Unknown package {name}.")
class App():
"""Main application.
"""
def __init__(self):
"""Constructor."""
self.config = None
def main(self):
"""Main entry point.
"""
args = self._parse_command_line()
self.initialize(args.config)
if args.show_config:
self.show_config()
package = create_package(args.package, self.config)
if args.git_clone:
package.git_clone()
if args.git_set_upstream:
package.git_set_upstream()
if args.git_sync_fork:
package.git_sync_fork()
if args.git_fetch:
package.git_fetch()
if args.git_set_branch:
package.git_set_branch(args.git_set_branch)
if args.git_new_branch:
package.git_new_branch(args.git_new_branch)
if args.git_delete_branch:
package.git_delete_branch(args.git_delete_branch)
if args.git_rebase:
package.git_rebase(args.git_rebase)
if args.git_replace_branch:
package.git_replace_branch(args.git_replace_branch)
if args.configure:
package.configure()
if args.build:
package.build()
if args.test:
package.test()
def initialize(self, filename, keep_case=True, verbose=False):
"""Set parameters from config file.
Args:
filenames (list)
List of .cfg files to read.
keep_case (bool)
If True, maintain case in section headings, otherwise convert to lowercase.
verbose (bool)
If True, print out progress.
Returns:
Dictionary with configuration.
"""
if not os.path.isfile(filename):
raise IOError(f"Could not find configuration file '{filename}'.")
if verbose:
print(f"Fetching parameters from {filename}...")
config = configparser.ConfigParser()
config.read(filename)
self.config = {s: dict(config.items(s)) for s in config.sections()}
def show_config(self):
"""Write configuration to stdout.
"""
parser = configparser.ConfigParser()
parser.read_dict(self.config)
parser.write(sys.stdout)
def _parse_command_line(self):
"""Parse command line arguments.
"""
DESCRIPTION = (
"Application for managing the PyLith development environment. "
"Once you become familiar with the build environment you will not need this utility."
)
PACKAGES = ["pythia", "spatialdata", "petsc", "pylith"]
parser = argparse.ArgumentParser(description=DESCRIPTION)
parser.add_argument("--config", action="store", dest="config", required=True)
parser.add_argument("--package", action="store", dest="package", choices=PACKAGES, required=True)
parser.add_argument("--git-clone", action="store_true", dest="git_clone")
parser.add_argument("--git-set-upstream", action="store_true", dest="git_set_upstream")
parser.add_argument("--git-sync-fork", action="store_true", dest="git_sync_fork")
parser.add_argument("--git-set-branch", action="store", dest="git_set_branch", metavar="BRANCH")
parser.add_argument("--git-new-branch", action="store", dest="git_new_branch", metavar="BRANCH")
parser.add_argument("--git-delete-branch", action="store", dest="git_delete_branch", metavar="BRANCH")
parser.add_argument("--git-fetch", action="store_true", dest="git_fetch")
parser.add_argument("--git-rebase", action="store", dest="git_rebase", metavar="REF_BRANCH")
parser.add_argument("--git-replace-branch", action="store", dest="git_replace_branch", metavar="BRANCH")
parser.add_argument("--configure", action="store_true", dest="configure")
parser.add_argument("--build", action="store_true", dest="build")
parser.add_argument("--test", action="store_true", dest="test")
parser.add_argument("--show-config", action="store_true", dest="show_config")
args = parser.parse_args()
return args
if __name__ == "__main__":
App().main()
# End of file
| [
"os.path.exists",
"configparser.ConfigParser",
"argparse.ArgumentParser",
"subprocess.run",
"os.path.join",
"os.path.isfile"
] | [((1152, 1211), 'os.path.join', 'os.path.join', (["self.base['build_dir']", 'build_arch', 'self.NAME'], {}), "(self.base['build_dir'], build_arch, self.NAME)\n", (1164, 1211), False, 'import os\n'), ((1322, 1372), 'os.path.join', 'os.path.join', (["self.base['install_dir']", 'build_arch'], {}), "(self.base['install_dir'], build_arch)\n", (1334, 1372), False, 'import os\n'), ((1574, 1628), 'subprocess.run', 'subprocess.run', (['"""git branch"""'], {'cwd': "self.base['src_dir']"}), "('git branch', cwd=self.base['src_dir'])\n", (1588, 1628), False, 'import subprocess\n'), ((9130, 9173), 'os.path.join', 'os.path.join', (["self.base['src_dir']", '"""petsc"""'], {}), "(self.base['src_dir'], 'petsc')\n", (9142, 9173), False, 'import os\n'), ((14339, 14366), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (14364, 14366), False, 'import configparser\n'), ((14572, 14599), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (14597, 14599), False, 'import configparser\n'), ((15050, 15098), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'DESCRIPTION'}), '(description=DESCRIPTION)\n', (15073, 15098), False, 'import argparse\n'), ((961, 1006), 'os.path.join', 'os.path.join', (["self.base['src_dir']", 'self.NAME'], {}), "(self.base['src_dir'], self.NAME)\n", (973, 1006), False, 'import os\n'), ((1510, 1533), 'os.path.exists', 'os.path.exists', (['src_dir'], {}), '(src_dir)\n', (1524, 1533), False, 'import os\n'), ((14137, 14161), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (14151, 14161), False, 'import os\n')] |
from django import forms
from django.forms import ModelForm, CharField
from ckeditor.widgets import CKEditorWidget
from .models import URP, Application
class URPCreateForm(ModelForm):
"""Form for URP creation"""
description = CharField(widget=CKEditorWidget())
class Meta:
model = URP
fields = ['title', 'summary', 'description']
class URPUpdateForm(ModelForm):
"""Form for updating/editing URPs"""
description = CharField(widget=CKEditorWidget())
class Meta:
model = URP
fields = ['summary', 'description']
class ApplicationCreateForm(ModelForm):
"""Form for Application creation"""
class Meta:
model = Application
fields = ['description']
description = CharField()
class ApplicationManageForm(forms.Form):
"""Form for updating application status: Accept or Reject"""
ACTIONS = (
('A', "Accept"),
('R', "Reject"),
)
action = forms.ChoiceField(widget=forms.Select, choices=ACTIONS)
| [
"django.forms.ChoiceField",
"ckeditor.widgets.CKEditorWidget",
"django.forms.CharField"
] | [((748, 759), 'django.forms.CharField', 'CharField', ([], {}), '()\n', (757, 759), False, 'from django.forms import ModelForm, CharField\n'), ((954, 1009), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'widget': 'forms.Select', 'choices': 'ACTIONS'}), '(widget=forms.Select, choices=ACTIONS)\n', (971, 1009), False, 'from django import forms\n'), ((254, 270), 'ckeditor.widgets.CKEditorWidget', 'CKEditorWidget', ([], {}), '()\n', (268, 270), False, 'from ckeditor.widgets import CKEditorWidget\n'), ((472, 488), 'ckeditor.widgets.CKEditorWidget', 'CKEditorWidget', ([], {}), '()\n', (486, 488), False, 'from ckeditor.widgets import CKEditorWidget\n')] |
import logging
from collections import OrderedDict
from copy import deepcopy
from datetime import datetime, timedelta
from pathlib import Path
from time import sleep
import cv2
import numpy as np
import pandas as pd
from PyQt5.QtCore import Qt, QTimer, pyqtSlot
from PyQt5.QtGui import QColor, QImage, QPixmap
from PyQt5.QtWidgets import QMessageBox, QStyle, QWidget
from .view import VideoAppViewer
class VideoApp(VideoAppViewer):
def __init__(self, videopath: str, outpath: str, **config):
self.videopath = videopath
self.outpath = outpath
self.config = config
self.title = self.config.get('title', 'PyQt5 video labeling viewer')
super().__init__(title=self.title)
# draw config
if self.config.get('draw') and isinstance(self.config['draw'], dict):
draw_config = self.config['draw']
self.label_frame.draw_color = draw_config.get('color', QColor(0, 0, 0))
self.label_frame.draw_thickness = draw_config.get('thickness', 2)
self.label_frame.draw_style = draw_config.get('style', Qt.SolidLine)
if self.config.get('select') and isinstance(self.config['select'], dict):
select_config = self.config['select']
self.label_frame.select_color = select_config.get('color', QColor(0, 0, 0))
self.label_frame.select_thickness = select_config.get('thickness', 3)
self.label_frame.select_style = select_config.get('style', Qt.SolidLine)
# record config
check_label = self.config.get('label')
label_color = self.config['label'].get('color', (0, 0, 0)) if check_label else None
label_thickness = self.config['label'].get('thickness', 2) if check_label else None
self.label_color = label_color
self.label_thickness = label_thickness
self.limit_nlabel = self.config.get('limit_nlabel', None)
self.records = []
# read video
self.cap = cv2.VideoCapture(self.videopath)
self.target_frame_idx = 0 # ready to update
self.render_frame_idx = None # redneded
self.scale_height = self.scale_width = None
self.is_playing_video = False
self.is_force_update = False
self._update_video_info()
self._update_frame()
# widget binding
self.slider_video.setRange(0, self.frame_count-1)
self.slider_video.sliderMoved.connect(self.on_slider_moved)
self.slider_video.sliderReleased.connect(self.on_slider_released)
self.btn_play_video.clicked.connect(self.on_play_video_clicked)
self.label_frame.mousePressEvent = self.event_frame_mouse_press
self.label_frame.mouseMoveEvent = self.event_frame_mouse_move
self.label_frame.mouseReleaseEvent = self.event_frame_mouse_release
self.btn_previous_record.clicked.connect(self._goto_previous_record)
self.btn_next_record.clicked.connect(self._goto_next_record)
self.btn_export_records.clicked.connect(self.save_file)
self.table_preview_records.doubleClicked.connect(self.event_preview_double_clicked)
self.show()
@property
def frame_count(self):
return int(self.cap.get(cv2.CAP_PROP_FRAME_COUNT)) if self.cap else None
@property
def frame_height(self):
return int(self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT)) if self.cap else None
@property
def frame_width(self):
return int(self.cap.get(cv2.CAP_PROP_FRAME_WIDTH)) if self.cap else None
@property
def video_fps(self):
return int(self.cap.get(cv2.CAP_PROP_FPS)) if self.cap else None
def _ndarray_to_qimage(self, image: np.ndarray):
"""convert cv2 image to pyqt5 image
Arguments:
image {np.ndarray} -- original RGB image
Returns:
{QImage} -- pyqt5 image format
"""
return QImage(image, image.shape[1], image.shape[0], QImage.Format_RGB888)
def _frame_idx_to_hmsf(self, frame_idx: int):
"""convert to hmsf timestamp by given frame idx and fps"""
assert self.video_fps
base = datetime.strptime('00:00:00.000000', '%H:%M:%S.%f')
delta = timedelta(seconds=frame_idx/self.video_fps)
return (base + delta).strftime('%H:%M:%S.%f')
def _frame_idx_to_hms(self, frame_idx: int):
"""convert to hms timestamp by given frame idx and fps"""
assert self.video_fps
base = datetime.strptime('00:00:00', '%H:%M:%S')
delta = timedelta(seconds=frame_idx//self.video_fps)
return (base + delta).strftime('%H:%M:%S')
def _read_frame(self, frame_idx: int):
"""check frame idx and read frame status than return frame
Arguments:
frame_idx {int} -- frame index
Returns:
{np.ndarray} -- RGB image in (h, w, c)
"""
if frame_idx >= self.frame_count:
self.logger.exception('frame index %d should be less than %d', frame_idx, self.frame_count)
else:
self.target_frame_idx = frame_idx
self.cap.set(1, frame_idx)
read_success, frame = self.cap.read()
if read_success:
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
return frame
self.logger.exception('read #%d frame failed', frame_idx)
def _play_video(self):
"""play video when button clicked"""
if self.is_playing_video and self.video_fps:
frame_idx = min(self.render_frame_idx+1, self.frame_count)
if frame_idx == self.frame_count:
self.on_play_video_clicked()
else:
self.target_frame_idx = frame_idx
QTimer.singleShot(1/self.video_fps, self._play_video)
def _check_coor_in_frame(self, coor_x: int, coor_y: int):
"""check the coordinate in mouse event"""
return 0 < coor_x < self.scale_width and 0 < coor_y < self.scale_height
def _update_video_info(self):
shape = str((self.frame_width, self.frame_height))
self.label_video_path.setText(self.videopath)
self.label_video_shape.setText(shape)
self.label_video_fps.setText(str(self.video_fps))
def _update_frame(self):
"""read and update image to label"""
if self.target_frame_idx != self.render_frame_idx or self.is_force_update:
self.is_force_update = False
frame = self._read_frame(self.target_frame_idx)
if frame is not None:
# draw, convert, resize pixmap
frame = self.draw_rects(self.target_frame_idx, frame)
pixmap = QPixmap(self._ndarray_to_qimage(frame))
self.scale_width = int(min(pixmap.width(), self.screen.width()*0.8))
self.scale_height = int(pixmap.height() * (self.scale_width / pixmap.width()))
pixmap = pixmap.scaled(self.scale_width, self.scale_height, Qt.KeepAspectRatio)
self.label_frame.setPixmap(pixmap)
self.label_frame.resize(self.scale_width, self.scale_height)
# sync, update related information
self._update_frame_status(self.target_frame_idx)
self.render_frame_idx = self.target_frame_idx
self.slider_video.setValue(self.render_frame_idx)
QTimer.singleShot(1000/self.video_fps, self._update_frame)
def _update_frame_status(self, frame_idx: int, err: str = ''):
"""update frame status
Arguments:
frame_idx {int} -- frame index
Keyword Arguments:
err {str} -- show status when exception (default: '')
"""
msg = '#frame ({}/{})'.format(frame_idx, self.frame_count-1)
if err:
msg += '\n{}'.format(err)
self.label_video_status.setText(msg)
def _get_records_by_frame_idx(self, frame_idx=None):
"""return specfic records by frame index (default: current frame)"""
frame_idx = frame_idx or self.render_frame_idx
return list(filter(lambda x: x['frame_idx'] == frame_idx, self.records))
def _get_nrecord_in_current_frame(self):
"""get the number of records in current frame"""
current_records = self._get_records_by_frame_idx()
return len(current_records) if current_records else None
def _get_closest_record_in_current_frame(self, coor_x: int, coor_y: int):
"""get the closest record by given coor in current frame
Arguments:
coor_x {int} -- cooridinate x
coor_y {int} -- cooridinate
Returns:
{OrderedDict} -- the closest record
"""
current_records = deepcopy(self._get_records_by_frame_idx())
for rid, record in enumerate(current_records):
pt1, pt2 = (record['x1'], record['y1']), (record['x2'], record['y2'])
if pt1[0] < coor_x < pt2[0] and pt1[1] < coor_y < pt2[1]:
center = np.array(((pt2[0]+pt1[0])/2, (pt2[1]+pt1[1])/2))
dist = np.linalg.norm(center - np.array((coor_x, coor_y)))
current_records[rid]['dist'] = dist
current_records = list(filter(lambda x: 'dist' in x, current_records))
if current_records:
return sorted(current_records, key=lambda x: x['dist'])[0]
def _remove_record(self, frame_idx: int, pt1: tuple, pt2: tuple):
"""remove record by given value
Arguments:
frame_idx {int} -- record frame index
pt1 {tuple} -- record (x1, y1)
pt2 {tuple} -- record (x2, y2)
"""
current_records = self._get_records_by_frame_idx(frame_idx)
target_record = None
for record in current_records:
src_pt1, src_pt2 = (record['x1'], record['y1']), (record['x2'], record['y2'])
if src_pt1 == pt1 and src_pt2 == pt2:
target_record = record
if target_record:
target_row_idx = self.records.index(target_record)
self.records.remove(target_record)
self.remove_record_from_preview(target_row_idx)
@pyqtSlot()
def _goto_previous_record(self):
rest_records = list(filter(lambda x: x['frame_idx'] < self.render_frame_idx, self.records))
if not rest_records:
QMessageBox.information(self, 'Info', 'no previous record', QMessageBox.Ok)
else:
self.target_frame_idx = rest_records[-1]['frame_idx']
@pyqtSlot()
def _goto_next_record(self):
rest_records = list(filter(lambda x: x['frame_idx'] > self.render_frame_idx, self.records))
if not rest_records:
QMessageBox.information(self, 'Info', 'no next record', QMessageBox.Ok)
else:
self.target_frame_idx = rest_records[0]['frame_idx']
@pyqtSlot()
def on_slider_released(self):
"""update frame and frame status when the slider released"""
self.target_frame_idx = self.slider_video.value()
@pyqtSlot()
def on_slider_moved(self):
"""update frame status only when the slider moved"""
self._update_frame_status(frame_idx=self.slider_video.value())
@pyqtSlot()
def on_play_video_clicked(self):
"""control to play or pause the video"""
self.is_playing_video = not self.is_playing_video
if self.is_playing_video:
self.btn_play_video.setIcon(self.style().standardIcon(QStyle.SP_MediaPause))
self._play_video()
else:
self.btn_play_video.setIcon(self.style().standardIcon(QStyle.SP_MediaPlay))
@pyqtSlot()
def event_frame_mouse_press(self, event):
"""label frame press mouse event
- Qt.LeftButton: drawing
- Qt.RightButton: select to delete
Arguments:
event {PyQt5.QtGui.QMouseEvent} -- event object
"""
if self._check_coor_in_frame(event.x(), event.y()) and not self.is_playing_video:
if event.button() == Qt.LeftButton:
nrecords = self._get_nrecord_in_current_frame()
if self.limit_nlabel and nrecords and self.limit_nlabel <= nrecords:
self.logger.warning('not available to add a new record (exist=%d, limit=%d)', \
nrecords, self.limit_nlabel)
else:
self.label_frame.is_drawing = True
self.label_frame.is_selecting = False
self.logger.debug('press mouse at (%d, %d)', event.x(), event.y())
self.label_frame.pt1 = (event.x(), event.y())
elif event.button() == Qt.RightButton:
closest_record = self._get_closest_record_in_current_frame(event.x(), event.y())
if closest_record:
pt1 = (closest_record['x1'], closest_record['y1'])
pt2 = (closest_record['x2'], closest_record['y2'])
message = '<b>Do you want to delete the record ?</b><br/><br/> \
frame index -\t{} <br/> position -\t{} {}'.format(
closest_record['frame_idx'], str(pt1), str(pt2))
reply = QMessageBox.question(self, 'Delete Record', message, \
QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if reply == QMessageBox.Yes:
self._remove_record(closest_record['frame_idx'], pt1, pt2)
self.is_force_update = True
self.update()
@pyqtSlot()
def event_frame_mouse_move(self, event):
if self.label_frame.is_drawing and self._check_coor_in_frame(event.x(), event.y()):
self.logger.debug('move mouse at (%d, %d)', event.x(), event.y())
self.label_frame.pt2 = (event.x(), event.y())
self.update()
elif not self.label_frame.is_drawing and not self.is_playing_video:
closest_record = self._get_closest_record_in_current_frame(event.x(), event.y())
if closest_record:
self.label_frame.is_selecting = True
self.label_frame.select_pt1 = (closest_record['x1'], closest_record['y1'])
self.label_frame.select_pt2 = (closest_record['x2'], closest_record['y2'])
else:
self.label_frame.is_selecting = False
self.label_frame.select_pt1 = self.label_frame.select_pt2 = None
self.update()
@pyqtSlot()
def event_frame_mouse_release(self, event):
if self.label_frame.is_drawing:
self.label_frame.is_drawing = False
self.logger.debug('release mouse at (%d, %d)', event.x(), event.y())
if self._check_coor_in_frame(event.x(), event.y()):
self.label_frame.pt2 = (event.x(), event.y())
pt1, pt2 = self.label_frame.revise_coor(self.label_frame.pt1, self.label_frame.pt2)
record = OrderedDict([
('timestamp_hms', self._frame_idx_to_hms(self.render_frame_idx)),
('timestamp_hmsf', self._frame_idx_to_hmsf(self.render_frame_idx)),
('frame_idx', self.render_frame_idx), ('fps', self.video_fps),
('frame_height', self.frame_height), ('frame_width', self.frame_width),
('scale_height', self.scale_height), ('scale_width', self.scale_width),
('x1', pt1[0]), ('y1', pt1[1]), ('x2', pt2[0]), ('y2', pt2[1]),
('center_x', (pt1[0]+pt2[0])//2), ('center_y', (pt1[1]+pt2[1])//2)
])
self.records.append(record)
self.records = sorted(self.records, key=lambda x: x['frame_idx'])
self.add_record_to_preview(record['timestamp_hms'], \
record['frame_idx'], \
(record['x1'], record['y1']), \
(record['x2'], record['y2']))
self.label_frame.pt1 = self.label_frame.pt2 = None
self.is_force_update = True
self.update()
@pyqtSlot()
def event_preview_double_clicked(self):
row = self.table_preview_records.currentRow()
frame_idx = int(self.table_preview_records.item(row, 1).text())
self.target_frame_idx = frame_idx
def draw_rects(self, frame_idx: int, frame: np.ndarray):
rest_records = list(filter(lambda x: x['frame_idx'] == frame_idx, self.records))
if not rest_records:
return frame
for record in rest_records:
pt1, pt2 = (record['x1'], record['y1']), (record['x2'], record['y2'])
cv2.rectangle(frame, pt1, pt2, self.label_color, self.label_thickness)
return frame
def save_file(self):
"""export records to default paths
- click ok only close message box
- click close to close PyQt program
"""
exist_msg = 'File <b>{}</b> exist.<br/><br/>\
Do you want to replace?'.format(self.outpath)
info_msg = 'Save at <b>{}</b><br/>\
total records: {}'.format(self.outpath, len(self.records))
# check the file existense
exist_reply = QMessageBox.No
if Path(self.outpath).exists():
exist_reply = QMessageBox.question(self, 'File Exist', exist_msg, \
QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if not Path(self.outpath).exists() or exist_reply == QMessageBox.Yes:
df_labels = pd.DataFrame().from_records(self.records)
df_labels.to_csv(self.outpath, index=False)
# check if the application is going to close
reply = QMessageBox.about(self, 'Info', info_msg)
self.close()
def keyPressEvent(self, event):
"""global keyboard event"""
if event.key() in [Qt.Key_Space, Qt.Key_P]:
self.on_play_video_clicked()
elif event.key() in [Qt.Key_Right, Qt.Key_D]:
self.target_frame_idx = min(self.target_frame_idx+self.video_fps, self.frame_count-1)
elif event.key() in [Qt.Key_Left, Qt.Key_A]:
self.target_frame_idx = max(0, self.target_frame_idx-self.video_fps)
else:
self.logger.debug('clicked %s but no related binding event', str(event.key()))
| [
"cv2.rectangle",
"PyQt5.QtCore.QTimer.singleShot",
"datetime.datetime.strptime",
"pathlib.Path",
"PyQt5.QtGui.QColor",
"PyQt5.QtCore.pyqtSlot",
"PyQt5.QtGui.QImage",
"PyQt5.QtWidgets.QMessageBox.information",
"numpy.array",
"PyQt5.QtWidgets.QMessageBox.question",
"PyQt5.QtWidgets.QMessageBox.abo... | [((10100, 10110), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (10108, 10110), False, 'from PyQt5.QtCore import Qt, QTimer, pyqtSlot\n'), ((10451, 10461), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (10459, 10461), False, 'from PyQt5.QtCore import Qt, QTimer, pyqtSlot\n'), ((10793, 10803), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (10801, 10803), False, 'from PyQt5.QtCore import Qt, QTimer, pyqtSlot\n'), ((10971, 10981), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (10979, 10981), False, 'from PyQt5.QtCore import Qt, QTimer, pyqtSlot\n'), ((11151, 11161), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (11159, 11161), False, 'from PyQt5.QtCore import Qt, QTimer, pyqtSlot\n'), ((11568, 11578), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (11576, 11578), False, 'from PyQt5.QtCore import Qt, QTimer, pyqtSlot\n'), ((13541, 13551), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (13549, 13551), False, 'from PyQt5.QtCore import Qt, QTimer, pyqtSlot\n'), ((14471, 14481), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (14479, 14481), False, 'from PyQt5.QtCore import Qt, QTimer, pyqtSlot\n'), ((16076, 16086), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (16084, 16086), False, 'from PyQt5.QtCore import Qt, QTimer, pyqtSlot\n'), ((1967, 1999), 'cv2.VideoCapture', 'cv2.VideoCapture', (['self.videopath'], {}), '(self.videopath)\n', (1983, 1999), False, 'import cv2\n'), ((3879, 3946), 'PyQt5.QtGui.QImage', 'QImage', (['image', 'image.shape[1]', 'image.shape[0]', 'QImage.Format_RGB888'], {}), '(image, image.shape[1], image.shape[0], QImage.Format_RGB888)\n', (3885, 3946), False, 'from PyQt5.QtGui import QColor, QImage, QPixmap\n'), ((4110, 4161), 'datetime.datetime.strptime', 'datetime.strptime', (['"""00:00:00.000000"""', '"""%H:%M:%S.%f"""'], {}), "('00:00:00.000000', '%H:%M:%S.%f')\n", (4127, 4161), False, 'from datetime import datetime, timedelta\n'), ((4178, 4223), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(frame_idx / self.video_fps)'}), '(seconds=frame_idx / self.video_fps)\n', (4187, 4223), False, 'from datetime import datetime, timedelta\n'), ((4437, 4478), 'datetime.datetime.strptime', 'datetime.strptime', (['"""00:00:00"""', '"""%H:%M:%S"""'], {}), "('00:00:00', '%H:%M:%S')\n", (4454, 4478), False, 'from datetime import datetime, timedelta\n'), ((4495, 4541), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(frame_idx // self.video_fps)'}), '(seconds=frame_idx // self.video_fps)\n', (4504, 4541), False, 'from datetime import datetime, timedelta\n'), ((5695, 5750), 'PyQt5.QtCore.QTimer.singleShot', 'QTimer.singleShot', (['(1 / self.video_fps)', 'self._play_video'], {}), '(1 / self.video_fps, self._play_video)\n', (5712, 5750), False, 'from PyQt5.QtCore import Qt, QTimer, pyqtSlot\n'), ((7327, 7387), 'PyQt5.QtCore.QTimer.singleShot', 'QTimer.singleShot', (['(1000 / self.video_fps)', 'self._update_frame'], {}), '(1000 / self.video_fps, self._update_frame)\n', (7344, 7387), False, 'from PyQt5.QtCore import Qt, QTimer, pyqtSlot\n'), ((17701, 17742), 'PyQt5.QtWidgets.QMessageBox.about', 'QMessageBox.about', (['self', '"""Info"""', 'info_msg'], {}), "(self, 'Info', info_msg)\n", (17718, 17742), False, 'from PyQt5.QtWidgets import QMessageBox, QStyle, QWidget\n'), ((10289, 10364), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', (['self', '"""Info"""', '"""no previous record"""', 'QMessageBox.Ok'], {}), "(self, 'Info', 'no previous record', QMessageBox.Ok)\n", (10312, 10364), False, 'from PyQt5.QtWidgets import QMessageBox, QStyle, QWidget\n'), ((10636, 10707), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', (['self', '"""Info"""', '"""no next record"""', 'QMessageBox.Ok'], {}), "(self, 'Info', 'no next record', QMessageBox.Ok)\n", (10659, 10707), False, 'from PyQt5.QtWidgets import QMessageBox, QStyle, QWidget\n'), ((16634, 16704), 'cv2.rectangle', 'cv2.rectangle', (['frame', 'pt1', 'pt2', 'self.label_color', 'self.label_thickness'], {}), '(frame, pt1, pt2, self.label_color, self.label_thickness)\n', (16647, 16704), False, 'import cv2\n'), ((17280, 17385), 'PyQt5.QtWidgets.QMessageBox.question', 'QMessageBox.question', (['self', '"""File Exist"""', 'exist_msg', '(QMessageBox.Yes | QMessageBox.No)', 'QMessageBox.No'], {}), "(self, 'File Exist', exist_msg, QMessageBox.Yes |\n QMessageBox.No, QMessageBox.No)\n", (17300, 17385), False, 'from PyQt5.QtWidgets import QMessageBox, QStyle, QWidget\n'), ((929, 944), 'PyQt5.QtGui.QColor', 'QColor', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (935, 944), False, 'from PyQt5.QtGui import QColor, QImage, QPixmap\n'), ((1308, 1323), 'PyQt5.QtGui.QColor', 'QColor', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (1314, 1323), False, 'from PyQt5.QtGui import QColor, QImage, QPixmap\n'), ((5193, 5231), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2RGB'], {}), '(frame, cv2.COLOR_BGR2RGB)\n', (5205, 5231), False, 'import cv2\n'), ((8947, 9003), 'numpy.array', 'np.array', (['((pt2[0] + pt1[0]) / 2, (pt2[1] + pt1[1]) / 2)'], {}), '(((pt2[0] + pt1[0]) / 2, (pt2[1] + pt1[1]) / 2))\n', (8955, 9003), True, 'import numpy as np\n'), ((17225, 17243), 'pathlib.Path', 'Path', (['self.outpath'], {}), '(self.outpath)\n', (17229, 17243), False, 'from pathlib import Path\n'), ((17533, 17547), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (17545, 17547), True, 'import pandas as pd\n'), ((9043, 9069), 'numpy.array', 'np.array', (['(coor_x, coor_y)'], {}), '((coor_x, coor_y))\n', (9051, 9069), True, 'import numpy as np\n'), ((13159, 13265), 'PyQt5.QtWidgets.QMessageBox.question', 'QMessageBox.question', (['self', '"""Delete Record"""', 'message', '(QMessageBox.Yes | QMessageBox.No)', 'QMessageBox.No'], {}), "(self, 'Delete Record', message, QMessageBox.Yes |\n QMessageBox.No, QMessageBox.No)\n", (13179, 13265), False, 'from PyQt5.QtWidgets import QMessageBox, QStyle, QWidget\n'), ((17446, 17464), 'pathlib.Path', 'Path', (['self.outpath'], {}), '(self.outpath)\n', (17450, 17464), False, 'from pathlib import Path\n')] |
from math import floor
from math import ceil
hours_needed = int(input())
days_for_work = int(input())
overtime_workers = int(input())
normal_shift = 8
learn_time = days_for_work - days_for_work * 10 / 100
hours_for_work = learn_time * normal_shift
overtime = overtime_workers * (2 * days_for_work)
all_time = hours_for_work + overtime
if hours_needed <= all_time:
time_left = hours_needed - floor(all_time)
print(f"Yes!{abs(time_left)} hours left.")
elif hours_needed > all_time:
time_needed = all_time - hours_needed
print(f"Not enough time!{ceil(abs(time_needed))} hours needed.") | [
"math.floor"
] | [((398, 413), 'math.floor', 'floor', (['all_time'], {}), '(all_time)\n', (403, 413), False, 'from math import floor\n')] |
"""Test the :class:`~polymatheia.data.reader.LocalReader`."""
import os
import pytest
from lxml.etree import XMLSyntaxError
from polymatheia.data.reader import XMLReader
def test_xml_reader():
"""Test that the XML local reading works."""
count = 0
for record in XMLReader('tests/fixtures/xml_reader_test'):
assert record
assert record.id
assert record.name.first
assert record.name.last
assert record.age
if record.id == '1':
assert len(record.name.first) == 2
assert record['special-tags']
count = count + 1
assert count == 3
def test_xml_reader_removed_file():
"""Test that the XML reader aborts on missing files."""
with open('tests/fixtures/xml_reader_test/temp.xml', 'w') as _:
pass
reader = XMLReader('tests/fixtures/xml_reader_test')
os.unlink('tests/fixtures/xml_reader_test/temp.xml')
with pytest.raises(FileNotFoundError):
for record in reader:
assert record
def test_xml_reader_invalid_file():
"""Test that the XML reader aborts on invalid files."""
with pytest.raises(XMLSyntaxError):
for record in XMLReader('tests/fixtures/xml_reader_invalid_test'):
assert record
| [
"polymatheia.data.reader.XMLReader",
"pytest.raises",
"os.unlink"
] | [((278, 321), 'polymatheia.data.reader.XMLReader', 'XMLReader', (['"""tests/fixtures/xml_reader_test"""'], {}), "('tests/fixtures/xml_reader_test')\n", (287, 321), False, 'from polymatheia.data.reader import XMLReader\n'), ((819, 862), 'polymatheia.data.reader.XMLReader', 'XMLReader', (['"""tests/fixtures/xml_reader_test"""'], {}), "('tests/fixtures/xml_reader_test')\n", (828, 862), False, 'from polymatheia.data.reader import XMLReader\n'), ((867, 919), 'os.unlink', 'os.unlink', (['"""tests/fixtures/xml_reader_test/temp.xml"""'], {}), "('tests/fixtures/xml_reader_test/temp.xml')\n", (876, 919), False, 'import os\n'), ((929, 961), 'pytest.raises', 'pytest.raises', (['FileNotFoundError'], {}), '(FileNotFoundError)\n', (942, 961), False, 'import pytest\n'), ((1126, 1155), 'pytest.raises', 'pytest.raises', (['XMLSyntaxError'], {}), '(XMLSyntaxError)\n', (1139, 1155), False, 'import pytest\n'), ((1179, 1230), 'polymatheia.data.reader.XMLReader', 'XMLReader', (['"""tests/fixtures/xml_reader_invalid_test"""'], {}), "('tests/fixtures/xml_reader_invalid_test')\n", (1188, 1230), False, 'from polymatheia.data.reader import XMLReader\n')] |
from app.infrastructure.repositories.camera.capture import CameraCapture
def main():
CameraCapture().run()
| [
"app.infrastructure.repositories.camera.capture.CameraCapture"
] | [((91, 106), 'app.infrastructure.repositories.camera.capture.CameraCapture', 'CameraCapture', ([], {}), '()\n', (104, 106), False, 'from app.infrastructure.repositories.camera.capture import CameraCapture\n')] |
from fipu_face import retina_face as rf
import binascii
import time
from fipu_face.utils import *
from fipu_face.img_utils import *
from exceptions.image_exception import *
from fipu_face.img_config import *
from fipu_face.segmentation.bg_segmentation import get_non_white_bg_pct
# from fipu_face.facial_landmarks.emotion import *
# from fipu_face.facial_landmarks.glasses import has_glasses
# Maximum difference in percentage in elevation between both eyes (y axis)
# Sometimes the detections are not as accurate so that should be taken into consideration
MAX_EYES_Y_DIFF_PCT = 3
# Maximum percentage difference between left eye-nose : right eye-nose
# This helps to detect if the person is looking to the side
MAX_NOSE_EYES_DIST_DIFF_PCT = 9
# When testing, when true draw bounding box and landmarks
DRAW_MARKS = False
# Crops the face based on the image configuration
def crop_img(frame, f, imc, err):
# Just add a little bit space around the bbox
left = f.bbox[0] * 0.99
top = f.bbox[1] * 0.99
right = f.bbox[2] * 1.01
bottom = f.bbox[3] * 1.01
y = top
h = bottom - top
x = left
w = right - left
# Calculate the percentage that the head should have in the image
# Eg. head should fill 80% of height and 60% of width
# Here we take the minimum of width and height ranges because the detections never overestimate head size
imw_pct = np.min(imc.hw_range) / imc.w
imh_pct = np.min(imc.hh_range) / imc.h
# Calculate the maximum percentage of head in the image
max_i_h_pct = np.max(imc.hh_range) / imc.h
# Calculate the percentage from the top of the head to the top of the image
total_h_diff_pct = max_i_h_pct - imh_pct + (1 - max_i_h_pct)
# Calculate pixes between top of the head and top of the image
total_h_diff = total_h_diff_pct / imh_pct * h
# Calculate the pixels between top of the head and maximum head range
head_to_max = (max_i_h_pct - imh_pct) / (1 - imh_pct) * total_h_diff
# Calculate bottom and top padding (they should be the same)
pad_tb = (total_h_diff - head_to_max) / 2
# Cropping top (y) is above the max head range + padding
y_start = top - head_to_max - pad_tb
# Cropping bottom (y) is bellow the face by adding padding
y_end = bottom + pad_tb
# Calculate the image height to which the image will be cropped
i_h = y_end - y_start
# Image width is proportional to that height based on the image confige dimensions
i_w = i_h * (imc.w / imc.h)
# Padding to left and right is calculated as (image width - face width) / 2
pad_lr = abs(w - i_w) / 2
# Add the calculated padding to the left and right side of the face
x_start = x - pad_lr
x_end = right + pad_lr
# Testing purposes
"""
print(imc.hh_range[0] / imc.h, h / i_h, imc.hh_range[1] / imc.h)
print(imc.hw_range[0] / imc.w, w / i_w, imc.hw_range[1] / imc.w)
print(imh_pct, h / i_h, imc.hh_range[0] / imc.h <= h / i_h <= imc.hh_range[1] / imc.h)
print(imw_pct, w / (x_end - x_start), imc.hw_range[0] / imc.w <= w / (x_end - x_start) <= imc.hw_range[1] / imc.w)
print(imc.w / imc.h, i_w / i_h)
print(pad_tb / i_h, (1 - np.max(imc.hh_range) / imc.h) / 2)
print("Ratio: ", imc.w / imc.h, i_w / i_h)
"""
return __do_crop(frame, x_start, x_end, y_start, y_end, err)
# Does the cropping and raises the exception if the cropping points are not in the frame
def __do_crop(frame, x_start, x_end, y_start, y_end, err):
h, w = frame.shape[:2]
# Before cropping check if the copping points are not outside the frame
if min(y_start, x_start) < 0 or x_end > w or y_end > h:
# print(y_start, x_start, x_end, y_end, frame.shape[:2])
s_i = [a < 0 for a in [x_start, y_start, w - x_end, h - y_end]]
sides = [SIDES_STR[i] for i, a in enumerate(s_i) if a]
err(PICTURED_TO_CLOSE_EXCEPTION, [', '.join(sides)], s_i)
return frame
# Crop the image
frame = frame[int(y_start):int(y_end), int(x_start):int(x_end)]
return frame
def check_face_alignment(frame, f, err):
l = f.landmark.astype(np.int)
# Should never happen, but just to make sure
if len(l) < 5:
err(NO_LANDMARKS_EXCEPTION)
return
# raise_error(NO_LANDMARKS_EXCEPTION)
# Save the references to the landmarks
left_eye = l[0]
right_eye = l[1]
nose = l[2]
left_lip = l[3]
right_lip = l[4]
# Face width and height
f_w = f.bbox[2] - f.bbox[0]
f_h = f.bbox[3] - f.bbox[1]
# Eyes should be leveled - at least within the MAX_EYES_Y_DIFF_PCT percentage
eyes_tilt = abs(left_eye[1] - right_eye[1]) / f_h * 100
if eyes_tilt > MAX_EYES_Y_DIFF_PCT:
err(TILTED_HEAD_EXCEPTION, payload=left_eye[1] < right_eye[1])
# Is the nose looking left or right?
# Calculate the difference between the (left_eye-nose) and (right_eye-nose)
l_n_e = abs(nose[0] - left_eye[0])
r_n_e = abs(nose[0] - right_eye[0])
# The percentage in differences between eyes and the nose should be less than MAX_NOSE_EYES_DIST_DIFF_PCT
nose_tilt = abs(l_n_e - r_n_e) / f_w * 100
# If the nose x position is smaller than the left eye x position or greater than the right eye y position
# then the person is looking to the side, otherwise it still may be a slight head tilt to either side
turns = [nose[0] < left_eye[0], nose[0] > right_eye[0], nose_tilt > MAX_NOSE_EYES_DIST_DIFF_PCT]
if any(turns):
is_left = turns[0]
# If last error then make a guess about the turn
# This is purely for error display purposes
if not any(turns[:2]) and turns[2]:
is_left = l_n_e < r_n_e
err(TURNED_HEAD_EXCEPTION, payload=is_left)
# mouth = (left_lip[1] + right_lip[1]) / 2
# eyes = (left_eye[1] + right_eye[1]) / 2
# print(np.mean(left_lip, right_lip))
# print("Diff: ", nose[1] - eyes, mouth - nose[1])
# print("Eyes: ", eyes_tilt)
# print("Nose: ", nose_tilt)
# Checks whether the image is blurry
# The blur value also depends on the image resolution,
# so each image configuration should have its own threshold
# Old method which is not very stable
def check_blur(frame, imc, err):
# Convert to grayscale
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# Calculate the blur
blur = cv2.Laplacian(gray, cv2.CV_64F).var()
# print('Blur: {}'.format(round(blur, 3)))
if blur < imc.blur_threshold:
err(BLURRY_IMAGE_EXCEPTION)
# raise_error(BLURRY_IMAGE_EXCEPTION)
def check_face_blur(frame, f, imc, err):
left = f.bbox[0]
top = f.bbox[1]
right = f.bbox[2]
bottom = f.bbox[3]
h, w = frame.shape[:2]
# Since the face can be outside the frame, the image would be empty with negative numbers
# Also is it necessary to check for blur when the whole face is not in frame?
face = frame[max(0, int(top)):min(int(bottom), h), min(0, int(left)):max(w, int(right))]
h, w = face.shape[:2]
# print(h, w)
# Scale the head to the approx size to what it would be in the final crop
# This way we get somewhat consistent results
scale_y = (imc.hh_range[1] / INCH * imc.dpi) / h
scale_x = w / h * scale_y * h / w
img = cv2.resize(face, None, None, fx=scale_x, fy=scale_y)
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
blur = np.max(cv2.convertScaleAbs(cv2.Laplacian(img, cv2.CV_64F)))
# print('Blur: {}'.format(round(blur, 3)))
if blur < imc.blur_threshold:
err(BLURRY_IMAGE_EXCEPTION)
# Ensures that only one faces is detected
def check_num_faces(faces, err):
if len(faces) == 0:
# raise_error(NO_FACES_EXCEPTION)
err(NO_FACES_EXCEPTION)
draw_errors(err, None)
raise err
elif len(faces) > 1:
# raise_error(TOO_MANY_FACES_EXCEPTION, [len(faces)])
err(TOO_MANY_FACES_EXCEPTION, [len(faces)])
draw_errors(err, None)
raise err
# Ensures that the background is white
def check_white_bg(frame, imc, err):
try:
non_white_pct = get_non_white_bg_pct(frame)
# print('Non-white pct: {}'.format(round(non_white_pct, 3)))
if non_white_pct > imc.max_non_white_bg_pct:
err(NON_WHITE_BG_EXCEPTION)
except Exception as e:
# only occurs when the required tensorflow version is not installed
print("Error while trying to detect background: ", e)
# Ensures that the background is white
def is_not_white_bg(frame, imc):
try:
non_white_pct = get_non_white_bg_pct(frame)
# print('Non-white pct: {}'.format(round(non_white_pct, 3)))
if non_white_pct > imc.max_non_white_bg_pct:
return True
except Exception as e:
# only occurs when the required tensorflow version is not installed
print("Error while trying to detect background: ", e)
return False
def detect_face(frame, err):
# Detect the faces... images are scaled to the training resolution to speed up the detection
faces = rf.detect_faces(frame, scale=calc_scale(frame))
check_num_faces(faces, err)
return faces[0]
def pre_process_check(frame):
frame = alpha_to_white(frame)
err = ImageException(frame)
f = detect_face(frame, err)
# print(f.det_score)
# Testing: face box and landmark drawing
# Need to draw before cropping because the landmarks have the position on the initial frame
if DRAW_MARKS:
draw_marks(frame, f)
check_face_alignment(frame, f, err)
return frame, f, err
# Detects and crop's the image if all checks are successful
def detect(frame, imcs=None):
imcs = imcs or [ImgX] # Default value
# Do the pre checks which are irrelevant of the image size
frame, f, err = pre_process_check(frame)
frames = {}
white_checks = []
for imc in imcs:
# Need to check before resizing and cropping
# otherwise we would need to perform another detection
check_face_blur(frame, f, imc, err)
__frame = crop_img(frame, f, imc, err)
__frame = scale_img(__frame, imc)
# Check background of the final image
# check_white_bg(__frame, imc, err)
white_checks.append(is_not_white_bg(__frame, imc))
# Testing: ellipse around the head
if DRAW_MARKS:
draw_ellipses(__frame, imc)
frames[imc.name] = __frame
if all(white_checks):
err(NON_WHITE_BG_EXCEPTION)
if err.has_errors():
draw_errors(err, f)
raise err
return frames if len(imcs) > 1 else frames[imcs[0].name]
# Shortcut method to crop the image and covert it back to the given format
def __do_detect(frame, img_formats, encoding):
frames = detect(frame, [get_format(f) for f in img_formats])
if type(frames) != dict:
frames = {img_formats[0]: frames}
for img_fmt in frames.keys():
frames[img_fmt] = convert_img(frames[img_fmt], encoding)
return frames
# API method called when the file is uploaded using standard file upload
def get_from_file(file, img_formats, encoding):
return __do_detect(cv2_read_img(file.read()), img_formats, encoding)
# API method called when the file is uploaded as a field in base64 format
def get_from_base64(uri, img_formats, encoding):
# Just in case the uri contains base64 prefix, split and take the last part
encoded_data = uri.split('base64,')[-1]
try:
img = cv2_read_img(base64.b64decode(encoded_data))
return __do_detect(img, img_formats, encoding)
except binascii.Error:
raise_error(INVALID_IMAGE_FORMAT)
# API method called when the file is uploaded as a field in bytes format
def get_from_bytes(img_bytes, img_formats, encoding):
return __do_detect(cv2_read_img(img_bytes), img_formats, encoding)
def draw_errors(err, f):
thickness = 2 * calc_thickness_scale(err.image)
if err.has_error(BLURRY_IMAGE_EXCEPTION):
err.image = add_blur(err.image)
if err.has_error(NO_FACES_EXCEPTION) or err.has_error(TOO_MANY_FACES_EXCEPTION):
draw_no_face(err.image)
if err.has_error(PICTURED_TO_CLOSE_EXCEPTION):
draw_no_space(err.image, f, err.get_payload(PICTURED_TO_CLOSE_EXCEPTION), COLOR_RED, thickness)
if err.has_error(TURNED_HEAD_EXCEPTION):
# Draw arrow to opposite direction to where the person face is rotated
draw_head_turn(err.image, f, not err.get_payload(TURNED_HEAD_EXCEPTION), COLOR_RED, thickness)
if err.has_error(TILTED_HEAD_EXCEPTION):
# Draw arrow to opposite direction to where the person is looking
draw_head_tilt(err.image, f, not err.get_payload(TILTED_HEAD_EXCEPTION), COLOR_RED, thickness)
if err.has_error(NON_WHITE_BG_EXCEPTION):
draw_non_white_bg(err.image, f, COLOR_WHITE, thickness * 2)
"""
def check_face_emotion(frame, f, imc):
emotion = detect_emotion(frame, f)
if emotion not in imc.allowed_emotions:
if emotion == EMOTION_NONE:
raise ImageException(
"Nemoguće očitati emociju. Maknite sve predmete koji sakrivaju lice (maska, ruke itd.)")
else:
raise ImageException(
"Nedozvoljena emocija {}. Dozvoljene emocije: {}".format(emotion, imc.allowed_emotions))
def check_face_obstacles(frame, f, imc):
if not imc.glasses and has_glasses(frame, f):
raise ImageException("Nočale nisu dozvoljene.")
"""
| [
"fipu_face.segmentation.bg_segmentation.get_non_white_bg_pct"
] | [((8062, 8089), 'fipu_face.segmentation.bg_segmentation.get_non_white_bg_pct', 'get_non_white_bg_pct', (['frame'], {}), '(frame)\n', (8082, 8089), False, 'from fipu_face.segmentation.bg_segmentation import get_non_white_bg_pct\n'), ((8524, 8551), 'fipu_face.segmentation.bg_segmentation.get_non_white_bg_pct', 'get_non_white_bg_pct', (['frame'], {}), '(frame)\n', (8544, 8551), False, 'from fipu_face.segmentation.bg_segmentation import get_non_white_bg_pct\n')] |
import numpy as np
from paz.abstract import Processor
from paz.backend.keypoints import project_points3D
from paz.backend.keypoints import build_cube_points3D
from paz.backend.image import draw_cube
from paz.processors import DrawBoxes3D
class DrawBoxes3D(Processor):
def __init__(self, camera, class_to_dimensions, thickness=1):
"""Draw boxes 3D of multiple objects
# Arguments
camera: Instance of ``paz.backend.camera.Camera''.
class_to_dimensions: Dictionary that has as keys the
class names and as value a list [model_height, model_width]
thickness: Int. Thickness of 3D box
"""
super(DrawBoxes3D, self).__init__()
self.camera = camera
self.class_to_dimensions = class_to_dimensions
self.class_to_points = self._build_points(self.class_to_dimensions)
self.thickness = thickness
def _build_points(self, class_to_dimensions):
class_to_cube3D = {}
print(class_to_dimensions)
for class_name, dimensions in class_to_dimensions.items():
width, height, depth = dimensions
cube_points3D = build_cube_points3D(width, height, depth)
class_to_cube3D[class_name] = cube_points3D
return class_to_cube3D
def call(self, image, pose6D):
points3D = self.class_to_points[pose6D.class_name]
points2D = project_points3D(points3D, pose6D, self.camera)
points2D = points2D.astype(np.int32)
draw_cube(image, points2D, thickness=self.thickness)
return image
| [
"paz.backend.keypoints.project_points3D",
"paz.backend.keypoints.build_cube_points3D",
"paz.backend.image.draw_cube"
] | [((1404, 1451), 'paz.backend.keypoints.project_points3D', 'project_points3D', (['points3D', 'pose6D', 'self.camera'], {}), '(points3D, pose6D, self.camera)\n', (1420, 1451), False, 'from paz.backend.keypoints import project_points3D\n'), ((1505, 1557), 'paz.backend.image.draw_cube', 'draw_cube', (['image', 'points2D'], {'thickness': 'self.thickness'}), '(image, points2D, thickness=self.thickness)\n', (1514, 1557), False, 'from paz.backend.image import draw_cube\n'), ((1161, 1202), 'paz.backend.keypoints.build_cube_points3D', 'build_cube_points3D', (['width', 'height', 'depth'], {}), '(width, height, depth)\n', (1180, 1202), False, 'from paz.backend.keypoints import build_cube_points3D\n')] |
import scrapy
from scrapy.spiders import CrawlSpider, Rule
from scrapy.linkextractors import LinkExtractor
from emails.items import ImageItem
class EmailTemplateSpire(CrawlSpider):
name = "email"
start_urls = [
'https://reallygoodemails.com/category/inaugural/welcome/',
]
rules = [
Rule(LinkExtractor(
allow=(
'inaugural\/welcome\/[a-z-]+\/',
'inaugural\/welcome\/page/\D+\/'
),
unique=True,
allow_domains=('reallygoodemails.com'),
restrict_css=('.entry_hover', '.page-numbers', )),
follow=True),
Rule(LinkExtractor(allow=('.*\.html', ),
unique=True,
allow_domains=('reallygoodemails.com'),
restrict_css=('.entry_copy', )),
callback='download_email',
follow=True),
]
def download_email(self, response):
page = response.url.split("/")[-1]
with open(f'output/{page}', 'wb') as f:
f.write(response.body)
self.log('Saved file %s' % page)
| [
"scrapy.linkextractors.LinkExtractor"
] | [((324, 527), 'scrapy.linkextractors.LinkExtractor', 'LinkExtractor', ([], {'allow': "('inaugural\\\\/welcome\\\\/[a-z-]+\\\\/', 'inaugural\\\\/welcome\\\\/page/\\\\D+\\\\/')", 'unique': '(True)', 'allow_domains': '"""reallygoodemails.com"""', 'restrict_css': "('.entry_hover', '.page-numbers')"}), "(allow=('inaugural\\\\/welcome\\\\/[a-z-]+\\\\/',\n 'inaugural\\\\/welcome\\\\/page/\\\\D+\\\\/'), unique=True, allow_domains=\n 'reallygoodemails.com', restrict_css=('.entry_hover', '.page-numbers'))\n", (337, 527), False, 'from scrapy.linkextractors import LinkExtractor\n'), ((651, 773), 'scrapy.linkextractors.LinkExtractor', 'LinkExtractor', ([], {'allow': "('.*\\\\.html',)", 'unique': '(True)', 'allow_domains': '"""reallygoodemails.com"""', 'restrict_css': "('.entry_copy',)"}), "(allow=('.*\\\\.html',), unique=True, allow_domains=\n 'reallygoodemails.com', restrict_css=('.entry_copy',))\n", (664, 773), False, 'from scrapy.linkextractors import LinkExtractor\n')] |
from matplotlib import pyplot as plt
from collections import deque
from threading import Lock, Thread
import myo
import numpy as np
import serial
import csv
import re
import time
from datetime import datetime
## serial port and rate for reading F/T sensor data
SERIAL_PORT = 'COM1'
SERIAL_RATE = 115200
## initializing Myo armband
class EmgCollector(myo.DeviceListener):
def __init__(self, n):
self.n = n
self.lock = Lock()
self.emg_data_queue = deque(maxlen=n)
def get_emg_data(self):
with self.lock:
return list(self.emg_data_queue)
def on_connected(self, event):
event.device.stream_emg(True)
def on_emg(self, event):
with self.lock:
self.emg_data_queue.append((event.timestamp, event.emg))
def main():
myo.init(sdk_path='E:\\sensors\\myo-sdk-win-0.9.0\\') ## type the path to myo-sdk
hub = myo.Hub()
listener = EmgCollector(1)
ser = serial.Serial(SERIAL_PORT, SERIAL_RATE)
ser.write(b'CV 4\r')
with open('01.csv', 'w', newline='') as csvfile:
fieldnames = ['Fz', 'emg0', 'emg1', 'emg2', 'emg3', 'emg4', 'emg5', 'emg6', 'emg7']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
with hub.run_in_background(listener.on_event):
timeout = time.time() + 50 ## setting the recording time for 50 seconds
starttime = datetime.now()
while True:
if listener.get_emg_data() != []:
ser.write(b'QR\r') ##this command asks to receive Z-axis force data
a = ser.inWaiting() ## this command recieves Z-axis force data
time.sleep(0.003)
r = ser.read(a).decode('ascii')
p = re.sub(r'[-\n,QR, CV, >, , \r]', "", r)
if p != "":
emg = listener.get_emg_data()[0][1]
writer.writerow({'Fz': p, 'emg0': emg[0],'emg1': emg[1],'emg2': emg[2],'emg3': emg[3],'emg4': emg[4],'emg5': emg[5],'emg6': emg[6], 'emg7': emg[7]})
if time.time() > timeout:
break
endtime = datetime.now()
print('test time: {}'.format(endtime - starttime))
if __name__ == '__main__':
main()
| [
"csv.DictWriter",
"collections.deque",
"threading.Lock",
"time.sleep",
"datetime.datetime.now",
"myo.Hub",
"serial.Serial",
"myo.init",
"re.sub",
"time.time"
] | [((804, 857), 'myo.init', 'myo.init', ([], {'sdk_path': '"""E:\\\\sensors\\\\myo-sdk-win-0.9.0\\\\"""'}), "(sdk_path='E:\\\\sensors\\\\myo-sdk-win-0.9.0\\\\')\n", (812, 857), False, 'import myo\n'), ((899, 908), 'myo.Hub', 'myo.Hub', ([], {}), '()\n', (906, 908), False, 'import myo\n'), ((952, 991), 'serial.Serial', 'serial.Serial', (['SERIAL_PORT', 'SERIAL_RATE'], {}), '(SERIAL_PORT, SERIAL_RATE)\n', (965, 991), False, 'import serial\n'), ((456, 462), 'threading.Lock', 'Lock', ([], {}), '()\n', (460, 462), False, 'from threading import Lock, Thread\n'), ((490, 505), 'collections.deque', 'deque', ([], {'maxlen': 'n'}), '(maxlen=n)\n', (495, 505), False, 'from collections import deque\n'), ((1183, 1229), 'csv.DictWriter', 'csv.DictWriter', (['csvfile'], {'fieldnames': 'fieldnames'}), '(csvfile, fieldnames=fieldnames)\n', (1197, 1229), False, 'import csv\n'), ((1448, 1462), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1460, 1462), False, 'from datetime import datetime\n'), ((2245, 2259), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2257, 2259), False, 'from datetime import datetime\n'), ((1343, 1354), 'time.time', 'time.time', ([], {}), '()\n', (1352, 1354), False, 'import time\n'), ((1733, 1750), 'time.sleep', 'time.sleep', (['(0.003)'], {}), '(0.003)\n', (1743, 1750), False, 'import time\n'), ((1829, 1873), 're.sub', 're.sub', (['"""[-\\\\n,QR, CV, >, \x06, \\\\r]"""', '""""""', 'r'], {}), "('[-\\\\n,QR, CV, >, \\x06, \\\\r]', '', r)\n", (1835, 1873), False, 'import re\n'), ((2160, 2171), 'time.time', 'time.time', ([], {}), '()\n', (2169, 2171), False, 'import time\n')] |
"""JUnit metric collector."""
from datetime import datetime
from dateutil.parser import parse
from base_collectors import SourceUpToDatenessCollector, XMLFileSourceCollector
from collector_utilities.functions import parse_source_response_xml
from collector_utilities.type import Response
class JUnitSourceUpToDateness(XMLFileSourceCollector, SourceUpToDatenessCollector):
"""Collector to collect the Junit report age."""
async def _parse_source_response_date_time(self, response: Response) -> datetime:
"""Override to parse the timestamp from the response."""
tree = await parse_source_response_xml(response)
test_suites = [tree] if tree.tag == "testsuite" else tree.findall("testsuite")
# If <testsuites> has at least one <testsuite> get the timestamp from the first <testsuite>. Since we're only
# interested in the date that the test ran, and not the exact time, we're assuming we don't need to look for
# the oldest testsuite. If there's no <testsuite> within the <testsuites>, we can't get a timestamp and return
# datetime.now(). We don't return datetime.min because there might be more Junit XML files being parsed that do
# have a timestamp. If we'd return datetime.min, SourceUpToDatenessCollector._parse_source_responses() would
# always return datetime.min as it returns the oldest timestamp it sees.
return parse(test_suites[0].get("timestamp", "")) if test_suites else datetime.now()
| [
"datetime.datetime.now",
"collector_utilities.functions.parse_source_response_xml"
] | [((603, 638), 'collector_utilities.functions.parse_source_response_xml', 'parse_source_response_xml', (['response'], {}), '(response)\n', (628, 638), False, 'from collector_utilities.functions import parse_source_response_xml\n'), ((1476, 1490), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1488, 1490), False, 'from datetime import datetime\n')] |
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from erpnext.accounts.doctype.sales_taxes_and_charges_template.sales_taxes_and_charges_template \
import valdiate_taxes_and_charges_template
class PurchaseTaxesandChargesTemplate(Document):
def validate(self):
valdiate_taxes_and_charges_template(self)
def autoname(self):
if self.company and self.title:
abbr = frappe.db.get_value('Company', self.company, 'abbr')
self.name = '{0} - {1}'.format(self.title, abbr)
| [
"frappe.db.get_value",
"erpnext.accounts.doctype.sales_taxes_and_charges_template.sales_taxes_and_charges_template.valdiate_taxes_and_charges_template"
] | [((464, 505), 'erpnext.accounts.doctype.sales_taxes_and_charges_template.sales_taxes_and_charges_template.valdiate_taxes_and_charges_template', 'valdiate_taxes_and_charges_template', (['self'], {}), '(self)\n', (499, 505), False, 'from erpnext.accounts.doctype.sales_taxes_and_charges_template.sales_taxes_and_charges_template import valdiate_taxes_and_charges_template\n'), ((572, 624), 'frappe.db.get_value', 'frappe.db.get_value', (['"""Company"""', 'self.company', '"""abbr"""'], {}), "('Company', self.company, 'abbr')\n", (591, 624), False, 'import frappe\n')] |
from django.db import models
from django.utils.translation import gettext_lazy as _
from wagtail.admin.edit_handlers import FieldPanel, TabbedInterface, ObjectList
from .state import StateCampaignPageBase
from areas.widgets.place_widgets import PlaceChooser
from offices.widgets import OfficeTypeChooser, LocalOfficeChooser
class LocalCampaignPageBase(StateCampaignPageBase):
place_ref = models.ForeignKey(
'areas.Place',
on_delete=models.PROTECT,
related_name='local_campaigns',
)
office_panels = StateCampaignPageBase.office_panels + [
FieldPanel('place_ref', widget=PlaceChooser(linked_fields={
'state_ref': {'id': 'id_state_ref'}
})),
]
class LocalCampaignPage(LocalCampaignPageBase):
class Meta:
verbose_name = "Local Campaign"
local_office_ref = models.ForeignKey(
'offices.LocalOffice',
verbose_name=_('office'),
on_delete=models.PROTECT,
related_name='local_campaigns',
null=True,
)
office_panels = LocalCampaignPageBase.office_panels + [
FieldPanel('office_type_ref', widget=OfficeTypeChooser(linked_fields={
'state_ref': {'id': 'id_state_ref'} # TODO: Unused but keep. Filter by area?
})),
FieldPanel('local_office_ref', widget=LocalOfficeChooser(linked_fields={
'state_ref': {'id': 'id_state_ref'},
'place_ref': {'id': 'id_place_ref'},
'office_type_ref': {'id': 'id_office_type_ref'},
})),
]
edit_handler = TabbedInterface([
ObjectList(StateCampaignPageBase.content_panels, heading='Content'),
ObjectList(office_panels, heading='Office'),
ObjectList(StateCampaignPageBase.promote_panels, heading='Promote'),
ObjectList(StateCampaignPageBase.settings_panels, heading='Settings', classname="settings"),
])
template = 'campaigns/campaign_page.html'
parent_page_types = ['campaigns.YearPage']
subpage_types = []
def save(self, *args, **kwargs):
self.area_ref = self.place_ref
self.office_ref = self.local_office_ref
super().save(*args, **kwargs)
| [
"offices.widgets.OfficeTypeChooser",
"django.db.models.ForeignKey",
"django.utils.translation.gettext_lazy",
"offices.widgets.LocalOfficeChooser",
"areas.widgets.place_widgets.PlaceChooser",
"wagtail.admin.edit_handlers.ObjectList"
] | [((397, 492), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""areas.Place"""'], {'on_delete': 'models.PROTECT', 'related_name': '"""local_campaigns"""'}), "('areas.Place', on_delete=models.PROTECT, related_name=\n 'local_campaigns')\n", (414, 492), False, 'from django.db import models\n'), ((921, 932), 'django.utils.translation.gettext_lazy', '_', (['"""office"""'], {}), "('office')\n", (922, 932), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1582, 1649), 'wagtail.admin.edit_handlers.ObjectList', 'ObjectList', (['StateCampaignPageBase.content_panels'], {'heading': '"""Content"""'}), "(StateCampaignPageBase.content_panels, heading='Content')\n", (1592, 1649), False, 'from wagtail.admin.edit_handlers import FieldPanel, TabbedInterface, ObjectList\n'), ((1659, 1702), 'wagtail.admin.edit_handlers.ObjectList', 'ObjectList', (['office_panels'], {'heading': '"""Office"""'}), "(office_panels, heading='Office')\n", (1669, 1702), False, 'from wagtail.admin.edit_handlers import FieldPanel, TabbedInterface, ObjectList\n'), ((1712, 1779), 'wagtail.admin.edit_handlers.ObjectList', 'ObjectList', (['StateCampaignPageBase.promote_panels'], {'heading': '"""Promote"""'}), "(StateCampaignPageBase.promote_panels, heading='Promote')\n", (1722, 1779), False, 'from wagtail.admin.edit_handlers import FieldPanel, TabbedInterface, ObjectList\n'), ((1789, 1884), 'wagtail.admin.edit_handlers.ObjectList', 'ObjectList', (['StateCampaignPageBase.settings_panels'], {'heading': '"""Settings"""', 'classname': '"""settings"""'}), "(StateCampaignPageBase.settings_panels, heading='Settings',\n classname='settings')\n", (1799, 1884), False, 'from wagtail.admin.edit_handlers import FieldPanel, TabbedInterface, ObjectList\n'), ((619, 684), 'areas.widgets.place_widgets.PlaceChooser', 'PlaceChooser', ([], {'linked_fields': "{'state_ref': {'id': 'id_state_ref'}}"}), "(linked_fields={'state_ref': {'id': 'id_state_ref'}})\n", (631, 684), False, 'from areas.widgets.place_widgets import PlaceChooser\n'), ((1139, 1209), 'offices.widgets.OfficeTypeChooser', 'OfficeTypeChooser', ([], {'linked_fields': "{'state_ref': {'id': 'id_state_ref'}}"}), "(linked_fields={'state_ref': {'id': 'id_state_ref'}})\n", (1156, 1209), False, 'from offices.widgets import OfficeTypeChooser, LocalOfficeChooser\n'), ((1323, 1488), 'offices.widgets.LocalOfficeChooser', 'LocalOfficeChooser', ([], {'linked_fields': "{'state_ref': {'id': 'id_state_ref'}, 'place_ref': {'id': 'id_place_ref'},\n 'office_type_ref': {'id': 'id_office_type_ref'}}"}), "(linked_fields={'state_ref': {'id': 'id_state_ref'},\n 'place_ref': {'id': 'id_place_ref'}, 'office_type_ref': {'id':\n 'id_office_type_ref'}})\n", (1341, 1488), False, 'from offices.widgets import OfficeTypeChooser, LocalOfficeChooser\n')] |
import RPi.GPIO as GPIO
import time
from PololuDriver import PololuDriver
res = 1.8
step_amount = 1
class Stepper():
def __init__(self):
self.Motor1 = PololuDriver(dir_pin=26, step_pin=20, enable_pin=21)
def step_num(self, num, dir='forward'):
self.Motor1.TurnStep(Dir=dir, steps=num, stepdelay = 0.001)
def step_deg(self, deg, dir='forward'):
steps = deg/res*step_amount
self.Motor1.TurnStep(Dir=dir, steps=int(steps), stepdelay = 0.001)
def step(self, dir='forward'):
self.Motor1.TurnStep(Dir=dir, steps=1, stepdelay = 0.001)
if __name__=="__main__":
stepper = Stepper()
while True:
inp = raw_input()
if(inp=="f"):
stepper.step("forward")
else:
stepper.step("backward");
# Motor1 = DRV8825(dir_pin=13, step_pin=19, enable_pin=12, mode_pins=(16, 17, 20))
# Motor1.SetMicroStep("hardware","") #set with DIP switches
#
# Motor1.TurnStep(Dir='forward', steps=8, stepdelay = 0.005)
# time.sleep(0.5)
# Motor1.TurnStep(Dir='backward', steps=8, stepdelay = 0.005)
# Motor1.Stop()
| [
"PololuDriver.PololuDriver"
] | [((165, 217), 'PololuDriver.PololuDriver', 'PololuDriver', ([], {'dir_pin': '(26)', 'step_pin': '(20)', 'enable_pin': '(21)'}), '(dir_pin=26, step_pin=20, enable_pin=21)\n', (177, 217), False, 'from PololuDriver import PololuDriver\n')] |
import hyperopt
import csv
import json
import traceback
import os.path
from pprint import pprint
import datetime
import time
import numpy.random
import threading
import queue
import copy
import tempfile
import random
import subprocess
import concurrent.futures
import tempfile
import functools
import math
import atexit
import jsonschema
import pkg_resources
from hypermax.execution import Execution
from hypermax.hyperparameter import Hyperparameter
from hypermax.results_analyzer import ResultsAnalyzer
from hypermax.algorithms.atpe_optimizer import ATPEOptimizer
from hypermax.algorithms.human_guided_optimizer_wrapper import HumanGuidedOptimizerWrapper
from hypermax.algorithms.tpe_optimizer import TPEOptimizer
from hypermax.algorithms.random_search_optimizer import RandomSearchOptimizer
from hypermax.algorithms.adaptive_bayesian_hyperband_optimizer import AdaptiveBayesianHyperband
from hypermax.configuration import Configuration
class Optimizer:
resultInformationKeys = [
'trial',
'status',
'loss',
'time',
'log',
'error'
]
def __init__(self, configuration):
self.config = Configuration(configuration)
self.searchConfig = configuration.get('search', {})
# jsonschema.validate(self.searchConfig, self.configurationSchema())
self.space = self.config.createHyperparameterSpace()
self.threadExecutor = concurrent.futures.ThreadPoolExecutor()
self.resultsAnalyzer = ResultsAnalyzer(configuration)
self.results = []
self.resultFutures = []
self.best = None
self.bestLoss = None
self.thread = threading.Thread(target=lambda: self.optimizationThread(), daemon=True if configuration.get("ui", {}).get("enabled", True) else False)
self.totalTrials = self.searchConfig.get("iterations")
self.trialsSinceResultsUpload = None
self.resultsExportFuture = None
self.currentTrials = []
self.allWorkers = set(range(self.config.data['function'].get('parallel', 1)))
self.occupiedWorkers = set()
self.trialNumber = 0
self.lastATPEParameters = None
self.lastLockedParameters = None
self.atpeParamDetails = None
self.tpeOptimizer = TPEOptimizer()
self.atpeOptimizer = ATPEOptimizer()
self.abhOptimizer = AdaptiveBayesianHyperband(self.atpeOptimizer, self.searchConfig.get("min_budget", 1), self.searchConfig.get("max_budget", 100), self.searchConfig.get("eta", 3))
self.humanGuidedATPEOptimizer = HumanGuidedOptimizerWrapper(self.atpeOptimizer)
self.randomSearchOptimizer = RandomSearchOptimizer()
def __del__(self):
if self.threadExecutor:
self.threadExecutor.shutdown(wait=True)
@classmethod
def configurationSchema(self):
""" This method returns the configuration schema for the optimization module. The schema
is a standard JSON-schema object."""
return {
"type": "object",
"properties": {
"method": {"type": "string", "enum": ['atpe', 'tpe', 'random']},
"iterations": {"type": "number"},
"budget": {"type": "number"}
},
"required": ['method', 'iterations']
}
def completed(self):
return len(self.results)
def sampleNext(self):
if self.searchConfig['method'] == 'tpe':
return self.tpeOptimizer.recommendNextParameters(self.config.data['hyperparameters'], self.results, self.currentTrials)
elif self.searchConfig['method'] == 'random':
return self.randomSearchOptimizer.recommendNextParameters(self.config.data['hyperparameters'], self.results, self.currentTrials)
elif self.searchConfig['method'] == 'atpe':
params = self.humanGuidedATPEOptimizer.recommendNextParameters(self.config.data['hyperparameters'], self.results, self.currentTrials)
self.lastATPEParameters = self.atpeOptimizer.lastATPEParameters
self.lastLockedParameters = self.atpeOptimizer.lastLockedParameters
self.atpeParamDetails = self.atpeOptimizer.atpeParamDetails
return params
elif self.searchConfig['method'] == 'abh':
params = self.abhOptimizer.recommendNextParameters(self.config.data['hyperparameters'], self.results, self.currentTrials)
self.lastATPEParameters = self.atpeOptimizer.lastATPEParameters
self.lastLockedParameters = self.atpeOptimizer.lastLockedParameters
self.atpeParamDetails = self.atpeOptimizer.atpeParamDetails
return params
def computeCurrentBest(self):
best = None
bestLoss = None
for result in self.results:
if (best is None and result['loss'] is not None ) or (result['loss'] is not None and result['loss'] < bestLoss):
best = result
bestLoss = result['loss']
self.best = best
self.bestLoss = bestLoss
def startOptmizationJob(self):
availableWorkers = list(sorted(self.allWorkers.difference(self.occupiedWorkers)))
sampleWorker = availableWorkers[0]
sample = None
while sample is None:
# Hedge against any exceptions in the atpe optimizer.
try:
sample = self.sampleNext()
except Exception:
traceback.print_exc()
pass
def testSample(params, trial, worker):
currentTrial = {
"start": datetime.datetime.now(),
"trial": trial,
"worker": worker,
"params": copy.deepcopy(params)
}
self.currentTrials.append(currentTrial)
start = datetime.datetime.now()
execution = Execution(self.config.data['function'], parameters=params, worker_n=worker)
modelResult = execution.run()
end = datetime.datetime.now()
result = Hyperparameter(self.config.data['hyperparameters']).convertToFlatValues(params)
for key in params.keys():
if key.startswith("$"):
result[key] = params[key]
result['trial'] = trial
self.resultsAnalyzer.makeDirs(os.path.join(self.resultsAnalyzer.directory, "logs"))
if 'loss' in modelResult:
result['loss'] = modelResult['loss']
elif 'accuracy' in modelResult:
result['loss'] = modelResult['accuracy']
if 'status' in modelResult:
result['status'] = modelResult['status']
else:
result['status'] = 'ok'
if 'log' in modelResult:
fileName = os.path.join(self.resultsAnalyzer.directory, "logs", "trial_" + str(trial) + ".txt")
with open(fileName, "wt") as file:
file.write(modelResult['log'])
result['log'] = fileName
else:
result['log'] = ''
if 'error' in modelResult:
result['error'] = modelResult['error']
else:
result['error'] = ''
if 'time' in modelResult:
result['time'] = modelResult['time']
else:
result['time'] = (end-start).total_seconds()
self.currentTrials.remove(currentTrial)
return result
def onCompletion(worker, future):
self.occupiedWorkers.remove(worker)
self.results.append(future.result())
self.computeCurrentBest()
if not self.config.data.get("ui", {}).get("enabled", True):
pprint(future.result())
if self.resultsExportFuture is None or (self.resultsExportFuture.done() and len(self.results) > 5):
self.resultsExportFuture = self.threadExecutor.submit(
lambda: self.outputResultsWithBackup(self.config.data.get("results", {}).get("graphs", True)))
else:
self.outputResultsWithBackup(False)
if 'hypermax_results' in self.config.data:
if self.trialsSinceResultsUpload is None or self.trialsSinceResultsUpload >= self.config.data['hypermax_results']['upload_frequency']:
self.saveResultsToHypermaxResultsRepository()
self.trialsSinceResultsUpload = 1
else:
self.trialsSinceResultsUpload += 1
self.occupiedWorkers.add(sampleWorker)
sampleFuture = self.threadExecutor.submit(testSample, sample, self.trialNumber, sampleWorker)
sampleFuture.add_done_callback(functools.partial(onCompletion, sampleWorker))
self.trialNumber += 1
return sampleFuture
def runOptimizationThread(self):
self.thread.start()
def outputResultsWithBackup(self, graphs, workers=1):
self.resultsAnalyzer.outputResultsFolder(self, graphs, workers=workers)
directory_head, directory_tail = os.path.split(self.resultsAnalyzer.directory)
backup_directory = os.path.join(directory_head, ".backup_" + directory_tail + "~")
self.resultsAnalyzer.outputResultsFolder(self, graphs, directory=backup_directory, workers=workers)
def optimizationThread(self):
# Make sure we output basic results if the process is killed for some reason.
atexit.register(lambda: self.outputResultsWithBackup(False))
futures = []
for worker in range(min(len(self.allWorkers), self.totalTrials - len(self.results))):
futures.append(self.startOptmizationJob())
time.sleep(1.0)
while (len(self.results) + len(self.currentTrials)) < self.totalTrials:
completedFuture = list(concurrent.futures.wait(futures, return_when=concurrent.futures.FIRST_COMPLETED)[0])[0]
futures.remove(completedFuture)
time.sleep(0.05)
futures.append(self.startOptmizationJob())
concurrent.futures.wait(futures)
# We are completed, so we can allocate a full contingent of workers
self.outputResultsWithBackup(True, workers=4)
def exportGuidanceJSON(self, fileName):
with open(fileName, 'wt') as file:
json.dump(self.humanGuidedATPEOptimizer.guidanceOptions, file, indent=4, sort_keys=True)
def importGuidanceJSON(self, fileName):
with open(fileName, 'rt') as file:
self.humanGuidedATPEOptimizer.guidanceOptions = json.load(file)
def exportResultsCSV(self, fileName):
allKeys = set()
for result in self.results:
for key in result:
allKeys.add(key)
fieldNames = self.resultInformationKeys + sorted(allKeys.difference(set(self.resultInformationKeys))) # Make sure we keep the order of the field names consistent when writing the csv
with open(fileName, 'wt') as file:
writer = csv.DictWriter(file, fieldnames=fieldNames if len(self.results) > 0 else [], dialect='unix')
writer.writeheader()
writer.writerows(self.results)
def importResultsCSV(self, fileName):
with open(fileName) as file:
reader = csv.DictReader(file)
results = list(reader)
newResults = []
for result in results:
newResult = {}
for key,value in result.items():
if value is not None and value != "":
try:
if '.' in value or 'e' in value:
newResult[key] = float(value)
else:
newResult[key] = int(value)
except ValueError:
newResult[key] = value
elif key == 'loss':
newResult[key] = None
elif key == 'log':
newResult[key] = ''
else:
newResult[key] = None
newResults.append(newResult)
self.results = newResults
self.computeCurrentBest()
self.trialNumber = len(self.results)
def saveResultsToHypermaxResultsRepository(self):
try:
hypermaxResultsConfig = self.config.data['hypermax_results']
with tempfile.TemporaryDirectory() as directory:
process = subprocess.run(['git', 'clone', '<EMAIL>:electricbrainio/hypermax-results.git'], cwd=directory, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
hypermaxResultsDirectory = os.path.join(directory, 'hypermax-results', hypermaxResultsConfig['name'])
self.resultsAnalyzer.outputResultsFolder(self, detailed=False, directory=hypermaxResultsDirectory)
with open(os.path.join(hypermaxResultsDirectory, "metadata.json"), 'wt') as file:
json.dump(self.config.data['hypermax_results'], file, indent=4)
process = subprocess.run(['git', 'add', hypermaxResultsDirectory], cwd=os.path.join(directory, 'hypermax-results'))
process = subprocess.run(['git', 'commit', '-m', 'Hypermax automatically storing results for model ' + hypermaxResultsConfig['name'] + ' with ' + str(len(self.results)) + " trials."], cwd=os.path.join(directory, 'hypermax-results'), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
process = subprocess.run(['git push'], cwd=os.path.join(directory, 'hypermax-results'), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
except Exception as e:
print(e)
| [
"tempfile.TemporaryDirectory",
"hypermax.algorithms.atpe_optimizer.ATPEOptimizer",
"hypermax.algorithms.tpe_optimizer.TPEOptimizer",
"csv.DictReader",
"hypermax.hyperparameter.Hyperparameter",
"subprocess.run",
"hypermax.algorithms.random_search_optimizer.RandomSearchOptimizer",
"time.sleep",
"datet... | [((1155, 1183), 'hypermax.configuration.Configuration', 'Configuration', (['configuration'], {}), '(configuration)\n', (1168, 1183), False, 'from hypermax.configuration import Configuration\n'), ((1487, 1517), 'hypermax.results_analyzer.ResultsAnalyzer', 'ResultsAnalyzer', (['configuration'], {}), '(configuration)\n', (1502, 1517), False, 'from hypermax.results_analyzer import ResultsAnalyzer\n'), ((2271, 2285), 'hypermax.algorithms.tpe_optimizer.TPEOptimizer', 'TPEOptimizer', ([], {}), '()\n', (2283, 2285), False, 'from hypermax.algorithms.tpe_optimizer import TPEOptimizer\n'), ((2315, 2330), 'hypermax.algorithms.atpe_optimizer.ATPEOptimizer', 'ATPEOptimizer', ([], {}), '()\n', (2328, 2330), False, 'from hypermax.algorithms.atpe_optimizer import ATPEOptimizer\n'), ((2560, 2607), 'hypermax.algorithms.human_guided_optimizer_wrapper.HumanGuidedOptimizerWrapper', 'HumanGuidedOptimizerWrapper', (['self.atpeOptimizer'], {}), '(self.atpeOptimizer)\n', (2587, 2607), False, 'from hypermax.algorithms.human_guided_optimizer_wrapper import HumanGuidedOptimizerWrapper\n'), ((2645, 2668), 'hypermax.algorithms.random_search_optimizer.RandomSearchOptimizer', 'RandomSearchOptimizer', ([], {}), '()\n', (2666, 2668), False, 'from hypermax.algorithms.random_search_optimizer import RandomSearchOptimizer\n'), ((5790, 5813), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (5811, 5813), False, 'import datetime\n'), ((5838, 5913), 'hypermax.execution.Execution', 'Execution', (["self.config.data['function']"], {'parameters': 'params', 'worker_n': 'worker'}), "(self.config.data['function'], parameters=params, worker_n=worker)\n", (5847, 5913), False, 'from hypermax.execution import Execution\n'), ((5974, 5997), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (5995, 5997), False, 'import datetime\n'), ((8709, 8754), 'functools.partial', 'functools.partial', (['onCompletion', 'sampleWorker'], {}), '(onCompletion, sampleWorker)\n', (8726, 8754), False, 'import functools\n'), ((9678, 9693), 'time.sleep', 'time.sleep', (['(1.0)'], {}), '(1.0)\n', (9688, 9693), False, 'import time\n'), ((9954, 9970), 'time.sleep', 'time.sleep', (['(0.05)'], {}), '(0.05)\n', (9964, 9970), False, 'import time\n'), ((10299, 10391), 'json.dump', 'json.dump', (['self.humanGuidedATPEOptimizer.guidanceOptions', 'file'], {'indent': '(4)', 'sort_keys': '(True)'}), '(self.humanGuidedATPEOptimizer.guidanceOptions, file, indent=4,\n sort_keys=True)\n', (10308, 10391), False, 'import json\n'), ((10536, 10551), 'json.load', 'json.load', (['file'], {}), '(file)\n', (10545, 10551), False, 'import json\n'), ((11245, 11265), 'csv.DictReader', 'csv.DictReader', (['file'], {}), '(file)\n', (11259, 11265), False, 'import csv\n'), ((5565, 5588), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (5586, 5588), False, 'import datetime\n'), ((5682, 5703), 'copy.deepcopy', 'copy.deepcopy', (['params'], {}), '(params)\n', (5695, 5703), False, 'import copy\n'), ((12403, 12432), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (12430, 12432), False, 'import tempfile\n'), ((12473, 12625), 'subprocess.run', 'subprocess.run', (["['git', 'clone', '<EMAIL>:electricbrainio/hypermax-results.git']"], {'cwd': 'directory', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['git', 'clone',\n '<EMAIL>:electricbrainio/hypermax-results.git'], cwd=directory, stdout=\n subprocess.PIPE, stderr=subprocess.PIPE)\n", (12487, 12625), False, 'import subprocess\n'), ((5420, 5441), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (5439, 5441), False, 'import traceback\n'), ((6020, 6071), 'hypermax.hyperparameter.Hyperparameter', 'Hyperparameter', (["self.config.data['hyperparameters']"], {}), "(self.config.data['hyperparameters'])\n", (6034, 6071), False, 'from hypermax.hyperparameter import Hyperparameter\n'), ((12968, 13031), 'json.dump', 'json.dump', (["self.config.data['hypermax_results']", 'file'], {'indent': '(4)'}), "(self.config.data['hypermax_results'], file, indent=4)\n", (12977, 13031), False, 'import json\n')] |
import doddq.helpers as helpers
import numpy as np
from qiskit import QuantumCircuit, QuantumRegister
from qiskit.circuit.library import HGate, PhaseGate, QFT
from typing import Callable
class GeneralizedQPEInfo:
def __init__(self, qr_eval: QuantumRegister, qr_state: QuantumRegister, circuit: QuantumCircuit) -> None:
self.qr_eval = qr_eval
self.qr_state = qr_state
self.circuit = circuit
class GeneralizedQPE(QuantumCircuit):
def __init__(
self,
num_eval_qubits: int,
num_state_qubits: int,
qc_eval_begin: QuantumCircuit,
qc_eval_end: QuantumCircuit,
qc_state_evolution: Callable[[GeneralizedQPEInfo, int], None],
name: str = 'GeneralizedQPE'
) -> None:
qr_eval = QuantumRegister(num_eval_qubits, 'eval')
qr_state = QuantumRegister(num_state_qubits, 'state')
circuit = QuantumCircuit(qr_eval, qr_state, name=name)
info = GeneralizedQPEInfo(qr_eval, qr_state, circuit)
circuit.compose(qc_eval_begin, qubits=qr_eval[:], inplace=True)
for i in range(num_eval_qubits):
qc_state_evolution(info, i)
circuit.compose(qc_eval_end, qubits=qr_eval[:], inplace=True)
super().__init__(*circuit.qregs, name=circuit.name)
self.compose(circuit.to_gate(), qubits=self.qubits, inplace=True)
def standard_qpe(
num_eval_qubits: int,
num_state_qubits: int,
unitary: QuantumCircuit,
name: str = 'StandardQPE'
) -> GeneralizedQPE:
def state_evolution(info: GeneralizedQPEInfo, index: int) -> None:
controlled_unitary_pow = unitary.power(2 ** index).control()
info.circuit.compose(controlled_unitary_pow, qubits=[info.qr_eval[index]] + info.qr_state[:], inplace=True)
qc_eval_begin = QuantumCircuit(num_eval_qubits)
qc_eval_begin.append(HGate(), [qc_eval_begin.qubits])
qc_eval_end = QFT(num_eval_qubits, inverse=True, do_swaps=False).reverse_bits()
return GeneralizedQPE(num_eval_qubits, num_state_qubits, qc_eval_begin, qc_eval_end, state_evolution, name)
def rodeo_qpe(
num_eval_qubits: int,
num_state_qubits: int,
hamiltonian: np.ndarray,
target_energy: float,
time_arr: np.ndarray,
name: str = 'RodeoQPE'
) -> GeneralizedQPE:
def unitary(index: int) -> QuantumCircuit:
return helpers.time_evolution_circuit(hamiltonian, time_arr[index])
def phase(index: int) -> PhaseGate:
return PhaseGate(target_energy * time_arr[index])
def state_evolution(info: GeneralizedQPEInfo, index: int) -> None:
info.circuit.compose(unitary(index).control(), qubits=[info.qr_eval[index]] + info.qr_state[:], inplace=True)
info.circuit.compose(phase(index), qubits=[index], inplace=True)
qc_eval_begin = QuantumCircuit(num_eval_qubits)
qc_eval_begin.append(HGate(), [qc_eval_begin.qubits])
qc_eval_end = QuantumCircuit(num_eval_qubits)
qc_eval_end.append(HGate(), [qc_eval_begin.qubits])
return GeneralizedQPE(num_eval_qubits, num_state_qubits, qc_eval_begin, qc_eval_end, state_evolution, name)
def rodeo_qpe_gaussian(
num_eval_qubits: int,
num_state_qubits: int,
hamiltonian: np.ndarray,
target_energy: float,
time_stddev: float,
name: str = 'RodeoQPE'
) -> GeneralizedQPE:
time_arr = helpers.rand_gaussian_array(num_eval_qubits, time_stddev)
return rodeo_qpe(num_eval_qubits, num_state_qubits, hamiltonian, target_energy, time_arr, name)
| [
"qiskit.circuit.library.PhaseGate",
"qiskit.circuit.library.QFT",
"qiskit.circuit.library.HGate",
"doddq.helpers.rand_gaussian_array",
"doddq.helpers.time_evolution_circuit",
"qiskit.QuantumRegister",
"qiskit.QuantumCircuit"
] | [((1836, 1867), 'qiskit.QuantumCircuit', 'QuantumCircuit', (['num_eval_qubits'], {}), '(num_eval_qubits)\n', (1850, 1867), False, 'from qiskit import QuantumCircuit, QuantumRegister\n'), ((2853, 2884), 'qiskit.QuantumCircuit', 'QuantumCircuit', (['num_eval_qubits'], {}), '(num_eval_qubits)\n', (2867, 2884), False, 'from qiskit import QuantumCircuit, QuantumRegister\n'), ((2962, 2993), 'qiskit.QuantumCircuit', 'QuantumCircuit', (['num_eval_qubits'], {}), '(num_eval_qubits)\n', (2976, 2993), False, 'from qiskit import QuantumCircuit, QuantumRegister\n'), ((3408, 3465), 'doddq.helpers.rand_gaussian_array', 'helpers.rand_gaussian_array', (['num_eval_qubits', 'time_stddev'], {}), '(num_eval_qubits, time_stddev)\n', (3435, 3465), True, 'import doddq.helpers as helpers\n'), ((802, 842), 'qiskit.QuantumRegister', 'QuantumRegister', (['num_eval_qubits', '"""eval"""'], {}), "(num_eval_qubits, 'eval')\n", (817, 842), False, 'from qiskit import QuantumCircuit, QuantumRegister\n'), ((862, 904), 'qiskit.QuantumRegister', 'QuantumRegister', (['num_state_qubits', '"""state"""'], {}), "(num_state_qubits, 'state')\n", (877, 904), False, 'from qiskit import QuantumCircuit, QuantumRegister\n'), ((923, 967), 'qiskit.QuantumCircuit', 'QuantumCircuit', (['qr_eval', 'qr_state'], {'name': 'name'}), '(qr_eval, qr_state, name=name)\n', (937, 967), False, 'from qiskit import QuantumCircuit, QuantumRegister\n'), ((1893, 1900), 'qiskit.circuit.library.HGate', 'HGate', ([], {}), '()\n', (1898, 1900), False, 'from qiskit.circuit.library import HGate, PhaseGate, QFT\n'), ((2409, 2469), 'doddq.helpers.time_evolution_circuit', 'helpers.time_evolution_circuit', (['hamiltonian', 'time_arr[index]'], {}), '(hamiltonian, time_arr[index])\n', (2439, 2469), True, 'import doddq.helpers as helpers\n'), ((2526, 2568), 'qiskit.circuit.library.PhaseGate', 'PhaseGate', (['(target_energy * time_arr[index])'], {}), '(target_energy * time_arr[index])\n', (2535, 2568), False, 'from qiskit.circuit.library import HGate, PhaseGate, QFT\n'), ((2910, 2917), 'qiskit.circuit.library.HGate', 'HGate', ([], {}), '()\n', (2915, 2917), False, 'from qiskit.circuit.library import HGate, PhaseGate, QFT\n'), ((3017, 3024), 'qiskit.circuit.library.HGate', 'HGate', ([], {}), '()\n', (3022, 3024), False, 'from qiskit.circuit.library import HGate, PhaseGate, QFT\n'), ((1945, 1995), 'qiskit.circuit.library.QFT', 'QFT', (['num_eval_qubits'], {'inverse': '(True)', 'do_swaps': '(False)'}), '(num_eval_qubits, inverse=True, do_swaps=False)\n', (1948, 1995), False, 'from qiskit.circuit.library import HGate, PhaseGate, QFT\n')] |
import numpy as np
import pandas as pd
from .scm import SCM
class DataGenerator:
def generate(self, scm: SCM, n_samples: int, seed: int):
pass
class SimpleDataGenerator(DataGenerator):
def generate(self, scm: SCM, n_samples: int, seed: int):
"""
Generates date according to the given Structural Causal Model
This Generator assumes that variables are normally distributed
The noise is distributed according to standard normal distribution
:param scm: instance of SCM
:param n_samples: number of samples to generate
:param seed: random seed
:return:
"""
np.random.seed(seed)
data = {}
for equation in scm.equations:
data[equation["output_variable"].name] = np.zeros(n_samples)
for input_variable, coeff in equation["input_variables"].items():
if input_variable.name not in data:
raise AttributeError(
f"No data generated for dependent variable {input_variable.name}"
)
data[equation["output_variable"].name] += (
data[input_variable.name] * coeff
)
mean = 0
std = 1.0
if isinstance(equation["output_variable"].config, dict):
mean = equation["output_variable"].config.get("mean", 0)
std = equation["output_variable"].config.get("std", 1.0)
data[equation["output_variable"].name] += np.random.normal(
loc=mean, scale=std, size=n_samples
)
if (
isinstance(equation["output_variable"].config, dict)
and "mask" in equation["output_variable"].config
):
out_val = data[equation["output_variable"].name]
out_val[out_val < equation["output_variable"].config["mask"]] = 0
out_val[out_val > 0] = 1
data[equation["output_variable"].name] = out_val
return pd.DataFrame.from_dict(data)
| [
"numpy.random.normal",
"numpy.zeros",
"numpy.random.seed",
"pandas.DataFrame.from_dict"
] | [((653, 673), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (667, 673), True, 'import numpy as np\n'), ((2057, 2085), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['data'], {}), '(data)\n', (2079, 2085), True, 'import pandas as pd\n'), ((786, 805), 'numpy.zeros', 'np.zeros', (['n_samples'], {}), '(n_samples)\n', (794, 805), True, 'import numpy as np\n'), ((1537, 1590), 'numpy.random.normal', 'np.random.normal', ([], {'loc': 'mean', 'scale': 'std', 'size': 'n_samples'}), '(loc=mean, scale=std, size=n_samples)\n', (1553, 1590), True, 'import numpy as np\n')] |
import cv2
image = cv2.imread("database-tea-cake/1018-1-a.jpg")
cv2.imshow("palm",image) #to view the palm in python
cv2.waitKey(0)
gray = cv2.cvtColor(image,cv2.COLOR_BGR2GRAY)
edges = cv2.Canny(gray,60,65,apertureSize = 3)
cv2.imshow("edges",edges)
cv2.waitKey(0)
edges = cv2.bitwise_not(edges)
cv2.imshow("change black and white",edges)
cv2.waitKey(0)
cv2.imwrite("palmlines.jpg", edges)
palmlines = cv2.imread("palmlines.jpg")
img = cv2.addWeighted(palmlines, 0.3, image, 0.7, 0)
cv2.imshow("lines in palm", img)
cv2.waitKey(0) | [
"cv2.imwrite",
"cv2.imshow",
"cv2.addWeighted",
"cv2.cvtColor",
"cv2.bitwise_not",
"cv2.Canny",
"cv2.waitKey",
"cv2.imread"
] | [((20, 64), 'cv2.imread', 'cv2.imread', (['"""database-tea-cake/1018-1-a.jpg"""'], {}), "('database-tea-cake/1018-1-a.jpg')\n", (30, 64), False, 'import cv2\n'), ((65, 90), 'cv2.imshow', 'cv2.imshow', (['"""palm"""', 'image'], {}), "('palm', image)\n", (75, 90), False, 'import cv2\n'), ((118, 132), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (129, 132), False, 'import cv2\n'), ((141, 180), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2GRAY'], {}), '(image, cv2.COLOR_BGR2GRAY)\n', (153, 180), False, 'import cv2\n'), ((189, 228), 'cv2.Canny', 'cv2.Canny', (['gray', '(60)', '(65)'], {'apertureSize': '(3)'}), '(gray, 60, 65, apertureSize=3)\n', (198, 228), False, 'import cv2\n'), ((228, 254), 'cv2.imshow', 'cv2.imshow', (['"""edges"""', 'edges'], {}), "('edges', edges)\n", (238, 254), False, 'import cv2\n'), ((254, 268), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (265, 268), False, 'import cv2\n'), ((278, 300), 'cv2.bitwise_not', 'cv2.bitwise_not', (['edges'], {}), '(edges)\n', (293, 300), False, 'import cv2\n'), ((301, 344), 'cv2.imshow', 'cv2.imshow', (['"""change black and white"""', 'edges'], {}), "('change black and white', edges)\n", (311, 344), False, 'import cv2\n'), ((344, 358), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (355, 358), False, 'import cv2\n'), ((360, 395), 'cv2.imwrite', 'cv2.imwrite', (['"""palmlines.jpg"""', 'edges'], {}), "('palmlines.jpg', edges)\n", (371, 395), False, 'import cv2\n'), ((408, 435), 'cv2.imread', 'cv2.imread', (['"""palmlines.jpg"""'], {}), "('palmlines.jpg')\n", (418, 435), False, 'import cv2\n'), ((442, 488), 'cv2.addWeighted', 'cv2.addWeighted', (['palmlines', '(0.3)', 'image', '(0.7)', '(0)'], {}), '(palmlines, 0.3, image, 0.7, 0)\n', (457, 488), False, 'import cv2\n'), ((489, 521), 'cv2.imshow', 'cv2.imshow', (['"""lines in palm"""', 'img'], {}), "('lines in palm', img)\n", (499, 521), False, 'import cv2\n'), ((522, 536), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (533, 536), False, 'import cv2\n')] |
#!/usr/bin/env python3
import numpy
from rl.agents.policy.policy_agent import PolicyAgent
class Random(PolicyAgent):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def act(self, state: numpy.ndarray, available_actions: numpy.ndarray):
"""
Uses a uniform random distribution to determine it's action given a state
TODO: Act according to different distributions
:param state: The state of the environment
:param available_actions: A list of available possible actions (positions on the board to mark)
:return: a random action
"""
return numpy.random.choice(available_actions)
| [
"numpy.random.choice"
] | [((644, 682), 'numpy.random.choice', 'numpy.random.choice', (['available_actions'], {}), '(available_actions)\n', (663, 682), False, 'import numpy\n')] |
import PP_Calculator
from pppredict.predict import predict
from util import str_to_dict, mod_convert, add_spaces
from re import findall
from lang_init import Initialization
from get_local import get_sample
import users
import maps
class Commands:
def __init__(self):
pass
''' +====-----------==--------=----------- ------------ ------- -- --- -
|
| PP Calculation
|
| Use /np before command*
|
| Syntax: {prefix}pp mods='nomod' acc=[95, 100] miss=[0, 0] combo='max'
|
| If force_miss num are not equals to force_acc,
| for every force_acc with index > len(force_miss) will use force_miss[-1]
|
| *NOTE: kwargs must to be after args
|
| Returns message to send in format: For {title} [{dif_name}] ({OD}, {AR}, {CS}, {Stars}, {Len_in_minutes}) you'll get *[{i[PP]} for {i[ACC]}]
|
\==---=--------------------------- ------- ---- - -'''
@staticmethod
def calc(*args) -> str:
nick: str = args[-1]
try:
Action: str = args[1][0][args[2]]
except:
return get_sample("ERROR_NP_NEED", nick)
PPs: str = ''
args: list = args[0]
beatmap_ID: str = findall(r'\d+', Action[Action.find('/b/')+3:])[0]
# If +MOD_NAME in Action, collect them to mods parameter
if '+' in Action:
mods = mod_convert(Action)
else:
mods = 'nomod'
# if there are args
if args:
acc = [0.95, 1]
miss = [0, 0]
combo = 'max'
arg_list = {'mods': [],
'acc': acc,
'miss': miss,
'combo': combo}
# if there are keyword args
if any('=' in i for i in args):
kwargs: list = list()
for i in range(len(args)):
[(kwargs.append(i), args.remove(i)) if '=' in i else '' for i in args]
for i in kwargs:
if 'mods=' in i:
ind = kwargs.index(i)
kwargs[ind] = kwargs[ind][:5] + '"' + kwargs[ind][5:] + '"'
# bruh
kwargs = eval('str_to_dict({})'.format(', '.join(kwargs)))
# some ifs, maybe better solution soon
if 'mods' in kwargs:
arg_list['mods'] = [i.lower() for i in [kwargs['mods'][i:i+2] for i in range(0, len(kwargs['mods']), 2)]]
if 'acc' in kwargs:
arg_list['acc'].insert(0, kwargs['acc']/100)
if 'miss' in kwargs:
arg_list['miss'].insert(0, kwargs['miss'])
if 'combo' in kwargs:
arg_list['combo'] = kwargs['combo']
# if there are non-keyword args
if args:
# all non-keyword args type is str. For acc and miss it should be int
for i in range(len(args)):
if type(arg_list[list(arg_list.keys())[i]]) == 'str':
arg_list[list(arg_list.keys())[i]] = args[i]
elif list(arg_list.keys())[i] == 'mods':
arg_list[list(arg_list.keys())[i]] = [x.lower() for x in [args[i][x:x+2] for x in range(0, len(args[i]), 2)]]
elif list(arg_list.keys())[i] == 'acc':
arg_list[list(arg_list.keys())[i]].insert(0, float(args[i])/100)
else:
arg_list[list(arg_list.keys())[i]].insert(0, int(args[i]))
# If user sets acc but not misses or vise versa we should equalize arrays
if len(arg_list['acc']) != len(arg_list['miss']):
dif = abs(len(arg_list['acc']) - len(arg_list['miss']))
if len(arg_list['acc']) > len(arg_list['miss']):
for i in range(dif):
arg_list['miss'].append(arg_list['miss'][-1])
else:
for i in range(dif):
arg_list['acc'].append(arg_list['acc'][-1])
if arg_list['mods'] is False:
arg_list['mods'] = mods
else:
arg_list = {
'mods': mods,
'acc': [0.95, 1],
'miss': [0, 0],
'combo': 'max'
}
res: list = PP_Calculator.PP_Calculator(arg_list['combo'],
beatmap_ID,
arg_list ['mods'],
1,
arg_list['acc'],
arg_list['miss'])
for i in range(len(res[1])):
PPs += get_sample("PP_FOR", nick).format(res[1][i], arg_list['acc'][i]*100)
message = get_sample("PP", nick).format(
beatmap_ID, # Beatmap ID
res[2][0], # Title
res[2][1], # Diff_name
' +{}'.format(''.join(arg_list['mods']).upper()) if arg_list['mods'] != 'nomod' else '', # If mods used
*[round(i, 2) for i in res[0]], # AR and etc
*[int(i) for i in divmod(int(res[2][2]), 60)], # True Seconds
PPs, # PPs
'({}x)'.format(arg_list['combo']) if arg_list['combo'] != 'max' else '') # If combo param used
return message
# Actually a copy of calc function but with prediction.
@staticmethod
def calcPred(*args) -> str:
nick: str = args[-1]
try:
Action: str = args[1][0][args[2]]
except:
return get_sample("ERROR_NP_NEED", nick)
PPs: str = ''
args: list = args[0]
beatmap_ID: str = findall(r'\d+', Action[Action.find('/b/')+3:])[0]
# If +MOD_NAME in Action, collect them to mods parameter
if '+' in Action:
mods = mod_convert(Action)
else:
mods = 'nomod'
# if there are args
if args:
acc = [0.95, 1]
miss = [0, 0]
combo = 'max'
arg_list = {'mods': [],
'acc': acc,
'miss': miss,
'combo': combo}
# if there are keyword args
if any('=' in i for i in args):
kwargs: list = list()
for i in range(len(args)):
[(kwargs.append(i), args.remove(i)) if '=' in i else '' for i in args]
for i in kwargs:
if 'mods=' in i:
ind = kwargs.index(i)
kwargs[ind] = kwargs[ind][:5] + '"' + kwargs[ind][5:] + '"'
# bruh
kwargs = eval('str_to_dict({})'.format(', '.join(kwargs)))
# some ifs, maybe better solution soon
if 'mods' in kwargs:
arg_list['mods'] = [i.lower() for i in [kwargs['mods'][i:i+2] for i in range(0, len(kwargs['mods']), 2)]]
if 'acc' in kwargs:
arg_list['acc'].insert(0, kwargs['acc']/100)
if 'miss' in kwargs:
arg_list['miss'].insert(0, kwargs['miss'])
if 'combo' in kwargs:
arg_list['combo'] = kwargs['combo']
# if there are non-keyword args
if args:
# all non-keyword args type is str. For acc and miss it should be int
for i in range(len(args)):
if type(arg_list[list(arg_list.keys())[i]]) == 'str':
arg_list[list(arg_list.keys())[i]] = args[i]
elif list(arg_list.keys())[i] == 'mods':
arg_list[list(arg_list.keys())[i]] = [x.lower() for x in [args[i][x:x+2] for x in range(0, len(args[i]), 2)]]
elif list(arg_list.keys())[i] == 'acc':
arg_list[list(arg_list.keys())[i]].insert(0, float(args[i])/100)
else:
arg_list[list(arg_list.keys())[i]].insert(0, int(args[i]))
# If user sets acc but not misses or vise versa we should equalize arrays
if len(arg_list['acc']) != len(arg_list['miss']):
dif = abs(len(arg_list['acc']) - len(arg_list['miss']))
if len(arg_list['acc']) > len(arg_list['miss']):
for i in range(dif):
arg_list['miss'].append(arg_list['miss'][-1])
else:
for i in range(dif):
arg_list['acc'].append(arg_list['acc'][-1])
if arg_list['mods'] is False:
arg_list['mods'] = mods
else:
arg_list = {
'mods': mods,
'acc': [0.95, 1],
'miss': [0, 0],
'combo': 'max'
}
res: list = PP_Calculator.PP_Calculator(arg_list['combo'],
beatmap_ID,
arg_list ['mods'],
1,
arg_list['acc'],
arg_list['miss'])
for i in range(len(res[1])):
PPs += get_sample("PP_FOR", nick).format(res[1][i], arg_list['acc'][i]*100)
Pred: predict.Prediction = predict.Prediction()
Pred.predict(nick, float(res[0][3]))
if Pred.predicted == 'Impossible':
PP_Pred = get_sample("PP_PRED_IMPOSSIBLE", nick)
else:
PP_Pred: str = get_sample("PP_PRED_FUTURE",
nick).format(PP_Calculator.PP_Calculator('max',
beatmap_ID,
arg_list['mods'],
1,
(Pred.predicted * 0.01,),
(0, ))[1][0])
message = get_sample("PP_PRED", nick).format(beatmap_ID, # Beatmap ID
res[2][0], # Title
res[2][1], # Diff_name
' +{}'.format(''.join(arg_list['mods']).upper()) if arg_list['mods'] != 'nomod' else '', # If mods used
*[round(i, 2) for i in res[0]], # AR and etc
*[int(i) for i in divmod(int(res[2][2]), 60)], # True Seconds
PPs, # PPs
'({}x)'.format(arg_list['combo']) if arg_list['combo'] != 'max' else '', # If combo param used
PP_Pred) # Predicted pp
return message
# INFO
@staticmethod
def info(*args) -> str:
nick = args[-1]
mess = get_sample("INFO", nick)
return mess
# Set language
staticmethod
def setLang(*args) -> str:
# Converts language to full name
lang_dict = {
'ru': 'Russian',
'en': 'English',
'de': 'Deutsch'
}
nick: str = args[-1]
language: str = args[0][0]
if language in lang_dict:
language = lang_dict[language]
else:
return get_sample("ERROR_NO_LANGUAGE", nick)
init = Initialization()
init.set(nick, language)
return get_sample("LANG_CHANGED", nick)
# Maps - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Push map
@staticmethod
def map_push(*args) -> str:
nick: str = args[-1]
try:
Action: str = args[1][0][args[2]]
except:
return get_sample("ERROR_NP_NEED", nick)
beatmap_ID: str = int(findall(r'\d+', Action[Action.find('/b/') + 3:])[0])
if users.Users.isPushMap(nick, beatmap_ID):
return get_sample("ERROR_MAP_PUSHED_ALREADY", nick)
else:
maps.Maps.addMap(beatmap_ID)
users.Users.addMapToPushed(nick, beatmap_ID)
return get_sample("MAP_SUCCESS_PUSH", nick)
# Drop map
@staticmethod
def map_drop(*args) -> str:
nick: str = args[-1]
try:
Action: str = args[1][0][args[2]]
except:
return get_sample("ERROR_NP_NEED", nick)
beatmap_ID: int = int(findall(r'\d+', Action[Action.find('/b/') + 3:])[0])
if users.Users.isPushMap(nick, beatmap_ID):
return get_sample("ERROR_MAP_PUSHED_ALREADY", nick)
else:
maps.Maps.dropMap(beatmap_ID)
users.Users.addMapToPushed(nick, beatmap_ID)
return get_sample("MAP_SUCCESS_DROP", nick)
# Map top
@staticmethod
def map_top(*args) -> str:
nick: str = args[-1]
args_l: list = args[0]
if not(args):
args: str = 'user'
else:
args: str = args_l[0]
top: list = maps.Maps.getTop(args, limit=5)
message: str = ''
for map in top:
PPs = get_sample("PP_FOR", nick).format(eval(map[2])[3], 100)
message += get_sample("MAP_TOP", nick).format(map[0],
map[1],
eval(map[4])[3],
PPs,
map[3])
message = add_spaces(message)
return message
# Get last map in /np
@staticmethod
def map_recent(*args) -> str:
nick: str = args[-1]
recent: list = maps.Maps.getLastNP()
accs: list = [0.95, 0.98, 0.99, 1]
PPs: str = ''
for i in range(len(accs)):
PPs += get_sample("PP_FOR", nick).format(eval(recent[2])[i], accs[i] * 100)
message: str = get_sample("MAP_RECENT", nick).format(recent[0], recent[1], eval(recent[4])[3], PPs)
return message
# Maps end - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Users - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Get target's stats. If no target, shows user's stats
@staticmethod
def user_getStat(*args) -> str:
nick: str = args[-1]
args: list = args[0]
if args:
to_see = args[0]
else:
to_see = nick
stats = users.Users.getStat(to_see)
message = get_sample("USER_STAT_FOR", nick).format(stats[0])
message = add_spaces(message)
message += get_sample("USER_STAT_ACC", nick).format(round(stats[1], 2))
message = add_spaces(message)
message += get_sample("USER_STAT_PP", nick).format(round(stats[2]))
message = add_spaces(message)
message += get_sample("USER_STAT_STARAVG", nick).format(round(stats[3], 2))
return message
# List of commands and functions
cmd_list = {'pp': (Commands.calc, True),
'pp_pred': (Commands.calcPred, True),
'info': (Commands.info, False),
'lang': (Commands.setLang, False),
'push': (Commands.map_push, True),
'drop': (Commands.map_drop, True),
'top': (Commands.map_top, False),
'recent': (Commands.map_recent, False),
'stats': (Commands.user_getStat, False)}
| [
"lang_init.Initialization",
"util.add_spaces",
"users.Users.getStat",
"maps.Maps.addMap",
"get_local.get_sample",
"maps.Maps.getLastNP",
"users.Users.addMapToPushed",
"users.Users.isPushMap",
"maps.Maps.dropMap",
"PP_Calculator.PP_Calculator",
"maps.Maps.getTop",
"pppredict.predict.predict.Pre... | [((4395, 4513), 'PP_Calculator.PP_Calculator', 'PP_Calculator.PP_Calculator', (["arg_list['combo']", 'beatmap_ID', "arg_list['mods']", '(1)', "arg_list['acc']", "arg_list['miss']"], {}), "(arg_list['combo'], beatmap_ID, arg_list['mods'],\n 1, arg_list['acc'], arg_list['miss'])\n", (4422, 4513), False, 'import PP_Calculator\n'), ((9278, 9396), 'PP_Calculator.PP_Calculator', 'PP_Calculator.PP_Calculator', (["arg_list['combo']", 'beatmap_ID', "arg_list['mods']", '(1)', "arg_list['acc']", "arg_list['miss']"], {}), "(arg_list['combo'], beatmap_ID, arg_list['mods'],\n 1, arg_list['acc'], arg_list['miss'])\n", (9305, 9396), False, 'import PP_Calculator\n'), ((9811, 9831), 'pppredict.predict.predict.Prediction', 'predict.Prediction', ([], {}), '()\n', (9829, 9831), False, 'from pppredict.predict import predict\n'), ((11620, 11644), 'get_local.get_sample', 'get_sample', (['"""INFO"""', 'nick'], {}), "('INFO', nick)\n", (11630, 11644), False, 'from get_local import get_sample\n'), ((12123, 12139), 'lang_init.Initialization', 'Initialization', ([], {}), '()\n', (12137, 12139), False, 'from lang_init import Initialization\n'), ((12189, 12221), 'get_local.get_sample', 'get_sample', (['"""LANG_CHANGED"""', 'nick'], {}), "('LANG_CHANGED', nick)\n", (12199, 12221), False, 'from get_local import get_sample\n'), ((12630, 12669), 'users.Users.isPushMap', 'users.Users.isPushMap', (['nick', 'beatmap_ID'], {}), '(nick, beatmap_ID)\n', (12651, 12669), False, 'import users\n'), ((12863, 12899), 'get_local.get_sample', 'get_sample', (['"""MAP_SUCCESS_PUSH"""', 'nick'], {}), "('MAP_SUCCESS_PUSH', nick)\n", (12873, 12899), False, 'from get_local import get_sample\n'), ((13220, 13259), 'users.Users.isPushMap', 'users.Users.isPushMap', (['nick', 'beatmap_ID'], {}), '(nick, beatmap_ID)\n', (13241, 13259), False, 'import users\n'), ((13454, 13490), 'get_local.get_sample', 'get_sample', (['"""MAP_SUCCESS_DROP"""', 'nick'], {}), "('MAP_SUCCESS_DROP', nick)\n", (13464, 13490), False, 'from get_local import get_sample\n'), ((13738, 13769), 'maps.Maps.getTop', 'maps.Maps.getTop', (['args'], {'limit': '(5)'}), '(args, limit=5)\n', (13754, 13769), False, 'import maps\n'), ((14427, 14448), 'maps.Maps.getLastNP', 'maps.Maps.getLastNP', ([], {}), '()\n', (14446, 14448), False, 'import maps\n'), ((15222, 15249), 'users.Users.getStat', 'users.Users.getStat', (['to_see'], {}), '(to_see)\n', (15241, 15249), False, 'import users\n'), ((15338, 15357), 'util.add_spaces', 'add_spaces', (['message'], {}), '(message)\n', (15348, 15357), False, 'from util import str_to_dict, mod_convert, add_spaces\n'), ((15457, 15476), 'util.add_spaces', 'add_spaces', (['message'], {}), '(message)\n', (15467, 15476), False, 'from util import str_to_dict, mod_convert, add_spaces\n'), ((15572, 15591), 'util.add_spaces', 'add_spaces', (['message'], {}), '(message)\n', (15582, 15591), False, 'from util import str_to_dict, mod_convert, add_spaces\n'), ((1368, 1387), 'util.mod_convert', 'mod_convert', (['Action'], {}), '(Action)\n', (1379, 1387), False, 'from util import str_to_dict, mod_convert, add_spaces\n'), ((6252, 6271), 'util.mod_convert', 'mod_convert', (['Action'], {}), '(Action)\n', (6263, 6271), False, 'from util import str_to_dict, mod_convert, add_spaces\n'), ((9943, 9981), 'get_local.get_sample', 'get_sample', (['"""PP_PRED_IMPOSSIBLE"""', 'nick'], {}), "('PP_PRED_IMPOSSIBLE', nick)\n", (9953, 9981), False, 'from get_local import get_sample\n'), ((12069, 12106), 'get_local.get_sample', 'get_sample', (['"""ERROR_NO_LANGUAGE"""', 'nick'], {}), "('ERROR_NO_LANGUAGE', nick)\n", (12079, 12106), False, 'from get_local import get_sample\n'), ((12690, 12734), 'get_local.get_sample', 'get_sample', (['"""ERROR_MAP_PUSHED_ALREADY"""', 'nick'], {}), "('ERROR_MAP_PUSHED_ALREADY', nick)\n", (12700, 12734), False, 'from get_local import get_sample\n'), ((12761, 12789), 'maps.Maps.addMap', 'maps.Maps.addMap', (['beatmap_ID'], {}), '(beatmap_ID)\n', (12777, 12789), False, 'import maps\n'), ((12802, 12846), 'users.Users.addMapToPushed', 'users.Users.addMapToPushed', (['nick', 'beatmap_ID'], {}), '(nick, beatmap_ID)\n', (12828, 12846), False, 'import users\n'), ((13280, 13324), 'get_local.get_sample', 'get_sample', (['"""ERROR_MAP_PUSHED_ALREADY"""', 'nick'], {}), "('ERROR_MAP_PUSHED_ALREADY', nick)\n", (13290, 13324), False, 'from get_local import get_sample\n'), ((13351, 13380), 'maps.Maps.dropMap', 'maps.Maps.dropMap', (['beatmap_ID'], {}), '(beatmap_ID)\n', (13368, 13380), False, 'import maps\n'), ((13393, 13437), 'users.Users.addMapToPushed', 'users.Users.addMapToPushed', (['nick', 'beatmap_ID'], {}), '(nick, beatmap_ID)\n', (13419, 13437), False, 'import users\n'), ((14253, 14272), 'util.add_spaces', 'add_spaces', (['message'], {}), '(message)\n', (14263, 14272), False, 'from util import str_to_dict, mod_convert, add_spaces\n'), ((1095, 1128), 'get_local.get_sample', 'get_sample', (['"""ERROR_NP_NEED"""', 'nick'], {}), "('ERROR_NP_NEED', nick)\n", (1105, 1128), False, 'from get_local import get_sample\n'), ((4911, 4933), 'get_local.get_sample', 'get_sample', (['"""PP"""', 'nick'], {}), "('PP', nick)\n", (4921, 4933), False, 'from get_local import get_sample\n'), ((5979, 6012), 'get_local.get_sample', 'get_sample', (['"""ERROR_NP_NEED"""', 'nick'], {}), "('ERROR_NP_NEED', nick)\n", (5989, 6012), False, 'from get_local import get_sample\n'), ((10625, 10652), 'get_local.get_sample', 'get_sample', (['"""PP_PRED"""', 'nick'], {}), "('PP_PRED', nick)\n", (10635, 10652), False, 'from get_local import get_sample\n'), ((12500, 12533), 'get_local.get_sample', 'get_sample', (['"""ERROR_NP_NEED"""', 'nick'], {}), "('ERROR_NP_NEED', nick)\n", (12510, 12533), False, 'from get_local import get_sample\n'), ((13090, 13123), 'get_local.get_sample', 'get_sample', (['"""ERROR_NP_NEED"""', 'nick'], {}), "('ERROR_NP_NEED', nick)\n", (13100, 13123), False, 'from get_local import get_sample\n'), ((14663, 14693), 'get_local.get_sample', 'get_sample', (['"""MAP_RECENT"""', 'nick'], {}), "('MAP_RECENT', nick)\n", (14673, 14693), False, 'from get_local import get_sample\n'), ((15269, 15302), 'get_local.get_sample', 'get_sample', (['"""USER_STAT_FOR"""', 'nick'], {}), "('USER_STAT_FOR', nick)\n", (15279, 15302), False, 'from get_local import get_sample\n'), ((15378, 15411), 'get_local.get_sample', 'get_sample', (['"""USER_STAT_ACC"""', 'nick'], {}), "('USER_STAT_ACC', nick)\n", (15388, 15411), False, 'from get_local import get_sample\n'), ((15497, 15529), 'get_local.get_sample', 'get_sample', (['"""USER_STAT_PP"""', 'nick'], {}), "('USER_STAT_PP', nick)\n", (15507, 15529), False, 'from get_local import get_sample\n'), ((15612, 15649), 'get_local.get_sample', 'get_sample', (['"""USER_STAT_STARAVG"""', 'nick'], {}), "('USER_STAT_STARAVG', nick)\n", (15622, 15649), False, 'from get_local import get_sample\n'), ((4823, 4849), 'get_local.get_sample', 'get_sample', (['"""PP_FOR"""', 'nick'], {}), "('PP_FOR', nick)\n", (4833, 4849), False, 'from get_local import get_sample\n'), ((9706, 9732), 'get_local.get_sample', 'get_sample', (['"""PP_FOR"""', 'nick'], {}), "('PP_FOR', nick)\n", (9716, 9732), False, 'from get_local import get_sample\n'), ((10023, 10057), 'get_local.get_sample', 'get_sample', (['"""PP_PRED_FUTURE"""', 'nick'], {}), "('PP_PRED_FUTURE', nick)\n", (10033, 10057), False, 'from get_local import get_sample\n'), ((13839, 13865), 'get_local.get_sample', 'get_sample', (['"""PP_FOR"""', 'nick'], {}), "('PP_FOR', nick)\n", (13849, 13865), False, 'from get_local import get_sample\n'), ((13918, 13945), 'get_local.get_sample', 'get_sample', (['"""MAP_TOP"""', 'nick'], {}), "('MAP_TOP', nick)\n", (13928, 13945), False, 'from get_local import get_sample\n'), ((14570, 14596), 'get_local.get_sample', 'get_sample', (['"""PP_FOR"""', 'nick'], {}), "('PP_FOR', nick)\n", (14580, 14596), False, 'from get_local import get_sample\n'), ((10103, 10207), 'PP_Calculator.PP_Calculator', 'PP_Calculator.PP_Calculator', (['"""max"""', 'beatmap_ID', "arg_list['mods']", '(1)', '(Pred.predicted * 0.01,)', '(0,)'], {}), "('max', beatmap_ID, arg_list['mods'], 1, (Pred.\n predicted * 0.01,), (0,))\n", (10130, 10207), False, 'import PP_Calculator\n')] |
# ---------------------------------------------------
# Intermediate Python - Loops
# 22 set 2020
# VNTBJR
# ---------------------------------------------------
#
# Load packages
library(reticulate)
# while loop -------------------------------------------
# Basic while loop
# Initialize offset
offset = 8
# Code the while loop
while offset != 0 :
print("correcting...")
offset = offset - 1
print(offset)
quit()
# Add conditionals
# Initialize offset
offset = -6
# Code the while loop
while offset != 0 :
print("correcting...")
if offset > 0 :
offset = offset - 1
else :
offset = offset + 1
print(offset)
quit()
######################################################################
# for loop -------------------------------------------
# Loop over a list
# areas list
areas = [11.25, 18.0, 20.0, 10.75, 9.50]
# Code the for loop
for area in areas :
print(area)
quit()
# Indexes and values
# areas list
areas = [11.25, 18.0, 20.0, 10.75, 9.50]
# Change for loop to use enumerate() and update print()
for index, a in enumerate(areas) :
print("room " + str(index) + ": " + str(a))
quit()
# Indexes and values (2)
# areas list
areas = [11.25, 18.0, 20.0, 10.75, 9.50]
# Code the for loop
for index, area in enumerate(areas) :
print("room " + str(index + 1) + ": " + str(area))
quit()
# Loop over list of lists
# house list of lists
house = [["hallway", 11.25],
["kitchen", 18.0],
["living room", 20.0],
["bedroom", 10.75],
["bathroom", 9.50]]
# Build a for loop from scratch
for room, area in house :
print("the " + str(room) + " is " + str(area) + " sqm")
quit()
######################################################################
# Loop Data Structures Part 1 -------------------------------------------
# Loop over dictionary
# Definition of dictionary
europe = {'spain':'madrid', 'france':'paris', 'germany':'berlin',
'norway':'oslo', 'italy':'rome', 'poland':'warsaw', 'austria':'vienna' }
# Iterate over europe
for key, value in europe.items() :
print("the capital of " + str(key) + " is " + str(value))
quit()
# Loop over Numpy array
# Import numpy as np
import numpy as np
import pandas as pd
# Load data
mlb = pd.read_csv("Datasets/MLB.csv", sep = ",")
# Create data for the exercise
np_height = np.array(mlb[["Height"]])
np_weight = np.array(mlb[["Weight"]])
np_baseball = []
for height in np.nditer(np_height) :
for weight in np.nditer(np_weight) :
np_baseball.append([height, weight])
quit()
np_baseball = np.array(np_baseball)
type(np_baseball)
# For loop over np_height
for height in np.nditer(np_height) :
print(str(height) + " inches")
quit()
# For loop over np_baseball
for hw in np.nditer(np_baseball) :
print(hw)
quit()
######################################################################
# Loop Data Structures Part 2 -------------------------------------------
# Loop over DataFrame (1)
# Import cars data
import pandas as pd
cars = pd.read_csv('Datasets/Cars.csv', index_col = 0)
# Iterate over rows of cars
for lab, row in cars.iterrows() :
print(lab)
print(row)
quit()
# Loop over DataFrame (2)
# Adapt for loop
for lab, row in cars.iterrows() :
print(str(lab) + ": " + str(row["cars_per_cap"]))
quit()
# Add column (1)
# Code for loop that adds COUNTRY column
for lab, row in cars.iterrows() :
cars.loc[lab, "COUNTRY"] = row["country"].upper()
quit()
# Print cars
print(cars)
# Add column (2)
# Import cars data
import pandas as pd
cars = pd.read_csv('Datasets/Cars.csv', index_col = 0)
# Use .apply(str.upper)
cars["COUNTRY"] = cars["country"].apply(str.upper)
print(cars)
######################################################################
| [
"numpy.array",
"numpy.nditer",
"pandas.read_csv"
] | [((2262, 2302), 'pandas.read_csv', 'pd.read_csv', (['"""Datasets/MLB.csv"""'], {'sep': '""","""'}), "('Datasets/MLB.csv', sep=',')\n", (2273, 2302), True, 'import pandas as pd\n'), ((2349, 2374), 'numpy.array', 'np.array', (["mlb[['Height']]"], {}), "(mlb[['Height']])\n", (2357, 2374), True, 'import numpy as np\n'), ((2387, 2412), 'numpy.array', 'np.array', (["mlb[['Weight']]"], {}), "(mlb[['Weight']])\n", (2395, 2412), True, 'import numpy as np\n'), ((2445, 2465), 'numpy.nditer', 'np.nditer', (['np_height'], {}), '(np_height)\n', (2454, 2465), True, 'import numpy as np\n'), ((2569, 2590), 'numpy.array', 'np.array', (['np_baseball'], {}), '(np_baseball)\n', (2577, 2590), True, 'import numpy as np\n'), ((2650, 2670), 'numpy.nditer', 'np.nditer', (['np_height'], {}), '(np_height)\n', (2659, 2670), True, 'import numpy as np\n'), ((2753, 2775), 'numpy.nditer', 'np.nditer', (['np_baseball'], {}), '(np_baseball)\n', (2762, 2775), True, 'import numpy as np\n'), ((3017, 3062), 'pandas.read_csv', 'pd.read_csv', (['"""Datasets/Cars.csv"""'], {'index_col': '(0)'}), "('Datasets/Cars.csv', index_col=0)\n", (3028, 3062), True, 'import pandas as pd\n'), ((3542, 3587), 'pandas.read_csv', 'pd.read_csv', (['"""Datasets/Cars.csv"""'], {'index_col': '(0)'}), "('Datasets/Cars.csv', index_col=0)\n", (3553, 3587), True, 'import pandas as pd\n'), ((2484, 2504), 'numpy.nditer', 'np.nditer', (['np_weight'], {}), '(np_weight)\n', (2493, 2504), True, 'import numpy as np\n')] |
from unittest import TestCase
import sys
sys.path.append("./AerialNavigation/rocket_powered_landing/")
from AerialNavigation.rocket_powered_landing import rocket_powered_landing as m
print(__file__)
class Test(TestCase):
def test1(self):
m.show_animation = False
m.main()
| [
"AerialNavigation.rocket_powered_landing.rocket_powered_landing.main",
"sys.path.append"
] | [((42, 103), 'sys.path.append', 'sys.path.append', (['"""./AerialNavigation/rocket_powered_landing/"""'], {}), "('./AerialNavigation/rocket_powered_landing/')\n", (57, 103), False, 'import sys\n'), ((288, 296), 'AerialNavigation.rocket_powered_landing.rocket_powered_landing.main', 'm.main', ([], {}), '()\n', (294, 296), True, 'from AerialNavigation.rocket_powered_landing import rocket_powered_landing as m\n')] |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import http, _
from odoo.http import request
from odoo.addons.website_sale.controllers.main import WebsiteSale
from odoo.exceptions import UserError
class WebsiteSaleDelivery(WebsiteSale):
@http.route(['/shop/payment'], type='http', auth="public", website=True)
def payment(self, **post):
order = request.website.sale_get_order()
carrier_id = post.get('carrier_id')
if carrier_id:
carrier_id = int(carrier_id)
if order:
order._check_carrier_quotation(force_carrier_id=carrier_id)
if carrier_id:
return request.redirect("/shop/payment")
return super(WebsiteSaleDelivery, self).payment(**post)
@http.route(['/shop/update_carrier'], type='json', auth='public', methods=['POST'], website=True, csrf=False)
def update_eshop_carrier(self, **post):
order = request.website.sale_get_order()
carrier_id = int(post['carrier_id'])
if order:
order._check_carrier_quotation(force_carrier_id=carrier_id)
return self._update_website_sale_delivery_return(order, **post)
@http.route(['/shop/carrier_rate_shipment'], type='json', auth='public', methods=['POST'], website=True)
def cart_carrier_rate_shipment(self, carrier_id, **kw):
order = request.website.sale_get_order(force_create=True)
if not int(carrier_id) in order._get_delivery_methods().ids:
raise UserError(_('It seems that a delivery method is not compatible with your address. Please refresh the page and try again.'))
Monetary = request.env['ir.qweb.field.monetary']
res = {'carrier_id': carrier_id}
carrier = request.env['delivery.carrier'].sudo().browse(int(carrier_id))
rate = carrier.rate_shipment(order)
if rate.get('success'):
tax_ids = carrier.product_id.taxes_id.filtered(lambda t: t.company_id == order.company_id)
if tax_ids:
fpos = order.fiscal_position_id
tax_ids = fpos.map_tax(tax_ids, carrier.product_id, order.partner_shipping_id)
taxes = tax_ids.compute_all(
rate['price'],
currency=order.currency_id,
quantity=1.0,
product=carrier.product_id,
partner=order.partner_shipping_id,
)
if request.env.user.has_group('account.group_show_line_subtotals_tax_excluded'):
rate['price'] = taxes['total_excluded']
else:
rate['price'] = taxes['total_included']
res['status'] = True
res['new_amount_delivery'] = Monetary.value_to_html(rate['price'], {'display_currency': order.currency_id})
res['is_free_delivery'] = not bool(rate['price'])
res['error_message'] = rate['warning_message']
else:
res['status'] = False
res['new_amount_delivery'] = Monetary.value_to_html(0.0, {'display_currency': order.currency_id})
res['error_message'] = rate['error_message']
return res
def order_lines_2_google_api(self, order_lines):
""" Transforms a list of order lines into a dict for google analytics """
order_lines_not_delivery = order_lines.filtered(lambda line: not line.is_delivery)
return super(WebsiteSaleDelivery, self).order_lines_2_google_api(order_lines_not_delivery)
def order_2_return_dict(self, order):
""" Returns the tracking_cart dict of the order for Google analytics """
ret = super(WebsiteSaleDelivery, self).order_2_return_dict(order)
for line in order.order_line:
if line.is_delivery:
ret['transaction']['shipping'] = line.price_unit
return ret
def _get_shop_payment_values(self, order, **kwargs):
values = super(WebsiteSaleDelivery, self)._get_shop_payment_values(order, **kwargs)
has_storable_products = any(line.product_id.type in ['consu', 'product'] for line in order.order_line)
if not order._get_delivery_methods() and has_storable_products:
values['errors'].append(
(_('Sorry, we are unable to ship your order'),
_('No shipping method is available for your current order and shipping address. '
'Please contact us for more information.')))
if has_storable_products:
if order.carrier_id and not order.delivery_rating_success:
order._remove_delivery_line()
delivery_carriers = order._get_delivery_methods()
values['deliveries'] = delivery_carriers.sudo()
values['delivery_has_storable'] = has_storable_products
values['delivery_action_id'] = request.env.ref('delivery.action_delivery_carrier_form').id
return values
def _update_website_sale_delivery_return(self, order, **post):
Monetary = request.env['ir.qweb.field.monetary']
carrier_id = int(post['carrier_id'])
currency = order.currency_id
if order:
return {
'status': order.delivery_rating_success,
'error_message': order.delivery_message,
'carrier_id': carrier_id,
'is_free_delivery': not bool(order.amount_delivery),
'new_amount_delivery': Monetary.value_to_html(order.amount_delivery, {'display_currency': currency}),
'new_amount_untaxed': Monetary.value_to_html(order.amount_untaxed, {'display_currency': currency}),
'new_amount_tax': Monetary.value_to_html(order.amount_tax, {'display_currency': currency}),
'new_amount_total': Monetary.value_to_html(order.amount_total, {'display_currency': currency}),
}
return {}
| [
"odoo._",
"odoo.http.request.website.sale_get_order",
"odoo.http.route",
"odoo.http.request.env.user.has_group",
"odoo.http.request.redirect",
"odoo.http.request.env.ref"
] | [((307, 378), 'odoo.http.route', 'http.route', (["['/shop/payment']"], {'type': '"""http"""', 'auth': '"""public"""', 'website': '(True)'}), "(['/shop/payment'], type='http', auth='public', website=True)\n", (317, 378), False, 'from odoo import http, _\n'), ((812, 925), 'odoo.http.route', 'http.route', (["['/shop/update_carrier']"], {'type': '"""json"""', 'auth': '"""public"""', 'methods': "['POST']", 'website': '(True)', 'csrf': '(False)'}), "(['/shop/update_carrier'], type='json', auth='public', methods=[\n 'POST'], website=True, csrf=False)\n", (822, 925), False, 'from odoo import http, _\n'), ((1227, 1334), 'odoo.http.route', 'http.route', (["['/shop/carrier_rate_shipment']"], {'type': '"""json"""', 'auth': '"""public"""', 'methods': "['POST']", 'website': '(True)'}), "(['/shop/carrier_rate_shipment'], type='json', auth='public',\n methods=['POST'], website=True)\n", (1237, 1334), False, 'from odoo import http, _\n'), ((426, 458), 'odoo.http.request.website.sale_get_order', 'request.website.sale_get_order', ([], {}), '()\n', (456, 458), False, 'from odoo.http import request\n'), ((981, 1013), 'odoo.http.request.website.sale_get_order', 'request.website.sale_get_order', ([], {}), '()\n', (1011, 1013), False, 'from odoo.http import request\n'), ((1407, 1456), 'odoo.http.request.website.sale_get_order', 'request.website.sale_get_order', ([], {'force_create': '(True)'}), '(force_create=True)\n', (1437, 1456), False, 'from odoo.http import request\n'), ((4882, 4938), 'odoo.http.request.env.ref', 'request.env.ref', (['"""delivery.action_delivery_carrier_form"""'], {}), "('delivery.action_delivery_carrier_form')\n", (4897, 4938), False, 'from odoo.http import request\n'), ((707, 740), 'odoo.http.request.redirect', 'request.redirect', (['"""/shop/payment"""'], {}), "('/shop/payment')\n", (723, 740), False, 'from odoo.http import request\n'), ((1555, 1672), 'odoo._', '_', (['"""It seems that a delivery method is not compatible with your address. Please refresh the page and try again."""'], {}), "('It seems that a delivery method is not compatible with your address. Please refresh the page and try again.'\n )\n", (1556, 1672), False, 'from odoo import http, _\n'), ((2498, 2574), 'odoo.http.request.env.user.has_group', 'request.env.user.has_group', (['"""account.group_show_line_subtotals_tax_excluded"""'], {}), "('account.group_show_line_subtotals_tax_excluded')\n", (2524, 2574), False, 'from odoo.http import request\n'), ((4294, 4338), 'odoo._', '_', (['"""Sorry, we are unable to ship your order"""'], {}), "('Sorry, we are unable to ship your order')\n", (4295, 4338), False, 'from odoo import http, _\n'), ((4357, 4483), 'odoo._', '_', (['"""No shipping method is available for your current order and shipping address. Please contact us for more information."""'], {}), "('No shipping method is available for your current order and shipping address. Please contact us for more information.'\n )\n", (4358, 4483), False, 'from odoo import http, _\n')] |
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
import functools
import gzip
import hashlib
import json
import logging
import os
import random
import warnings
from collections import defaultdict
from dataclasses import dataclass, field
from itertools import islice
from pathlib import Path
from typing import (
ClassVar,
Dict,
List,
Optional,
Sequence,
Tuple,
Type,
TypedDict,
Union,
)
import numpy as np
import torch
from iopath.common.file_io import PathManager
from PIL import Image
from pytorch3d.io import IO
from pytorch3d.renderer.cameras import PerspectiveCameras
from pytorch3d.structures.pointclouds import Pointclouds
from . import types
from .dataset_base import DatasetBase, FrameData
logger = logging.getLogger(__name__)
class FrameAnnotsEntry(TypedDict):
subset: Optional[str]
frame_annotation: types.FrameAnnotation
@dataclass(eq=False)
class JsonIndexDataset(DatasetBase):
"""
A dataset with annotations in json files like the Common Objects in 3D
(CO3D) dataset.
Args:
frame_annotations_file: A zipped json file containing metadata of the
frames in the dataset, serialized List[types.FrameAnnotation].
sequence_annotations_file: A zipped json file containing metadata of the
sequences in the dataset, serialized List[types.SequenceAnnotation].
subset_lists_file: A json file containing the lists of frames corresponding
corresponding to different subsets (e.g. train/val/test) of the dataset;
format: {subset: (sequence_name, frame_id, file_path)}.
subsets: Restrict frames/sequences only to the given list of subsets
as defined in subset_lists_file (see above).
limit_to: Limit the dataset to the first #limit_to frames (after other
filters have been applied).
limit_sequences_to: Limit the dataset to the first
#limit_sequences_to sequences (after other sequence filters have been
applied but before frame-based filters).
pick_sequence: A list of sequence names to restrict the dataset to.
exclude_sequence: A list of the names of the sequences to exclude.
limit_category_to: Restrict the dataset to the given list of categories.
dataset_root: The root folder of the dataset; all the paths in jsons are
specified relative to this root (but not json paths themselves).
load_images: Enable loading the frame RGB data.
load_depths: Enable loading the frame depth maps.
load_depth_masks: Enable loading the frame depth map masks denoting the
depth values used for evaluation (the points consistent across views).
load_masks: Enable loading frame foreground masks.
load_point_clouds: Enable loading sequence-level point clouds.
max_points: Cap on the number of loaded points in the point cloud;
if reached, they are randomly sampled without replacement.
mask_images: Whether to mask the images with the loaded foreground masks;
0 value is used for background.
mask_depths: Whether to mask the depth maps with the loaded foreground
masks; 0 value is used for background.
image_height: The height of the returned images, masks, and depth maps;
aspect ratio is preserved during cropping/resizing.
image_width: The width of the returned images, masks, and depth maps;
aspect ratio is preserved during cropping/resizing.
box_crop: Enable cropping of the image around the bounding box inferred
from the foreground region of the loaded segmentation mask; masks
and depth maps are cropped accordingly; cameras are corrected.
box_crop_mask_thr: The threshold used to separate pixels into foreground
and background based on the foreground_probability mask; if no value
is greater than this threshold, the loader lowers it and repeats.
box_crop_context: The amount of additional padding added to each
dimension of the cropping bounding box, relative to box size.
remove_empty_masks: Removes the frames with no active foreground pixels
in the segmentation mask after thresholding (see box_crop_mask_thr).
n_frames_per_sequence: If > 0, randomly samples #n_frames_per_sequence
frames in each sequences uniformly without replacement if it has
more frames than that; applied before other frame-level filters.
seed: The seed of the random generator sampling #n_frames_per_sequence
random frames per sequence.
sort_frames: Enable frame annotations sorting to group frames from the
same sequences together and order them by timestamps
eval_batches: A list of batches that form the evaluation set;
list of batch-sized lists of indices corresponding to __getitem__
of this class, thus it can be used directly as a batch sampler.
"""
frame_annotations_type: ClassVar[
Type[types.FrameAnnotation]
] = types.FrameAnnotation
path_manager: Optional[PathManager] = None
frame_annotations_file: str = ""
sequence_annotations_file: str = ""
subset_lists_file: str = ""
subsets: Optional[List[str]] = None
limit_to: int = 0
limit_sequences_to: int = 0
pick_sequence: Sequence[str] = ()
exclude_sequence: Sequence[str] = ()
limit_category_to: Sequence[int] = ()
dataset_root: str = ""
load_images: bool = True
load_depths: bool = True
load_depth_masks: bool = True
load_masks: bool = True
load_point_clouds: bool = False
max_points: int = 0
mask_images: bool = False
mask_depths: bool = False
image_height: Optional[int] = 256
image_width: Optional[int] = 256
box_crop: bool = False
box_crop_mask_thr: float = 0.4
box_crop_context: float = 1.0
remove_empty_masks: bool = False
n_frames_per_sequence: int = -1
seed: int = 0
sort_frames: bool = False
eval_batches: Optional[List[List[int]]] = None
frame_annots: List[FrameAnnotsEntry] = field(init=False)
seq_annots: Dict[str, types.SequenceAnnotation] = field(init=False)
def __post_init__(self) -> None:
# pyre-fixme[16]: `JsonIndexDataset` has no attribute `subset_to_image_path`.
self.subset_to_image_path = None
self._load_frames()
self._load_sequences()
if self.sort_frames:
self._sort_frames()
self._load_subset_lists()
self._filter_db() # also computes sequence indices
logger.info(str(self))
def seq_frame_index_to_dataset_index(
self,
seq_frame_index: Union[
List[List[Union[Tuple[str, int, str], Tuple[str, int]]]],
],
) -> List[List[int]]:
"""
Obtain indices into the dataset object given a list of frames specified as
`seq_frame_index = List[List[Tuple[sequence_name:str, frame_number:int]]]`.
"""
# TODO: check the frame numbers are unique
_dataset_seq_frame_n_index = {
seq: {
self.frame_annots[idx]["frame_annotation"].frame_number: idx
for idx in seq_idx
}
for seq, seq_idx in self._seq_to_idx.items()
}
def _get_batch_idx(seq_name, frame_no, path=None) -> int:
idx = _dataset_seq_frame_n_index[seq_name][frame_no]
if path is not None:
# Check that the loaded frame path is consistent
# with the one stored in self.frame_annots.
assert os.path.normpath(
self.frame_annots[idx]["frame_annotation"].image.path
) == os.path.normpath(
path
), f"Inconsistent batch {seq_name, frame_no, path}."
return idx
batches_idx = [[_get_batch_idx(*b) for b in batch] for batch in seq_frame_index]
return batches_idx
def __str__(self) -> str:
return f"JsonIndexDataset #frames={len(self.frame_annots)}"
def __len__(self) -> int:
return len(self.frame_annots)
def _get_frame_type(self, entry: FrameAnnotsEntry) -> Optional[str]:
return entry["subset"]
def __getitem__(self, index) -> FrameData:
if index >= len(self.frame_annots):
raise IndexError(f"index {index} out of range {len(self.frame_annots)}")
entry = self.frame_annots[index]["frame_annotation"]
point_cloud = self.seq_annots[entry.sequence_name].point_cloud
frame_data = FrameData(
frame_number=_safe_as_tensor(entry.frame_number, torch.long),
frame_timestamp=_safe_as_tensor(entry.frame_timestamp, torch.float),
sequence_name=entry.sequence_name,
sequence_category=self.seq_annots[entry.sequence_name].category,
camera_quality_score=_safe_as_tensor(
self.seq_annots[entry.sequence_name].viewpoint_quality_score,
torch.float,
),
point_cloud_quality_score=_safe_as_tensor(
point_cloud.quality_score, torch.float
)
if point_cloud is not None
else None,
)
# The rest of the fields are optional
frame_data.frame_type = self._get_frame_type(self.frame_annots[index])
(
frame_data.fg_probability,
frame_data.mask_path,
frame_data.bbox_xywh,
clamp_bbox_xyxy,
) = self._load_crop_fg_probability(entry)
scale = 1.0
if self.load_images and entry.image is not None:
# original image size
frame_data.image_size_hw = _safe_as_tensor(entry.image.size, torch.long)
(
frame_data.image_rgb,
frame_data.image_path,
frame_data.mask_crop,
scale,
) = self._load_crop_images(
entry, frame_data.fg_probability, clamp_bbox_xyxy
)
if self.load_depths and entry.depth is not None:
(
frame_data.depth_map,
frame_data.depth_path,
frame_data.depth_mask,
) = self._load_mask_depth(entry, clamp_bbox_xyxy, frame_data.fg_probability)
if entry.viewpoint is not None:
frame_data.camera = self._get_pytorch3d_camera(
entry,
scale,
clamp_bbox_xyxy,
)
if self.load_point_clouds and point_cloud is not None:
frame_data.sequence_point_cloud_path = pcl_path = os.path.join(
self.dataset_root, point_cloud.path
)
frame_data.sequence_point_cloud = _load_pointcloud(
self._local_path(pcl_path), max_points=self.max_points
)
return frame_data
def _load_crop_fg_probability(
self, entry: types.FrameAnnotation
) -> Tuple[
Optional[torch.Tensor],
Optional[str],
Optional[torch.Tensor],
Optional[torch.Tensor],
]:
fg_probability, full_path, bbox_xywh, clamp_bbox_xyxy = (
None,
None,
None,
None,
)
if (self.load_masks or self.box_crop) and entry.mask is not None:
full_path = os.path.join(self.dataset_root, entry.mask.path)
mask = _load_mask(self._local_path(full_path))
if mask.shape[-2:] != entry.image.size:
raise ValueError(
f"bad mask size: {mask.shape[-2:]} vs {entry.image.size}!"
)
bbox_xywh = torch.tensor(_get_bbox_from_mask(mask, self.box_crop_mask_thr))
if self.box_crop:
clamp_bbox_xyxy = _get_clamp_bbox(bbox_xywh, self.box_crop_context)
mask = _crop_around_box(mask, clamp_bbox_xyxy, full_path)
fg_probability, _, _ = self._resize_image(mask, mode="nearest")
return fg_probability, full_path, bbox_xywh, clamp_bbox_xyxy
def _load_crop_images(
self,
entry: types.FrameAnnotation,
fg_probability: Optional[torch.Tensor],
clamp_bbox_xyxy: Optional[torch.Tensor],
) -> Tuple[torch.Tensor, str, torch.Tensor, float]:
assert self.dataset_root is not None and entry.image is not None
path = os.path.join(self.dataset_root, entry.image.path)
image_rgb = _load_image(self._local_path(path))
if image_rgb.shape[-2:] != entry.image.size:
raise ValueError(
f"bad image size: {image_rgb.shape[-2:]} vs {entry.image.size}!"
)
if self.box_crop:
assert clamp_bbox_xyxy is not None
image_rgb = _crop_around_box(image_rgb, clamp_bbox_xyxy, path)
image_rgb, scale, mask_crop = self._resize_image(image_rgb)
if self.mask_images:
assert fg_probability is not None
image_rgb *= fg_probability
return image_rgb, path, mask_crop, scale
def _load_mask_depth(
self,
entry: types.FrameAnnotation,
clamp_bbox_xyxy: Optional[torch.Tensor],
fg_probability: Optional[torch.Tensor],
) -> Tuple[torch.Tensor, str, torch.Tensor]:
entry_depth = entry.depth
assert entry_depth is not None
path = os.path.join(self.dataset_root, entry_depth.path)
depth_map = _load_depth(self._local_path(path), entry_depth.scale_adjustment)
if self.box_crop:
assert clamp_bbox_xyxy is not None
depth_bbox_xyxy = _rescale_bbox(
clamp_bbox_xyxy, entry.image.size, depth_map.shape[-2:]
)
depth_map = _crop_around_box(depth_map, depth_bbox_xyxy, path)
depth_map, _, _ = self._resize_image(depth_map, mode="nearest")
if self.mask_depths:
assert fg_probability is not None
depth_map *= fg_probability
if self.load_depth_masks:
assert entry_depth.mask_path is not None
mask_path = os.path.join(self.dataset_root, entry_depth.mask_path)
depth_mask = _load_depth_mask(self._local_path(mask_path))
if self.box_crop:
assert clamp_bbox_xyxy is not None
depth_mask_bbox_xyxy = _rescale_bbox(
clamp_bbox_xyxy, entry.image.size, depth_mask.shape[-2:]
)
depth_mask = _crop_around_box(
depth_mask, depth_mask_bbox_xyxy, mask_path
)
depth_mask, _, _ = self._resize_image(depth_mask, mode="nearest")
else:
depth_mask = torch.ones_like(depth_map)
return depth_map, path, depth_mask
def _get_pytorch3d_camera(
self,
entry: types.FrameAnnotation,
scale: float,
clamp_bbox_xyxy: Optional[torch.Tensor],
) -> PerspectiveCameras:
entry_viewpoint = entry.viewpoint
assert entry_viewpoint is not None
# principal point and focal length
principal_point = torch.tensor(
entry_viewpoint.principal_point, dtype=torch.float
)
focal_length = torch.tensor(entry_viewpoint.focal_length, dtype=torch.float)
half_image_size_wh_orig = (
torch.tensor(list(reversed(entry.image.size)), dtype=torch.float) / 2.0
)
# first, we convert from the dataset's NDC convention to pixels
format = entry_viewpoint.intrinsics_format
if format.lower() == "ndc_norm_image_bounds":
# this is e.g. currently used in CO3D for storing intrinsics
rescale = half_image_size_wh_orig
elif format.lower() == "ndc_isotropic":
rescale = half_image_size_wh_orig.min()
else:
raise ValueError(f"Unknown intrinsics format: {format}")
# principal point and focal length in pixels
principal_point_px = half_image_size_wh_orig - principal_point * rescale
focal_length_px = focal_length * rescale
if self.box_crop:
assert clamp_bbox_xyxy is not None
principal_point_px -= clamp_bbox_xyxy[:2]
# now, convert from pixels to PyTorch3D v0.5+ NDC convention
if self.image_height is None or self.image_width is None:
out_size = list(reversed(entry.image.size))
else:
out_size = [self.image_width, self.image_height]
half_image_size_output = torch.tensor(out_size, dtype=torch.float) / 2.0
half_min_image_size_output = half_image_size_output.min()
# rescaled principal point and focal length in ndc
principal_point = (
half_image_size_output - principal_point_px * scale
) / half_min_image_size_output
focal_length = focal_length_px * scale / half_min_image_size_output
return PerspectiveCameras(
focal_length=focal_length[None],
principal_point=principal_point[None],
R=torch.tensor(entry_viewpoint.R, dtype=torch.float)[None],
T=torch.tensor(entry_viewpoint.T, dtype=torch.float)[None],
)
def _load_frames(self) -> None:
logger.info(f"Loading Co3D frames from {self.frame_annotations_file}.")
local_file = self._local_path(self.frame_annotations_file)
with gzip.open(local_file, "rt", encoding="utf8") as zipfile:
frame_annots_list = types.load_dataclass(
zipfile, List[self.frame_annotations_type]
)
if not frame_annots_list:
raise ValueError("Empty dataset!")
self.frame_annots = [
FrameAnnotsEntry(frame_annotation=a, subset=None) for a in frame_annots_list
]
def _load_sequences(self) -> None:
logger.info(f"Loading Co3D sequences from {self.sequence_annotations_file}.")
local_file = self._local_path(self.sequence_annotations_file)
with gzip.open(local_file, "rt", encoding="utf8") as zipfile:
seq_annots = types.load_dataclass(zipfile, List[types.SequenceAnnotation])
if not seq_annots:
raise ValueError("Empty sequences file!")
self.seq_annots = {entry.sequence_name: entry for entry in seq_annots}
def _load_subset_lists(self) -> None:
logger.info(f"Loading Co3D subset lists from {self.subset_lists_file}.")
if not self.subset_lists_file:
return
with open(self._local_path(self.subset_lists_file), "r") as f:
subset_to_seq_frame = json.load(f)
frame_path_to_subset = {
path: subset
for subset, frames in subset_to_seq_frame.items()
for _, _, path in frames
}
for frame in self.frame_annots:
frame["subset"] = frame_path_to_subset.get(
frame["frame_annotation"].image.path, None
)
if frame["subset"] is None:
warnings.warn(
"Subset lists are given but don't include "
+ frame["frame_annotation"].image.path
)
def _sort_frames(self) -> None:
# Sort frames to have them grouped by sequence, ordered by timestamp
self.frame_annots = sorted(
self.frame_annots,
key=lambda f: (
f["frame_annotation"].sequence_name,
f["frame_annotation"].frame_timestamp or 0,
),
)
def _filter_db(self) -> None:
if self.remove_empty_masks:
logger.info("Removing images with empty masks.")
old_len = len(self.frame_annots)
msg = "remove_empty_masks needs every MaskAnnotation.mass to be set."
def positive_mass(frame_annot: types.FrameAnnotation) -> bool:
mask = frame_annot.mask
if mask is None:
return False
if mask.mass is None:
raise ValueError(msg)
return mask.mass > 1
self.frame_annots = [
frame
for frame in self.frame_annots
if positive_mass(frame["frame_annotation"])
]
logger.info("... filtered %d -> %d" % (old_len, len(self.frame_annots)))
# this has to be called after joining with categories!!
subsets = self.subsets
if subsets:
if not self.subset_lists_file:
raise ValueError(
"Subset filter is on but subset_lists_file was not given"
)
logger.info(f"Limiting Co3D dataset to the '{subsets}' subsets.")
# truncate the list of subsets to the valid one
self.frame_annots = [
entry for entry in self.frame_annots if entry["subset"] in subsets
]
if len(self.frame_annots) == 0:
raise ValueError(f"There are no frames in the '{subsets}' subsets!")
self._invalidate_indexes(filter_seq_annots=True)
if len(self.limit_category_to) > 0:
logger.info(f"Limiting dataset to categories: {self.limit_category_to}")
self.seq_annots = {
name: entry
for name, entry in self.seq_annots.items()
if entry.category in self.limit_category_to
}
# sequence filters
for prefix in ("pick", "exclude"):
orig_len = len(self.seq_annots)
attr = f"{prefix}_sequence"
arr = getattr(self, attr)
if len(arr) > 0:
logger.info(f"{attr}: {str(arr)}")
self.seq_annots = {
name: entry
for name, entry in self.seq_annots.items()
if (name in arr) == (prefix == "pick")
}
logger.info("... filtered %d -> %d" % (orig_len, len(self.seq_annots)))
if self.limit_sequences_to > 0:
self.seq_annots = dict(
islice(self.seq_annots.items(), self.limit_sequences_to)
)
# retain only frames from retained sequences
self.frame_annots = [
f
for f in self.frame_annots
if f["frame_annotation"].sequence_name in self.seq_annots
]
self._invalidate_indexes()
if self.n_frames_per_sequence > 0:
logger.info(f"Taking max {self.n_frames_per_sequence} per sequence.")
keep_idx = []
for seq, seq_indices in self._seq_to_idx.items():
# infer the seed from the sequence name, this is reproducible
# and makes the selection differ for different sequences
seed = _seq_name_to_seed(seq) + self.seed
seq_idx_shuffled = random.Random(seed).sample(
sorted(seq_indices), len(seq_indices)
)
keep_idx.extend(seq_idx_shuffled[: self.n_frames_per_sequence])
logger.info(
"... filtered %d -> %d" % (len(self.frame_annots), len(keep_idx))
)
self.frame_annots = [self.frame_annots[i] for i in keep_idx]
self._invalidate_indexes(filter_seq_annots=False)
# sequences are not decimated, so self.seq_annots is valid
if self.limit_to > 0 and self.limit_to < len(self.frame_annots):
logger.info(
"limit_to: filtered %d -> %d" % (len(self.frame_annots), self.limit_to)
)
self.frame_annots = self.frame_annots[: self.limit_to]
self._invalidate_indexes(filter_seq_annots=True)
def _invalidate_indexes(self, filter_seq_annots: bool = False) -> None:
# update _seq_to_idx and filter seq_meta according to frame_annots change
# if filter_seq_annots, also uldates seq_annots based on the changed _seq_to_idx
self._invalidate_seq_to_idx()
if filter_seq_annots:
self.seq_annots = {
k: v for k, v in self.seq_annots.items() if k in self._seq_to_idx
}
def _invalidate_seq_to_idx(self) -> None:
seq_to_idx = defaultdict(list)
for idx, entry in enumerate(self.frame_annots):
seq_to_idx[entry["frame_annotation"].sequence_name].append(idx)
self._seq_to_idx = seq_to_idx
def _resize_image(
self, image, mode="bilinear"
) -> Tuple[torch.Tensor, float, torch.Tensor]:
image_height, image_width = self.image_height, self.image_width
if image_height is None or image_width is None:
# skip the resizing
imre_ = torch.from_numpy(image)
return imre_, 1.0, torch.ones_like(imre_[:1])
# takes numpy array, returns pytorch tensor
minscale = min(
image_height / image.shape[-2],
image_width / image.shape[-1],
)
imre = torch.nn.functional.interpolate(
torch.from_numpy(image)[None],
# pyre-ignore[6]
scale_factor=minscale,
mode=mode,
align_corners=False if mode == "bilinear" else None,
recompute_scale_factor=True,
)[0]
imre_ = torch.zeros(image.shape[0], self.image_height, self.image_width)
imre_[:, 0 : imre.shape[1], 0 : imre.shape[2]] = imre
mask = torch.zeros(1, self.image_height, self.image_width)
mask[:, 0 : imre.shape[1] - 1, 0 : imre.shape[2] - 1] = 1.0
return imre_, minscale, mask
def _local_path(self, path: str) -> str:
if self.path_manager is None:
return path
return self.path_manager.get_local_path(path)
def get_frame_numbers_and_timestamps(
self, idxs: Sequence[int]
) -> List[Tuple[int, float]]:
out: List[Tuple[int, float]] = []
for idx in idxs:
frame_annotation = self.frame_annots[idx]["frame_annotation"]
out.append(
(frame_annotation.frame_number, frame_annotation.frame_timestamp)
)
return out
def get_eval_batches(self) -> Optional[List[List[int]]]:
return self.eval_batches
def _seq_name_to_seed(seq_name) -> int:
return int(hashlib.sha1(seq_name.encode("utf-8")).hexdigest(), 16)
def _load_image(path) -> np.ndarray:
with Image.open(path) as pil_im:
im = np.array(pil_im.convert("RGB"))
im = im.transpose((2, 0, 1))
im = im.astype(np.float32) / 255.0
return im
def _load_16big_png_depth(depth_png) -> np.ndarray:
with Image.open(depth_png) as depth_pil:
# the image is stored with 16-bit depth but PIL reads it as I (32 bit).
# we cast it to uint16, then reinterpret as float16, then cast to float32
depth = (
np.frombuffer(np.array(depth_pil, dtype=np.uint16), dtype=np.float16)
.astype(np.float32)
.reshape((depth_pil.size[1], depth_pil.size[0]))
)
return depth
def _load_1bit_png_mask(file: str) -> np.ndarray:
with Image.open(file) as pil_im:
mask = (np.array(pil_im.convert("L")) > 0.0).astype(np.float32)
return mask
def _load_depth_mask(path) -> np.ndarray:
if not path.lower().endswith(".png"):
raise ValueError('unsupported depth mask file name "%s"' % path)
m = _load_1bit_png_mask(path)
return m[None] # fake feature channel
def _load_depth(path, scale_adjustment) -> np.ndarray:
if not path.lower().endswith(".png"):
raise ValueError('unsupported depth file name "%s"' % path)
d = _load_16big_png_depth(path) * scale_adjustment
d[~np.isfinite(d)] = 0.0
return d[None] # fake feature channel
def _load_mask(path) -> np.ndarray:
with Image.open(path) as pil_im:
mask = np.array(pil_im)
mask = mask.astype(np.float32) / 255.0
return mask[None] # fake feature channel
def _get_1d_bounds(arr) -> Tuple[int, int]:
nz = np.flatnonzero(arr)
return nz[0], nz[-1]
def _get_bbox_from_mask(
mask, thr, decrease_quant: float = 0.05
) -> Tuple[int, int, int, int]:
# bbox in xywh
masks_for_box = np.zeros_like(mask)
while masks_for_box.sum() <= 1.0:
masks_for_box = (mask > thr).astype(np.float32)
thr -= decrease_quant
if thr <= 0.0:
warnings.warn(f"Empty masks_for_bbox (thr={thr}) => using full image.")
x0, x1 = _get_1d_bounds(masks_for_box.sum(axis=-2))
y0, y1 = _get_1d_bounds(masks_for_box.sum(axis=-1))
return x0, y0, x1 - x0, y1 - y0
def _get_clamp_bbox(
bbox: torch.Tensor, box_crop_context: float = 0.0, impath: str = ""
) -> torch.Tensor:
# box_crop_context: rate of expansion for bbox
# returns possibly expanded bbox xyxy as float
# increase box size
if box_crop_context > 0.0:
c = box_crop_context
bbox = bbox.float()
bbox[0] -= bbox[2] * c / 2
bbox[1] -= bbox[3] * c / 2
bbox[2] += bbox[2] * c
bbox[3] += bbox[3] * c
if (bbox[2:] <= 1.0).any():
raise ValueError(
f"squashed image {impath}!! The bounding box contains no pixels."
)
bbox[2:] = torch.clamp(bbox[2:], 2)
bbox[2:] += bbox[0:2] + 1 # convert to [xmin, ymin, xmax, ymax]
# +1 because upper bound is not inclusive
return bbox
def _crop_around_box(tensor, bbox, impath: str = ""):
# bbox is xyxy, where the upper bound is corrected with +1
bbox[[0, 2]] = torch.clamp(bbox[[0, 2]], 0.0, tensor.shape[-1])
bbox[[1, 3]] = torch.clamp(bbox[[1, 3]], 0.0, tensor.shape[-2])
bbox = bbox.round().long()
tensor = tensor[..., bbox[1] : bbox[3], bbox[0] : bbox[2]]
assert all(c > 0 for c in tensor.shape), f"squashed image {impath}"
return tensor
def _rescale_bbox(bbox: torch.Tensor, orig_res, new_res) -> torch.Tensor:
assert bbox is not None
assert np.prod(orig_res) > 1e-8
# average ratio of dimensions
rel_size = (new_res[0] / orig_res[0] + new_res[1] / orig_res[1]) / 2.0
return bbox * rel_size
def _safe_as_tensor(data, dtype):
if data is None:
return None
return torch.tensor(data, dtype=dtype)
# NOTE this cache is per-worker; they are implemented as processes.
# each batch is loaded and collated by a single worker;
# since sequences tend to co-occur within batches, this is useful.
@functools.lru_cache(maxsize=256)
def _load_pointcloud(pcl_path: Union[str, Path], max_points: int = 0) -> Pointclouds:
pcl = IO().load_pointcloud(pcl_path)
if max_points > 0:
pcl = pcl.subsample(max_points)
return pcl
| [
"logging.getLogger",
"numpy.prod",
"gzip.open",
"dataclasses.dataclass",
"torch.from_numpy",
"numpy.array",
"numpy.isfinite",
"random.Random",
"numpy.flatnonzero",
"os.path.normpath",
"pytorch3d.io.IO",
"warnings.warn",
"dataclasses.field",
"torch.ones_like",
"torch.clamp",
"PIL.Image.... | [((908, 935), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (925, 935), False, 'import logging\n'), ((1046, 1065), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)'}), '(eq=False)\n', (1055, 1065), False, 'from dataclasses import dataclass, field\n'), ((30535, 30567), 'functools.lru_cache', 'functools.lru_cache', ([], {'maxsize': '(256)'}), '(maxsize=256)\n', (30554, 30567), False, 'import functools\n'), ((6340, 6357), 'dataclasses.field', 'field', ([], {'init': '(False)'}), '(init=False)\n', (6345, 6357), False, 'from dataclasses import dataclass, field\n'), ((6412, 6429), 'dataclasses.field', 'field', ([], {'init': '(False)'}), '(init=False)\n', (6417, 6429), False, 'from dataclasses import dataclass, field\n'), ((28144, 28163), 'numpy.flatnonzero', 'np.flatnonzero', (['arr'], {}), '(arr)\n', (28158, 28163), True, 'import numpy as np\n'), ((28331, 28350), 'numpy.zeros_like', 'np.zeros_like', (['mask'], {}), '(mask)\n', (28344, 28350), True, 'import numpy as np\n'), ((29348, 29372), 'torch.clamp', 'torch.clamp', (['bbox[2:]', '(2)'], {}), '(bbox[2:], 2)\n', (29359, 29372), False, 'import torch\n'), ((29643, 29691), 'torch.clamp', 'torch.clamp', (['bbox[[0, 2]]', '(0.0)', 'tensor.shape[-1]'], {}), '(bbox[[0, 2]], 0.0, tensor.shape[-1])\n', (29654, 29691), False, 'import torch\n'), ((29711, 29759), 'torch.clamp', 'torch.clamp', (['bbox[[1, 3]]', '(0.0)', 'tensor.shape[-2]'], {}), '(bbox[[1, 3]], 0.0, tensor.shape[-2])\n', (29722, 29759), False, 'import torch\n'), ((30309, 30340), 'torch.tensor', 'torch.tensor', (['data'], {'dtype': 'dtype'}), '(data, dtype=dtype)\n', (30321, 30340), False, 'import torch\n'), ((12619, 12668), 'os.path.join', 'os.path.join', (['self.dataset_root', 'entry.image.path'], {}), '(self.dataset_root, entry.image.path)\n', (12631, 12668), False, 'import os\n'), ((13601, 13650), 'os.path.join', 'os.path.join', (['self.dataset_root', 'entry_depth.path'], {}), '(self.dataset_root, entry_depth.path)\n', (13613, 13650), False, 'import os\n'), ((15331, 15395), 'torch.tensor', 'torch.tensor', (['entry_viewpoint.principal_point'], {'dtype': 'torch.float'}), '(entry_viewpoint.principal_point, dtype=torch.float)\n', (15343, 15395), False, 'import torch\n'), ((15441, 15502), 'torch.tensor', 'torch.tensor', (['entry_viewpoint.focal_length'], {'dtype': 'torch.float'}), '(entry_viewpoint.focal_length, dtype=torch.float)\n', (15453, 15502), False, 'import torch\n'), ((24389, 24406), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (24400, 24406), False, 'from collections import defaultdict\n'), ((25437, 25501), 'torch.zeros', 'torch.zeros', (['image.shape[0]', 'self.image_height', 'self.image_width'], {}), '(image.shape[0], self.image_height, self.image_width)\n', (25448, 25501), False, 'import torch\n'), ((25579, 25630), 'torch.zeros', 'torch.zeros', (['(1)', 'self.image_height', 'self.image_width'], {}), '(1, self.image_height, self.image_width)\n', (25590, 25630), False, 'import torch\n'), ((26545, 26561), 'PIL.Image.open', 'Image.open', (['path'], {}), '(path)\n', (26555, 26561), False, 'from PIL import Image\n'), ((26767, 26788), 'PIL.Image.open', 'Image.open', (['depth_png'], {}), '(depth_png)\n', (26777, 26788), False, 'from PIL import Image\n'), ((27246, 27262), 'PIL.Image.open', 'Image.open', (['file'], {}), '(file)\n', (27256, 27262), False, 'from PIL import Image\n'), ((27940, 27956), 'PIL.Image.open', 'Image.open', (['path'], {}), '(path)\n', (27950, 27956), False, 'from PIL import Image\n'), ((27983, 27999), 'numpy.array', 'np.array', (['pil_im'], {}), '(pil_im)\n', (27991, 27999), True, 'import numpy as np\n'), ((28502, 28573), 'warnings.warn', 'warnings.warn', (['f"""Empty masks_for_bbox (thr={thr}) => using full image."""'], {}), "(f'Empty masks_for_bbox (thr={thr}) => using full image.')\n", (28515, 28573), False, 'import warnings\n'), ((30060, 30077), 'numpy.prod', 'np.prod', (['orig_res'], {}), '(orig_res)\n', (30067, 30077), True, 'import numpy as np\n'), ((10859, 10908), 'os.path.join', 'os.path.join', (['self.dataset_root', 'point_cloud.path'], {}), '(self.dataset_root, point_cloud.path)\n', (10871, 10908), False, 'import os\n'), ((11582, 11630), 'os.path.join', 'os.path.join', (['self.dataset_root', 'entry.mask.path'], {}), '(self.dataset_root, entry.mask.path)\n', (11594, 11630), False, 'import os\n'), ((14318, 14372), 'os.path.join', 'os.path.join', (['self.dataset_root', 'entry_depth.mask_path'], {}), '(self.dataset_root, entry_depth.mask_path)\n', (14330, 14372), False, 'import os\n'), ((14922, 14948), 'torch.ones_like', 'torch.ones_like', (['depth_map'], {}), '(depth_map)\n', (14937, 14948), False, 'import torch\n'), ((16726, 16767), 'torch.tensor', 'torch.tensor', (['out_size'], {'dtype': 'torch.float'}), '(out_size, dtype=torch.float)\n', (16738, 16767), False, 'import torch\n'), ((17590, 17634), 'gzip.open', 'gzip.open', (['local_file', '"""rt"""'], {'encoding': '"""utf8"""'}), "(local_file, 'rt', encoding='utf8')\n", (17599, 17634), False, 'import gzip\n'), ((18193, 18237), 'gzip.open', 'gzip.open', (['local_file', '"""rt"""'], {'encoding': '"""utf8"""'}), "(local_file, 'rt', encoding='utf8')\n", (18202, 18237), False, 'import gzip\n'), ((18785, 18797), 'json.load', 'json.load', (['f'], {}), '(f)\n', (18794, 18797), False, 'import json\n'), ((24869, 24892), 'torch.from_numpy', 'torch.from_numpy', (['image'], {}), '(image)\n', (24885, 24892), False, 'import torch\n'), ((27828, 27842), 'numpy.isfinite', 'np.isfinite', (['d'], {}), '(d)\n', (27839, 27842), True, 'import numpy as np\n'), ((30664, 30668), 'pytorch3d.io.IO', 'IO', ([], {}), '()\n', (30666, 30668), False, 'from pytorch3d.io import IO\n'), ((19192, 19294), 'warnings.warn', 'warnings.warn', (['("Subset lists are given but don\'t include " + frame[\'frame_annotation\'].\n image.path)'], {}), '("Subset lists are given but don\'t include " + frame[\n \'frame_annotation\'].image.path)\n', (19205, 19294), False, 'import warnings\n'), ((24924, 24950), 'torch.ones_like', 'torch.ones_like', (['imre_[:1]'], {}), '(imre_[:1])\n', (24939, 24950), False, 'import torch\n'), ((7842, 7913), 'os.path.normpath', 'os.path.normpath', (["self.frame_annots[idx]['frame_annotation'].image.path"], {}), "(self.frame_annots[idx]['frame_annotation'].image.path)\n", (7858, 7913), False, 'import os\n'), ((7955, 7977), 'os.path.normpath', 'os.path.normpath', (['path'], {}), '(path)\n', (7971, 7977), False, 'import os\n'), ((17253, 17303), 'torch.tensor', 'torch.tensor', (['entry_viewpoint.R'], {'dtype': 'torch.float'}), '(entry_viewpoint.R, dtype=torch.float)\n', (17265, 17303), False, 'import torch\n'), ((17325, 17375), 'torch.tensor', 'torch.tensor', (['entry_viewpoint.T'], {'dtype': 'torch.float'}), '(entry_viewpoint.T, dtype=torch.float)\n', (17337, 17375), False, 'import torch\n'), ((25184, 25207), 'torch.from_numpy', 'torch.from_numpy', (['image'], {}), '(image)\n', (25200, 25207), False, 'import torch\n'), ((23035, 23054), 'random.Random', 'random.Random', (['seed'], {}), '(seed)\n', (23048, 23054), False, 'import random\n'), ((27009, 27045), 'numpy.array', 'np.array', (['depth_pil'], {'dtype': 'np.uint16'}), '(depth_pil, dtype=np.uint16)\n', (27017, 27045), True, 'import numpy as np\n')] |