code
stringlengths
22
1.05M
apis
listlengths
1
3.31k
extract_api
stringlengths
75
3.25M
from django.contrib.auth.models import User from rest_framework import viewsets from rest_framework import permissions from rest_framework.response import Response from blog.models import Post from blog.serializers import PostSerializer from blog.serializers import UserSerializer from blog.permissions import IsAuthorOrReadOnly class UserViewSet(viewsets.ReadOnlyModelViewSet): """This viewset automatically provides `list` and `detail` actions.""" queryset = User.objects.all() serializer_class = UserSerializer class PostViewSet(viewsets.ModelViewSet): """This viewset automatically provides `list`, `create`, `retrieve`, `update` and `destroy` actions. """ queryset = Post.objects.all() serializer_class = PostSerializer permission_classes = [ permissions.IsAuthenticatedOrReadOnly, IsAuthorOrReadOnly, ] def perform_create(self, serializer): serializer.save(author=self.request.user)
[ "django.contrib.auth.models.User.objects.all", "blog.models.Post.objects.all" ]
[((473, 491), 'django.contrib.auth.models.User.objects.all', 'User.objects.all', ([], {}), '()\n', (489, 491), False, 'from django.contrib.auth.models import User\n'), ((706, 724), 'blog.models.Post.objects.all', 'Post.objects.all', ([], {}), '()\n', (722, 724), False, 'from blog.models import Post\n')]
# -*- coding: utf-8 -*- # # Copyright 2017 - Swiss Data Science Center (SDSC) # A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Pytest configuration.""" import json import os import shutil import sys import tempfile import time import types import pytest import responses from click.testing import CliRunner @pytest.fixture(scope='module') def renku_path(tmpdir_factory): """Temporary instance path.""" path = str(tmpdir_factory.mktemp('renku')) yield path shutil.rmtree(path) @pytest.fixture() def instance_path(renku_path, monkeypatch): """Temporary instance path.""" orig_pwd = os.getcwd() with monkeypatch.context() as m: m.chdir(renku_path) yield renku_path @pytest.fixture() def runner(monkeypatch): """Create a runner on isolated filesystem.""" from renku.cli._config import RENKU_HOME monkeypatch.setenv('RENKU_CONFIG', RENKU_HOME) return CliRunner() @pytest.fixture() def run(runner, capsys): """Return a callable runner.""" import contextlib from renku import cli @contextlib.contextmanager def chdir(path): """Change the current working directory.""" cwd = os.getcwd() os.chdir(path) try: yield finally: os.chdir(cwd) class redirect_stdin(contextlib.ContextDecorator): """Implement missing redirect stdin based on ``contextlib.py``.""" _stream = 'stdin' def __init__(self, new_target): """Keep the original stream.""" self._new_target = new_target # We use a list of old targets to make this CM re-entrant self._old_targets = [] def __enter__(self): """Change the stream value.""" self._old_targets.append(getattr(sys, self._stream)) setattr(sys, self._stream, self._new_target) return self._new_target def __exit__(self, exctype, excinst, exctb): """Restore the stream value.""" setattr(sys, self._stream, self._old_targets.pop()) managers = { 'stdout': lambda path: contextlib.redirect_stdout(path.open('wb')), 'stderr': lambda path: contextlib.redirect_stderr(path.open('wb')), 'stdin': lambda path: redirect_stdin( path.open('rb') if not hasattr(path, 'read') else path ), } def generate(args=('update', ), cwd=None, **streams): """Generate an output.""" with capsys.disabled(), contextlib.ExitStack() as stack: for name, stream in streams.items(): stack.enter_context(managers[name](stream)) if cwd is not None: stack.enter_context(chdir(str(cwd))) try: cli.cli.main( args=args, prog_name=runner.get_default_prog_name(cli.cli), ) except SystemExit as e: return 0 if e.code is None else e.code except Exception: raise return generate @pytest.fixture() def isolated_runner(monkeypatch): """Create a runner on isolated filesystem.""" from renku.cli._config import RENKU_HOME monkeypatch.setenv('RENKU_CONFIG', RENKU_HOME) runner_ = CliRunner() with runner_.isolated_filesystem(): yield runner_ @pytest.fixture() def data_file(tmpdir): """Create a sample data file.""" p = tmpdir.mkdir('data').join('file') p.write('1234') return p @pytest.fixture(scope='module') def repository(): """Yield a Renku repository.""" from renku import cli from renku.api import LocalClient runner = CliRunner() with runner.isolated_filesystem() as project_path: result = runner.invoke(cli.cli, ['init', '.'], catch_exceptions=False) assert result.exit_code == 0 yield project_path @pytest.fixture def project(repository): """Create a test project.""" from git import Repo repo = Repo(repository) commit = repo.head.commit os.chdir(repository) yield repository os.chdir(repository) repo.head.reset(commit, index=True, working_tree=True) # remove any extra non-tracked files (.pyc, etc) repo.git.clean('-xdff') @pytest.fixture() def client(repository): """Return a Renku repository.""" from git import Repo from renku.api import LocalClient repo = Repo(repository) commit = repo.head.commit os.chdir(repository) yield LocalClient(path=repository) os.chdir(repository) repo.head.reset(commit, index=True, working_tree=True) # remove any extra non-tracked files (.pyc, etc) repo.git.clean('-xdff') @pytest.fixture() def dataset(client): """Create a dataset.""" with client.with_dataset(name='dataset') as dataset: dataset.authors = { 'name': 'me', 'email': '<EMAIL>', } return dataset @pytest.fixture() def dataset_responses(): """Authentication responses.""" with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps: def request_callback(request): return (200, {'Content-Type': 'application/text'}, '1234') rsps.add_callback( responses.GET, 'http://example.com/file', callback=request_callback ) rsps.add_callback( responses.GET, 'https://example.com/file', callback=request_callback ) yield rsps @pytest.fixture(scope='module') def directory_tree(tmpdir_factory): """Create a test directory tree.""" # initialize p = tmpdir_factory.mktemp('directory_tree') p.join('file').write('1234') p.join('dir2').mkdir() p.join('dir2/file2').write('5678') return p @pytest.fixture(scope='module') def data_repository(directory_tree): """Create a test repo.""" from git import Repo, Actor # initialize repo = Repo.init(directory_tree.strpath) # add a file repo.index.add([directory_tree.join('file').strpath]) repo.index.commit('test commit', author=Actor('me', '<EMAIL>')) # commit changes to the same file with a different user directory_tree.join('file').write('5678') repo.index.add([directory_tree.join('file').strpath]) repo.index.commit('test commit', author=Actor('me2', '<EMAIL>')) # commit a second file repo.index.add([directory_tree.join('dir2/file2').strpath]) repo.index.commit('test commit', author=Actor('me', '<EMAIL>')) # return the repo return repo @pytest.fixture(autouse=True) def add_client(doctest_namespace): """Add Renku client to doctest namespace.""" from renku.api import LocalClient doctest_namespace['client'] = LocalClient(path=tempfile.mkdtemp())
[ "git.Repo.init", "responses.RequestsMock", "click.testing.CliRunner", "os.getcwd", "shutil.rmtree", "os.chdir", "renku.api.LocalClient", "git.Actor", "tempfile.mkdtemp", "contextlib.ExitStack", "git.Repo", "pytest.fixture" ]
[((942, 972), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (956, 972), False, 'import pytest\n'), ((1129, 1145), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (1143, 1145), False, 'import pytest\n'), ((1346, 1362), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (1360, 1362), False, 'import pytest\n'), ((1560, 1576), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (1574, 1576), False, 'import pytest\n'), ((3705, 3721), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (3719, 3721), False, 'import pytest\n'), ((3993, 4009), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (4007, 4009), False, 'import pytest\n'), ((4148, 4178), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (4162, 4178), False, 'import pytest\n'), ((4897, 4913), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (4911, 4913), False, 'import pytest\n'), ((5331, 5347), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (5345, 5347), False, 'import pytest\n'), ((5572, 5588), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (5586, 5588), False, 'import pytest\n'), ((6145, 6175), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (6159, 6175), False, 'import pytest\n'), ((6432, 6462), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (6446, 6462), False, 'import pytest\n'), ((7204, 7232), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (7218, 7232), False, 'import pytest\n'), ((1106, 1125), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\n', (1119, 1125), False, 'import shutil\n'), ((1240, 1251), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1249, 1251), False, 'import os\n'), ((1545, 1556), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (1554, 1556), False, 'from click.testing import CliRunner\n'), ((3916, 3927), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (3925, 3927), False, 'from click.testing import CliRunner\n'), ((4310, 4321), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (4319, 4321), False, 'from click.testing import CliRunner\n'), ((4635, 4651), 'git.Repo', 'Repo', (['repository'], {}), '(repository)\n', (4639, 4651), False, 'from git import Repo, Actor\n'), ((4687, 4707), 'os.chdir', 'os.chdir', (['repository'], {}), '(repository)\n', (4695, 4707), False, 'import os\n'), ((4733, 4753), 'os.chdir', 'os.chdir', (['repository'], {}), '(repository)\n', (4741, 4753), False, 'import os\n'), ((5051, 5067), 'git.Repo', 'Repo', (['repository'], {}), '(repository)\n', (5055, 5067), False, 'from git import Repo, Actor\n'), ((5103, 5123), 'os.chdir', 'os.chdir', (['repository'], {}), '(repository)\n', (5111, 5123), False, 'import os\n'), ((5167, 5187), 'os.chdir', 'os.chdir', (['repository'], {}), '(repository)\n', (5175, 5187), False, 'import os\n'), ((6590, 6623), 'git.Repo.init', 'Repo.init', (['directory_tree.strpath'], {}), '(directory_tree.strpath)\n', (6599, 6623), False, 'from git import Repo, Actor\n'), ((1806, 1817), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1815, 1817), False, 'import os\n'), ((1826, 1840), 'os.chdir', 'os.chdir', (['path'], {}), '(path)\n', (1834, 1840), False, 'import os\n'), ((5134, 5162), 'renku.api.LocalClient', 'LocalClient', ([], {'path': 'repository'}), '(path=repository)\n', (5145, 5162), False, 'from renku.api import LocalClient\n'), ((5659, 5718), 'responses.RequestsMock', 'responses.RequestsMock', ([], {'assert_all_requests_are_fired': '(False)'}), '(assert_all_requests_are_fired=False)\n', (5681, 5718), False, 'import responses\n'), ((1901, 1914), 'os.chdir', 'os.chdir', (['cwd'], {}), '(cwd)\n', (1909, 1914), False, 'import os\n'), ((3144, 3166), 'contextlib.ExitStack', 'contextlib.ExitStack', ([], {}), '()\n', (3164, 3166), False, 'import contextlib\n'), ((6744, 6766), 'git.Actor', 'Actor', (['"""me"""', '"""<EMAIL>"""'], {}), "('me', '<EMAIL>')\n", (6749, 6766), False, 'from git import Repo, Actor\n'), ((6977, 7000), 'git.Actor', 'Actor', (['"""me2"""', '"""<EMAIL>"""'], {}), "('me2', '<EMAIL>')\n", (6982, 7000), False, 'from git import Repo, Actor\n'), ((7138, 7160), 'git.Actor', 'Actor', (['"""me"""', '"""<EMAIL>"""'], {}), "('me', '<EMAIL>')\n", (7143, 7160), False, 'from git import Repo, Actor\n'), ((7406, 7424), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (7422, 7424), False, 'import tempfile\n')]
# -*- coding: utf-8 -*- """ --- Description --- Module: Logger.py Abstract: A module for logging Modified: threemeninaboat3247 2018/04/30 --- End --- """ # Standard library imports import logging logger = logging.getLogger('Kuchinawa Log') # ログレベルの設定 logger.setLevel(10) # ログのファイル出力先を設定 fh = logging.FileHandler('kuchinawa.log') logger.addHandler(fh) # ログのコンソール出力の設定 sh = logging.StreamHandler() logger.addHandler(sh) # ログの出力形式の設定 formatter = logging.Formatter('%(asctime)s:%(lineno)d:%(levelname)s:%(message)s') fh.setFormatter(formatter) sh.setFormatter(formatter)
[ "logging.getLogger", "logging.Formatter", "logging.StreamHandler", "logging.FileHandler" ]
[((255, 289), 'logging.getLogger', 'logging.getLogger', (['"""Kuchinawa Log"""'], {}), "('Kuchinawa Log')\n", (272, 289), False, 'import logging\n'), ((346, 382), 'logging.FileHandler', 'logging.FileHandler', (['"""kuchinawa.log"""'], {}), "('kuchinawa.log')\n", (365, 382), False, 'import logging\n'), ((428, 451), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (449, 451), False, 'import logging\n'), ((501, 570), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s:%(lineno)d:%(levelname)s:%(message)s"""'], {}), "('%(asctime)s:%(lineno)d:%(levelname)s:%(message)s')\n", (518, 570), False, 'import logging\n')]
""" Copyright 2012, 2013 UW Information Technology, University of Washington Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from django.test import TestCase from django.conf import settings from django.test.client import Client from spotseeker_server.models import Spot, SpotExtendedInfo, SpotType import simplejson as json from django.test.utils import override_settings from mock import patch from django.core import cache from spotseeker_server import models @override_settings(SPOTSEEKER_AUTH_MODULE='spotseeker_server.auth.all_ok') class SpotSearchCapacityTest(TestCase): def test_capacity(self): dummy_cache = cache.get_cache('django.core.cache.backends.dummy.DummyCache') with patch.object(models, 'cache', dummy_cache): spot1 = Spot.objects.create(name="capacity: 1", capacity=1) spot1.save() spot2 = Spot.objects.create(name="capacity: 2", capacity=2) spot2.save() spot3 = Spot.objects.create(name="capacity: 3", capacity=3) spot3.save() spot4 = Spot.objects.create(name="capacity: 4", capacity=4) spot4.save() spot5 = Spot.objects.create(name="capacity: 50", capacity=50) spot5.save() c = Client() response = c.get("/api/v1/spot", {'capacity': '', 'name': 'capacity'}) self.assertEquals(response["Content-Type"], "application/json", "Has the json header") spots = json.loads(response.content) has_1 = False has_2 = False has_3 = False has_4 = False has_5 = False for spot in spots: if spot['id'] == spot1.pk: has_1 = True if spot['id'] == spot2.pk: has_2 = True if spot['id'] == spot3.pk: has_3 = True if spot['id'] == spot4.pk: has_4 = True if spot['id'] == spot5.pk: has_5 = True self.assertEquals(has_1, True) self.assertEquals(has_2, True) self.assertEquals(has_3, True) self.assertEquals(has_4, True) self.assertEquals(has_5, True) response = c.get("/api/v1/spot", {'capacity': '1'}) self.assertEquals(response["Content-Type"], "application/json", "Has the json header") spots = json.loads(response.content) has_1 = False has_2 = False has_3 = False has_4 = False has_5 = False for spot in spots: if spot['id'] == spot1.pk: has_1 = True if spot['id'] == spot2.pk: has_2 = True if spot['id'] == spot3.pk: has_3 = True if spot['id'] == spot4.pk: has_4 = True if spot['id'] == spot5.pk: has_5 = True self.assertEquals(has_1, True) self.assertEquals(has_2, True) self.assertEquals(has_3, True) self.assertEquals(has_4, True) self.assertEquals(has_5, True) response = c.get("/api/v1/spot", {'capacity': '49'}) self.assertEquals(response["Content-Type"], "application/json", "Has the json header") spots = json.loads(response.content) has_1 = False has_2 = False has_3 = False has_4 = False has_5 = False for spot in spots: if spot['id'] == spot1.pk: has_1 = True if spot['id'] == spot2.pk: has_2 = True if spot['id'] == spot3.pk: has_3 = True if spot['id'] == spot4.pk: has_4 = True if spot['id'] == spot5.pk: has_5 = True self.assertEquals(has_1, False) self.assertEquals(has_2, False) self.assertEquals(has_3, False) self.assertEquals(has_4, False) self.assertEquals(has_5, True) response = c.get("/api/v1/spot", {'capacity': '501'}) self.assertEquals(response["Content-Type"], "application/json", "Has the json header") spots = json.loads(response.content) has_1 = False has_2 = False has_3 = False has_4 = False has_5 = False for spot in spots: if spot['id'] == spot1.pk: has_1 = True if spot['id'] == spot2.pk: has_2 = True if spot['id'] == spot3.pk: has_3 = True if spot['id'] == spot4.pk: has_4 = True if spot['id'] == spot5.pk: has_5 = True self.assertEquals(has_1, False) self.assertEquals(has_2, False) self.assertEquals(has_3, False) self.assertEquals(has_4, False) self.assertEquals(has_5, False) response = c.get("/api/v1/spot", {'capacity': '1', 'distance': '100', 'limit': '4'}) #testing sorting by distance, which is impossible given no center self.assertEquals(response.status_code, 400)
[ "django.core.cache.get_cache", "django.test.client.Client", "spotseeker_server.models.Spot.objects.create", "mock.patch.object", "django.test.utils.override_settings", "simplejson.loads" ]
[((980, 1053), 'django.test.utils.override_settings', 'override_settings', ([], {'SPOTSEEKER_AUTH_MODULE': '"""spotseeker_server.auth.all_ok"""'}), "(SPOTSEEKER_AUTH_MODULE='spotseeker_server.auth.all_ok')\n", (997, 1053), False, 'from django.test.utils import override_settings\n'), ((1145, 1207), 'django.core.cache.get_cache', 'cache.get_cache', (['"""django.core.cache.backends.dummy.DummyCache"""'], {}), "('django.core.cache.backends.dummy.DummyCache')\n", (1160, 1207), False, 'from django.core import cache\n'), ((1221, 1263), 'mock.patch.object', 'patch.object', (['models', '"""cache"""', 'dummy_cache'], {}), "(models, 'cache', dummy_cache)\n", (1233, 1263), False, 'from mock import patch\n'), ((1285, 1336), 'spotseeker_server.models.Spot.objects.create', 'Spot.objects.create', ([], {'name': '"""capacity: 1"""', 'capacity': '(1)'}), "(name='capacity: 1', capacity=1)\n", (1304, 1336), False, 'from spotseeker_server.models import Spot, SpotExtendedInfo, SpotType\n'), ((1383, 1434), 'spotseeker_server.models.Spot.objects.create', 'Spot.objects.create', ([], {'name': '"""capacity: 2"""', 'capacity': '(2)'}), "(name='capacity: 2', capacity=2)\n", (1402, 1434), False, 'from spotseeker_server.models import Spot, SpotExtendedInfo, SpotType\n'), ((1481, 1532), 'spotseeker_server.models.Spot.objects.create', 'Spot.objects.create', ([], {'name': '"""capacity: 3"""', 'capacity': '(3)'}), "(name='capacity: 3', capacity=3)\n", (1500, 1532), False, 'from spotseeker_server.models import Spot, SpotExtendedInfo, SpotType\n'), ((1579, 1630), 'spotseeker_server.models.Spot.objects.create', 'Spot.objects.create', ([], {'name': '"""capacity: 4"""', 'capacity': '(4)'}), "(name='capacity: 4', capacity=4)\n", (1598, 1630), False, 'from spotseeker_server.models import Spot, SpotExtendedInfo, SpotType\n'), ((1677, 1730), 'spotseeker_server.models.Spot.objects.create', 'Spot.objects.create', ([], {'name': '"""capacity: 50"""', 'capacity': '(50)'}), "(name='capacity: 50', capacity=50)\n", (1696, 1730), False, 'from spotseeker_server.models import Spot, SpotExtendedInfo, SpotType\n'), ((1773, 1781), 'django.test.client.Client', 'Client', ([], {}), '()\n', (1779, 1781), False, 'from django.test.client import Client\n'), ((1984, 2012), 'simplejson.loads', 'json.loads', (['response.content'], {}), '(response.content)\n', (1994, 2012), True, 'import simplejson as json\n'), ((2956, 2984), 'simplejson.loads', 'json.loads', (['response.content'], {}), '(response.content)\n', (2966, 2984), True, 'import simplejson as json\n'), ((3929, 3957), 'simplejson.loads', 'json.loads', (['response.content'], {}), '(response.content)\n', (3939, 3957), True, 'import simplejson as json\n'), ((4907, 4935), 'simplejson.loads', 'json.loads', (['response.content'], {}), '(response.content)\n', (4917, 4935), True, 'import simplejson as json\n')]
import pandas as pd import numpy as np from model.helper_functions import build_playlist_features print('Reading data into memory') pid_list = np.genfromtxt('../data/train_pids.csv', skip_header=1, dtype=int) playlistfile = '../data/playlists.csv' playlist_df = pd.read_csv(playlistfile) trackfile = '../data/songs_100000_feat_cleaned.csv' track_df = pd.read_csv(trackfile, index_col='track_uri') print('Finding playlist features') playlist_features = build_playlist_features(pid_list, playlist_df, track_df) playlist_features.to_csv('../data/playlist_features_train.csv') print('Finding top artists') # Find the top artists who dominate playlists top_playlist_defining_artists = playlist_features.artist_uri_top.value_counts(normalize=False) top_playlist_defining_artists.to_csv('../data/top_playlist_defining_artists_train_all.csv', header=True) top_playlist_defining_artists = playlist_features.artist_uri_top.value_counts().index.values[:50] np.savetxt('../data/top_playlist_defining_artists_train.csv', top_playlist_defining_artists, delimiter=',', fmt="%s") # Keep only those artists who dominate playlists and one hot encode artists_to_keep = playlist_features.artist_uri_top.isin(top_playlist_defining_artists) playlist_features.artist_uri_top = playlist_features.artist_uri_top[artists_to_keep] playlist_features.artist_uri_freq = playlist_features.artist_uri_freq[artists_to_keep] playlist_features.artist_uri_freq.fillna(0, inplace=True) top_artist_dummies = pd.get_dummies(playlist_features.artist_uri_top) playlist_features = pd.concat([playlist_features, top_artist_dummies], axis=1) playlist_features.drop(['artist_uri_top'], axis=1, inplace=True) playlist_features.to_csv('../data/playlist_features_with_artists_train.csv')
[ "pandas.read_csv", "model.helper_functions.build_playlist_features", "pandas.concat", "numpy.savetxt", "pandas.get_dummies", "numpy.genfromtxt" ]
[((144, 209), 'numpy.genfromtxt', 'np.genfromtxt', (['"""../data/train_pids.csv"""'], {'skip_header': '(1)', 'dtype': 'int'}), "('../data/train_pids.csv', skip_header=1, dtype=int)\n", (157, 209), True, 'import numpy as np\n'), ((263, 288), 'pandas.read_csv', 'pd.read_csv', (['playlistfile'], {}), '(playlistfile)\n', (274, 288), True, 'import pandas as pd\n'), ((352, 397), 'pandas.read_csv', 'pd.read_csv', (['trackfile'], {'index_col': '"""track_uri"""'}), "(trackfile, index_col='track_uri')\n", (363, 397), True, 'import pandas as pd\n'), ((454, 510), 'model.helper_functions.build_playlist_features', 'build_playlist_features', (['pid_list', 'playlist_df', 'track_df'], {}), '(pid_list, playlist_df, track_df)\n', (477, 510), False, 'from model.helper_functions import build_playlist_features\n'), ((949, 1070), 'numpy.savetxt', 'np.savetxt', (['"""../data/top_playlist_defining_artists_train.csv"""', 'top_playlist_defining_artists'], {'delimiter': '""","""', 'fmt': '"""%s"""'}), "('../data/top_playlist_defining_artists_train.csv',\n top_playlist_defining_artists, delimiter=',', fmt='%s')\n", (959, 1070), True, 'import numpy as np\n'), ((1475, 1523), 'pandas.get_dummies', 'pd.get_dummies', (['playlist_features.artist_uri_top'], {}), '(playlist_features.artist_uri_top)\n', (1489, 1523), True, 'import pandas as pd\n'), ((1544, 1602), 'pandas.concat', 'pd.concat', (['[playlist_features, top_artist_dummies]'], {'axis': '(1)'}), '([playlist_features, top_artist_dummies], axis=1)\n', (1553, 1602), True, 'import pandas as pd\n')]
"""Defines an Table Status Message.""" # System imports from enum import IntEnum # Local source tree imports from pyof.foundation.base import GenericMessage, GenericStruct from pyof.foundation.basic_types import BinaryData, FixedTypeList, UBInt16, UBInt8, UBInt32, UBInt64, Pad from pyof.v0x05.common.header import Header, Type from pyof.v0x05.controller2switch.multipart_reply import TableDesc # Third-party imports __all__ = ('TableStatus', 'TableReason') # Enums class TableReason(IntEnum): """What changed about the table.""" #: Vacancy down threshold event OFPTR_VACANCY_DOWN = 3 #: Vacancy up threshold event OFPTR_VACANCY_UP = 4 # Classes class TableStatus(GenericMessage): """OpenFlow TableStatus Message OFPT_TABLE_STATUS. A table config has changed in the datapath """ #: :class:`~pyof.v0x05.common.action.ActionHeader`: OpenFlow Header header = Header(message_type=Type.OFPT_TABLE_STATUS) #: One of OFPTR_.* reason = UBInt8(enum_ref=TableReason) #: Pad to 64 bits pad = Pad(7) #: New table config table = TableDesc() def __init__(self, xid=None, reason=None, table=None): """Create a message with the optional parameters below. Args: xid (int): xid to be used on the message header. elements: List of elements - 0 or more """ super().__init__(xid) self.reason = reason self.table = table
[ "pyof.foundation.basic_types.Pad", "pyof.v0x05.controller2switch.multipart_reply.TableDesc", "pyof.foundation.basic_types.UBInt8", "pyof.v0x05.common.header.Header" ]
[((911, 954), 'pyof.v0x05.common.header.Header', 'Header', ([], {'message_type': 'Type.OFPT_TABLE_STATUS'}), '(message_type=Type.OFPT_TABLE_STATUS)\n', (917, 954), False, 'from pyof.v0x05.common.header import Header, Type\n'), ((991, 1019), 'pyof.foundation.basic_types.UBInt8', 'UBInt8', ([], {'enum_ref': 'TableReason'}), '(enum_ref=TableReason)\n', (997, 1019), False, 'from pyof.foundation.basic_types import BinaryData, FixedTypeList, UBInt16, UBInt8, UBInt32, UBInt64, Pad\n'), ((1052, 1058), 'pyof.foundation.basic_types.Pad', 'Pad', (['(7)'], {}), '(7)\n', (1055, 1058), False, 'from pyof.foundation.basic_types import BinaryData, FixedTypeList, UBInt16, UBInt8, UBInt32, UBInt64, Pad\n'), ((1095, 1106), 'pyof.v0x05.controller2switch.multipart_reply.TableDesc', 'TableDesc', ([], {}), '()\n', (1104, 1106), False, 'from pyof.v0x05.controller2switch.multipart_reply import TableDesc\n')]
import torch from torch.autograd import Function class Identity(Function): @staticmethod def forward(ctx, x, name): ctx.name = name return x.clone() def backward(ctx, grad): import pydevd pydevd.settrace(suspend=False, trace_only_current_thread=True) grad_temp = grad.clone() return grad_temp, None
[ "pydevd.settrace" ]
[((235, 297), 'pydevd.settrace', 'pydevd.settrace', ([], {'suspend': '(False)', 'trace_only_current_thread': '(True)'}), '(suspend=False, trace_only_current_thread=True)\n', (250, 297), False, 'import pydevd\n')]
# Copyright 2022 Quantapix Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================= import collections import os import unicodedata from ...tokens.utils import PreTrainedTokenizer, _is_control, _is_punctuation, _is_whitespace from ...tokens.base import BatchEncoding from ...utils import PaddingStrategy VOCAB_FS = {"vocab_file": "vocab.txt"} VOCAB_MAP = { "vocab_file": { "google/realm-cc-news-pretrained-embedder": "https://huggingface.co/google/realm-cc-news-pretrained-embedder/resolve/main/vocab.txt", "google/realm-cc-news-pretrained-encoder": "https://huggingface.co/google/realm-cc-news-pretrained-encoder/resolve/main/vocab.txt", "google/realm-cc-news-pretrained-scorer": "https://huggingface.co/google/realm-cc-news-pretrained-scorer/resolve/main/vocab.txt", "google/realm-cc-news-pretrained-openqa": "https://huggingface.co/google/realm-cc-news-pretrained-openqa/aresolve/main/vocab.txt", "google/realm-orqa-nq-openqa": "https://huggingface.co/google/realm-orqa-nq-openqa/resolve/main/vocab.txt", "google/realm-orqa-nq-reader": "https://huggingface.co/google/realm-orqa-nq-reader/resolve/main/vocab.txt", "google/realm-orqa-wq-openqa": "https://huggingface.co/google/realm-orqa-wq-openqa/resolve/main/vocab.txt", "google/realm-orqa-wq-reader": "https://huggingface.co/google/realm-orqa-wq-reader/resolve/main/vocab.txt", } } INPUT_CAPS = { "google/realm-cc-news-pretrained-embedder": 512, "google/realm-cc-news-pretrained-encoder": 512, "google/realm-cc-news-pretrained-scorer": 512, "google/realm-cc-news-pretrained-openqa": 512, "google/realm-orqa-nq-openqa": 512, "google/realm-orqa-nq-reader": 512, "google/realm-orqa-wq-openqa": 512, "google/realm-orqa-wq-reader": 512, } PRETRAINED_INIT_CONFIGURATION = { "google/realm-cc-news-pretrained-embedder": {"do_lower_case": True}, "google/realm-cc-news-pretrained-encoder": {"do_lower_case": True}, "google/realm-cc-news-pretrained-scorer": {"do_lower_case": True}, "google/realm-cc-news-pretrained-openqa": {"do_lower_case": True}, "google/realm-orqa-nq-openqa": {"do_lower_case": True}, "google/realm-orqa-nq-reader": {"do_lower_case": True}, "google/realm-orqa-wq-openqa": {"do_lower_case": True}, "google/realm-orqa-wq-reader": {"do_lower_case": True}, } def load_vocab(vocab_file): vocab = collections.OrderedDict() with open(vocab_file, "r", encoding="utf-8") as reader: tokens = reader.readlines() for index, token in enumerate(tokens): token = token.rstrip("\n") vocab[token] = index return vocab def whitespace_tokenize(text): text = text.strip() if not text: return [] tokens = text.split() return tokens class Tokenizer(PreTrainedTokenizer): vocab_fs = VOCAB_FS vocab_map = VOCAB_MAP pretrained_init_configuration = PRETRAINED_INIT_CONFIGURATION input_caps = INPUT_CAPS def __init__( self, vocab_file, do_lower_case=True, do_basic_tokenize=True, never_split=None, unk="[UNK]", sep="[SEP]", pad="[PAD]", cls="[CLS]", msk="[MASK]", tokenize_chinese_chars=True, strip_accents=None, **kw, ): super().__init__( do_lower_case=do_lower_case, do_basic_tokenize=do_basic_tokenize, never_split=never_split, unk=unk, sep=sep, pad=pad, cls=cls, msk=msk, tokenize_chinese_chars=tokenize_chinese_chars, strip_accents=strip_accents, **kw, ) if not os.path.isfile(vocab_file): raise ValueError( f"Can't find a vocabulary file at path '{vocab_file}'. To load the vocabulary from a Google pretrained " "model use `tokenizer = RealmTokenizer.from_pretrained(PRETRAINED_MODEL_NAME)`" ) self.vocab = load_vocab(vocab_file) self.ids_to_tokens = collections.OrderedDict( [(ids, tok) for tok, ids in self.vocab.items()] ) self.do_basic_tokenize = do_basic_tokenize if do_basic_tokenize: self.basic_tokenizer = BasicTokenizer( do_lower_case=do_lower_case, never_split=never_split, tokenize_chinese_chars=tokenize_chinese_chars, strip_accents=strip_accents, ) self.wordpiece_tokenizer = WordpieceTokenizer(vocab=self.vocab, unk=self.unk) @property def do_lower_case(self): return self.basic_tokenizer.do_lower_case @property def s_vocab(self): return len(self.vocab) def get_vocab(self): return dict(self.vocab, **self.added_tokens_encoder) def _tokenize(self, text): split_tokens = [] if self.do_basic_tokenize: for token in self.basic_tokenizer.tokenize(text, never_split=self.all_special_tokens): # If the token is part of the never_split set if token in self.basic_tokenizer.never_split: split_tokens.append(token) else: split_tokens += self.wordpiece_tokenizer.tokenize(token) else: split_tokens = self.wordpiece_tokenizer.tokenize(text) return split_tokens def _convert_token_to_id(self, token): """Converts a token (str) in an id using the vocab.""" return self.vocab.get(token, self.vocab.get(self.unk)) def _convert_id_to_token(self, index): """Converts an index (integer) in a token (str) using the vocab.""" return self.ids_to_tokens.get(index, self.unk) def convert_tokens_to_string(self, tokens): """Converts a sequence of tokens (string) in a single string.""" out_string = " ".join(tokens).replace(" ##", "").strip() return out_string def batch_encode_candidates(self, text, **kw): kw["padding"] = PaddingStrategy.MAX_LENGTH batch_text = text batch_text_pair = kw.pop("text_pair", None) return_tensors = kw.pop("return_tensors", None) output_data = { "input_ids": [], "attention_mask": [], "token_type_ids": [], } for idx, candidate_text in enumerate(batch_text): if batch_text_pair is not None: candidate_text_pair = batch_text_pair[idx] else: candidate_text_pair = None encoded_candidates = super().__call__( candidate_text, candidate_text_pair, return_tensors=None, **kw ) encoded_input_ids = encoded_candidates.get("input_ids") encoded_attention_mask = encoded_candidates.get("attention_mask") encoded_token_type_ids = encoded_candidates.get("token_type_ids") if encoded_input_ids is not None: output_data["input_ids"].append(encoded_input_ids) if encoded_attention_mask is not None: output_data["attention_mask"].append(encoded_attention_mask) if encoded_token_type_ids is not None: output_data["token_type_ids"].append(encoded_token_type_ids) output_data = dict((key, item) for key, item in output_data.items() if len(item) != 0) return BatchEncoding(output_data, tensor_type=return_tensors) def build_inputs_with_special_tokens(self, toks_0, toks_1=None): if toks_1 is None: return [self.cls_token_id] + toks_0 + [self.sep_token_id] cls = [self.cls_token_id] sep = [self.sep_token_id] return cls + toks_0 + sep + toks_1 + sep def get_special_tokens_mask( self, toks_0, toks_1=None, has_specials=False, ): if has_specials: return super().get_special_tokens_mask(toks_0=toks_0, toks_1=toks_1, has_specials=True) if toks_1 is not None: return [1] + ([0] * len(toks_0)) + [1] + ([0] * len(toks_1)) + [1] return [1] + ([0] * len(toks_0)) + [1] def create_token_type_ids_from_sequences(self, toks_0, toks_1=None): sep = [self.sep_token_id] cls = [self.cls_token_id] if toks_1 is None: return len(cls + toks_0 + sep) * [0] return len(cls + toks_0 + sep) * [0] + len(toks_1 + sep) * [1] def save_vocabulary(self, dir, pre=None): index = 0 if os.path.isdir(dir): vocab_file = os.path.join( dir, (pre + "-" if pre else "") + VOCAB_FS["vocab_file"], ) else: vocab_file = (pre + "-" if pre else "") + dir with open(vocab_file, "w", encoding="utf-8") as writer: for token, token_index in sorted(self.vocab.items(), key=lambda kv: kv[1]): if index != token_index: logger.warning( f"Saving vocabulary to {vocab_file}: vocabulary indices are not consecutive." " Please check that the vocabulary is not corrupted!" ) index = token_index writer.write(token + "\n") index += 1 return (vocab_file,) class BasicTokenizer(object): def __init__( self, do_lower_case=True, never_split=None, tokenize_chinese_chars=True, strip_accents=None ): if never_split is None: never_split = [] self.do_lower_case = do_lower_case self.never_split = set(never_split) self.tokenize_chinese_chars = tokenize_chinese_chars self.strip_accents = strip_accents def tokenize(self, text, never_split=None): never_split = self.never_split.union(set(never_split)) if never_split else self.never_split text = self._clean_text(text) if self.tokenize_chinese_chars: text = self._tokenize_chinese_chars(text) orig_tokens = whitespace_tokenize(text) split_tokens = [] for token in orig_tokens: if token not in never_split: if self.do_lower_case: token = token.lower() if self.strip_accents is not False: token = self._run_strip_accents(token) elif self.strip_accents: token = self._run_strip_accents(token) split_tokens.extend(self._run_split_on_punc(token, never_split)) output_tokens = whitespace_tokenize(" ".join(split_tokens)) return output_tokens def _run_strip_accents(self, text): text = unicodedata.normalize("NFD", text) output = [] for char in text: cat = unicodedata.category(char) if cat == "Mn": continue output.append(char) return "".join(output) def _run_split_on_punc(self, text, never_split=None): if never_split is not None and text in never_split: return [text] chars = list(text) i = 0 start_new_word = True output = [] while i < len(chars): char = chars[i] if _is_punctuation(char): output.append([char]) start_new_word = True else: if start_new_word: output.append([]) start_new_word = False output[-1].append(char) i += 1 return ["".join(x) for x in output] def _tokenize_chinese_chars(self, text): output = [] for char in text: cp = ord(char) if self._is_chinese_char(cp): output.append(" ") output.append(char) output.append(" ") else: output.append(char) return "".join(output) def _is_chinese_char(self, cp): if ( (cp >= 0x4E00 and cp <= 0x9FFF) or (cp >= 0x3400 and cp <= 0x4DBF) # or (cp >= 0x20000 and cp <= 0x2A6DF) # or (cp >= 0x2A700 and cp <= 0x2B73F) # or (cp >= 0x2B740 and cp <= 0x2B81F) # or (cp >= 0x2B820 and cp <= 0x2CEAF) # or (cp >= 0xF900 and cp <= 0xFAFF) or (cp >= 0x2F800 and cp <= 0x2FA1F) # ): # return True return False def _clean_text(self, text): output = [] for char in text: cp = ord(char) if cp == 0 or cp == 0xFFFD or _is_control(char): continue if _is_whitespace(char): output.append(" ") else: output.append(char) return "".join(output) class WordpieceTokenizer(object): def __init__(self, vocab, unk, max_input_chars_per_word=100): self.vocab = vocab self.unk = unk self.max_input_chars_per_word = max_input_chars_per_word def tokenize(self, text): output_tokens = [] for token in whitespace_tokenize(text): chars = list(token) if len(chars) > self.max_input_chars_per_word: output_tokens.append(self.unk) continue is_bad = False start = 0 sub_tokens = [] while start < len(chars): end = len(chars) cur_substr = None while start < end: substr = "".join(chars[start:end]) if start > 0: substr = "##" + substr if substr in self.vocab: cur_substr = substr break end -= 1 if cur_substr is None: is_bad = True break sub_tokens.append(cur_substr) start = end if is_bad: output_tokens.append(self.unk) else: output_tokens.extend(sub_tokens) return output_tokens
[ "collections.OrderedDict", "os.path.join", "os.path.isfile", "unicodedata.category", "os.path.isdir", "unicodedata.normalize" ]
[((3002, 3027), 'collections.OrderedDict', 'collections.OrderedDict', ([], {}), '()\n', (3025, 3027), False, 'import collections\n'), ((9109, 9127), 'os.path.isdir', 'os.path.isdir', (['dir'], {}), '(dir)\n', (9122, 9127), False, 'import os\n'), ((11284, 11318), 'unicodedata.normalize', 'unicodedata.normalize', (['"""NFD"""', 'text'], {}), "('NFD', text)\n", (11305, 11318), False, 'import unicodedata\n'), ((4301, 4327), 'os.path.isfile', 'os.path.isfile', (['vocab_file'], {}), '(vocab_file)\n', (4315, 4327), False, 'import os\n'), ((9154, 9224), 'os.path.join', 'os.path.join', (['dir', "((pre + '-' if pre else '') + VOCAB_FS['vocab_file'])"], {}), "(dir, (pre + '-' if pre else '') + VOCAB_FS['vocab_file'])\n", (9166, 9224), False, 'import os\n'), ((11383, 11409), 'unicodedata.category', 'unicodedata.category', (['char'], {}), '(char)\n', (11403, 11409), False, 'import unicodedata\n')]
import datetime, os, base64 from flask import Flask, jsonify, request, Blueprint from dbstore import dbconf import json from bson import json_util # process kill # lsof -i tcp:3000 file_upload = Blueprint('uploadAPI', __name__) app = Flask(__name__) def song_upload(val): try: # content = request.get_json() curs = dbconf.file_store.find().sort( [("_id", -1)] ).limit(1) if curs.count() > 0: for rec in curs: id_val = rec["audioFileMetadata"]["id"] id = id_val + 1 else: id = 1 audio_file_id = int(val["audio_file_id"]) cursor_file_id = dbconf.file_store.find({'audioFileMetadata.audio_file_id': audio_file_id}) if cursor_file_id.count() == 0: song_name = str(val['song_name']) duration_sec = int(val['duration_sec']) upload_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") if len(song_name) != 0 and len(song_name) <= 100: if duration_sec >= 0: msg = "Successful" response = { "status": 200, "msg": msg, "id": id, "song_name": song_name, "duration_sec": duration_sec, "upload_time": upload_time, "audio_file_id": audio_file_id } else: msg = "Duration should be positive integer number" response = { "status": 400, "msg": msg, "upload_time": upload_time } else: msg = "Song name should be between 0 to 100 characters" response = { "status": 400, "msg": msg, "upload_time": upload_time } else: msg = "Duplicate audio id found." response = { "status": 400, "msg": msg } return response except Exception as e: print(str(e)) response = { "status": 500, "msg": "Something went wrong." } return response def podcast_upload(val): try: curs = dbconf.file_store.find().sort( [("_id", -1)] ).limit(1) if curs.count() > 0: for rec in curs: id_val = rec["audioFileMetadata"]["id"] id = id_val + 1 else: id = 1 audio_file_id = int(val["audio_file_id"]) cursor_file_id = dbconf.file_store.find({'audioFileMetadata.audio_file_id': audio_file_id}) if cursor_file_id.count() == 0: podcast_name = str(val['podcast_name']) duration_sec = int(val['duration_sec']) upload_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") host = str(val['host']) participant = val['participant'] # print(id, podcast_name, duration_sec, upload_time, host, participant) if len(podcast_name) <= 100: if duration_sec >= 0: exceed_leng = [ x for x in participant if len(x) >= 100] if len(participant) <= 10 and len(exceed_leng) == 0: if len(host) <= 100: msg = "sucessful" response = { "status": 200, "msg": msg, "id": id, "podcast_name": podcast_name, "duration_sec": duration_sec, "upload_time": upload_time, "host": host, "participant": participant, "audio_file_id": audio_file_id } else: msg = "Host cannot be larger than 100 characters." response = { "status": 400, "msg": msg, "upload_time": upload_time } else: msg = "Each string cannot be larger than 100 characters, maximum of 10 participants possible" response = { "status": 400, "msg": msg, "upload_time": upload_time } else: msg = "Duration should be positive integer number" response = { "status": 400, "msg": msg, "upload_time": upload_time } else: msg = "Name of the podcast cannot be larger than 100 characters." response = { "status": 400, "msg": msg, "upload_time": upload_time } else: msg = "Duplicate audio id found." response = { "status": 400, "msg": msg } return response except Exception as e: print(str(e)) response = { "status": 500, "msg": "Something went wrong." } return response def audiobook_upload(val): try: # content = request.get_json() curs = dbconf.file_store.find().sort( [("_id", -1)]).limit(1) if curs.count() > 0: for rec in curs: id_val = rec["audioFileMetadata"]["id"] id = id_val + 1 else: id = 1 audio_file_id = int(val["audio_file_id"]) cursor_file_id = dbconf.file_store.find({'audioFileMetadata.audio_file_id': audio_file_id}) if cursor_file_id.count() == 0: audiobook_title = str(val['audiobook_title']) author_title = str(val['author_title']) narrator = str(val['narrator']) duration_sec = int(val['duration_sec']) upload_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") if len(audiobook_title) <= 100 and len(audiobook_title) != 0: if len(author_title) <= 100 and len(author_title) != 0: if len(narrator) <=100 and len(narrator) != 0: if duration_sec >= 0: msg = "sucessful" response = { "status": 200, "msg": msg, "id": id, "audiobook_title": audiobook_title, "author_title": author_title, "narrator": narrator, "duration_sec": duration_sec, "upload_time": upload_time, "audio_file_id": audio_file_id } else: msg = "Duration should be positive integer number" response = { "status": 400, "msg": msg, "upload_time": upload_time } else: msg = "Narrator should be between 0 to 100 characters." response = { "status": 400, "msg": msg, "upload_time": upload_time } else: msg = "Author title should be between 0 to 100 characters." response = { "status": 400, "msg": msg, "upload_time": upload_time } else: msg = "Audiobook should be between 0 to 100 characters." response = { "status": 400, "msg": msg, "upload_time": upload_time } else: msg = "Duplicate audio id found." response = { "status": 400, "msg": msg } return response except Exception as e: print(str(e)) msg = "Something went wrong." response = { "status": 500, "msg": msg } return response @file_upload.route('api/_create', methods= ['POST']) def create(): try: if request.method == "POST": #getting all the parameters content = request.get_json() # print(content) audioFileType = content['audioFileType'] #for song type if audioFileType.lower() == 'song': audioFileMetadata = "song" #calling the song-upload method for song type func_call = song_upload(content['audioFileMetadata']) if func_call["status"] == 200: audioFileMetadata = { "duration_sec": func_call["duration_sec"], "id": func_call["id"], "song_name": func_call['song_name'], "upload_time": func_call['upload_time'], "audio_file_id": func_call['audio_file_id'] } rec = { "audioFileType": audioFileType.lower(), "audioFileMetadata": audioFileMetadata } # insert the data into collection data = json.loads(json_util.dumps(rec)) dbconf.file_store.insert(rec) response = { "status": func_call["status"], "msg": func_call["msg"], "record": data } # print(response) elif func_call["status"] == 400: response = { "status": func_call["status"], "msg": func_call["msg"] } elif func_call["status"] == 500: response = { "status": func_call["status"], "msg": func_call["msg"] } #for podcast type elif audioFileType.lower() == 'podcast': audioFileMetadata = "podcast" func_call = podcast_upload(content['audioFileMetadata']) if func_call["status"] == 200: audioFileMetadata = { "podcast_name": func_call["podcast_name"], "id": func_call["id"], "duration_sec": func_call["duration_sec"], "host": func_call['host'], "upload_time": func_call['upload_time'], "participant": func_call["participant"], "audio_file_id": func_call['audio_file_id'] } rec = { "audioFileType": audioFileType.lower(), "audioFileMetadata": audioFileMetadata } data = json.loads(json_util.dumps(rec)) dbconf.file_store.insert(rec) response = { "status": func_call["status"], "msg": func_call["msg"], "record": data } # print(response) elif func_call["status"] == 400: response = { "status": func_call["status"], "msg": func_call["msg"] } elif func_call["status"] == 500: response = { "status": func_call["status"], "msg": func_call["msg"] } #for audiobook type elif audioFileType.lower() == 'audiobook': audioFileMetadata = "audiobook" func_call = audiobook_upload(content['audioFileMetadata']) if func_call["status"] == 200: audioFileMetadata = { "audiobook_title": func_call["audiobook_title"], "id": func_call["id"], "duration_sec": func_call["duration_sec"], "author_title": func_call['author_title'], "upload_time": func_call['upload_time'], "narrator": func_call["narrator"], "audio_file_id": func_call['audio_file_id'] } rec = { "audioFileType": audioFileType.lower(), "audioFileMetadata": audioFileMetadata } data = json.loads(json_util.dumps(rec)) dbconf.file_store.insert(rec) response = { "status": func_call["status"], "msg": func_call["msg"], "record": data } # print(response) elif func_call["status"] == 400: response = { "status": func_call["status"], "msg": func_call["msg"] } elif func_call["status"] == 500: response = { "status": func_call["status"], "msg": func_call["msg"] } # print(response) else: response = { "status": 400, "msg": "Bad request." } else: response = { "status": 400, "msg": "Bad request." } return jsonify(response) except Exception as e: print(str(e)) response = { "status": 500, "msg": "Something went wrong." } return jsonify(response) @file_upload.route('api/_delete/<string:audioFileType>/<int:audioFileID>', methods= ['DELETE']) def delete_(audioFileType, audioFileID): try: if request.method == "DELETE": cursor = dbconf.file_store.find({"audioFileType": audioFileType.lower(), 'audioFileMetadata.audio_file_id': audioFileID}) if cursor.count() != 0: dbconf.file_store.remove({"audioFileType": audioFileType.lower(), 'audioFileMetadata.audio_file_id': audioFileID}) response = { "status": 200, "msg": "Sucessfull.", "audioFileType": audioFileType, "audioFileID": audioFileID } else: response = { "status": 400, "msg": "audio file ID is not found.", "audioFileType": audioFileType, "audioFileID": audioFileID } else: response = { "status": 400, "msg": "Bad request." } return jsonify(response) except Exception as e: print(str(e)) response = { "status": 500, "msg": "Something went wrong." } return jsonify(response) @file_upload.route('api/_update/<string:audioFileType>/<int:audioFileID>', methods= ['PUT']) def update(audioFileType, audioFileID): try: if request.method == "PUT": content = request.json cursor = dbconf.file_store.find({"audioFileType": audioFileType.lower(), 'audioFileMetadata.audio_file_id': audioFileID}) if cursor.count() != 0: #song type if audioFileType.lower() == 'song': song_name = content["audioFileMetadata"]["song_name"] duration_sec = content["audioFileMetadata"]["duration_sec"] if len(song_name) != 0 and len(song_name) <= 100: if duration_sec >= 0: myquery = {"audioFileType": audioFileType.lower(), 'audioFileMetadata.audio_file_id': audioFileID} newvalues = { "$set": { "audioFileMetadata.duration_sec": duration_sec, "audioFileMetadata.song_name": song_name, "audioFileMetadata.upload_time": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") }} dbconf.file_store.update_one(myquery, newvalues) response = { "status": 200, "msg": "Sucessfull.", "audioFileType": audioFileType, "audioFileID": audioFileID } #for duration else: response = { "status": 400, "msg": "Duration should be positive integer number", "audioFileType": audioFileType, "audioFileID": audioFileID } #for song name else: response = { "status": 400, "msg": "Song name should be between 0 to 100 characters", "audioFileType": audioFileType, "audioFileID": audioFileID } #podcast type elif audioFileType.lower() == 'podcast': podcast_name = content["audioFileMetadata"]["podcast_name"] duration_sec = content["audioFileMetadata"]["duration_sec"] host = content["audioFileMetadata"]["host"] participant = content["audioFileMetadata"]["participant"] if len(podcast_name) != 0 and len(podcast_name) <= 100: if duration_sec >= 0: exceed_leng = [ x for x in participant if len(x) >= 100] if len(participant) <= 10 and len(exceed_leng) == 0: if len(host) != 0 and len(host) <= 100: myquery = {"audioFileType": audioFileType.lower(), 'audioFileMetadata.audio_file_id': audioFileID} newvalues = { "$set": { "audioFileMetadata.podcast_name": podcast_name, "audioFileMetadata.duration_sec": duration_sec, "audioFileMetadata.host": host, "audioFileMetadata.participant": participant, "audioFileMetadata.upload_time": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") }} dbconf.file_store.update_one(myquery, newvalues) response = { "status": 200, "msg": "Sucessfull.", "audioFileType": audioFileType, "audioFileID": audioFileID } #for host else: response = { "status": 400, "msg": "Host should be between 0 to 100 characters", "audioFileType": audioFileType, "audioFileID": audioFileID } #participant else: response = { "status": 400, "msg": "Each string cannot be larger than 100 characters, maximum of 10 participants possible", "audioFileType": audioFileType, "audioFileID": audioFileID } #duration else: response = { "status": 400, "msg": "Duration should be positive integer number", "audioFileType": audioFileType, "audioFileID": audioFileID } #podcast_name else: response = { "status": 400, "msg": "Name of the podcast should be between 0 to 100 characters", "audioFileType": audioFileType, "audioFileID": audioFileID } #audiobook type elif audioFileType.lower() == 'audiobook': audiobook_title = content["audioFileMetadata"]["audiobook_title"] duration_sec = content["audioFileMetadata"]["duration_sec"] author_title = content["audioFileMetadata"]["author_title"] narrator = content["audioFileMetadata"]["narrator"] if len(audiobook_title) != 0 and len(audiobook_title) <= 100: if len(author_title) != 0 and len(author_title) <= 100: if len(narrator) != 0 and len(narrator) <=100: if duration_sec >= 0: myquery = {"audioFileType": audioFileType.lower(), 'audioFileMetadata.audio_file_id': audioFileID} newvalues = { "$set": { "audioFileMetadata.audiobook_title": audiobook_title, "audioFileMetadata.duration_sec": duration_sec, "audioFileMetadata.author_title": author_title, "audioFileMetadata.narrator": narrator, "audioFileMetadata.upload_time": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") }} dbconf.file_store.update_one(myquery, newvalues) response = { "status": 200, "msg": "Sucessfull.", "audioFileType": audioFileType, "audioFileID": audioFileID } else: response = { "status": 400, "msg": "Duration should be positive integer number", "audioFileType": audioFileType, "audioFileID": audioFileID } else: response = { "status": 400, "msg": "Narrator should be between 0 to 100 characters.", "audioFileType": audioFileType, "audioFileID": audioFileID } else: response = { "status": 400, "msg": "Author title should be between 0 to 100 characters.", "audioFileType": audioFileType, "audioFileID": audioFileID } else: response = { "status": 400, "msg": "Audiobook should be between 0 to 100 characters.", "audioFileType": audioFileType, "audioFileID": audioFileID } else: response = { "status": 400, "msg": "audio file ID is not found.", "audioFileType": audioFileType, "audioFileID": audioFileID } else: response = { "status": 400, "msg": "Bad request." } return jsonify(response) except Exception as e: print(str(e)) response = { "status": 500, "msg": "Something went wrong." } return jsonify(response) @file_upload.route("api/_getapi/<audioFileType>", methods=["GET"], defaults={"audioFileID": None}) @file_upload.route('api/_getapi/<string:audioFileType>/<int:audioFileID>', methods= ['GET']) def getapi(audioFileType, audioFileID): try: if request.method == 'GET': if audioFileID is not None: cursor = dbconf.file_store.find({"audioFileType": audioFileType.lower(), 'audioFileMetadata.audio_file_id': audioFileID}) if cursor.count() != 0: for rec in cursor: if rec["audioFileType"] == 'song': audio_file = rec["audioFileMetadata"]["song_name"] if rec["audioFileType"] == 'podcast': audio_file= rec["audioFileMetadata"]["podcast_name"] if rec["audioFileType"] == 'audiobook': audio_file= rec["audioFileMetadata"]["audiobook_title"] response = { "status": 200, "msg": "Sucessfull.", "audioFileType": audioFileType, "audio_file": audio_file } else: response = { "status": 400, "msg": "audio file ID is not found.", "audioFileType": audioFileType, "audioFileID": audioFileID } else: cursor = dbconf.file_store.find({"audioFileType": str(audioFileType.lower())}) if cursor.count() != 0: audio_list = [] for rec in cursor: if rec["audioFileType"] == 'song': audio_list.append(rec["audioFileMetadata"]["song_name"]) if rec["audioFileType"] == 'podcast': audio_list.append(rec["audioFileMetadata"]["podcast_name"]) if rec["audioFileType"] == 'audiobook': audio_list.append(rec["audioFileMetadata"]["audiobook_title"]) response = { "status": 200, "msg": "Sucessfull.", "audioFileType": audioFileType, "audio_list": audio_list } else: response = { "status": 400, "msg": "Audio files not found.", "audioFileType": audioFileType } else: response = { "status": 400, "msg": "Bad request." } return jsonify(response) except Exception as e: print(str(e)) response = { "status": 500, "msg": "Something went wrong." } return jsonify(response)
[ "flask.Flask", "dbstore.dbconf.file_store.update_one", "bson.json_util.dumps", "dbstore.dbconf.file_store.insert", "datetime.datetime.now", "flask.request.get_json", "dbstore.dbconf.file_store.find", "flask.Blueprint", "flask.jsonify" ]
[((199, 231), 'flask.Blueprint', 'Blueprint', (['"""uploadAPI"""', '__name__'], {}), "('uploadAPI', __name__)\n", (208, 231), False, 'from flask import Flask, jsonify, request, Blueprint\n'), ((239, 254), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (244, 254), False, 'from flask import Flask, jsonify, request, Blueprint\n'), ((651, 725), 'dbstore.dbconf.file_store.find', 'dbconf.file_store.find', (["{'audioFileMetadata.audio_file_id': audio_file_id}"], {}), "({'audioFileMetadata.audio_file_id': audio_file_id})\n", (673, 725), False, 'from dbstore import dbconf\n'), ((2749, 2823), 'dbstore.dbconf.file_store.find', 'dbconf.file_store.find', (["{'audioFileMetadata.audio_file_id': audio_file_id}"], {}), "({'audioFileMetadata.audio_file_id': audio_file_id})\n", (2771, 2823), False, 'from dbstore import dbconf\n'), ((6125, 6199), 'dbstore.dbconf.file_store.find', 'dbconf.file_store.find', (["{'audioFileMetadata.audio_file_id': audio_file_id}"], {}), "({'audioFileMetadata.audio_file_id': audio_file_id})\n", (6147, 6199), False, 'from dbstore import dbconf\n'), ((14750, 14767), 'flask.jsonify', 'jsonify', (['response'], {}), '(response)\n', (14757, 14767), False, 'from flask import Flask, jsonify, request, Blueprint\n'), ((16108, 16125), 'flask.jsonify', 'jsonify', (['response'], {}), '(response)\n', (16115, 16125), False, 'from flask import Flask, jsonify, request, Blueprint\n'), ((26209, 26226), 'flask.jsonify', 'jsonify', (['response'], {}), '(response)\n', (26216, 26226), False, 'from flask import Flask, jsonify, request, Blueprint\n'), ((29281, 29298), 'flask.jsonify', 'jsonify', (['response'], {}), '(response)\n', (29288, 29298), False, 'from flask import Flask, jsonify, request, Blueprint\n'), ((9272, 9290), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (9288, 9290), False, 'from flask import Flask, jsonify, request, Blueprint\n'), ((14946, 14963), 'flask.jsonify', 'jsonify', (['response'], {}), '(response)\n', (14953, 14963), False, 'from flask import Flask, jsonify, request, Blueprint\n'), ((16303, 16320), 'flask.jsonify', 'jsonify', (['response'], {}), '(response)\n', (16310, 16320), False, 'from flask import Flask, jsonify, request, Blueprint\n'), ((26404, 26421), 'flask.jsonify', 'jsonify', (['response'], {}), '(response)\n', (26411, 26421), False, 'from flask import Flask, jsonify, request, Blueprint\n'), ((29476, 29493), 'flask.jsonify', 'jsonify', (['response'], {}), '(response)\n', (29483, 29493), False, 'from flask import Flask, jsonify, request, Blueprint\n'), ((890, 913), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (911, 913), False, 'import datetime, os, base64\n'), ((2994, 3017), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3015, 3017), False, 'import datetime, os, base64\n'), ((6485, 6508), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (6506, 6508), False, 'import datetime, os, base64\n'), ((10354, 10383), 'dbstore.dbconf.file_store.insert', 'dbconf.file_store.insert', (['rec'], {}), '(rec)\n', (10378, 10383), False, 'from dbstore import dbconf\n'), ((341, 365), 'dbstore.dbconf.file_store.find', 'dbconf.file_store.find', ([], {}), '()\n', (363, 365), False, 'from dbstore import dbconf\n'), ((2439, 2463), 'dbstore.dbconf.file_store.find', 'dbconf.file_store.find', ([], {}), '()\n', (2461, 2463), False, 'from dbstore import dbconf\n'), ((5816, 5840), 'dbstore.dbconf.file_store.find', 'dbconf.file_store.find', ([], {}), '()\n', (5838, 5840), False, 'from dbstore import dbconf\n'), ((10311, 10331), 'bson.json_util.dumps', 'json_util.dumps', (['rec'], {}), '(rec)\n', (10326, 10331), False, 'from bson import json_util\n'), ((12052, 12081), 'dbstore.dbconf.file_store.insert', 'dbconf.file_store.insert', (['rec'], {}), '(rec)\n', (12076, 12081), False, 'from dbstore import dbconf\n'), ((12010, 12030), 'bson.json_util.dumps', 'json_util.dumps', (['rec'], {}), '(rec)\n', (12025, 12030), False, 'from bson import json_util\n'), ((13757, 13786), 'dbstore.dbconf.file_store.insert', 'dbconf.file_store.insert', (['rec'], {}), '(rec)\n', (13781, 13786), False, 'from dbstore import dbconf\n'), ((17566, 17614), 'dbstore.dbconf.file_store.update_one', 'dbconf.file_store.update_one', (['myquery', 'newvalues'], {}), '(myquery, newvalues)\n', (17594, 17614), False, 'from dbstore import dbconf\n'), ((13715, 13735), 'bson.json_util.dumps', 'json_util.dumps', (['rec'], {}), '(rec)\n', (13730, 13735), False, 'from bson import json_util\n'), ((20190, 20238), 'dbstore.dbconf.file_store.update_one', 'dbconf.file_store.update_one', (['myquery', 'newvalues'], {}), '(myquery, newvalues)\n', (20218, 20238), False, 'from dbstore import dbconf\n'), ((17453, 17476), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (17474, 17476), False, 'import datetime, os, base64\n'), ((23893, 23941), 'dbstore.dbconf.file_store.update_one', 'dbconf.file_store.update_one', (['myquery', 'newvalues'], {}), '(myquery, newvalues)\n', (23921, 23941), False, 'from dbstore import dbconf\n'), ((20061, 20084), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (20082, 20084), False, 'import datetime, os, base64\n'), ((23756, 23779), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (23777, 23779), False, 'import datetime, os, base64\n')]
# Generated by Django 3.2.2 on 2021-07-10 03:16 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('blweb', '0011_vehiclecolor'), ] operations = [ migrations.AddField( model_name='vehicleconfig', name='color', field=models.ForeignKey(blank=True, default=None, help_text='The chosen color for this config', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='color', to='blweb.vehiclecolor'), ), ]
[ "django.db.models.ForeignKey" ]
[((365, 567), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'default': 'None', 'help_text': '"""The chosen color for this config"""', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""color"""', 'to': '"""blweb.vehiclecolor"""'}), "(blank=True, default=None, help_text=\n 'The chosen color for this config', null=True, on_delete=django.db.\n models.deletion.CASCADE, related_name='color', to='blweb.vehiclecolor')\n", (382, 567), False, 'from django.db import migrations, models\n')]
import numpy as np import sys import scipy.interpolate as interpolate import asdf from .function import * from .basic_func import Basic class Func: ''' The list of (possible) `Func` attributes is given below: Attributes ---------- ''' def __init__(self, MB, dust_model=0): ''' Parameters ---------- dust_model : int 0 for Calzetti. ''' self.ID = MB.ID self.ZZ = MB.Zall self.age = MB.age self.AA = MB.nage self.tau0 = MB.tau0 self.MB = MB self.dust_model = dust_model self.DIR_TMP = MB.DIR_TMP if MB.f_dust: self.Temp = MB.Temp try: self.filts = MB.filts self.DIR_FIL = MB.DIR_FILT except: pass # Already Read or not; self.f_af = False self.f_af0 = False def demo(self): ZZ = self.ZZ AA = self.AA return ZZ, AA ############################# # Load template in obs range. ############################# def open_spec_fits(self, fall=0, orig=False): ''' ''' ID0 = self.MB.ID tau0= self.MB.tau0 #[0.01,0.02,0.03] from astropy.io import fits ZZ = self.ZZ AA = self.AA bfnc = self.MB.bfnc #Basic(ZZ) # ASDF; if fall == 0: app = '' hdu0 = self.MB.af['spec'] elif fall == 1: app = 'all_' hdu0 = self.MB.af['spec_full'] DIR_TMP = self.DIR_TMP for pp in range(len(tau0)): for zz in range(len(ZZ)): Z = ZZ[zz] NZ = bfnc.Z2NZ(Z) if zz == 0 and pp == 0: nr = hdu0['colnum'] xx = hdu0['wavelength'] lib = np.zeros((len(nr), 2+len(AA)*len(ZZ)*len(tau0)), dtype='float') lib[:,0] = nr[:] lib[:,1] = xx[:] for aa in range(len(AA)): coln = int(2 + aa) if orig: colname = 'fspec_orig_' + str(zz) + '_' + str(aa) + '_' + str(pp) else: colname = 'fspec_' + str(zz) + '_' + str(aa) + '_' + str(pp) colnall = int(2 + pp*len(ZZ)*len(AA) + zz*len(AA) + aa) # 2 takes account of wavelength and AV columns. lib[:,colnall] = hdu0[colname] return lib def open_spec_dust_fits(self, fall=0): ''' Loads dust template in obs range. ''' ID0 = self.MB.ID tau0= self.MB.tau0 #[0.01,0.02,0.03] from astropy.io import fits ZZ = self.ZZ AA = self.AA bfnc = self.MB.bfnc #Basic(ZZ) self.MB.af = asdf.open(self.DIR_TMP + 'spec_all_' + self.ID + '.asdf') self.MB.af0 = asdf.open(self.DIR_TMP + 'spec_all.asdf') if fall == 0: app = '' hdu0 = self.MB.af['spec_dust'] elif fall == 1: app = 'all_' hdu0 = self.MB.af['spec_dust_full'] DIR_TMP = self.DIR_TMP nr = hdu0['colnum'] xx = hdu0['wavelength'] lib = np.zeros((len(nr), 2+len(self.Temp)), dtype='float') lib[:,0] = nr[:] lib[:,1] = xx[:] for aa in range(len(self.Temp)): coln = int(2 + aa) colname = 'fspec_' + str(aa) colnall = int(2 + aa) # 2 takes account of wavelength and AV columns. lib[:,colnall] = hdu0[colname] if fall==1 and False: import matplotlib.pyplot as plt plt.close() plt.plot(lib[:,1],lib[:,coln],linestyle='-') plt.show() return lib def open_spec_fits_dir(self, nage, nz, kk, Av00, zgal, A00): ''' Load template in obs range. But for weird template. ''' from astropy.io import fits tau0= self.tau0 #[0.01,0.02,0.03] ZZ = self.ZZ AA = self.AA bfnc = self.MB.bfnc #Basic(ZZ) self.MB.af = asdf.open(self.DIR_TMP + 'spec_all_' + self.ID + '.asdf') self.MB.af0 = asdf.open(self.DIR_TMP + 'spec_all.asdf') app = 'all' hdu0 = self.MB.af['spec_full'] DIR_TMP = self.DIR_TMP #'./templates/' pp = 0 zz = nz # Luminosity mshdu = self.MB.af0['ML'] Ls = mshdu['Ls_%d'%nz] xx = hdu0['wavelength'] # at RF; nr = np.arange(0,len(xx),1) #hdu0.data['colnum'] lib = np.zeros((len(nr), 2+1), dtype='float') lib[:,0] = nr[:] lib[:,1] = xx[:] aa = nage coln = int(2 + aa) colname = 'fspec_' + str(zz) + '_' + str(aa) + '_' + str(pp) yy0 = hdu0[colname]/Ls[aa] yy = flamtonu(xx, yy0) lib[:,2] = yy[:] if self.dust_model == 0: # Calzetti yyd, xxd, nrd = dust_calz(xx, yy, Av00, nr) elif self.dust_model == 1: # MW yyd, xxd, nrd = dust_mw(xx, yy, Av00, nr) elif self.dust_model == 2: # LMC yyd, xxd, nrd = dust_gen(xx, yy, Av00, nr, Rv=4.05, gamma=-0.06, Eb=2.8) elif self.dust_model == 3: # SMC yyd, xxd, nrd = dust_gen(xx, yy, Av00, nr, Rv=4.05, gamma=-0.42, Eb=0.0) elif self.dust_model == 4: # Kriek&Conroy with gamma=-0.2 yyd, xxd, nrd = dust_kc(xx, yy, Av00, nr, Rv=4.05, gamma=-0.2) else: print('No entry. Dust model is set to Calzetti') yyd, xxd, nrd = dust_calz(xx, yy, Av00, nr) xxd *= (1.+zgal) nrd_yyd = np.zeros((len(nrd),3), dtype='float') nrd_yyd[:,0] = nrd[:] nrd_yyd[:,1] = yyd[:] nrd_yyd[:,2] = xxd[:] b = nrd_yyd nrd_yyd_sort = b[np.lexsort(([-1,1]*b[:,[1,0]]).T)] yyd_sort = nrd_yyd_sort[:,1] xxd_sort = nrd_yyd_sort[:,2] return A00 * yyd_sort, xxd_sort def get_template(self, lib, Amp=1.0, T=1.0, Av=0.0, Z=0.0, zgal=1.0, f_bb=False): ''' Gets an element template given a set of parameters. Not necessarily the most efficient way, but easy to use. Parameters: ----------- lib : dict library dictionary. Amp : float Amplitude of the target template. Note that each template has Lbol = 1e10Lsun. T : float Age, in Gyr. Av : float Dust attenuation, in mag. Z : float Metallicity, in log(Z/Zsun). zgal : float Redshift. f_bb: bool If calculate bb photometry for the spectrum requested. Returns flux : float array. Flux in Fnu. wavelength : float array. Wave in AA. lcen, lflux : , if f_bb==True. ''' bfnc = self.MB.bfnc DIR_TMP = self.MB.DIR_TMP NZ = bfnc.Z2NZ(Z) pp0 = np.random.uniform(low=0, high=len(self.tau0), size=(1,)) pp = int(pp0[0]) if pp>=len(self.tau0): pp += -1 nmodel = np.argmin(np.abs(T-self.age[:])) if T - self.age[nmodel] != 0: print('T=%.2f is not found in age library. T=%.2f is used.'%(T,self.age[nmodel])) coln= int(2 + pp*len(self.ZZ)*len(self.AA) + NZ*len(self.AA) + nmodel) nr = lib[:, 0] xx = lib[:, 1] # This is OBSERVED wavelength range at z=zgal yy = lib[:, coln] if self.dust_model == 0: yyd, xxd, nrd = dust_calz(xx/(1.+zgal), yy, Av, nr) elif self.dust_model == 1: yyd, xxd, nrd = dust_mw(xx/(1.+zgal), yy, Av, nr) elif self.dust_model == 2: # LMC yyd, xxd, nrd = dust_gen(xx/(1.+zgal), yy, Av, nr, Rv=4.05, gamma=-0.06, Eb=2.8) elif self.dust_model == 3: # SMC yyd, xxd, nrd = dust_gen(xx/(1.+zgal), yy, Av, nr, Rv=4.05, gamma=-0.42, Eb=0.0) elif self.dust_model == 4: # Kriek&Conroy with gamma=-0.2 yyd, xxd, nrd = dust_kc(xx/(1.+zgal), yy, Av, nr, Rv=4.05, gamma=-0.2) else: yyd, xxd, nrd = dust_calz(xx/(1.+zgal), yy, Av, nr) xxd *= (1.+zgal) nrd_yyd = np.zeros((len(nrd),3), dtype='float') nrd_yyd[:,0] = nrd[:] nrd_yyd[:,1] = yyd[:] nrd_yyd[:,2] = xxd[:] b = nrd_yyd nrd_yyd_sort = b[np.lexsort(([-1,1]*b[:,[1,0]]).T)] yyd_sort = nrd_yyd_sort[:,1] xxd_sort = nrd_yyd_sort[:,2] if f_bb: #fil_cen, fil_flux = filconv(self.filts, xxd_sort, Amp * yyd_sort, self.DIR_FIL) fil_cen, fil_flux = filconv_fast(self.MB, xxd_sort, Amp * yyd_sort) return Amp * yyd_sort, xxd_sort, fil_flux, fil_cen else: return Amp * yyd_sort, xxd_sort def tmp03(self, A00, Av00, nmodel, Z, zgal, lib): ''' ''' tau0= self.tau0 #[0.01,0.02,0.03] ZZ = self.ZZ AA = self.AA bfnc = self.MB.bfnc #Basic(ZZ) DIR_TMP = self.MB.DIR_TMP #'./templates/' NZ = bfnc.Z2NZ(Z) pp0 = np.random.uniform(low=0, high=len(tau0), size=(1,)) pp = int(pp0[0]) if pp>=len(tau0): pp += -1 coln= int(2 + pp*len(ZZ)*len(AA) + NZ*len(AA) + nmodel) nr = lib[:,0] xx = lib[:,1] # This is OBSERVED wavelength range at z=zgal yy = lib[:,coln] if self.dust_model == 0: yyd, xxd, nrd = dust_calz(xx/(1.+zgal), yy, Av00, nr) elif self.dust_model == 1: yyd, xxd, nrd = dust_mw(xx/(1.+zgal), yy, Av00, nr) elif self.dust_model == 2: # LMC yyd, xxd, nrd = dust_gen(xx/(1.+zgal), yy, Av00, nr, Rv=4.05, gamma=-0.06, Eb=2.8) elif self.dust_model == 3: # SMC yyd, xxd, nrd = dust_gen(xx/(1.+zgal), yy, Av00, nr, Rv=4.05, gamma=-0.42, Eb=0.0) elif self.dust_model == 4: # Kriek&Conroy with gamma=-0.2 yyd, xxd, nrd = dust_kc(xx/(1.+zgal), yy, Av00, nr, Rv=4.05, gamma=-0.2) else: yyd, xxd, nrd = dust_calz(xx/(1.+zgal), yy, Av00, nr) xxd *= (1.+zgal) nrd_yyd = np.zeros((len(nrd),3), dtype='float') nrd_yyd[:,0] = nrd[:] nrd_yyd[:,1] = yyd[:] nrd_yyd[:,2] = xxd[:] b = nrd_yyd nrd_yyd_sort = b[np.lexsort(([-1,1]*b[:,[1,0]]).T)] yyd_sort = nrd_yyd_sort[:,1] xxd_sort = nrd_yyd_sort[:,2] return A00 * yyd_sort, xxd_sort def tmp04(self, par, f_Alog=True, nprec=1, f_val=False, lib_all=False, f_nrd=False): ''' Makes model template with a given param set. Also dust attenuation. Parameters ---------- nprec : int Precision when redshift is refined. ''' ZZ = self.ZZ AA = self.AA bfnc = self.MB.bfnc Mtot = 0 if f_val: par = par.params if self.MB.fzmc == 1: try: zmc = par['zmc'].value except: zmc = self.MB.zgal else: zmc = self.MB.zgal pp = 0 # AV limit; if par['Av'] < self.MB.Avmin: par['Av'] = self.MB.Avmin if par['Av'] > self.MB.Avmax: par['Av'] = self.MB.Avmax Av00 = par['Av'] for aa in range(len(AA)): if self.MB.ZEVOL==1 or aa == 0: Z = par['Z'+str(aa)] NZ = bfnc.Z2NZ(Z) else: pass # Check limit; if par['A'+str(aa)] < self.MB.Amin: par['A'+str(aa)] = self.MB.Amin if par['A'+str(aa)] > self.MB.Amax: par['A'+str(aa)] = self.MB.Amax # Z limit: if aa == 0 or self.MB.ZEVOL == 1: if par['Z%d'%aa] < self.MB.Zmin: par['Z%d'%aa] = self.MB.Zmin if par['Z%d'%aa] > self.MB.Zmax: par['Z%d'%aa] = self.MB.Zmax # Is A in logspace? if f_Alog: A00 = 10**par['A'+str(aa)] else: A00 = par['A'+str(aa)] coln = int(2 + pp*len(ZZ)*len(AA) + NZ*len(AA) + aa) sedpar = self.MB.af['ML'] # For M/L mslist = sedpar['ML_'+str(NZ)][aa] Mtot += 10**(par['A%d'%aa] + np.log10(mslist)) if lib_all: if aa == 0: nr = self.MB.lib_all[:, 0] xx = self.MB.lib_all[:, 1] # This is OBSERVED wavelength range at z=zgal yy = A00 * self.MB.lib_all[:, coln] else: yy += A00 * self.MB.lib_all[:, coln] else: if aa == 0: nr = self.MB.lib[:, 0] xx = self.MB.lib[:, 1] # This is OBSERVED wavelength range at z=zgal yy = A00 * self.MB.lib[:, coln] else: yy += A00 * self.MB.lib[:, coln] self.MB.logMtmp = np.log10(Mtot) if round(zmc,nprec) != round(self.MB.zgal,nprec): xx_s = xx / (1+self.MB.zgal) * (1+zmc) fint = interpolate.interp1d(xx, yy, kind='nearest', fill_value="extrapolate") yy_s = fint(xx_s) else: xx_s = xx yy_s = yy xx = xx_s yy = yy_s if self.dust_model == 0: yyd, xxd, nrd = dust_calz(xx/(1.+zmc), yy, Av00, nr) elif self.dust_model == 1: yyd, xxd, nrd = dust_mw(xx/(1.+zmc), yy, Av00, nr) elif self.dust_model == 2: # LMC yyd, xxd, nrd = dust_gen(xx/(1.+zmc), yy, Av00, nr, Rv=4.05, gamma=-0.06, Eb=2.8) elif self.dust_model == 3: # SMC yyd, xxd, nrd = dust_gen(xx/(1.+zmc), yy, Av00, nr, Rv=4.05, gamma=-0.42, Eb=0.0) elif self.dust_model == 4: # Kriek&Conroy with gamma=-0.2 yyd, xxd, nrd = dust_kc(xx/(1.+zmc), yy, Av00, nr, Rv=4.05, gamma=-0.2) else: yyd, xxd, nrd = dust_calz(xx/(1.+zmc), yy, Av00, nr) xxd *= (1.+zmc) nrd_yyd = np.zeros((len(nrd),3), dtype='float') nrd_yyd[:,0] = nrd[:] nrd_yyd[:,1] = yyd[:] nrd_yyd[:,2] = xxd[:] nrd_yyd_sort = nrd_yyd[nrd_yyd[:,0].argsort()] if not f_nrd: return nrd_yyd_sort[:,1],nrd_yyd_sort[:,2] else: return nrd_yyd_sort[:,0],nrd_yyd_sort[:,1],nrd_yyd_sort[:,2] def tmp04_dust(self, par, nprec=1): ''' Makes model template with a given param setself. Also dust attenuation. ''' tau0= self.tau0 ZZ = self.ZZ AA = self.AA bfnc = self.MB.bfnc DIR_TMP = self.MB.DIR_TMP try: m_dust = par['MDUST'] t_dust = par['TDUST'] except: # This is exception for initial minimizing; m_dust = -99 t_dust = 0 nr = self.MB.lib_dust[:,0] xx = self.MB.lib_dust[:,1] # This is OBSERVED wavelength range at z=zgal coln= 2+int(t_dust+0.5) yy = 10**m_dust * self.MB.lib_dust[:,coln] if self.MB.fzmc == 1: zmc = par.params['zmc'].value else: zmc = self.MB.zgal # How much does this cost in time? if round(zmc,nprec) != round(self.MB.zgal,nprec): xx_s = xx / (1+self.MB.zgal) * (1+zmc) fint = interpolate.interp1d(xx, yy, kind='nearest', fill_value="extrapolate") yy_s = fint(xx_s) else: xx_s = xx yy_s = yy return yy_s, xx_s class Func_tau: ''' ''' def __init__(self, MB, dust_model=0): ''' Parameters: ----------- dust_model : int 0 for Calzetti. 1 for MW. 4 for Kriek Conroy ''' self.MB = MB self.ID = MB.ID self.ZZ = MB.Zall self.AA = MB.nage self.tau = MB.tau self.dust_model = dust_model self.DIR_TMP = MB.DIR_TMP if MB.f_dust: self.Temp = MB.Temp try: self.filts = MB.filts self.DIR_FIL = MB.DIR_FILT except: pass # Already Read or not; self.f_af = False self.f_af0 = False def demo(self): ZZ = self.ZZ AA = self.AA return ZZ, AA def open_spec_fits(self, fall=0, orig=False): ''' Loads template in obs range. ''' ID0 = self.MB.ID from astropy.io import fits ZZ = self.ZZ AA = self.AA bfnc = self.MB.bfnc # ASDF; if fall == 0: app = '' hdu0 = self.MB.af['spec'] elif fall == 1: app = 'all_' hdu0 = self.MB.af['spec_full'] DIR_TMP = self.DIR_TMP NZ = len(ZZ) NT = self.MB.ntau NA = self.MB.nage for zz,Z in enumerate(ZZ): for tt,TT in enumerate(self.MB.tau): for ss,TA in enumerate(self.MB.ageparam): if zz == 0 and tt == 0 and ss == 0: nr = hdu0['colnum'] xx = hdu0['wavelength'] coln = int(2 + NZ * NT * NA) # + self.MB.ntau * self.MB.nage + NA) lib = np.zeros((len(nr), coln), dtype='float') lib[:,0] = nr[:] lib[:,1] = xx[:] if orig: colname = 'fspec_orig_' + str(zz) + '_' + str(tt) + '_' + str(ss) else: colname = 'fspec_' + str(zz) + '_' + str(tt) + '_' + str(ss) colnall = int(2 + zz * NT * NA + tt * NA + ss) # 2 takes account of wavelength and AV columns. lib[:,colnall] = hdu0[colname] return lib def open_spec_dust_fits(self, fall=0): ''' Load dust template in obs range. ''' ID0 = self.MB.ID tau0= self.MB.tau0 #[0.01,0.02,0.03] from astropy.io import fits ZZ = self.ZZ AA = self.AA bfnc = self.MB.bfnc #Basic(ZZ) self.MB.af = asdf.open(self.DIR_TMP + 'spec_all_' + self.ID + '.asdf') self.MB.af0 = asdf.open(self.DIR_TMP + 'spec_all.asdf') if fall == 0: app = '' hdu0 = self.MB.af['spec_dust'] elif fall == 1: app = 'all_' hdu0 = self.MB.af['spec_dust_full'] DIR_TMP = self.DIR_TMP nr = hdu0['colnum'] xx = hdu0['wavelength'] lib = np.zeros((len(nr), 2+len(self.Temp)), dtype='float') lib[:,0] = nr[:] lib[:,1] = xx[:] for aa in range(len(self.Temp)): coln = int(2 + aa) colname = 'fspec_' + str(aa) colnall = int(2 + aa) # 2 takes account of wavelength and AV columns. lib[:,colnall] = hdu0[colname] if fall==1 and False: import matplotlib.pyplot as plt plt.close() plt.plot(lib[:,1],lib[:,coln],linestyle='-') plt.show() return lib def open_spec_fits_dir(self, nage, nz, kk, Av00, zgal, A00): ''' Loads template in obs range. But for weird template. ''' from astropy.io import fits tau0= self.tau0 #[0.01,0.02,0.03] ZZ = self.ZZ AA = self.AA bfnc = self.MB.bfnc #Basic(ZZ) self.MB.af = asdf.open(self.DIR_TMP + 'spec_all_' + self.ID + '.asdf') self.MB.af0 = asdf.open(self.DIR_TMP + 'spec_all.asdf') app = 'all' hdu0 = self.MB.af['spec_full'] DIR_TMP = self.DIR_TMP #'./templates/' pp = 0 zz = nz # Luminosity mshdu = self.MB.af0['ML'] Ls = mshdu['Ls_%d'%nz] xx = hdu0['wavelength'] # at RF; nr = np.arange(0,len(xx),1) #hdu0.data['colnum'] lib = np.zeros((len(nr), 2+1), dtype='float') lib[:,0] = nr[:] lib[:,1] = xx[:] aa = nage coln = int(2 + aa) colname = 'fspec_' + str(zz) + '_' + str(aa) + '_' + str(pp) yy0 = hdu0[colname]/Ls[aa] yy = flamtonu(xx, yy0) lib[:,2] = yy[:] if self.dust_model == 0: # Calzetti yyd, xxd, nrd = dust_calz(xx, yy, Av00, nr) elif self.dust_model == 1: # MW yyd, xxd, nrd = dust_mw(xx, yy, Av00, nr) elif self.dust_model == 2: # LMC yyd, xxd, nrd = dust_gen(xx, yy, Av00, nr, Rv=4.05, gamma=-0.06, Eb=2.8) elif self.dust_model == 3: # SMC yyd, xxd, nrd = dust_gen(xx, yy, Av00, nr, Rv=4.05, gamma=-0.42, Eb=0.0) elif self.dust_model == 4: # Kriek&Conroy with gamma=-0.2 yyd, xxd, nrd = dust_kc(xx, yy, Av00, nr, Rv=4.05, gamma=-0.2) else: print('No entry. Dust model is set to Calzetti') yyd, xxd, nrd = dust_calz(xx, yy, Av00, nr) xxd *= (1.+zgal) nrd_yyd = np.zeros((len(nrd),3), dtype='float') nrd_yyd[:,0] = nrd[:] nrd_yyd[:,1] = yyd[:] nrd_yyd[:,2] = xxd[:] b = nrd_yyd nrd_yyd_sort = b[np.lexsort(([-1,1]*b[:,[1,0]]).T)] yyd_sort = nrd_yyd_sort[:,1] xxd_sort = nrd_yyd_sort[:,2] return A00 * yyd_sort, xxd_sort def tmp04(self, par, f_Alog=True, nprec=1, f_val=False, check_bound=False, lib_all=False, f_nrd=False): ''' Makes model template with a given param set. Also dust attenuation. Parameters: ----------- nprec : int Precision when redshift is refined. ''' ZZ = self.ZZ AA = self.AA bfnc = self.MB.bfnc Mtot = 0 pp = 0 if f_val: par = par.params if self.MB.fzmc == 1: try: zmc = par['zmc'].value except: zmc = self.MB.zgal else: zmc = self.MB.zgal if check_bound: # AV limit; if par['Av'] < self.MB.Avmin: par['Av'] = self.MB.Avmin if par['Av'] > self.MB.Avmax: par['Av'] = self.MB.Avmax Av00 = par['Av'] for aa in range(self.MB.npeak): if self.MB.ZEVOL==1 or aa == 0: if check_bound: # Z limit: if par['Z%d'%aa] < self.MB.Zmin: par['Z%d'%aa] = self.MB.Zmin if par['Z%d'%aa] > self.MB.Zmax: par['Z%d'%aa] = self.MB.Zmax Z = par['Z%d'%aa] else: pass if check_bound: # A if par['A'+str(aa)] < self.MB.Amin: par['A'+str(aa)] = self.MB.Amin if par['A'+str(aa)] > self.MB.Amax: par['A'+str(aa)] = self.MB.Amax if par['TAU'+str(aa)] < self.MB.taumin: par['TAU'+str(aa)] = self.MB.taumin if par['TAU'+str(aa)] > self.MB.taumax: par['TAU'+str(aa)] = self.MB.taumax if par['AGE'+str(aa)] < self.MB.agemin: par['AGE'+str(aa)] = self.MB.agemin if par['AGE'+str(aa)] > self.MB.agemax: par['AGE'+str(aa)] = self.MB.agemax # Is A in logspace? if f_Alog: A00 = 10**par['A'+str(aa)] else: A00 = par['A'+str(aa)] tau,age = par['TAU%d'%aa],par['AGE%d'%aa] NZ, NT, NA = bfnc.Z2NZ(Z,tau,age) coln = int(2 + NZ*self.MB.ntau*self.MB.nage + NT*self.MB.nage + NA) mslist = self.MB.af['ML']['ML_'+str(NZ)+'_'+str(NT)][NA] Mtot += 10**(par['A%d'%aa] + np.log10(mslist)) if lib_all: if aa == 0: nr = self.MB.lib_all[:, 0] xx = self.MB.lib_all[:, 1] # This is OBSERVED wavelength range at z=zgal yy = A00 * self.MB.lib_all[:, coln] else: yy += A00 * self.MB.lib_all[:, coln] else: if aa == 0: nr = self.MB.lib[:, 0] xx = self.MB.lib[:, 1] # This is OBSERVED wavelength range at z=zgal yy = A00 * self.MB.lib[:, coln] else: yy += A00 * self.MB.lib[:, coln] # Keep logM self.MB.logMtmp = np.log10(Mtot) # Redshift refinement; if round(zmc,nprec) != round(self.MB.zgal,nprec): # Not sure how much this costs in time. xx_s = xx / (1+self.MB.zgal) * (1+zmc) fint = interpolate.interp1d(xx, yy, kind='nearest', fill_value="extrapolate") yy_s = fint(xx_s) else: xx_s = xx yy_s = yy xx = xx_s yy = yy_s if self.dust_model == 0: yyd, xxd, nrd = dust_calz(xx/(1.+zmc), yy, Av00, nr) elif self.dust_model == 1: yyd, xxd, nrd = dust_mw(xx/(1.+zmc), yy, Av00, nr) elif self.dust_model == 2: # LMC yyd, xxd, nrd = dust_gen(xx/(1.+zmc), yy, Av00, nr, Rv=4.05, gamma=-0.06, Eb=2.8) elif self.dust_model == 3: # SMC yyd, xxd, nrd = dust_gen(xx/(1.+zmc), yy, Av00, nr, Rv=4.05, gamma=-0.42, Eb=0.0) elif self.dust_model == 4: # Kriek&Conroy with gamma=-0.2 yyd, xxd, nrd = dust_kc(xx/(1.+zmc), yy, Av00, nr, Rv=4.05, gamma=-0.2) else: yyd, xxd, nrd = dust_calz(xx/(1.+zmc), yy, Av00, nr) xxd *= (1.+zmc) if self.dust_model == 0: if not f_nrd: return yyd,xxd else: return nrd,yyd,xxd else: nrd_yyd = np.zeros((len(nrd),3), dtype='float') nrd_yyd[:,0] = nrd[:] nrd_yyd[:,1] = yyd[:] nrd_yyd[:,2] = xxd[:] nrd_yyd_sort = nrd_yyd[nrd_yyd[:,0].argsort()] if not f_nrd: return nrd_yyd_sort[:,1],nrd_yyd_sort[:,2] else: return nrd_yyd_sort[:,0],nrd_yyd_sort[:,1],nrd_yyd_sort[:,2] def tmp04_dust(self, par, nprec=1): ''' Makes model template with a given param setself. Also dust attenuation. ''' bfnc = self.MB.bfnc #Basic(ZZ) DIR_TMP = self.MB.DIR_TMP #'./templates/' try: m_dust = par['MDUST'] t_dust = par['TDUST'] except: # This is exception for initial minimizing; m_dust = -99 t_dust = 0 nr = self.MB.lib_dust[:,0] xx = self.MB.lib_dust[:,1] # This is OBSERVED wavelength range at z=zgal coln= 2+int(t_dust+0.5) yy = 10**m_dust * self.MB.lib_dust[:,coln] if self.MB.fzmc == 1: zmc = par.params['zmc'].value else: zmc = self.MB.zgal # How much does this cost in time? if round(zmc,nprec) != round(self.MB.zgal,nprec): xx_s = xx / (1+self.MB.zgal) * (1+zmc) fint = interpolate.interp1d(xx, yy, kind='nearest', fill_value="extrapolate") yy_s = fint(xx_s) else: xx_s = xx yy_s = yy return yy_s, xx_s
[ "numpy.abs", "numpy.log10", "matplotlib.pyplot.plot", "scipy.interpolate.interp1d", "matplotlib.pyplot.close", "numpy.lexsort", "asdf.open", "matplotlib.pyplot.show" ]
[((2825, 2882), 'asdf.open', 'asdf.open', (["(self.DIR_TMP + 'spec_all_' + self.ID + '.asdf')"], {}), "(self.DIR_TMP + 'spec_all_' + self.ID + '.asdf')\n", (2834, 2882), False, 'import asdf\n'), ((2905, 2946), 'asdf.open', 'asdf.open', (["(self.DIR_TMP + 'spec_all.asdf')"], {}), "(self.DIR_TMP + 'spec_all.asdf')\n", (2914, 2946), False, 'import asdf\n'), ((4146, 4203), 'asdf.open', 'asdf.open', (["(self.DIR_TMP + 'spec_all_' + self.ID + '.asdf')"], {}), "(self.DIR_TMP + 'spec_all_' + self.ID + '.asdf')\n", (4155, 4203), False, 'import asdf\n'), ((4226, 4267), 'asdf.open', 'asdf.open', (["(self.DIR_TMP + 'spec_all.asdf')"], {}), "(self.DIR_TMP + 'spec_all.asdf')\n", (4235, 4267), False, 'import asdf\n'), ((13083, 13097), 'numpy.log10', 'np.log10', (['Mtot'], {}), '(Mtot)\n', (13091, 13097), True, 'import numpy as np\n'), ((18228, 18285), 'asdf.open', 'asdf.open', (["(self.DIR_TMP + 'spec_all_' + self.ID + '.asdf')"], {}), "(self.DIR_TMP + 'spec_all_' + self.ID + '.asdf')\n", (18237, 18285), False, 'import asdf\n'), ((18308, 18349), 'asdf.open', 'asdf.open', (["(self.DIR_TMP + 'spec_all.asdf')"], {}), "(self.DIR_TMP + 'spec_all.asdf')\n", (18317, 18349), False, 'import asdf\n'), ((19549, 19606), 'asdf.open', 'asdf.open', (["(self.DIR_TMP + 'spec_all_' + self.ID + '.asdf')"], {}), "(self.DIR_TMP + 'spec_all_' + self.ID + '.asdf')\n", (19558, 19606), False, 'import asdf\n'), ((19629, 19670), 'asdf.open', 'asdf.open', (["(self.DIR_TMP + 'spec_all.asdf')"], {}), "(self.DIR_TMP + 'spec_all.asdf')\n", (19638, 19670), False, 'import asdf\n'), ((24598, 24612), 'numpy.log10', 'np.log10', (['Mtot'], {}), '(Mtot)\n', (24606, 24612), True, 'import numpy as np\n'), ((5848, 5886), 'numpy.lexsort', 'np.lexsort', (['([-1, 1] * b[:, [1, 0]]).T'], {}), '(([-1, 1] * b[:, [1, 0]]).T)\n', (5858, 5886), True, 'import numpy as np\n'), ((7152, 7175), 'numpy.abs', 'np.abs', (['(T - self.age[:])'], {}), '(T - self.age[:])\n', (7158, 7175), True, 'import numpy as np\n'), ((8417, 8455), 'numpy.lexsort', 'np.lexsort', (['([-1, 1] * b[:, [1, 0]]).T'], {}), '(([-1, 1] * b[:, [1, 0]]).T)\n', (8427, 8455), True, 'import numpy as np\n'), ((10371, 10409), 'numpy.lexsort', 'np.lexsort', (['([-1, 1] * b[:, [1, 0]]).T'], {}), '(([-1, 1] * b[:, [1, 0]]).T)\n', (10381, 10409), True, 'import numpy as np\n'), ((13227, 13297), 'scipy.interpolate.interp1d', 'interpolate.interp1d', (['xx', 'yy'], {'kind': '"""nearest"""', 'fill_value': '"""extrapolate"""'}), "(xx, yy, kind='nearest', fill_value='extrapolate')\n", (13247, 13297), True, 'import scipy.interpolate as interpolate\n'), ((15470, 15540), 'scipy.interpolate.interp1d', 'interpolate.interp1d', (['xx', 'yy'], {'kind': '"""nearest"""', 'fill_value': '"""extrapolate"""'}), "(xx, yy, kind='nearest', fill_value='extrapolate')\n", (15490, 15540), True, 'import scipy.interpolate as interpolate\n'), ((21246, 21284), 'numpy.lexsort', 'np.lexsort', (['([-1, 1] * b[:, [1, 0]]).T'], {}), '(([-1, 1] * b[:, [1, 0]]).T)\n', (21256, 21284), True, 'import numpy as np\n'), ((24813, 24883), 'scipy.interpolate.interp1d', 'interpolate.interp1d', (['xx', 'yy'], {'kind': '"""nearest"""', 'fill_value': '"""extrapolate"""'}), "(xx, yy, kind='nearest', fill_value='extrapolate')\n", (24833, 24883), True, 'import scipy.interpolate as interpolate\n'), ((27211, 27281), 'scipy.interpolate.interp1d', 'interpolate.interp1d', (['xx', 'yy'], {'kind': '"""nearest"""', 'fill_value': '"""extrapolate"""'}), "(xx, yy, kind='nearest', fill_value='extrapolate')\n", (27231, 27281), True, 'import scipy.interpolate as interpolate\n'), ((3687, 3698), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (3696, 3698), True, 'import matplotlib.pyplot as plt\n'), ((3715, 3763), 'matplotlib.pyplot.plot', 'plt.plot', (['lib[:, 1]', 'lib[:, coln]'], {'linestyle': '"""-"""'}), "(lib[:, 1], lib[:, coln], linestyle='-')\n", (3723, 3763), True, 'import matplotlib.pyplot as plt\n'), ((3776, 3786), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3784, 3786), True, 'import matplotlib.pyplot as plt\n'), ((19090, 19101), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (19099, 19101), True, 'import matplotlib.pyplot as plt\n'), ((19118, 19166), 'matplotlib.pyplot.plot', 'plt.plot', (['lib[:, 1]', 'lib[:, coln]'], {'linestyle': '"""-"""'}), "(lib[:, 1], lib[:, coln], linestyle='-')\n", (19126, 19166), True, 'import matplotlib.pyplot as plt\n'), ((19179, 19189), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (19187, 19189), True, 'import matplotlib.pyplot as plt\n'), ((12397, 12413), 'numpy.log10', 'np.log10', (['mslist'], {}), '(mslist)\n', (12405, 12413), True, 'import numpy as np\n'), ((23900, 23916), 'numpy.log10', 'np.log10', (['mslist'], {}), '(mslist)\n', (23908, 23916), True, 'import numpy as np\n')]
import os import sys import json from manager import dbmanager def loadJson(filename): print("loadJson: " + filename) memoList = [] try: if os.path.isfile(filename): file = open(filename, 'r', encoding="UTF-8") lines = file.readlines() file.close() idx = 0 for line in lines[1:]: memo = json.loads(line) idx += 1 item = {} item['id'] = memo["id"] item['memo'] = memo["memo"] item['index'] = str(idx) memoList.append(item) print("Success to load " + filename) return memoList except: print("Loading failed:" + filename) return [] def save2DB(data, db_name): db = dbmanager.DBManager(db_name) print(len(data)) # for item in data: # db.insert([item['id'], item['memo']]) # db.printDB() def main(filenames): json_file = filenames[0] db_file = filenames[1] data = loadJson(json_file) save2DB(data, db_file) if __name__ == '__main__': if len(sys.argv) < 3: print("Usage: json2db json_file db_file") else: main(sys.argv[1:])
[ "os.path.isfile", "json.loads", "manager.dbmanager.DBManager" ]
[((800, 828), 'manager.dbmanager.DBManager', 'dbmanager.DBManager', (['db_name'], {}), '(db_name)\n', (819, 828), False, 'from manager import dbmanager\n'), ((162, 186), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (176, 186), False, 'import os\n'), ((386, 402), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (396, 402), False, 'import json\n')]
import FWCore.ParameterSet.Config as cms # This modifier sets replaces the default pattern recognition with mkFit for tobTecStep trackingMkFitTobTecStep = cms.Modifier()
[ "FWCore.ParameterSet.Config.Modifier" ]
[((156, 170), 'FWCore.ParameterSet.Config.Modifier', 'cms.Modifier', ([], {}), '()\n', (168, 170), True, 'import FWCore.ParameterSet.Config as cms\n')]
# -*- coding: utf-8 -*- from __future__ import absolute_import,unicode_literals from pythainlp.tools import get_path_db,get_path_data from tinydb import TinyDB,Query from future.moves.urllib.request import urlopen from tqdm import tqdm import requests import os import math import requests from nltk.corpus import names #__all__ = ["thaipos", "thaiword","alphabet","tone","country","wordnet"] path_db_=get_path_db() def get_file(name): db=TinyDB(path_db_) temp = Query() if len(db.search(temp.name==name))>0: path= get_path_data(db.search(temp.name==name)[0]['file']) db.close() if not os.path.exists(path): download(name) return path def download_(url, dst): """ @param: url to download file @param: dst place to put the file """ file_size = int(urlopen(url).info().get('Content-Length', -1)) if os.path.exists(dst): first_byte = os.path.getsize(dst) else: first_byte = 0 if first_byte >= file_size: return file_size header = {"Range": "bytes=%s-%s" % (first_byte, file_size)} pbar = tqdm( total=file_size, initial=first_byte, unit='B', unit_scale=True, desc=url.split('/')[-1]) req = requests.get(url, headers=header, stream=True) with(open(get_path_data(dst), 'wb')) as f: for chunk in req.iter_content(chunk_size=1024): if chunk: f.write(chunk) pbar.update(1024) pbar.close() #return file_size def download(name,force=False): db=TinyDB(path_db_) temp = Query() data=requests.get("https://raw.githubusercontent.com/PyThaiNLP/pythainlp-corpus/master/db.json") data_json=data.json() if name in list(data_json.keys()): temp_name=data_json[name] print("Download : "+name) if len(db.search(temp.name==name))==0: print(name+" "+temp_name['version']) download_(temp_name['download'],temp_name['file_name']) db.insert({'name': name, 'version': temp_name['version'],'file':temp_name['file_name']}) else: if len(db.search(temp.name==name and temp.version==temp_name['version']))==0: print("have update") print("from "+name+" "+db.search(temp.name==name)[0]['version']+" update to "+name+" "+temp_name['version']) yes_no="y" if force==False: yes_no=str(input("y or n : ")).lower() if "y"==yes_no: download_(temp_name['download'],temp_name['file_name']) db.update({'version':temp_name['version']},temp.name==name) else: print("re-download") print("from "+name+" "+db.search(temp.name==name)[0]['version']+" update to "+name+" "+temp_name['version']) yes_no="y" if force==False: yes_no=str(input("y or n : ")).lower() if "y"==yes_no: download_(temp_name['download'],temp_name['file_name']) db.update({'version':temp_name['version']},temp.name==name) db.close()
[ "os.path.exists", "os.path.getsize", "tinydb.TinyDB", "tinydb.Query", "requests.get", "pythainlp.tools.get_path_data", "future.moves.urllib.request.urlopen", "pythainlp.tools.get_path_db" ]
[((402, 415), 'pythainlp.tools.get_path_db', 'get_path_db', ([], {}), '()\n', (413, 415), False, 'from pythainlp.tools import get_path_db, get_path_data\n'), ((443, 459), 'tinydb.TinyDB', 'TinyDB', (['path_db_'], {}), '(path_db_)\n', (449, 459), False, 'from tinydb import TinyDB, Query\n'), ((471, 478), 'tinydb.Query', 'Query', ([], {}), '()\n', (476, 478), False, 'from tinydb import TinyDB, Query\n'), ((877, 896), 'os.path.exists', 'os.path.exists', (['dst'], {}), '(dst)\n', (891, 896), False, 'import os\n'), ((1226, 1272), 'requests.get', 'requests.get', (['url'], {'headers': 'header', 'stream': '(True)'}), '(url, headers=header, stream=True)\n', (1238, 1272), False, 'import requests\n'), ((1541, 1557), 'tinydb.TinyDB', 'TinyDB', (['path_db_'], {}), '(path_db_)\n', (1547, 1557), False, 'from tinydb import TinyDB, Query\n'), ((1569, 1576), 'tinydb.Query', 'Query', ([], {}), '()\n', (1574, 1576), False, 'from tinydb import TinyDB, Query\n'), ((1586, 1687), 'requests.get', 'requests.get', (['"""https://raw.githubusercontent.com/PyThaiNLP/pythainlp-corpus/master/db.json"""'], {}), "(\n 'https://raw.githubusercontent.com/PyThaiNLP/pythainlp-corpus/master/db.json'\n )\n", (1598, 1687), False, 'import requests\n'), ((919, 939), 'os.path.getsize', 'os.path.getsize', (['dst'], {}), '(dst)\n', (934, 939), False, 'import os\n'), ((622, 642), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (636, 642), False, 'import os\n'), ((1287, 1305), 'pythainlp.tools.get_path_data', 'get_path_data', (['dst'], {}), '(dst)\n', (1300, 1305), False, 'from pythainlp.tools import get_path_db, get_path_data\n'), ((823, 835), 'future.moves.urllib.request.urlopen', 'urlopen', (['url'], {}), '(url)\n', (830, 835), False, 'from future.moves.urllib.request import urlopen\n')]
__all__ = [ 'Terminated', 'Unavailable', 'client', 'server', ] import logging import time import curio import nanomsg as nn from garage import asyncs from garage.assertions import ASSERT from garage.asyncs import futures from garage.asyncs import queues LOG = logging.getLogger(__name__) class Terminated(Exception): """Client agent is terminated.""" class Unavailable(Exception): """Service is unavailable.""" def _transform_error(exc): if isinstance(exc, curio.TaskTimeout): new_exc = Unavailable() new_exc.__cause__ = exc return new_exc elif isinstance(exc, (nn.EBADF, queues.Closed)): new_exc = Terminated() new_exc.__cause__ = exc return new_exc else: return exc async def client(graceful_exit, sockets, request_queue, timeout=None): """Act as client-side in the reqrep protocol. NOTE: Because we want end-to-end functionality (non-raw sockets), a socket can only handle one request at a time; to overcome this, we use a pool of sockets. In additional to handling requests, this waits for the graceful exit event and then clean up itself. When cleaning up, it: * Close socket so that pump_requests will not send any further requests. * Close the queue so that upstream will not enqueue any further requests. The requests still in the queue will be "processed", with their result being set to EBADF, since the socket is closed. This signals and unblocks all blocked upstream tasks. """ for socket in sockets: ASSERT.equal(socket.options.nn_domain, nn.AF_SP) ASSERT.equal(socket.options.nn_protocol, nn.NN_REQ) async def pump_requests(socket): LOG.info('client: start sending requests to: %s', socket) while True: try: request, response_promise = await request_queue.get() except queues.Closed: break if not response_promise.set_running_or_notify_cancel(): LOG.debug('client: drop request: %r', request) continue try: async with curio.timeout_after(timeout): await socket.send(request) with await socket.recv() as message: response = bytes(message.as_memoryview()) except Exception as exc: if response_promise.cancelled(): LOG.exception( 'client: err but request is cancelled: %r', request, ) else: response_promise.set_exception(_transform_error(exc)) else: response_promise.set_result(response) LOG.info('client: stop sending requests to: %s', socket) async with asyncs.TaskStack() as stack: for socket in sockets: await stack.spawn(pump_requests(socket)) stack.sync_callback(request_queue.close) for socket in sockets: stack.sync_callback(socket.close) await stack.spawn(graceful_exit.wait()) await (await stack.wait_any()).join() async def server( graceful_exit, socket, request_queue, timeout=None, error_handler=None): """Act as server-side in the reqrep protocol. NOTE: error_handler is not asynchronous because you should probably send back error messages without being blocked indefinitely. In additional to handling requests, this waits for the graceful exit event and then clean up itself. When cleaning up, it: * Close socket so that the pump_requests will not recv new requests and will exit. * Close the queue so that downstream will not dequeue any request. The requests still in the queue will be dropped (since socket is closed, their response cannot be sent back to the client). """ ASSERT.equal(socket.options.nn_domain, nn.AF_SP_RAW) ASSERT.equal(socket.options.nn_protocol, nn.NN_REP) if error_handler is None: error_handler = lambda *_: None async def pump_requests(handlers): LOG.info('server: start receiving requests from: %s', socket) while True: try: message = await socket.recvmsg() except nn.EBADF: break with message: response_message = nn.Message() # NOTE: It is important to set control header in the # response message from the request so that response can # be correctly routed back to the right sender. response_message.adopt_control(*message.disown_control()) request = bytes(message.as_memoryview()) # Enqueue request here rather than in handle_request so that # pump_requests may apply back pressure to socket. begin_time = time.perf_counter() try: response_future = futures.Future() async with curio.timeout_after(timeout): await request_queue.put(( request, response_future.promise(), )) except Exception as exc: await on_error(exc, request, response_message) continue await handlers.spawn(handle_request( begin_time, request, response_future, response_message, )) LOG.info('server: stop receiving requests from: %s', socket) async def handle_request( begin_time, request, response_future, response_message): if timeout is not None: remaining_time = timeout - (time.perf_counter() - begin_time) if remaining_time <= 0: response_future.cancel() await on_error( Unavailable(), request, response_message, exc_info=False, ) return else: remaining_time = None try: async with curio.timeout_after(remaining_time), response_future: response = await response_future.result() except Exception as exc: await on_error(exc, request, response_message) else: await send_response(request, response, response_message) async def on_error(exc, request, response_message, *, exc_info=True): if isinstance(exc, curio.TaskTimeout): # Timeout is very common is distributed system; whether it # is an error should be decided at application level, and we # will just log a warning here. log = LOG.warning else: log = LOG.error log( 'server: err when processing request: %r', request, exc_info=exc_info, ) error_response = error_handler(request, _transform_error(exc)) if error_response is not None: await send_response(request, error_response, response_message) async def send_response(request, response, response_message): response_message.adopt_message(response, len(response), False) try: await socket.sendmsg(response_message) except nn.EBADF: LOG.debug('server: drop response: %r, %r', request, response) async def join_handlers(handlers): async for handler in handlers: if handler.exception: LOG.error( 'server: err in request handler', exc_info=handler.exception, ) def close_queue(): num_dropped = len(request_queue.close(graceful=False)) if num_dropped: LOG.info('server: drop %d requests', num_dropped) async with asyncs.TaskSet() as handlers, asyncs.TaskStack() as stack: await stack.spawn(join_handlers(handlers)) await stack.spawn(pump_requests(handlers)) stack.sync_callback(close_queue) stack.sync_callback(socket.close) await stack.spawn(graceful_exit.wait()) await (await stack.wait_any()).join()
[ "logging.getLogger", "garage.asyncs.futures.Future", "time.perf_counter", "nanomsg.Message", "garage.asyncs.TaskSet", "curio.timeout_after", "garage.asyncs.TaskStack", "garage.assertions.ASSERT.equal" ]
[((282, 309), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (299, 309), False, 'import logging\n'), ((3978, 4030), 'garage.assertions.ASSERT.equal', 'ASSERT.equal', (['socket.options.nn_domain', 'nn.AF_SP_RAW'], {}), '(socket.options.nn_domain, nn.AF_SP_RAW)\n', (3990, 4030), False, 'from garage.assertions import ASSERT\n'), ((4035, 4086), 'garage.assertions.ASSERT.equal', 'ASSERT.equal', (['socket.options.nn_protocol', 'nn.NN_REP'], {}), '(socket.options.nn_protocol, nn.NN_REP)\n', (4047, 4086), False, 'from garage.assertions import ASSERT\n'), ((1601, 1649), 'garage.assertions.ASSERT.equal', 'ASSERT.equal', (['socket.options.nn_domain', 'nn.AF_SP'], {}), '(socket.options.nn_domain, nn.AF_SP)\n', (1613, 1649), False, 'from garage.assertions import ASSERT\n'), ((1658, 1709), 'garage.assertions.ASSERT.equal', 'ASSERT.equal', (['socket.options.nn_protocol', 'nn.NN_REQ'], {}), '(socket.options.nn_protocol, nn.NN_REQ)\n', (1670, 1709), False, 'from garage.assertions import ASSERT\n'), ((2876, 2894), 'garage.asyncs.TaskStack', 'asyncs.TaskStack', ([], {}), '()\n', (2892, 2894), False, 'from garage import asyncs\n'), ((7919, 7935), 'garage.asyncs.TaskSet', 'asyncs.TaskSet', ([], {}), '()\n', (7933, 7935), False, 'from garage import asyncs\n'), ((7949, 7967), 'garage.asyncs.TaskStack', 'asyncs.TaskStack', ([], {}), '()\n', (7965, 7967), False, 'from garage import asyncs\n'), ((4979, 4998), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (4996, 4998), False, 'import time\n'), ((4467, 4479), 'nanomsg.Message', 'nn.Message', ([], {}), '()\n', (4477, 4479), True, 'import nanomsg as nn\n'), ((5050, 5066), 'garage.asyncs.futures.Future', 'futures.Future', ([], {}), '()\n', (5064, 5066), False, 'from garage.asyncs import futures\n'), ((6197, 6232), 'curio.timeout_after', 'curio.timeout_after', (['remaining_time'], {}), '(remaining_time)\n', (6216, 6232), False, 'import curio\n'), ((2180, 2208), 'curio.timeout_after', 'curio.timeout_after', (['timeout'], {}), '(timeout)\n', (2199, 2208), False, 'import curio\n'), ((5094, 5122), 'curio.timeout_after', 'curio.timeout_after', (['timeout'], {}), '(timeout)\n', (5113, 5122), False, 'import curio\n'), ((5830, 5849), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (5847, 5849), False, 'import time\n')]
#! /usr/bin/env python3.5 # vim:ts=4:sw=4:ai:et:si:sts=4 import argparse import json import re import os import uuid import shutil import sys import requests filterRe = re.compile(r'(?P<block>^%=(?P<mode>.)?\s+(?P<label>.*?)\s+(?P<value>[^\s\n$]+)(?:\s*.*?)?^(?P<section>.*?)^=%.*?$)', re.M | re.S) subItemRe = re.compile(r'@_@') def convertConfig(config): keys = list(config.keys()) regexes = list(map(lambda x: re.compile(r"@%s@" % x, re.I), keys)) values = list(config.values()) subst = zip(keys, regexes, values) subst = {key: {'regex': regex, 'value': value} for (key, regex, value) in subst} return subst def substituteFile(infile, outfile, subst): if infile == "stdin": text = sys.stdin.read() else: with open(infile, "r") as f: text = f.read() print("Subtituting from %s to %s" % (infile, outfile)) for item in subst.values(): regex = item.get('regex', None) repl = item.get('value', None) if regex is None or repl is None: continue text = regex.sub(str(repl), text) blocks = filterRe.findall(text) for (block, mode, label, value, section) in blocks: subvalue = subst.get(label.lower(), {}).get('value', None) print(mode, label, value, subvalue) if mode == '+' or mode == '': if subvalue is not None and str(subvalue) != value: section = "" elif mode == '-': if subvalue is None or str(subvalue) != value: section = "" elif mode == '?': if subvalue is None: section = "" elif mode == '!': if subvalue is not None: section = "" sections = '' if not isinstance(subvalue, list): subvalue = [subvalue] for subval in subvalue: sections += subItemRe.sub(str(subval), section) text = text.replace(block, sections) with open(outfile, "w") as f: f.write(text) def copyfile(coin, infile, outfile=None): if not os.path.exists(infile): return if not outfile: outfile = infile outfile = os.path.join("build", coin, outfile) print("Copying %s to %s" % (infile, outfile)) shutil.copyfile(infile, outfile) parser = argparse.ArgumentParser(description="Substitute in variables") parser.add_argument('--coin', '-c', required=True, help="Which coin") parser.add_argument('--nodaemon', '-D', action="store_false", dest="daemon", help="Don't copy daemon") parser.add_argument('--pool', '-p', action="store_true", help="Grab pool wallet") parser.add_argument('--explorer', '-e', action="store_true", help="Use explorer") args = parser.parse_args() buildDir = os.path.join("build", args.coin) # First read the config file with open("config/%s.json" % args.coin, "r") as f: config = json.load(f) config = {key.lower(): value for (key, value) in config.items()} if args.pool: config["poolnode"] = 1 config.pop("grabwallet", None) if args.explorer: config['useexplorer'] = 1 else: config['useexplorer'] = 0 subst = convertConfig(config) if args.coin == 'coiniumserv' or args.coin == 'yiimp': result = requests.get("http://169.254.169.254/latest/meta-data/local-ipv4") subst.update(convertConfig({"hostip": result.text})) else: # Create a config file outconfig = { "daemon": 1, "dns": 1, "server": 1, "listen": 1, "rpcport": config['rpcport'], "rpcuser": "%srpc" % config['coinname'], } if not args.pool: rpcallowip = "127.0.0.1" rpcpassword = str(uuid.uuid4()) else: rpcallowip = ["127.0.0.1", "172.17.0.*"] rpcpassword = "<PASSWORD>-%s" % args.coin outconfig["rpcallowip"] = rpcallowip outconfig["rpcpassword"] = rpcpassword addnodes = config.get('addnodes', []) if not isinstance(addnodes, list): addnodes = [addnodes] if addnodes: outconfig['addnode'] = addnodes # Add the config setting to the mapping subst.update(convertConfig(outconfig)) conffile = os.path.join(buildDir, "%s.conf" % config['coinname']) with open(conffile, "w") as f: for (key, values) in sorted(outconfig.items()): if not isinstance(values, list): values = [values] for value in values: f.write("%s=%s\n" % (key, value)) # Create the Dockerfile if args.coin == 'coiniumserv': infile = "Dockerfile.coiniumserv.in" elif args.coin == 'yiimp': infile = "Dockerfile.yiimp.in" else: infile = "Dockerfile.in" outfile = os.path.join(buildDir, "Dockerfile") substituteFile(infile, outfile, subst) # Create the node run Dockerfile infile = "Dockerfile.node.in" if args.pool: outfile = os.path.join(buildDir, "Dockerfile.pool") elif args.explorer: outfile = os.path.join(buildDir, "Dockerfile.explorer") else: outfile = os.path.join(buildDir, "Dockerfile.node") substituteFile(infile, outfile, subst) # Create the startup script if args.coin == 'coiniumserv': infile = "startup.sh-coiniumserv.in" elif args.coin == 'yiimp': infile = "startup.sh-yiimp.in" else: infile = "startup.sh.in" if args.pool: suffix = "-pool.sh" else: suffix = "-node.sh" outfile = os.path.join(buildDir, "startup%s" % suffix) substituteFile(infile, outfile, subst) # Create the ports file ports = [] port = config.get('p2pport', None) if port: ports.append(port) port = config.get('explorerport', None) useexplorer = config.get('useexplorer', None) if port and useexplorer: ports.append(port) port = config.get('p2poolport', None) usep2pool = config.get('usep2pool', None) if port and usep2pool: ports.append(port) port = config.get('poolport', None) if port: ports.append(port) if args.pool: port = config.get("rpcport", None) if port: ports.append(port) poolports = config.get('stratumports', None) if poolports: if not isinstance(poolports, list): poolports = [poolports] ports.extend(poolports) ports = list(map(lambda x: "-p %s:%s" % (x, x), ports)) links = config.get('links', None) if links: links = list(map(lambda x: "--link %s" % x, links)) ports.extend(links) ports = " ".join(ports) outfile = os.path.join(buildDir, "ports.txt") with open(outfile, "w") as f: f.write(ports) # Copy over the daemon if args.daemon and args.coin != 'coiniumserv' and args.coin != 'yiimp': infile = os.path.join("..", "build", "artifacts", config["coinname"], "linux", config['daemonname']) copyfile(args.coin, infile, config['daemonname']) if config.get('installexplorer', False): # Create the Explorer settings file infile = "explorer-settings.json.in" outfile = os.path.join(buildDir, "explorer-settings.json") substituteFile(infile, outfile, subst) # Create the Explorer layout template infile = "explorer-layout.jade.in" outfile = os.path.join(buildDir, "explorer-layout.jade") substituteFile(infile, outfile, subst) # Copy over the mongo init script and the crontab for explorer copyfile(args.coin, "explorer.mongo") copyfile(args.coin, "explorer-crontab") ## Copy the nodejs archive copyfile(args.coin, "build/cache/node-v8.7.0-linux-x64.tar.xz", "node-v8.7.0-linux-x64.tar.xz") # Copy the sudoers.d file copyfile(args.coin, "sudoers-coinnode") # Copy the coin-cli script copyfile(args.coin, "coin-cli") if config.get('copyawscreds', False): copyfile(args.coin, os.path.expanduser("~/.aws/credentials"), "aws-credentials")
[ "os.path.exists", "argparse.ArgumentParser", "re.compile", "os.path.join", "requests.get", "uuid.uuid4", "shutil.copyfile", "json.load", "sys.stdin.read", "os.path.expanduser" ]
[((172, 315), 're.compile', 're.compile', (['"""(?P<block>^%=(?P<mode>.)?\\\\s+(?P<label>.*?)\\\\s+(?P<value>[^\\\\s\\\\n$]+)(?:\\\\s*.*?)?^(?P<section>.*?)^=%.*?$)"""', '(re.M | re.S)'], {}), "(\n '(?P<block>^%=(?P<mode>.)?\\\\s+(?P<label>.*?)\\\\s+(?P<value>[^\\\\s\\\\n$]+)(?:\\\\s*.*?)?^(?P<section>.*?)^=%.*?$)'\n , re.M | re.S)\n", (182, 315), False, 'import re\n'), ((314, 331), 're.compile', 're.compile', (['"""@_@"""'], {}), "('@_@')\n", (324, 331), False, 'import re\n'), ((2319, 2381), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Substitute in variables"""'}), "(description='Substitute in variables')\n", (2342, 2381), False, 'import argparse\n'), ((2818, 2850), 'os.path.join', 'os.path.join', (['"""build"""', 'args.coin'], {}), "('build', args.coin)\n", (2830, 2850), False, 'import os\n'), ((4710, 4746), 'os.path.join', 'os.path.join', (['buildDir', '"""Dockerfile"""'], {}), "(buildDir, 'Dockerfile')\n", (4722, 4746), False, 'import os\n'), ((5379, 5423), 'os.path.join', 'os.path.join', (['buildDir', "('startup%s' % suffix)"], {}), "(buildDir, 'startup%s' % suffix)\n", (5391, 5423), False, 'import os\n'), ((6368, 6403), 'os.path.join', 'os.path.join', (['buildDir', '"""ports.txt"""'], {}), "(buildDir, 'ports.txt')\n", (6380, 6403), False, 'import os\n'), ((2184, 2220), 'os.path.join', 'os.path.join', (['"""build"""', 'coin', 'outfile'], {}), "('build', coin, outfile)\n", (2196, 2220), False, 'import os\n'), ((2275, 2307), 'shutil.copyfile', 'shutil.copyfile', (['infile', 'outfile'], {}), '(infile, outfile)\n', (2290, 2307), False, 'import shutil\n'), ((2945, 2957), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2954, 2957), False, 'import json\n'), ((3285, 3351), 'requests.get', 'requests.get', (['"""http://169.254.169.254/latest/meta-data/local-ipv4"""'], {}), "('http://169.254.169.254/latest/meta-data/local-ipv4')\n", (3297, 3351), False, 'import requests\n'), ((4198, 4252), 'os.path.join', 'os.path.join', (['buildDir', "('%s.conf' % config['coinname'])"], {}), "(buildDir, '%s.conf' % config['coinname'])\n", (4210, 4252), False, 'import os\n'), ((4878, 4919), 'os.path.join', 'os.path.join', (['buildDir', '"""Dockerfile.pool"""'], {}), "(buildDir, 'Dockerfile.pool')\n", (4890, 4919), False, 'import os\n'), ((6562, 6657), 'os.path.join', 'os.path.join', (['""".."""', '"""build"""', '"""artifacts"""', "config['coinname']", '"""linux"""', "config['daemonname']"], {}), "('..', 'build', 'artifacts', config['coinname'], 'linux',\n config['daemonname'])\n", (6574, 6657), False, 'import os\n'), ((6871, 6919), 'os.path.join', 'os.path.join', (['buildDir', '"""explorer-settings.json"""'], {}), "(buildDir, 'explorer-settings.json')\n", (6883, 6919), False, 'import os\n'), ((7059, 7105), 'os.path.join', 'os.path.join', (['buildDir', '"""explorer-layout.jade"""'], {}), "(buildDir, 'explorer-layout.jade')\n", (7071, 7105), False, 'import os\n'), ((740, 756), 'sys.stdin.read', 'sys.stdin.read', ([], {}), '()\n', (754, 756), False, 'import sys\n'), ((2086, 2108), 'os.path.exists', 'os.path.exists', (['infile'], {}), '(infile)\n', (2100, 2108), False, 'import os\n'), ((4954, 4999), 'os.path.join', 'os.path.join', (['buildDir', '"""Dockerfile.explorer"""'], {}), "(buildDir, 'Dockerfile.explorer')\n", (4966, 4999), False, 'import os\n'), ((5020, 5061), 'os.path.join', 'os.path.join', (['buildDir', '"""Dockerfile.node"""'], {}), "(buildDir, 'Dockerfile.node')\n", (5032, 5061), False, 'import os\n'), ((7638, 7678), 'os.path.expanduser', 'os.path.expanduser', (['"""~/.aws/credentials"""'], {}), "('~/.aws/credentials')\n", (7656, 7678), False, 'import os\n'), ((3716, 3728), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (3726, 3728), False, 'import uuid\n'), ((426, 454), 're.compile', 're.compile', (["('@%s@' % x)", 're.I'], {}), "('@%s@' % x, re.I)\n", (436, 454), False, 'import re\n')]
import numpy as np import torch from torch import optim from torch.nn import functional import torch.nn as nn import torch.utils.data from torch.nn import BatchNorm1d, Dropout, LeakyReLU, Linear, Module, ReLU, Sequential,Sigmoid from torch.nn import functional as F from opendp.smartnoise.synthesizers.base import SDGYMBaseSynthesizer import ctgan from ctgan.transformer import DataTransformer from ctgan.conditional import ConditionalGenerator from ctgan.models import Generator from ctgan.sampler import Sampler from ctgan import CTGANSynthesizer import opacus from opacus import autograd_grad_sample from opacus import PrivacyEngine, utils class Discriminator(Module): def calc_gradient_penalty(self, real_data, fake_data, device='cpu', pac=10, lambda_=10): alpha = torch.rand(real_data.size(0) // pac, 1, 1, device=device) alpha = alpha.repeat(1, pac, real_data.size(1)) alpha = alpha.view(-1, real_data.size(1)) interpolates = alpha * real_data + ((1 - alpha) * fake_data) disc_interpolates = self(interpolates) gradients = torch.autograd.grad( outputs=disc_interpolates, inputs=interpolates, grad_outputs=torch.ones(disc_interpolates.size(), device=device), create_graph=True, retain_graph=True, only_inputs=True )[0] gradient_penalty = (( gradients.view(-1, pac * real_data.size(1)).norm(2, dim=1) - 1 ) ** 2).mean() * lambda_ return gradient_penalty def __init__(self, input_dim, dis_dims, loss, pack): super(Discriminator, self).__init__() torch.cuda.manual_seed(0) torch.manual_seed(0) dim = input_dim * pack # print ('now dim is {}'.format(dim)) self.pack = pack self.packdim = dim seq = [] for item in list(dis_dims): seq += [Linear(dim, item), LeakyReLU(0.2), Dropout(0.5)] dim = item seq += [Linear(dim, 1)] if loss == 'cross_entropy': seq += [Sigmoid()] self.seq = Sequential(*seq) def forward(self, input): assert input.size()[0] % self.pack == 0 return self.seq(input.view(-1, self.packdim)) # custom for calcuate grad_sample for multiple loss.backward() def _custom_create_or_extend_grad_sample( param: torch.Tensor, grad_sample: torch.Tensor, batch_dim: int ) -> None: """ Create a 'grad_sample' attribute in the given parameter, or accumulate it if the 'grad_sample' attribute already exists. This custom code will not work when using optimizer.virtual_step() """ #print ("now this happen") if hasattr(param, "grad_sample"): param.grad_sample = param.grad_sample + grad_sample #param.grad_sample = torch.cat((param.grad_sample, grad_sample), batch_dim) else: param.grad_sample = grad_sample class DPCTGAN(CTGANSynthesizer): """Differential Private Conditional Table GAN Synthesizer This code adds Differential Privacy to CTGANSynthesizer from https://github.com/sdv-dev/CTGAN """ def __init__(self, embedding_dim=128, gen_dim=(256, 256), dis_dim=(256, 256), l2scale=1e-6, batch_size=500, epochs=300, pack=1, log_frequency=True, disabled_dp=False, target_delta=None, sigma = 5, max_per_sample_grad_norm=1.0, epsilon = 1, verbose=True, loss = 'cross_entropy'): # CTGAN model specific parameters self.embedding_dim = embedding_dim self.gen_dim = gen_dim self.dis_dim = dis_dim self.l2scale = l2scale self.batch_size = batch_size self.epochs = epochs self.pack=pack self.log_frequency = log_frequency self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") # opacus parameters self.sigma = sigma self.disabled_dp = disabled_dp self.target_delta = target_delta self.max_per_sample_grad_norm = max_per_sample_grad_norm self.epsilon = epsilon self.epsilon_list = [] self.alpha_list = [] self.loss_d_list = [] self.loss_g_list = [] self.verbose=verbose self.loss=loss if self.loss != "cross_entropy": # Monkeypatches the _create_or_extend_grad_sample function when calling opacus opacus.supported_layers_grad_samplers._create_or_extend_grad_sample = _custom_create_or_extend_grad_sample def train(self, data, categorical_columns=None, ordinal_columns=None, update_epsilon=None): if update_epsilon: self.epsilon = update_epsilon self.transformer = DataTransformer() self.transformer.fit(data, discrete_columns=categorical_columns) train_data = self.transformer.transform(data) data_sampler = Sampler(train_data, self.transformer.output_info) data_dim = self.transformer.output_dimensions self.cond_generator = ConditionalGenerator(train_data, self.transformer.output_info, self.log_frequency) self.generator = Generator( self.embedding_dim + self.cond_generator.n_opt, self.gen_dim, data_dim).to(self.device) discriminator = Discriminator( data_dim + self.cond_generator.n_opt, self.dis_dim, self.loss, self.pack).to(self.device) optimizerG = optim.Adam( self.generator.parameters(), lr=2e-4, betas=(0.5, 0.9), weight_decay=self.l2scale) optimizerD = optim.Adam(discriminator.parameters(), lr=2e-4, betas=(0.5, 0.9)) privacy_engine = opacus.PrivacyEngine( discriminator, batch_size=self.batch_size, sample_size=train_data.shape[0], alphas=[1 + x / 10.0 for x in range(1, 100)] + list(range(12, 64)), noise_multiplier=self.sigma, max_grad_norm=self.max_per_sample_grad_norm, clip_per_layer=True ) if not self.disabled_dp: privacy_engine.attach(optimizerD) one = torch.tensor(1, dtype=torch.float).to(self.device) mone = one * -1 REAL_LABEL = 1 FAKE_LABEL = 0 criterion = nn.BCELoss() assert self.batch_size % 2 == 0 mean = torch.zeros(self.batch_size, self.embedding_dim, device=self.device) std = mean + 1 steps_per_epoch = len(train_data) // self.batch_size for i in range(self.epochs): for id_ in range(steps_per_epoch): fakez = torch.normal(mean=mean, std=std) condvec = self.cond_generator.sample(self.batch_size) if condvec is None: c1, m1, col, opt = None, None, None, None real = data_sampler.sample(self.batch_size, col, opt) else: c1, m1, col, opt = condvec c1 = torch.from_numpy(c1).to(self.device) m1 = torch.from_numpy(m1).to(self.device) fakez = torch.cat([fakez, c1], dim=1) perm = np.arange(self.batch_size) np.random.shuffle(perm) real = data_sampler.sample(self.batch_size, col[perm], opt[perm]) c2 = c1[perm] fake = self.generator(fakez) fakeact = self._apply_activate(fake) real = torch.from_numpy(real.astype('float32')).to(self.device) if c1 is not None: fake_cat = torch.cat([fakeact, c1], dim=1) real_cat = torch.cat([real, c2], dim=1) else: real_cat = real fake_cat = fake optimizerD.zero_grad() if self.loss == 'cross_entropy': y_fake = discriminator(fake_cat) # print ('y_fake is {}'.format(y_fake)) label_fake = torch.full((int(self.batch_size/self.pack),), FAKE_LABEL, dtype=torch.float, device=self.device) # print ('label_fake is {}'.format(label_fake)) errD_fake = criterion(y_fake, label_fake) errD_fake.backward() optimizerD.step() # train with real label_true = torch.full((int(self.batch_size/self.pack),), REAL_LABEL, dtype=torch.float, device=self.device) y_real = discriminator(real_cat) errD_real = criterion(y_real, label_true) errD_real.backward() optimizerD.step() loss_d = errD_real + errD_fake else: y_fake = discriminator(fake_cat) mean_fake = torch.mean(y_fake) mean_fake.backward(one) y_real = discriminator(real_cat) mean_real = torch.mean(y_real) mean_real.backward(mone) optimizerD.step() loss_d = -(mean_real - mean_fake) max_grad_norm = [] for p in discriminator.parameters(): param_norm = p.grad.data.norm(2).item() max_grad_norm.append(param_norm) #pen = calc_gradient_penalty(discriminator, real_cat, fake_cat, self.device) #pen.backward(retain_graph=True) #loss_d.backward() #optimizerD.step() fakez = torch.normal(mean=mean, std=std) condvec = self.cond_generator.sample(self.batch_size) if condvec is None: c1, m1, col, opt = None, None, None, None else: c1, m1, col, opt = condvec c1 = torch.from_numpy(c1).to(self.device) m1 = torch.from_numpy(m1).to(self.device) fakez = torch.cat([fakez, c1], dim=1) fake = self.generator(fakez) fakeact = self._apply_activate(fake) if c1 is not None: y_fake = discriminator(torch.cat([fakeact, c1], dim=1)) else: y_fake = discriminator(fakeact) #if condvec is None: cross_entropy = 0 #else: # cross_entropy = self._cond_loss(fake, c1, m1) if self.loss=='cross_entropy': label_g = torch.full((int(self.batch_size/self.pack),), REAL_LABEL, dtype=torch.float, device=self.device) #label_g = torch.full(int(self.batch_size/self.pack,),1,device=self.device) loss_g = criterion(y_fake, label_g) loss_g = loss_g + cross_entropy else: loss_g = -torch.mean(y_fake) + cross_entropy optimizerG.zero_grad() loss_g.backward() optimizerG.step() if not self.disabled_dp: #if self.loss == 'cross_entropy': # autograd_grad_sample.clear_backprops(discriminator) #else: for p in discriminator.parameters(): if hasattr(p, "grad_sample"): del p.grad_sample if self.target_delta is None: self.target_delta = 1/train_data.shape[0] epsilon, best_alpha = optimizerD.privacy_engine.get_privacy_spent(self.target_delta) self.epsilon_list.append(epsilon) self.alpha_list.append(best_alpha) #if self.verbose: if not self.disabled_dp: if self.epsilon < epsilon: break self.loss_d_list.append(loss_d) self.loss_g_list.append(loss_g) if self.verbose: print("Epoch %d, Loss G: %.4f, Loss D: %.4f" % (i + 1, loss_g.detach().cpu(), loss_d.detach().cpu()), flush=True) print ('epsilon is {e}, alpha is {a}'.format(e=epsilon, a = best_alpha)) return self.loss_d_list, self.loss_g_list, self.epsilon_list, self.alpha_list def generate(self, n): self.generator.eval() #output_info = self.transformer.output_info steps = n // self.batch_size + 1 data = [] for i in range(steps): mean = torch.zeros(self.batch_size, self.embedding_dim) std = mean + 1 fakez = torch.normal(mean=mean, std=std).to(self.device) condvec = self.cond_generator.sample_zero(self.batch_size) if condvec is None: pass else: c1 = condvec c1 = torch.from_numpy(c1).to(self.device) fakez = torch.cat([fakez, c1], dim=1) fake = self.generator(fakez) fakeact = self._apply_activate(fake) data.append(fakeact.detach().cpu().numpy()) data = np.concatenate(data, axis=0) data = data[:n] return self.transformer.inverse_transform(data, None)
[ "ctgan.models.Generator", "torch.nn.Dropout", "torch.nn.Sequential", "ctgan.sampler.Sampler", "torch.from_numpy", "ctgan.conditional.ConditionalGenerator", "torch.cuda.is_available", "torch.normal", "numpy.arange", "torch.nn.Sigmoid", "torch.mean", "numpy.concatenate", "torch.nn.LeakyReLU", ...
[((1613, 1638), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['(0)'], {}), '(0)\n', (1635, 1638), False, 'import torch\n'), ((1647, 1667), 'torch.manual_seed', 'torch.manual_seed', (['(0)'], {}), '(0)\n', (1664, 1667), False, 'import torch\n'), ((2061, 2077), 'torch.nn.Sequential', 'Sequential', (['*seq'], {}), '(*seq)\n', (2071, 2077), False, 'from torch.nn import BatchNorm1d, Dropout, LeakyReLU, Linear, Module, ReLU, Sequential, Sigmoid\n'), ((4866, 4883), 'ctgan.transformer.DataTransformer', 'DataTransformer', ([], {}), '()\n', (4881, 4883), False, 'from ctgan.transformer import DataTransformer\n'), ((5035, 5084), 'ctgan.sampler.Sampler', 'Sampler', (['train_data', 'self.transformer.output_info'], {}), '(train_data, self.transformer.output_info)\n', (5042, 5084), False, 'from ctgan.sampler import Sampler\n'), ((5170, 5257), 'ctgan.conditional.ConditionalGenerator', 'ConditionalGenerator', (['train_data', 'self.transformer.output_info', 'self.log_frequency'], {}), '(train_data, self.transformer.output_info, self.\n log_frequency)\n', (5190, 5257), False, 'from ctgan.conditional import ConditionalGenerator\n'), ((6425, 6437), 'torch.nn.BCELoss', 'nn.BCELoss', ([], {}), '()\n', (6435, 6437), True, 'import torch.nn as nn\n'), ((6494, 6562), 'torch.zeros', 'torch.zeros', (['self.batch_size', 'self.embedding_dim'], {'device': 'self.device'}), '(self.batch_size, self.embedding_dim, device=self.device)\n', (6505, 6562), False, 'import torch\n'), ((13355, 13383), 'numpy.concatenate', 'np.concatenate', (['data'], {'axis': '(0)'}), '(data, axis=0)\n', (13369, 13383), True, 'import numpy as np\n'), ((1959, 1973), 'torch.nn.Linear', 'Linear', (['dim', '(1)'], {}), '(dim, 1)\n', (1965, 1973), False, 'from torch.nn import BatchNorm1d, Dropout, LeakyReLU, Linear, Module, ReLU, Sequential, Sigmoid\n'), ((12763, 12811), 'torch.zeros', 'torch.zeros', (['self.batch_size', 'self.embedding_dim'], {}), '(self.batch_size, self.embedding_dim)\n', (12774, 12811), False, 'import torch\n'), ((1870, 1887), 'torch.nn.Linear', 'Linear', (['dim', 'item'], {}), '(dim, item)\n', (1876, 1887), False, 'from torch.nn import BatchNorm1d, Dropout, LeakyReLU, Linear, Module, ReLU, Sequential, Sigmoid\n'), ((1889, 1903), 'torch.nn.LeakyReLU', 'LeakyReLU', (['(0.2)'], {}), '(0.2)\n', (1898, 1903), False, 'from torch.nn import BatchNorm1d, Dropout, LeakyReLU, Linear, Module, ReLU, Sequential, Sigmoid\n'), ((1905, 1917), 'torch.nn.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (1912, 1917), False, 'from torch.nn import BatchNorm1d, Dropout, LeakyReLU, Linear, Module, ReLU, Sequential, Sigmoid\n'), ((2031, 2040), 'torch.nn.Sigmoid', 'Sigmoid', ([], {}), '()\n', (2038, 2040), False, 'from torch.nn import BatchNorm1d, Dropout, LeakyReLU, Linear, Module, ReLU, Sequential, Sigmoid\n'), ((3978, 4003), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (4001, 4003), False, 'import torch\n'), ((5279, 5364), 'ctgan.models.Generator', 'Generator', (['(self.embedding_dim + self.cond_generator.n_opt)', 'self.gen_dim', 'data_dim'], {}), '(self.embedding_dim + self.cond_generator.n_opt, self.gen_dim,\n data_dim)\n', (5288, 5364), False, 'from ctgan.models import Generator\n'), ((6283, 6317), 'torch.tensor', 'torch.tensor', (['(1)'], {'dtype': 'torch.float'}), '(1, dtype=torch.float)\n', (6295, 6317), False, 'import torch\n'), ((6756, 6788), 'torch.normal', 'torch.normal', ([], {'mean': 'mean', 'std': 'std'}), '(mean=mean, std=std)\n', (6768, 6788), False, 'import torch\n'), ((9753, 9785), 'torch.normal', 'torch.normal', ([], {'mean': 'mean', 'std': 'std'}), '(mean=mean, std=std)\n', (9765, 9785), False, 'import torch\n'), ((13162, 13191), 'torch.cat', 'torch.cat', (['[fakez, c1]'], {'dim': '(1)'}), '([fakez, c1], dim=1)\n', (13171, 13191), False, 'import torch\n'), ((7253, 7282), 'torch.cat', 'torch.cat', (['[fakez, c1]'], {'dim': '(1)'}), '([fakez, c1], dim=1)\n', (7262, 7282), False, 'import torch\n'), ((7311, 7337), 'numpy.arange', 'np.arange', (['self.batch_size'], {}), '(self.batch_size)\n', (7320, 7337), True, 'import numpy as np\n'), ((7358, 7381), 'numpy.random.shuffle', 'np.random.shuffle', (['perm'], {}), '(perm)\n', (7375, 7381), True, 'import numpy as np\n'), ((7749, 7780), 'torch.cat', 'torch.cat', (['[fakeact, c1]'], {'dim': '(1)'}), '([fakeact, c1], dim=1)\n', (7758, 7780), False, 'import torch\n'), ((7812, 7840), 'torch.cat', 'torch.cat', (['[real, c2]'], {'dim': '(1)'}), '([real, c2], dim=1)\n', (7821, 7840), False, 'import torch\n'), ((9003, 9021), 'torch.mean', 'torch.mean', (['y_fake'], {}), '(y_fake)\n', (9013, 9021), False, 'import torch\n'), ((9153, 9171), 'torch.mean', 'torch.mean', (['y_real'], {}), '(y_real)\n', (9163, 9171), False, 'import torch\n'), ((10176, 10205), 'torch.cat', 'torch.cat', (['[fakez, c1]'], {'dim': '(1)'}), '([fakez, c1], dim=1)\n', (10185, 10205), False, 'import torch\n'), ((12859, 12891), 'torch.normal', 'torch.normal', ([], {'mean': 'mean', 'std': 'std'}), '(mean=mean, std=std)\n', (12871, 12891), False, 'import torch\n'), ((10384, 10415), 'torch.cat', 'torch.cat', (['[fakeact, c1]'], {'dim': '(1)'}), '([fakeact, c1], dim=1)\n', (10393, 10415), False, 'import torch\n'), ((13101, 13121), 'torch.from_numpy', 'torch.from_numpy', (['c1'], {}), '(c1)\n', (13117, 13121), False, 'import torch\n'), ((7126, 7146), 'torch.from_numpy', 'torch.from_numpy', (['c1'], {}), '(c1)\n', (7142, 7146), False, 'import torch\n'), ((7188, 7208), 'torch.from_numpy', 'torch.from_numpy', (['m1'], {}), '(m1)\n', (7204, 7208), False, 'import torch\n'), ((10049, 10069), 'torch.from_numpy', 'torch.from_numpy', (['c1'], {}), '(c1)\n', (10065, 10069), False, 'import torch\n'), ((10111, 10131), 'torch.from_numpy', 'torch.from_numpy', (['m1'], {}), '(m1)\n', (10127, 10131), False, 'import torch\n'), ((11104, 11122), 'torch.mean', 'torch.mean', (['y_fake'], {}), '(y_fake)\n', (11114, 11122), False, 'import torch\n')]
import socket import threading import time # Create constants HEADER = 64 PORT = 5050 FORMAT = 'utf-8' DC_MSG = "!DISCONNECT" SERVER = "localhost" ADDR = (SERVER, PORT) # Set up client var and connect to the server client = socket.socket(socket.AF_INET, socket.SOCK_STREAM) client.connect(ADDR) erase = '\x1b[1A\x1b[K' # Handles sending a message to the server def send(sendMsg): # Encode and create header message = sendMsg.encode(FORMAT) msg_len = len(message) send_len = str(msg_len).encode(FORMAT) send_len += b' ' * (HEADER - len(send_len)) client.send(send_len) # Send the actual text client.send(message) # A thread to handle receiving messages broadcast from the server def recvThread(): try: # Wait for a message from the server and then decode and print it, while keeping the prompt on the same line while True: msg_len = client.recv(HEADER).decode(FORMAT) if msg_len: msg_len = int(msg_len) recvMsg = client.recv(msg_len).decode(FORMAT) print(f"\n{erase}{recvMsg}\n[{uname}]: ", end="") except Exception as e: return e # Main thread try: # Send initial message to set up username uname = input("Enter a username: ") send(uname) # Start handling received messages RECVTHREAD = threading.Thread(target=recvThread) RECVTHREAD.start() # Handle the prompt and sending messages while True: msg = input(f"[{uname}]: ") send(msg) print("\x1b[A\x1b[K", end="") if msg == DC_MSG: break # Close everything if ctrl+c is pressed finally: send(DC_MSG) time.sleep(0.5) client.close() print("\ngoodbye") exit()
[ "threading.Thread", "time.sleep", "socket.socket" ]
[((239, 288), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (252, 288), False, 'import socket\n'), ((1401, 1436), 'threading.Thread', 'threading.Thread', ([], {'target': 'recvThread'}), '(target=recvThread)\n', (1417, 1436), False, 'import threading\n'), ((1741, 1756), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (1751, 1756), False, 'import time\n')]
from __future__ import annotations import disnake from disnake.ext import commands from typing import TYPE_CHECKING from src.utils.utils import EmbedFactory, ExitButton, SaveButton, add_lines, get_info if TYPE_CHECKING: from src.utils import File def clear_codeblock(content: str): content.strip("\n") if content.startswith("```"): content = "\n".join(content.splitlines()[1:]) if content.endswith("```"): content = content[:-3] if "`" in content: content.replace("`", "\u200b") return content def update_buttons(cls: EditView): if cls.page == 1: cls.previous_button.disabled = True else: cls.previous_button.disabled = False if cls.page == len(cls.pages) - 2: cls.next_button.disabled = True else: cls.next_button.disabled = False class EditView(disnake.ui.View): async def interaction_check(self, interaction: disnake.MessageInteraction) -> bool: return ( interaction.author == self.ctx.author and interaction.channel == self.ctx.channel ) def __init__( self, ctx, file_: "File", bot_message=None, file_view=None, lines: list[str] = None, ): super().__init__() self.ctx = ctx self.bot = ctx.bot self.file = file_ self.content = file_.content self.bot_message = bot_message self.file_view = file_view self.undo = self.file_view.file.undo self.redo = self.file_view.file.redo self.pages = [lines[x : x + 50] for x in range(0, len(lines), 50)] self.page = 0 self.SUDO = self.ctx.me.guild_permissions.manage_messages self.add_item(ExitButton(ctx, bot_message, row=3)) self.add_item(SaveButton(ctx, bot_message, file_, row=2)) async def edit(self, inter): await inter.response.defer() await self.bot_message.edit( embed=EmbedFactory.code_embed( self.ctx, "".join(add_lines(self.file_view.file.content)), self.file.filename, ), ) @disnake.ui.button(label="Write", style=disnake.ButtonStyle.gray) async def write_button( self, button: disnake.ui.Button, interaction: disnake.MessageInteraction ): ... @disnake.ui.button(label="Replace", style=disnake.ButtonStyle.gray) async def replace_button( self, button: disnake.ui.Button, interaction: disnake.MessageInteraction ): await interaction.response.send_message( "**Format:**\n[line number]\n```py\n<code>\n```**Example:**" "\n12-25\n```py\nfor i in range(10):\n\tprint('foo')\n```" "\n`[Click save to see the result]`", ephemeral=True, ) content: str = ( await self.ctx.bot.wait_for( "message", check=lambda m: m.author == interaction.author and m.channel == interaction.channel, ) ).content if content[0].isdigit(): line_no = content.splitlines()[0] if "-" in line_no: from_, to = ( int(line_no.split("-")[0]) - 1, int(line_no.split("-")[1]) - 1, ) else: from_, to = int(line_no) - 1, int(line_no) - 1 code = clear_codeblock("\n".join(content.splitlines()[1:])) else: from_, to = 0, len(self.file_view.file.content) - 1 code = clear_codeblock(content) self.undo.append(self.content) sliced = self.file_view.file.content.splitlines() del sliced[from_ : to + 1] sliced.insert(from_, code) self.file_view.file.content = "\n".join(sliced) @disnake.ui.button(label="Append", style=disnake.ButtonStyle.gray) async def append_button( self, button: disnake.ui.Button, interaction: disnake.MessageInteraction ): await interaction.response.send_message( "Type something... (This will append your code with a new line) `[Click save to see the result]`", ephemeral=True, ) self.undo.append(self.file_view.file.content) self.file_view.file.content += "\n" + clear_codeblock( ( await self.ctx.bot.wait_for( "message", check=lambda m: m.author == interaction.author and m.channel == interaction.channel, ) ).content ) @disnake.ui.button(label="Rename", style=disnake.ButtonStyle.grey) async def rename_button( self, button: disnake.ui.Button, interaction: disnake.MessageInteraction ): await interaction.response.send_message( "What would you like the filename to be?", ephemeral=True ) filename = await self.bot.wait_for( "message", check=lambda m: self.ctx.author == m.author and m.channel == self.ctx.channel, ) if len(filename.content) > 12: if self.SUDO: await filename.delete() return await interaction.channel.send( "That filename is too long! The maximum limit is 12 character" ) file_ = File(filename=filename, content=self.file.content, bot=self.bot) description = await get_info(file_) self.file = file_ self.extension = file_.filename.split(".")[-1] embed = EmbedFactory.ide_embed(self.ctx, description) await self.bot_message.edit(embed=embed) @disnake.ui.button(label="Prev", style=disnake.ButtonStyle.blurple, row=2) async def previous_button( self, button: disnake.ui.Button, interaction: disnake.MessageInteraction ): await interaction.response.defer() update_buttons(self) self.page -= 1 embed = ( disnake.Embed( description=f"```py\n{''.join(self.pages[self.page])}\n```\nPage: {self.page + 1}/{len(self.pages)}", timestamp=self.ctx.message.created_at, ) .set_author( name=f"{self.ctx.author.name}'s automated paginator for {self.file.filename}", icon_url=self.ctx.author.avatar.url, ) .set_footer(text="The official jarvide text editor and ide") ) await self.bot_message.edit(embed=embed, view=self) @disnake.ui.button(label="Next", style=disnake.ButtonStyle.blurple, row=2) async def next_button( self, button: disnake.ui.Button, interaction: disnake.MessageInteraction ): await interaction.response.defer() update_buttons(self) self.page += 1 embed = ( disnake.Embed( description=f"```py\n{''.join(self.pages[self.page])}\n```\nPage: {self.page + 1}/{len(self.pages)}", timestamp=self.ctx.message.created_at, ) .set_author( name=f"{self.ctx.author.name}'s automated paginator for {self.file.filename}", icon_url=self.ctx.author.avatar.url, ) .set_footer(text="The official jarvide text editor and ide") ) await self.bot_message.edit(embed=embed, view=self) @disnake.ui.button(label="Undo", style=disnake.ButtonStyle.blurple, row=2) async def undo_button( self, button: disnake.ui.Button, interaction: disnake.MessageInteraction ): if not self.undo: return await interaction.response.send_message( "You have made no changes and have nothing to undo!", ephemeral=True ) self.redo.append(self.file_view.file.content) self.file_view.file.content = self.undo.pop(-1) await self.edit(interaction) @disnake.ui.button(label="Redo", style=disnake.ButtonStyle.blurple, row=2) async def redo_button( self, button: disnake.ui.Button, interaction: disnake.MessageInteraction ): if not self.redo: return await interaction.response.send_message( "You have made no changes and have nothing to undo!", ephemeral=True ) self.undo.append(self.file_view.file.content) self.file_view.file.content = self.redo.pop(-1) await self.edit(interaction) @disnake.ui.button(label="Clear", style=disnake.ButtonStyle.danger, row=3) async def clear_button( self, button: disnake.ui.Button, interaction: disnake.MessageInteraction ): self.undo.append(self.file_view.file.content) self.file_view.file.content = "" await self.edit(interaction) @disnake.ui.button(label="Back", style=disnake.ButtonStyle.danger, row=3) async def settings_button( self, button: disnake.ui.Button, interaction: disnake.MessageInteraction ): embed = EmbedFactory.ide_embed(self.ctx, await get_info(self.file)) self.undo = [] self.redo = [] await self.bot_message.edit(embed=embed, view=self.file_view) def setup(bot: commands.Bot): pass
[ "src.utils.utils.get_info", "src.utils.utils.EmbedFactory.ide_embed", "src.utils.utils.ExitButton", "src.utils.File", "src.utils.utils.SaveButton", "disnake.ui.button", "src.utils.utils.add_lines" ]
[((2156, 2220), 'disnake.ui.button', 'disnake.ui.button', ([], {'label': '"""Write"""', 'style': 'disnake.ButtonStyle.gray'}), "(label='Write', style=disnake.ButtonStyle.gray)\n", (2173, 2220), False, 'import disnake\n'), ((2355, 2421), 'disnake.ui.button', 'disnake.ui.button', ([], {'label': '"""Replace"""', 'style': 'disnake.ButtonStyle.gray'}), "(label='Replace', style=disnake.ButtonStyle.gray)\n", (2372, 2421), False, 'import disnake\n'), ((3829, 3894), 'disnake.ui.button', 'disnake.ui.button', ([], {'label': '"""Append"""', 'style': 'disnake.ButtonStyle.gray'}), "(label='Append', style=disnake.ButtonStyle.gray)\n", (3846, 3894), False, 'import disnake\n'), ((4598, 4663), 'disnake.ui.button', 'disnake.ui.button', ([], {'label': '"""Rename"""', 'style': 'disnake.ButtonStyle.grey'}), "(label='Rename', style=disnake.ButtonStyle.grey)\n", (4615, 4663), False, 'import disnake\n'), ((5665, 5738), 'disnake.ui.button', 'disnake.ui.button', ([], {'label': '"""Prev"""', 'style': 'disnake.ButtonStyle.blurple', 'row': '(2)'}), "(label='Prev', style=disnake.ButtonStyle.blurple, row=2)\n", (5682, 5738), False, 'import disnake\n'), ((6521, 6594), 'disnake.ui.button', 'disnake.ui.button', ([], {'label': '"""Next"""', 'style': 'disnake.ButtonStyle.blurple', 'row': '(2)'}), "(label='Next', style=disnake.ButtonStyle.blurple, row=2)\n", (6538, 6594), False, 'import disnake\n'), ((7373, 7446), 'disnake.ui.button', 'disnake.ui.button', ([], {'label': '"""Undo"""', 'style': 'disnake.ButtonStyle.blurple', 'row': '(2)'}), "(label='Undo', style=disnake.ButtonStyle.blurple, row=2)\n", (7390, 7446), False, 'import disnake\n'), ((7901, 7974), 'disnake.ui.button', 'disnake.ui.button', ([], {'label': '"""Redo"""', 'style': 'disnake.ButtonStyle.blurple', 'row': '(2)'}), "(label='Redo', style=disnake.ButtonStyle.blurple, row=2)\n", (7918, 7974), False, 'import disnake\n'), ((8429, 8502), 'disnake.ui.button', 'disnake.ui.button', ([], {'label': '"""Clear"""', 'style': 'disnake.ButtonStyle.danger', 'row': '(3)'}), "(label='Clear', style=disnake.ButtonStyle.danger, row=3)\n", (8446, 8502), False, 'import disnake\n'), ((8758, 8830), 'disnake.ui.button', 'disnake.ui.button', ([], {'label': '"""Back"""', 'style': 'disnake.ButtonStyle.danger', 'row': '(3)'}), "(label='Back', style=disnake.ButtonStyle.danger, row=3)\n", (8775, 8830), False, 'import disnake\n'), ((5356, 5420), 'src.utils.File', 'File', ([], {'filename': 'filename', 'content': 'self.file.content', 'bot': 'self.bot'}), '(filename=filename, content=self.file.content, bot=self.bot)\n', (5360, 5420), False, 'from src.utils import File\n'), ((5564, 5609), 'src.utils.utils.EmbedFactory.ide_embed', 'EmbedFactory.ide_embed', (['self.ctx', 'description'], {}), '(self.ctx, description)\n', (5586, 5609), False, 'from src.utils.utils import EmbedFactory, ExitButton, SaveButton, add_lines, get_info\n'), ((1743, 1778), 'src.utils.utils.ExitButton', 'ExitButton', (['ctx', 'bot_message'], {'row': '(3)'}), '(ctx, bot_message, row=3)\n', (1753, 1778), False, 'from src.utils.utils import EmbedFactory, ExitButton, SaveButton, add_lines, get_info\n'), ((1802, 1844), 'src.utils.utils.SaveButton', 'SaveButton', (['ctx', 'bot_message', 'file_'], {'row': '(2)'}), '(ctx, bot_message, file_, row=2)\n', (1812, 1844), False, 'from src.utils.utils import EmbedFactory, ExitButton, SaveButton, add_lines, get_info\n'), ((5449, 5464), 'src.utils.utils.get_info', 'get_info', (['file_'], {}), '(file_)\n', (5457, 5464), False, 'from src.utils.utils import EmbedFactory, ExitButton, SaveButton, add_lines, get_info\n'), ((9005, 9024), 'src.utils.utils.get_info', 'get_info', (['self.file'], {}), '(self.file)\n', (9013, 9024), False, 'from src.utils.utils import EmbedFactory, ExitButton, SaveButton, add_lines, get_info\n'), ((2048, 2086), 'src.utils.utils.add_lines', 'add_lines', (['self.file_view.file.content'], {}), '(self.file_view.file.content)\n', (2057, 2086), False, 'from src.utils.utils import EmbedFactory, ExitButton, SaveButton, add_lines, get_info\n')]
import discord, sqlite3, asyncio, utils, re from discord.ext import commands from datetime import datetime TIME_REGEX = re.compile("(?:(\d{1,5})\s?(h|hours|hrs|hour|hr|s|seconds|secs|sec|second|m|mins|minutes|minute|min|d|days|day))+?") TIME_DICT = {"h": 3600, "s": 1, "m": 60, "d": 86400} class TimeConverter(commands.Converter): async def convert(self, argument): if argument is None: return 0 args = argument.lower() matches = re.findall(TIME_REGEX, args) time = 0 for v, k in matches: try: for key in ("h", "s", "m", "d"): if k.startswith(key): k = key break time += TIME_DICT[k]*float(v) except KeyError: raise commands.BadArgument("{} is an invalid time-key! h/m/s/d are valid!".format(k)) except ValueError: raise commands.BadArgument("{} is not a number!".format(v)) return time class AdministratorCommands(commands.Cog): def __init__(self, bot): self.bot = bot @commands.command(usage="poll <ping> <question> | <answer 1> | <answer2...>") @utils.guild_only() @utils.is_admin() @commands.bot_has_permissions(manage_roles=True) @commands.cooldown(1, 60, commands.BucketType.guild) async def poll(self, ctx, ping_member, *, args): """Creates a poll with up to 5 answers.""" ping = ping_member.lower() if ping not in ("yes", "no", "true", "false", "y", "n", "t", "f"): return await utils.embed(ctx, discord.Embed(title="Poll Failed", description=f"Sorry, the `ping_member` argument should be \"Yes\" or \"No\". Please use `{self.bot.config.prefix}help poll` for more information."), error=True) if ping in ("yes", "y", "true", "t"): ping = True if ping in ("no", "n", "no", "n"): ping = False ques_ans = args.split(" | ") if len(ques_ans) <= 2: return await utils.embed(ctx, discord.Embed(title="Poll Failed", description=f"Sorry, the `args` argument should be follow this syntax: `question | answer 1 | answer 2...`."), error=True) question = ques_ans[0] answers = ques_ans[1:6] channel_id = self.bot.config.channels.announcements channel = self.bot.get_channel(channel_id) if channel is None: return await utils.embed(ctx, discord.Embed(title="Poll Failed", description=f"Sorry, the `announcements` channel hasn't been configured."), error=True) reactions = [] text = "" i = 1 for answer in answers: react = {1: "1\u20e3", 2: "2\u20e3", 3: "3\u20e3", 4: "4\u20e3", 5: "5\u20e3"}[i] reactions.append(react) text += f"{react} {answers[i-1]}\n\n" i += 1 embed = await utils.embed(ctx, discord.Embed(timestamp=datetime.utcnow(), title="Server Poll", description=f"**{question}**\n\n{text}").set_footer(text=f"Poll by {ctx.author}"), send=False) if ping: ping_role = utils.get_ping_role(ctx) if ping_role != ctx.guild.default_role: if not ping_role.mentionable: edited = False try: await ping_role.edit(mentionable=True) edited = True except discord.Forbidden: return await utils.embed(ctx, discord.Embed(title="Poll Failed", description=f"I do not have permission to **edit** {ping_role.mention}."), error=True) try: message = await channel.send(ping_role.mention, embed=embed) await utils.embed(ctx, discord.Embed(title="Poll Created", description=f"Your poll was successfully posted in {channel.mention}."), error=True) for r in reactions: await message.add_reaction(r) except: if channel.permissions_for(ctx.guild.me).add_reactions is False: issue = f"I do not have permission to **add reactions** in <#{channel.mention}>." if channel.permissions_for(ctx.guild.me).send_messages is False: issue = f"I do not have permission to **send messages** in <#{channel.mention}>." return await utils.embed(ctx, discord.Embed(title="Poll Failed", description=issue), error=True) if edited: await ping_role.edit(mentionable=False) return try: message = await channel.send(content="@everyone" if ping else None, embed=embed) await utils.embed(ctx, discord.Embed(title="Poll Created", description=f"Your poll was successfully posted in {channel.mention}."), error=True) for r in reactions: await message.add_reaction(r) except: if channel.permissions_for(ctx.guild.me).add_reactions is False: issue = f"I do not have permission to **add reactions** in <#{channel.mention}>." if channel.permissions_for(ctx.guild.me).send_messages is False: issue = f"I do not have permission to **send messages** in <#{channel.mention}>." await utils.embed(ctx, discord.Embed(title="Poll Failed", description=issue), error=True) @commands.command(usage="announce <ping> <announcement>") @utils.guild_only() @utils.is_admin() async def announce(self, ctx, ping_member, *, announcement): """Creates an announcement.""" ping = ping_member.lower() if ping not in ("yes", "no", "true", "false", "y", "n", "t", "f"): return await utils.embed(ctx, discord.Embed(title="Announcement Failed", description=f"Sorry, the `ping_member` argument should be \"Yes\" or \"No\". Please use `{self.bot.config.prefix}help announce` for more information."), error=True) if ping in ("yes", "y", "true", "t"): ping = True if ping in ("no", "n", "no", "n"): ping = False channel_id = self.bot.config.channels.announcements channel = self.bot.get_channel(channel_id) if channel is None: return await utils.embed(ctx, discord.Embed(title="Announcement Failed", description=f"Sorry, the `announcements` channel hasn't been configured."), error=True) if ping: ping_role = utils.get_ping_role(ctx) if ping_role != ctx.guild.default_role: if not ping_role.mentionable: edited = False try: await ping_role.edit(mentionable=True) edited = True except discord.Forbidden: return await utils.embed(ctx, discord.Embed(title="Announcement Failed", description=f"I do not have permission to **edit** {ping_role.mention}."), error=True) try: await channel.send(f"{ping_role.mention}\n{announcement}") await utils.embed(ctx, discord.Embed(title="Announcement Sent", description=f"Your announcement was successfully posted in {channel.mention}."), error=True) except: if channel.permissions_for(ctx.guild.me).send_messages is False: issue = f"I do not have permission to **send messages** in <#{channel.mention}>." return await utils.embed(ctx, discord.Embed(title="Announcement Failed", description=issue), error=True) if edited: await ping_role.edit(mentionable=False) return try: await channel.send("@everyone\n" if ping else "" + announcement) await utils.embed(ctx, discord.Embed(title="Announcement Sent", description=f"Your announcement was successfully posted in {channel.mention}."), error=True) except: if channel.permissions_for(ctx.guild.me).send_messages is False: issue = f"I do not have permission to **send messages** in <#{channel.mention}>." await utils.embed(ctx, discord.Embed(title="Poll Failed", description=issue), error=True) @commands.command(aliases=["resetcase"], usage="resetid") @utils.guild_only() @utils.is_admin() async def resetid(self, ctx): """Resets the case ID.""" with sqlite3.connect(self.bot.config.database) as db: db.cursor().execute("UPDATE Settings SET Case_ID='0'") db.cursor().execute("DELETE FROM Cases") db.commit() await utils.embed(ctx, discord.Embed(timestamp=datetime.utcnow(), title="Data Wiped", description="All case data has been successfully cleared.")) @commands.command(aliases=["reloadconfig"], usage="reload") @utils.guild_only() @utils.is_admin() async def reload(self, ctx): """Reloads the config file.""" del self.bot.config self.bot.config = utils.Config() await utils.embed(ctx, discord.Embed(timestamp=datetime.utcnow(), title="Config Reloaded", description="All config data has been successfully reloaded.")) @commands.command(usage="lockdown [time]") @utils.guild_only() @commands.bot_has_permissions(manage_channels=True) @utils.is_admin() async def lockdown(self, ctx, *, time=None): """Locks or unlocks a channel for a specified amount of time.""" member_role = utils.get_member_role(ctx) ows = ctx.channel.overwrites_for(member_role) if ows.read_messages is False: return await utils.embed(ctx, discord.Embed(timestamp=datetime.utcnow(), title="Lockdown Failed", description=f"Sorry, I can only lock channels that can be seen by {member_role.mention if member_role != ctx.guild.default_role else member_role}."), error=True) if ows.send_messages is False: await ctx.channel.set_permissions(member_role, send_messages=None) await ctx.channel.set_permissions(ctx.guild.me, send_messages=None) return await utils.embed(ctx, discord.Embed(timestamp=datetime.utcnow(), title="Lockdown Deactivated", description=f"Lockdown has been lifted by **{ctx.author}**.")) if ows.send_messages in (True, None): seconds = await TimeConverter().convert(time) await ctx.channel.set_permissions(member_role, send_messages=False) await ctx.channel.set_permissions(ctx.guild.me, send_messages=True) if seconds < 1: return await utils.embed(ctx, discord.Embed(timestamp=datetime.utcnow(), title="Lockdown Activated", description=f"Lockdown has been activated by **{ctx.author}**.")) await utils.embed(ctx, discord.Embed(timestamp=datetime.utcnow(), title="Lockdown Activated", description=f"Lockdown has been activated by **{ctx.author}** for {utils.display_time(round(seconds), 4)}.")) await asyncio.sleep(seconds) ows = ctx.channel.overwrites_for(member_role) if ows.send_messages is False: await ctx.channel.set_permissions(member_role, send_messages=None) await ctx.channel.set_permissions(ctx.guild.me, send_messages=None) return await utils.embed(ctx, discord.Embed(timestamp=datetime.utcnow(), title="Lockdown Deactivated", description=f"Lockdown has been lifted.")) def setup(bot): bot.add_cog(AdministratorCommands(bot))
[ "utils.Config", "sqlite3.connect", "utils.get_member_role", "re.compile", "asyncio.sleep", "datetime.datetime.utcnow", "utils.get_ping_role", "discord.ext.commands.bot_has_permissions", "utils.guild_only", "discord.ext.commands.cooldown", "re.findall", "discord.Embed", "discord.ext.commands....
[((127, 255), 're.compile', 're.compile', (['"""(?:(\\\\d{1,5})\\\\s?(h|hours|hrs|hour|hr|s|seconds|secs|sec|second|m|mins|minutes|minute|min|d|days|day))+?"""'], {}), "(\n '(?:(\\\\d{1,5})\\\\s?(h|hours|hrs|hour|hr|s|seconds|secs|sec|second|m|mins|minutes|minute|min|d|days|day))+?'\n )\n", (137, 255), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((1200, 1276), 'discord.ext.commands.command', 'commands.command', ([], {'usage': '"""poll <ping> <question> | <answer 1> | <answer2...>"""'}), "(usage='poll <ping> <question> | <answer 1> | <answer2...>')\n", (1216, 1276), False, 'from discord.ext import commands\n'), ((1283, 1301), 'utils.guild_only', 'utils.guild_only', ([], {}), '()\n', (1299, 1301), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((1308, 1324), 'utils.is_admin', 'utils.is_admin', ([], {}), '()\n', (1322, 1324), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((1331, 1378), 'discord.ext.commands.bot_has_permissions', 'commands.bot_has_permissions', ([], {'manage_roles': '(True)'}), '(manage_roles=True)\n', (1359, 1378), False, 'from discord.ext import commands\n'), ((1385, 1436), 'discord.ext.commands.cooldown', 'commands.cooldown', (['(1)', '(60)', 'commands.BucketType.guild'], {}), '(1, 60, commands.BucketType.guild)\n', (1402, 1436), False, 'from discord.ext import commands\n'), ((5625, 5681), 'discord.ext.commands.command', 'commands.command', ([], {'usage': '"""announce <ping> <announcement>"""'}), "(usage='announce <ping> <announcement>')\n", (5641, 5681), False, 'from discord.ext import commands\n'), ((5688, 5706), 'utils.guild_only', 'utils.guild_only', ([], {}), '()\n', (5704, 5706), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((5713, 5729), 'utils.is_admin', 'utils.is_admin', ([], {}), '()\n', (5727, 5729), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((8560, 8616), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['resetcase']", 'usage': '"""resetid"""'}), "(aliases=['resetcase'], usage='resetid')\n", (8576, 8616), False, 'from discord.ext import commands\n'), ((8623, 8641), 'utils.guild_only', 'utils.guild_only', ([], {}), '()\n', (8639, 8641), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((8648, 8664), 'utils.is_admin', 'utils.is_admin', ([], {}), '()\n', (8662, 8664), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((9113, 9171), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['reloadconfig']", 'usage': '"""reload"""'}), "(aliases=['reloadconfig'], usage='reload')\n", (9129, 9171), False, 'from discord.ext import commands\n'), ((9178, 9196), 'utils.guild_only', 'utils.guild_only', ([], {}), '()\n', (9194, 9196), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((9203, 9219), 'utils.is_admin', 'utils.is_admin', ([], {}), '()\n', (9217, 9219), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((9543, 9584), 'discord.ext.commands.command', 'commands.command', ([], {'usage': '"""lockdown [time]"""'}), "(usage='lockdown [time]')\n", (9559, 9584), False, 'from discord.ext import commands\n'), ((9591, 9609), 'utils.guild_only', 'utils.guild_only', ([], {}), '()\n', (9607, 9609), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((9616, 9666), 'discord.ext.commands.bot_has_permissions', 'commands.bot_has_permissions', ([], {'manage_channels': '(True)'}), '(manage_channels=True)\n', (9644, 9666), False, 'from discord.ext import commands\n'), ((9673, 9689), 'utils.is_admin', 'utils.is_admin', ([], {}), '()\n', (9687, 9689), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((488, 516), 're.findall', 're.findall', (['TIME_REGEX', 'args'], {}), '(TIME_REGEX, args)\n', (498, 516), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((9354, 9368), 'utils.Config', 'utils.Config', ([], {}), '()\n', (9366, 9368), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((9839, 9865), 'utils.get_member_role', 'utils.get_member_role', (['ctx'], {}), '(ctx)\n', (9860, 9865), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((3249, 3273), 'utils.get_ping_role', 'utils.get_ping_role', (['ctx'], {}), '(ctx)\n', (3268, 3273), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((6714, 6738), 'utils.get_ping_role', 'utils.get_ping_role', (['ctx'], {}), '(ctx)\n', (6733, 6738), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((8751, 8792), 'sqlite3.connect', 'sqlite3.connect', (['self.bot.config.database'], {}), '(self.bot.config.database)\n', (8766, 8792), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((11355, 11377), 'asyncio.sleep', 'asyncio.sleep', (['seconds'], {}), '(seconds)\n', (11368, 11377), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((1702, 1890), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Poll Failed"""', 'description': 'f"""Sorry, the `ping_member` argument should be "Yes" or "No". Please use `{self.bot.config.prefix}help poll` for more information."""'}), '(title=\'Poll Failed\', description=\n f\'Sorry, the `ping_member` argument should be "Yes" or "No". Please use `{self.bot.config.prefix}help poll` for more information.\'\n )\n', (1715, 1890), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((2169, 2323), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Poll Failed"""', 'description': 'f"""Sorry, the `args` argument should be follow this syntax: `question | answer 1 | answer 2...`."""'}), "(title='Poll Failed', description=\n f'Sorry, the `args` argument should be follow this syntax: `question | answer 1 | answer 2...`.'\n )\n", (2182, 2323), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((2583, 2697), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Poll Failed"""', 'description': 'f"""Sorry, the `announcements` channel hasn\'t been configured."""'}), '(title=\'Poll Failed\', description=\n f"Sorry, the `announcements` channel hasn\'t been configured.")\n', (2596, 2697), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((4934, 5046), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Poll Created"""', 'description': 'f"""Your poll was successfully posted in {channel.mention}."""'}), "(title='Poll Created', description=\n f'Your poll was successfully posted in {channel.mention}.')\n", (4947, 5046), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((5995, 6195), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Announcement Failed"""', 'description': 'f"""Sorry, the `ping_member` argument should be "Yes" or "No". Please use `{self.bot.config.prefix}help announce` for more information."""'}), '(title=\'Announcement Failed\', description=\n f\'Sorry, the `ping_member` argument should be "Yes" or "No". Please use `{self.bot.config.prefix}help announce` for more information.\'\n )\n', (6008, 6195), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((6538, 6660), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Announcement Failed"""', 'description': 'f"""Sorry, the `announcements` channel hasn\'t been configured."""'}), '(title=\'Announcement Failed\', description=\n f"Sorry, the `announcements` channel hasn\'t been configured.")\n', (6551, 6660), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((8117, 8242), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Announcement Sent"""', 'description': 'f"""Your announcement was successfully posted in {channel.mention}."""'}), "(title='Announcement Sent', description=\n f'Your announcement was successfully posted in {channel.mention}.')\n", (8130, 8242), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((5550, 5603), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Poll Failed"""', 'description': 'issue'}), "(title='Poll Failed', description=issue)\n", (5563, 5603), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((8485, 8538), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Poll Failed"""', 'description': 'issue'}), "(title='Poll Failed', description=issue)\n", (8498, 8538), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((9005, 9022), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (9020, 9022), False, 'from datetime import datetime\n'), ((9427, 9444), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (9442, 9444), False, 'from datetime import datetime\n'), ((3917, 4029), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Poll Created"""', 'description': 'f"""Your poll was successfully posted in {channel.mention}."""'}), "(title='Poll Created', description=\n f'Your poll was successfully posted in {channel.mention}.')\n", (3930, 4029), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((7388, 7513), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Announcement Sent"""', 'description': 'f"""Your announcement was successfully posted in {channel.mention}."""'}), "(title='Announcement Sent', description=\n f'Your announcement was successfully posted in {channel.mention}.')\n", (7401, 7513), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((10030, 10047), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (10045, 10047), False, 'from datetime import datetime\n'), ((10506, 10523), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (10521, 10523), False, 'from datetime import datetime\n'), ((11179, 11196), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (11194, 11196), False, 'from datetime import datetime\n'), ((3069, 3086), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (3084, 3086), False, 'from datetime import datetime\n'), ((4604, 4657), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Poll Failed"""', 'description': 'issue'}), "(title='Poll Failed', description=issue)\n", (4617, 4657), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((7795, 7856), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Announcement Failed"""', 'description': 'issue'}), "(title='Announcement Failed', description=issue)\n", (7808, 7856), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((11004, 11021), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (11019, 11021), False, 'from datetime import datetime\n'), ((11723, 11740), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (11738, 11740), False, 'from datetime import datetime\n'), ((3645, 3758), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Poll Failed"""', 'description': 'f"""I do not have permission to **edit** {ping_role.mention}."""'}), "(title='Poll Failed', description=\n f'I do not have permission to **edit** {ping_role.mention}.')\n", (3658, 3758), False, 'import discord, sqlite3, asyncio, utils, re\n'), ((7110, 7231), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Announcement Failed"""', 'description': 'f"""I do not have permission to **edit** {ping_role.mention}."""'}), "(title='Announcement Failed', description=\n f'I do not have permission to **edit** {ping_role.mention}.')\n", (7123, 7231), False, 'import discord, sqlite3, asyncio, utils, re\n')]
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ ## Created by: <NAME> ## Email: <EMAIL> ## Copyright (c) 2020 ## ## LICENSE file in the root directory of this source tree ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ """ResNet variants""" import os import math import torch import numpy as np import torch.nn as nn from torch.nn import Parameter import torch.nn.functional as F from models.attention_map import SEModule, SpatialCGNL, SAModule from models.feature_extraction.splat import SplAtConv2d from models.utils import gen_adj_num, gen_adj from models.common import conv1x1 _url_format = 'https://hangzh.s3.amazonaws.com/encoding/models/{}-{}.pth' _model_sha256 = {name: checksum for checksum, name in [('528c19ca', 'resnest50'), ('22405ba7', 'resnest101'), ('75117900', 'resnest200'), ('0cc87c48', 'resnest269'), ]} def short_hash(name): if name not in _model_sha256: raise ValueError('Pretrained model for {name} is not available.'.format(name=name)) return _model_sha256[name][:8] resnest_model_urls = {name: _url_format.format(name, short_hash(name)) for name in _model_sha256.keys()} __all__ = ['ResNet', 'Bottleneck'] class DropBlock2D(object): def __init__(self, *args, **kwargs): raise NotImplementedError class Bottleneck(nn.Module): """ResNet Bottleneck """ # pylint: disable=unused-argument expansion = 4 def __init__(self, inplanes, planes, stride=1, downsample=None, radix=1, cardinality=1, bottleneck_width=64, avd=False, avd_first=False, dilation=1, is_first=False, rectified_conv=False, rectify_avg=False, norm_layer=None, dropblock_prob=0.0, last_gamma=False, use_se=False): super(Bottleneck, self).__init__() group_width = int(planes * (bottleneck_width / 64.)) * cardinality self.conv1 = nn.Conv2d(inplanes, group_width, kernel_size=1, bias=False) self.bn1 = norm_layer(group_width) self.dropblock_prob = dropblock_prob self.radix = radix self.avd = avd and (stride > 1 or is_first) self.avd_first = avd_first if self.avd: self.avd_layer = nn.AvgPool2d(3, stride, padding=1) stride = 1 if dropblock_prob > 0.0: self.dropblock1 = DropBlock2D(dropblock_prob, 3) if radix == 1: self.dropblock2 = DropBlock2D(dropblock_prob, 3) self.dropblock3 = DropBlock2D(dropblock_prob, 3) if radix >= 1: self.conv2 = SplAtConv2d(group_width, group_width, kernel_size=3, stride=stride, padding=dilation, dilation=dilation, groups=cardinality, bias=False, radix=radix, rectify=rectified_conv, rectify_avg=rectify_avg, norm_layer=norm_layer, dropblock_prob=dropblock_prob) elif rectified_conv: from rfconv import RFConv2d self.conv2 = RFConv2d(group_width, group_width, kernel_size=3, stride=stride, padding=dilation, dilation=dilation, groups=cardinality, bias=False, average_mode=rectify_avg) self.bn2 = norm_layer(group_width) else: self.conv2 = nn.Conv2d(group_width, group_width, kernel_size=3, stride=stride, padding=dilation, dilation=dilation, groups=cardinality, bias=False) self.bn2 = norm_layer(group_width) self.conv3 = nn.Conv2d(group_width, planes * 4, kernel_size=1, bias=False) self.bn3 = norm_layer(planes * 4) if last_gamma: from torch.nn.init import zeros_ zeros_(self.bn3.weight) self.relu = nn.ReLU(inplace=True) self.downsample = downsample self.dilation = dilation self.stride = stride self.use_se = use_se if use_se: self.se = SEModule(planes * 4) def forward(self, x): residual = x out = self.conv1(x) out = self.bn1(out) if self.dropblock_prob > 0.0: out = self.dropblock1(out) out = self.relu(out) if self.avd and self.avd_first: out = self.avd_layer(out) out = self.conv2(out) if self.radix == 0: out = self.bn2(out) if self.dropblock_prob > 0.0: out = self.dropblock2(out) out = self.relu(out) if self.avd and not self.avd_first: out = self.avd_layer(out) out = self.conv3(out) out = self.bn3(out) if self.dropblock_prob > 0.0: out = self.dropblock3(out) if self.downsample is not None: residual = self.downsample(x) if self.use_se: out = self.se(out) + residual else: out += residual out = self.relu(out) return out class ResNet(nn.Module): """ResNet Variants Parameters ---------- block : Block Class for the residual block. Options are BasicBlockV1, BottleneckV1. layers : list of int Numbers of layers in each block classes : int, default 1000 Number of classification classes. dilated : bool, default False Applying dilation strategy to pretrained ResNet yielding a stride-8 model, typically used in Semantic Segmentation. norm_layer : object Normalization layer used in backbone network (default: :class:`mxnet.gluon.nn.BatchNorm`; for Synchronized Cross-GPU BachNormalization). Reference: - <NAME>, et al. "Deep residual learning for image recognition." Proceedings of the IEEE conference on computer vision and pattern recognition. 2016. - <NAME>, and <NAME>. "Multi-scale context aggregation by dilated convolutions." """ # pylint: disable=unused-variable def __init__(self, block, layers, radix=1, groups=1, bottleneck_width=64, num_classes=1000, dilated=False, dilation=1, deep_stem=False, stem_width=64, avg_down=False, rectified_conv=False, rectify_avg=False, avd=False, avd_first=False, final_drop=0.0, dropblock_prob=0, last_gamma=False, use_se=False, in_channels=300, word_file='/workspace/Projects/cxr/models/feature_extraction/diseases_embeddings.npy', # word_file='diseases_embeddings.npy', # word_file='/home/hoangvu/Projects/cxr/models/feature_extraction/diseases_embeddings.npy', extract_fields='0,1,2,3,4,5', agree_rate=0.5, csv_path='', norm_layer=nn.BatchNorm2d): self.cardinality = groups self.bottleneck_width = bottleneck_width # ResNet-D params self.inplanes = stem_width * 2 if deep_stem else 64 self.avg_down = avg_down self.last_gamma = last_gamma # ResNeSt params self.radix = radix self.avd = avd self.avd_first = avd_first self.use_se = use_se super(ResNet, self).__init__() self.rectified_conv = rectified_conv self.rectify_avg = rectify_avg if rectified_conv: from rfconv import RFConv2d conv_layer = RFConv2d else: conv_layer = nn.Conv2d conv_kwargs = {'average_mode': rectify_avg} if rectified_conv else {} if deep_stem: self.conv1 = nn.Sequential( conv_layer(3, stem_width, kernel_size=3, stride=2, padding=1, bias=False, **conv_kwargs), norm_layer(stem_width), nn.ReLU(inplace=True), conv_layer(stem_width, stem_width, kernel_size=3, stride=1, padding=1, bias=False, **conv_kwargs), norm_layer(stem_width), nn.ReLU(inplace=True), conv_layer(stem_width, stem_width * 2, kernel_size=3, stride=1, padding=1, bias=False, **conv_kwargs), ) else: self.conv1 = conv_layer(3, 64, kernel_size=7, stride=2, padding=3, bias=False, **conv_kwargs) self.bn1 = norm_layer(self.inplanes) self.relu = nn.ReLU(inplace=True) self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.layer1 = self._make_layer(block, 64, layers[0], norm_layer=norm_layer, is_first=False) self.layer2 = self._make_layer(block, 128, layers[1], stride=2, norm_layer=norm_layer) if dilated or dilation == 4: self.layer3 = self._make_layer(block, 256, layers[2], stride=1, dilation=2, norm_layer=norm_layer, dropblock_prob=dropblock_prob) self.layer4 = self._make_layer(block, 512, layers[3], stride=1, dilation=4, norm_layer=norm_layer, dropblock_prob=dropblock_prob) elif dilation == 2: self.layer3 = self._make_layer(block, 256, layers[2], stride=2, dilation=1, norm_layer=norm_layer, dropblock_prob=dropblock_prob) self.layer4 = self._make_layer(block, 512, layers[3], stride=1, dilation=2, norm_layer=norm_layer, dropblock_prob=dropblock_prob) else: self.layer3 = self._make_layer(block, 256, layers[2], stride=2, norm_layer=norm_layer, dropblock_prob=dropblock_prob) self.layer4 = self._make_layer(block, 512, layers[3], stride=2, norm_layer=norm_layer, dropblock_prob=dropblock_prob) self.global_pool = nn.AdaptiveAvgPool2d((1, 1)) self.drop = nn.Dropout(final_drop) if final_drop > 0.0 else None # self.fc = nn.Linear(512 * block.expansion, num_classes) for m in self.modules(): if isinstance(m, nn.Conv2d): n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels m.weight.data.normal_(0, math.sqrt(2. / n)) elif isinstance(m, norm_layer): m.weight.data.fill_(1) m.bias.data.zero_() num_classes = len(extract_fields.split(',')) _adj = gen_adj_num(labels=extract_fields, agree_rate=agree_rate, csv_path=csv_path) self.adj = Parameter(torch.from_numpy(_adj).float()) if not os.path.exists(word_file): word = np.random.randn(num_classes, 300) print('graph input: random') else: with open(word_file, 'rb') as point: word = np.load(point) print('graph input: loaded from {}'.format(word_file)) self.word = Parameter(torch.from_numpy(word).float()) self.gc0 = GraphConvolution(in_channels, 128, bias=True) self.gc1 = GraphConvolution(128, 256, bias=True) self.gc2 = GraphConvolution(256, 512, bias=True) self.gc3 = GraphConvolution(512, 1024, bias=True) self.gc4 = GraphConvolution(1024, 2048, bias=True) self.gc_relu = nn.LeakyReLU(0.2) self.gc_tanh = nn.Tanh() self.merge_conv0 = nn.Conv2d(num_classes, 128, kernel_size=1, stride=1, bias=False) self.merge_conv1 = nn.Conv2d(num_classes, 256, kernel_size=1, stride=1, bias=False) self.merge_conv2 = nn.Conv2d(num_classes, 512, kernel_size=1, stride=1, bias=False) self.merge_conv3 = nn.Conv2d(num_classes, 1024, kernel_size=1, stride=1, bias=False) self.conv1x1 = conv1x1(in_channels=2048, out_channels=num_classes, bias=True) # self.spatial_attention = SAModule(2048) # self.spatial_attention = SpatialCGNL(2048, 1024) def _make_layer(self, block, planes, blocks, stride=1, dilation=1, norm_layer=None, dropblock_prob=0.0, is_first=True): downsample = None if stride != 1 or self.inplanes != planes * block.expansion: down_layers = [] if self.avg_down: if dilation == 1: down_layers.append( nn.AvgPool2d(kernel_size=stride, stride=stride, ceil_mode=True, count_include_pad=False)) else: down_layers.append(nn.AvgPool2d(kernel_size=1, stride=1, ceil_mode=True, count_include_pad=False)) down_layers.append( nn.Conv2d(self.inplanes, planes * block.expansion, kernel_size=1, stride=1, bias=False)) else: down_layers.append( nn.Conv2d(self.inplanes, planes * block.expansion, kernel_size=1, stride=stride, bias=False)) down_layers.append(norm_layer(planes * block.expansion)) downsample = nn.Sequential(*down_layers) layers = [] if dilation == 1 or dilation == 2: layers.append( block(self.inplanes, planes, stride, downsample=downsample, radix=self.radix, cardinality=self.cardinality, bottleneck_width=self.bottleneck_width, avd=self.avd, avd_first=self.avd_first, dilation=1, is_first=is_first, rectified_conv=self.rectified_conv, rectify_avg=self.rectify_avg, norm_layer=norm_layer, dropblock_prob=dropblock_prob, last_gamma=self.last_gamma, use_se=self.use_se)) elif dilation == 4: layers.append( block(self.inplanes, planes, stride, downsample=downsample, radix=self.radix, cardinality=self.cardinality, bottleneck_width=self.bottleneck_width, avd=self.avd, avd_first=self.avd_first, dilation=2, is_first=is_first, rectified_conv=self.rectified_conv, rectify_avg=self.rectify_avg, norm_layer=norm_layer, dropblock_prob=dropblock_prob, last_gamma=self.last_gamma, use_se=self.use_se)) else: raise RuntimeError("=> unknown dilation size: {}".format(dilation)) self.inplanes = planes * block.expansion for i in range(1, blocks): layers.append( block(self.inplanes, planes, radix=self.radix, cardinality=self.cardinality, bottleneck_width=self.bottleneck_width, avd=self.avd, avd_first=self.avd_first, dilation=dilation, rectified_conv=self.rectified_conv, rectify_avg=self.rectify_avg, norm_layer=norm_layer, dropblock_prob=dropblock_prob, last_gamma=self.last_gamma, use_se=self.use_se)) return nn.Sequential(*layers) def forward(self, feature): adj = gen_adj(self.adj).detach() word = self.word.detach() feature = self.conv1(feature) feature = self.bn1(feature) feature = self.relu(feature) feature = self.maxpool(feature) x_raw = self.gc0(word, adj) x = self.gc_tanh(x_raw) feature = merge_gcn_residual(feature, x, self.merge_conv0) feature = self.layer1(feature) x = self.gc_relu(x_raw) x_raw = self.gc1(x, adj) x = self.gc_tanh(x_raw) feature = merge_gcn_residual(feature, x, self.merge_conv1) feature = self.layer2(feature) x = self.gc_relu(x_raw) x_raw = self.gc2(x, adj) x = self.gc_tanh(x_raw) feature = merge_gcn_residual(feature, x, self.merge_conv2) feature = self.layer3(feature) x = self.gc_relu(x_raw) x_raw = self.gc3(x, adj) x = self.gc_tanh(x_raw) feature = merge_gcn_residual(feature, x, self.merge_conv3) feature = self.layer4(feature) # feature = self.spatial_attention(feature) feature_raw = self.global_pool(feature) if self.drop is not None: feature_raw = self.drop(feature_raw) feature = feature_raw.view(feature_raw.size(0), -1) x = self.gc_relu(x_raw) x = self.gc4(x, adj) x = self.gc_tanh(x) x = x.transpose(0, 1) x = torch.matmul(feature, x) y = self.conv1x1(feature_raw) y = y.view(y.size(0), -1) x = x + y return x def gcn_resnest200(cfg=None, **kwargs): model = ResNet(Bottleneck, [3, 24, 36, 3], radix=2, groups=1, bottleneck_width=64, deep_stem=True, stem_width=64, avg_down=True, avd=True, avd_first=False, use_se=cfg.use_se, extract_fields=cfg.extract_fields, agree_rate=cfg.agree_rate, csv_path=cfg.csv_path, **kwargs) # model = ResNet(Bottleneck, [3, 24, 36, 3], radix=2, groups=1, bottleneck_width=64, # deep_stem=True, stem_width=64, avg_down=True, avd=True, avd_first=False, # use_se=False, extract_fields='0,1,2,3,4,5', agree_rate=0.5, # csv_path='D:/Dataset/Vinmec/Noise/train_sss.csv', **kwargs) if cfg.pretrained: model.load_state_dict( torch.hub.load_state_dict_from_url(resnest_model_urls['resnest200'], progress=True), strict=False) return model def gcn_resnest101(cfg=None, **kwargs): model = ResNet(Bottleneck, [3, 4, 23, 3], radix=2, groups=1, bottleneck_width=64, deep_stem=True, stem_width=64, avg_down=True, avd=True, avd_first=False, use_se=cfg.use_se, extract_fields=cfg.extract_fields, agree_rate=cfg.agree_rate, csv_path=cfg.csv_path, **kwargs) if cfg.pretrained: model.load_state_dict( torch.hub.load_state_dict_from_url(resnest_model_urls['resnest101'], progress=True), strict=False) return model def gcn_resnest50(cfg=None, **kwargs): model = ResNet(Bottleneck, [3, 4, 6, 3], radix=2, groups=1, bottleneck_width=64, deep_stem=True, stem_width=32, avg_down=True, avd=True, avd_first=False, use_se=cfg.use_se, extract_fields=cfg.extract_fields, agree_rate=cfg.agree_rate, csv_path=cfg.csv_path, **kwargs) if cfg.pretrained: model.load_state_dict( torch.hub.load_state_dict_from_url(resnest_model_urls['resnest50'], progress=True), strict=False) return model class GraphConvolution(nn.Module): """ Simple GCN layer, similar to https://arxiv.org/abs/1609.02907 """ def __init__(self, in_features, out_features, bias=False): super(GraphConvolution, self).__init__() self.in_features = in_features self.out_features = out_features middle_features = max(32, (in_features + out_features) // 16) self.weight1 = Parameter(torch.Tensor(in_features, middle_features)) self.weight2 = Parameter(torch.Tensor(middle_features, out_features)) if bias: self.bias = Parameter(torch.Tensor(1, out_features)) else: self.register_parameter('bias', None) self.reset_parameters() def reset_parameters(self): stdv = 1. / math.sqrt(self.weight1.size(1)) self.weight1.data.uniform_(-stdv, stdv) stdv = 1. / math.sqrt(self.weight2.size(1)) self.weight2.data.uniform_(-stdv, stdv) if self.bias is not None: self.bias.data.uniform_(-stdv, stdv) def forward(self, input, adj): support = torch.matmul(input, self.weight1) support = torch.matmul(support, self.weight2) output = torch.matmul(adj, support) if self.bias is not None: output = output + self.bias return output def __repr__(self): return self.__class__.__name__ + ' (' + str(self.in_features) + ' -> ' + str( self.out_features) + ')' class GraphAttentionLayer(nn.Module): """ Simple GAT layer, similar to https://arxiv.org/abs/1710.10903 """ def __init__(self, in_features, out_features, dropout=0, alpha=0.2, concat=True, bias=False): super(GraphAttentionLayer, self).__init__() self.dropout = dropout self.in_features = in_features self.out_features = out_features self.alpha = alpha self.concat = concat self.W = nn.Parameter(torch.zeros(size=(in_features, out_features))) nn.init.xavier_uniform_(self.W.data, gain=1.414) self.a = nn.Parameter(torch.zeros(size=(2 * out_features, 1))) nn.init.xavier_uniform_(self.a.data, gain=1.414) if bias: self.bias = Parameter(torch.Tensor(1, out_features)) else: self.register_parameter('bias', None) self.leakyrelu = nn.LeakyReLU(self.alpha) self.reset_parameters() def reset_parameters(self): stdv = 1. / math.sqrt(self.W.size(1)) self.W.data.uniform_(-stdv, stdv) stdv = 1. / math.sqrt(self.a.size(1)) self.a.data.uniform_(-stdv, stdv) if self.bias is not None: self.bias.data.uniform_(-stdv, stdv) def forward(self, input, adj): h = torch.mm(input, self.W) N = h.size()[0] a_input = torch.cat([h.repeat(1, N).view(N * N, -1), h.repeat(N, 1)], dim=1).view(N, -1, 2 * self.out_features) e = self.leakyrelu(torch.matmul(a_input, self.a).squeeze(2)) zero_vec = -9e15 * torch.ones_like(e) attention = torch.where(adj > 0, e, zero_vec) attention = F.softmax(attention, dim=1) attention = F.dropout(attention, self.dropout, training=self.training) h_prime = torch.matmul(attention, h) if self.bias is not None: h_prime = h_prime + self.bias if self.concat: return F.elu(h_prime) else: return h_prime def __repr__(self): return self.__class__.__name__ + ' (' + str(self.in_features) + ' -> ' + str( self.out_features) + ')' def merge_gcn_residual(feature, x, merge_conv): feature_raw = feature feature = feature_raw.transpose(1, 2) feature = feature.transpose(2, 3).contiguous() feature = feature.view(-1, feature.shape[-1]) reshape_x = x.transpose(0, 1) feature = torch.matmul(feature, reshape_x) feature = feature.view(feature_raw.shape[0], feature_raw.shape[2], feature_raw.shape[3], -1) feature = feature.transpose(2, 3) feature = feature.transpose(1, 2) feature = merge_conv(feature) return feature_raw + feature if __name__ == "__main__": import torchsummary x = torch.randn([2, 3, 224, 224]) model = gcn_resnest200(num_classes=6, word_file='diseases_embeddings.npy') logits = model(x) # print(torchsummary.summary(model, input_size=(3, 512, 512), device='cpu')) print(logits) # x = torch.randn([2, 2048, 7, 7]) # word = torch.randn([6, 300]) # adj = torch.randn([6, 6]) # # # gcn = GraphConvolution(in_features=300, out_features=256, bias=True) # gcn = GraphAttentionLayer(in_features=300, out_features=256, bias=True) # output = gcn(word, adj) # print(output) # feature = torch.randn([2, 128, 56, 56]) # x = torch.randn([11, 128]) # merge_conv = nn.Conv2d(11, 128, kernel_size=1, stride=1, bias=False) # # output = merge_gcn_residual(feature, x, merge_conv) # print(output.size())
[ "models.attention_map.SEModule", "torch.nn.ReLU", "torch.nn.Dropout", "torch.nn.Tanh", "torch.nn.Sequential", "rfconv.RFConv2d", "math.sqrt", "models.utils.gen_adj", "torch.from_numpy", "torch.nn.AvgPool2d", "torch.nn.functional.softmax", "os.path.exists", "torch.nn.init.xavier_uniform_", ...
[((22501, 22533), 'torch.matmul', 'torch.matmul', (['feature', 'reshape_x'], {}), '(feature, reshape_x)\n', (22513, 22533), False, 'import torch\n'), ((22837, 22866), 'torch.randn', 'torch.randn', (['[2, 3, 224, 224]'], {}), '([2, 3, 224, 224])\n', (22848, 22866), False, 'import torch\n'), ((1961, 2020), 'torch.nn.Conv2d', 'nn.Conv2d', (['inplanes', 'group_width'], {'kernel_size': '(1)', 'bias': '(False)'}), '(inplanes, group_width, kernel_size=1, bias=False)\n', (1970, 2020), True, 'import torch.nn as nn\n'), ((3712, 3773), 'torch.nn.Conv2d', 'nn.Conv2d', (['group_width', '(planes * 4)'], {'kernel_size': '(1)', 'bias': '(False)'}), '(group_width, planes * 4, kernel_size=1, bias=False)\n', (3721, 3773), True, 'import torch.nn as nn\n'), ((3941, 3962), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (3948, 3962), True, 'import torch.nn as nn\n'), ((8378, 8399), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (8385, 8399), True, 'import torch.nn as nn\n'), ((8423, 8471), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(3)', 'stride': '(2)', 'padding': '(1)'}), '(kernel_size=3, stride=2, padding=1)\n', (8435, 8471), True, 'import torch.nn as nn\n'), ((9859, 9887), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', (['(1, 1)'], {}), '((1, 1))\n', (9879, 9887), True, 'import torch.nn as nn\n'), ((10423, 10499), 'models.utils.gen_adj_num', 'gen_adj_num', ([], {'labels': 'extract_fields', 'agree_rate': 'agree_rate', 'csv_path': 'csv_path'}), '(labels=extract_fields, agree_rate=agree_rate, csv_path=csv_path)\n', (10434, 10499), False, 'from models.utils import gen_adj_num, gen_adj\n'), ((11252, 11269), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)'], {}), '(0.2)\n', (11264, 11269), True, 'import torch.nn as nn\n'), ((11293, 11302), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (11300, 11302), True, 'import torch.nn as nn\n'), ((11330, 11394), 'torch.nn.Conv2d', 'nn.Conv2d', (['num_classes', '(128)'], {'kernel_size': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(num_classes, 128, kernel_size=1, stride=1, bias=False)\n', (11339, 11394), True, 'import torch.nn as nn\n'), ((11422, 11486), 'torch.nn.Conv2d', 'nn.Conv2d', (['num_classes', '(256)'], {'kernel_size': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(num_classes, 256, kernel_size=1, stride=1, bias=False)\n', (11431, 11486), True, 'import torch.nn as nn\n'), ((11514, 11578), 'torch.nn.Conv2d', 'nn.Conv2d', (['num_classes', '(512)'], {'kernel_size': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(num_classes, 512, kernel_size=1, stride=1, bias=False)\n', (11523, 11578), True, 'import torch.nn as nn\n'), ((11606, 11671), 'torch.nn.Conv2d', 'nn.Conv2d', (['num_classes', '(1024)'], {'kernel_size': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(num_classes, 1024, kernel_size=1, stride=1, bias=False)\n', (11615, 11671), True, 'import torch.nn as nn\n'), ((11695, 11757), 'models.common.conv1x1', 'conv1x1', ([], {'in_channels': '(2048)', 'out_channels': 'num_classes', 'bias': '(True)'}), '(in_channels=2048, out_channels=num_classes, bias=True)\n', (11702, 11757), False, 'from models.common import conv1x1\n'), ((14946, 14968), 'torch.nn.Sequential', 'nn.Sequential', (['*layers'], {}), '(*layers)\n', (14959, 14968), True, 'import torch.nn as nn\n'), ((16393, 16417), 'torch.matmul', 'torch.matmul', (['feature', 'x'], {}), '(feature, x)\n', (16405, 16417), False, 'import torch\n'), ((19654, 19687), 'torch.matmul', 'torch.matmul', (['input', 'self.weight1'], {}), '(input, self.weight1)\n', (19666, 19687), False, 'import torch\n'), ((19706, 19741), 'torch.matmul', 'torch.matmul', (['support', 'self.weight2'], {}), '(support, self.weight2)\n', (19718, 19741), False, 'import torch\n'), ((19759, 19785), 'torch.matmul', 'torch.matmul', (['adj', 'support'], {}), '(adj, support)\n', (19771, 19785), False, 'import torch\n'), ((20558, 20606), 'torch.nn.init.xavier_uniform_', 'nn.init.xavier_uniform_', (['self.W.data'], {'gain': '(1.414)'}), '(self.W.data, gain=1.414)\n', (20581, 20606), True, 'import torch.nn as nn\n'), ((20686, 20734), 'torch.nn.init.xavier_uniform_', 'nn.init.xavier_uniform_', (['self.a.data'], {'gain': '(1.414)'}), '(self.a.data, gain=1.414)\n', (20709, 20734), True, 'import torch.nn as nn\n'), ((20908, 20932), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['self.alpha'], {}), '(self.alpha)\n', (20920, 20932), True, 'import torch.nn as nn\n'), ((21308, 21331), 'torch.mm', 'torch.mm', (['input', 'self.W'], {}), '(input, self.W)\n', (21316, 21331), False, 'import torch\n'), ((21703, 21736), 'torch.where', 'torch.where', (['(adj > 0)', 'e', 'zero_vec'], {}), '(adj > 0, e, zero_vec)\n', (21714, 21736), False, 'import torch\n'), ((21757, 21784), 'torch.nn.functional.softmax', 'F.softmax', (['attention'], {'dim': '(1)'}), '(attention, dim=1)\n', (21766, 21784), True, 'import torch.nn.functional as F\n'), ((21805, 21863), 'torch.nn.functional.dropout', 'F.dropout', (['attention', 'self.dropout'], {'training': 'self.training'}), '(attention, self.dropout, training=self.training)\n', (21814, 21863), True, 'import torch.nn.functional as F\n'), ((21882, 21908), 'torch.matmul', 'torch.matmul', (['attention', 'h'], {}), '(attention, h)\n', (21894, 21908), False, 'import torch\n'), ((2274, 2308), 'torch.nn.AvgPool2d', 'nn.AvgPool2d', (['(3)', 'stride'], {'padding': '(1)'}), '(3, stride, padding=1)\n', (2286, 2308), True, 'import torch.nn as nn\n'), ((2629, 2896), 'models.feature_extraction.splat.SplAtConv2d', 'SplAtConv2d', (['group_width', 'group_width'], {'kernel_size': '(3)', 'stride': 'stride', 'padding': 'dilation', 'dilation': 'dilation', 'groups': 'cardinality', 'bias': '(False)', 'radix': 'radix', 'rectify': 'rectified_conv', 'rectify_avg': 'rectify_avg', 'norm_layer': 'norm_layer', 'dropblock_prob': 'dropblock_prob'}), '(group_width, group_width, kernel_size=3, stride=stride, padding\n =dilation, dilation=dilation, groups=cardinality, bias=False, radix=\n radix, rectify=rectified_conv, rectify_avg=rectify_avg, norm_layer=\n norm_layer, dropblock_prob=dropblock_prob)\n', (2640, 2896), False, 'from models.feature_extraction.splat import SplAtConv2d\n'), ((3897, 3920), 'torch.nn.init.zeros_', 'zeros_', (['self.bn3.weight'], {}), '(self.bn3.weight)\n', (3903, 3920), False, 'from torch.nn.init import zeros_\n'), ((4133, 4153), 'models.attention_map.SEModule', 'SEModule', (['(planes * 4)'], {}), '(planes * 4)\n', (4141, 4153), False, 'from models.attention_map import SEModule, SpatialCGNL, SAModule\n'), ((9908, 9930), 'torch.nn.Dropout', 'nn.Dropout', (['final_drop'], {}), '(final_drop)\n', (9918, 9930), True, 'import torch.nn as nn\n'), ((10577, 10602), 'os.path.exists', 'os.path.exists', (['word_file'], {}), '(word_file)\n', (10591, 10602), False, 'import os\n'), ((10623, 10656), 'numpy.random.randn', 'np.random.randn', (['num_classes', '(300)'], {}), '(num_classes, 300)\n', (10638, 10656), True, 'import numpy as np\n'), ((13051, 13078), 'torch.nn.Sequential', 'nn.Sequential', (['*down_layers'], {}), '(*down_layers)\n', (13064, 13078), True, 'import torch.nn as nn\n'), ((17310, 17397), 'torch.hub.load_state_dict_from_url', 'torch.hub.load_state_dict_from_url', (["resnest_model_urls['resnest200']"], {'progress': '(True)'}), "(resnest_model_urls['resnest200'],\n progress=True)\n", (17344, 17397), False, 'import torch\n'), ((17876, 17963), 'torch.hub.load_state_dict_from_url', 'torch.hub.load_state_dict_from_url', (["resnest_model_urls['resnest101']"], {'progress': '(True)'}), "(resnest_model_urls['resnest101'],\n progress=True)\n", (17910, 17963), False, 'import torch\n'), ((18440, 18526), 'torch.hub.load_state_dict_from_url', 'torch.hub.load_state_dict_from_url', (["resnest_model_urls['resnest50']"], {'progress': '(True)'}), "(resnest_model_urls['resnest50'],\n progress=True)\n", (18474, 18526), False, 'import torch\n'), ((18982, 19024), 'torch.Tensor', 'torch.Tensor', (['in_features', 'middle_features'], {}), '(in_features, middle_features)\n', (18994, 19024), False, 'import torch\n'), ((19059, 19102), 'torch.Tensor', 'torch.Tensor', (['middle_features', 'out_features'], {}), '(middle_features, out_features)\n', (19071, 19102), False, 'import torch\n'), ((20503, 20548), 'torch.zeros', 'torch.zeros', ([], {'size': '(in_features, out_features)'}), '(size=(in_features, out_features))\n', (20514, 20548), False, 'import torch\n'), ((20637, 20676), 'torch.zeros', 'torch.zeros', ([], {'size': '(2 * out_features, 1)'}), '(size=(2 * out_features, 1))\n', (20648, 20676), False, 'import torch\n'), ((21664, 21682), 'torch.ones_like', 'torch.ones_like', (['e'], {}), '(e)\n', (21679, 21682), False, 'import torch\n'), ((22030, 22044), 'torch.nn.functional.elu', 'F.elu', (['h_prime'], {}), '(h_prime)\n', (22035, 22044), True, 'import torch.nn.functional as F\n'), ((3124, 3292), 'rfconv.RFConv2d', 'RFConv2d', (['group_width', 'group_width'], {'kernel_size': '(3)', 'stride': 'stride', 'padding': 'dilation', 'dilation': 'dilation', 'groups': 'cardinality', 'bias': '(False)', 'average_mode': 'rectify_avg'}), '(group_width, group_width, kernel_size=3, stride=stride, padding=\n dilation, dilation=dilation, groups=cardinality, bias=False,\n average_mode=rectify_avg)\n', (3132, 3292), False, 'from rfconv import RFConv2d\n'), ((3438, 3577), 'torch.nn.Conv2d', 'nn.Conv2d', (['group_width', 'group_width'], {'kernel_size': '(3)', 'stride': 'stride', 'padding': 'dilation', 'dilation': 'dilation', 'groups': 'cardinality', 'bias': '(False)'}), '(group_width, group_width, kernel_size=3, stride=stride, padding=\n dilation, dilation=dilation, groups=cardinality, bias=False)\n', (3447, 3577), True, 'import torch.nn as nn\n'), ((7797, 7818), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (7804, 7818), True, 'import torch.nn as nn\n'), ((7986, 8007), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (7993, 8007), True, 'import torch.nn as nn\n'), ((10784, 10798), 'numpy.load', 'np.load', (['point'], {}), '(point)\n', (10791, 10798), True, 'import numpy as np\n'), ((15016, 15033), 'models.utils.gen_adj', 'gen_adj', (['self.adj'], {}), '(self.adj)\n', (15023, 15033), False, 'from models.utils import gen_adj_num, gen_adj\n'), ((19155, 19184), 'torch.Tensor', 'torch.Tensor', (['(1)', 'out_features'], {}), '(1, out_features)\n', (19167, 19184), False, 'import torch\n'), ((20787, 20816), 'torch.Tensor', 'torch.Tensor', (['(1)', 'out_features'], {}), '(1, out_features)\n', (20799, 20816), False, 'import torch\n'), ((10216, 10234), 'math.sqrt', 'math.sqrt', (['(2.0 / n)'], {}), '(2.0 / n)\n', (10225, 10234), False, 'import math\n'), ((10529, 10551), 'torch.from_numpy', 'torch.from_numpy', (['_adj'], {}), '(_adj)\n', (10545, 10551), False, 'import torch\n'), ((10900, 10922), 'torch.from_numpy', 'torch.from_numpy', (['word'], {}), '(word)\n', (10916, 10922), False, 'import torch\n'), ((12640, 12731), 'torch.nn.Conv2d', 'nn.Conv2d', (['self.inplanes', '(planes * block.expansion)'], {'kernel_size': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(self.inplanes, planes * block.expansion, kernel_size=1, stride=1,\n bias=False)\n', (12649, 12731), True, 'import torch.nn as nn\n'), ((12833, 12930), 'torch.nn.Conv2d', 'nn.Conv2d', (['self.inplanes', '(planes * block.expansion)'], {'kernel_size': '(1)', 'stride': 'stride', 'bias': '(False)'}), '(self.inplanes, planes * block.expansion, kernel_size=1, stride=\n stride, bias=False)\n', (12842, 12930), True, 'import torch.nn as nn\n'), ((21594, 21623), 'torch.matmul', 'torch.matmul', (['a_input', 'self.a'], {}), '(a_input, self.a)\n', (21606, 21623), False, 'import torch\n'), ((12264, 12356), 'torch.nn.AvgPool2d', 'nn.AvgPool2d', ([], {'kernel_size': 'stride', 'stride': 'stride', 'ceil_mode': '(True)', 'count_include_pad': '(False)'}), '(kernel_size=stride, stride=stride, ceil_mode=True,\n count_include_pad=False)\n', (12276, 12356), True, 'import torch.nn as nn\n'), ((12452, 12530), 'torch.nn.AvgPool2d', 'nn.AvgPool2d', ([], {'kernel_size': '(1)', 'stride': '(1)', 'ceil_mode': '(True)', 'count_include_pad': '(False)'}), '(kernel_size=1, stride=1, ceil_mode=True, count_include_pad=False)\n', (12464, 12530), True, 'import torch.nn as nn\n')]
# AUTOGENERATED! DO NOT EDIT! File to edit: 02_inference.ipynb (unless otherwise specified). __all__ = ['device', 'pad_output', 'get_activ_offsets_mns'] # Cell #from fastai.vision.all import * from fastai import * from typing import * from torch import tensor, Tensor import torch import torchvision # Needed to invoke torchvision.ops.mns function # Cell # Automatically sets for GPU or CPU environments device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') # Cell # Pad tensors so that they have uniform dimentions: (batch size, no of items in a batch, 4) and (batch size, no of items in a batch, 21) def pad_output(l_bb:List, l_scr:List, l_idx:List, no_classes:int): '''Pad tensors so that they have uniform dimentions: (batch size, no of items in a batch, 4) and (batch size, no of items in a batch, 21) Inputs: l_bb - list of tensors containing individual non-uniform sized bounding boxes l_scr - list of tensors containing class index values (i.e. 1 - airplane) l_idx - list of tensors containing class index values (i.e. 1 - airplane) no_classes - Number of classes, Integer Outputs: Uniform-sized tensors: bounding box tensor and score tensor with dims: (batch size, no of items in a batch, 4) and (batch size, no of items in a batch, 21)''' if len([len(img_bb) for img_bb in l_bb]) == 0.: print(F'Image did not pass the scoring threshold') return mx_len = max([len(img_bb) for img_bb in l_bb]) # Calculate maximun lenght of the boxes in the batch l_b, l_c, l_x, l_cat = [], [], [], [] # Create Bounding Box tensors # zeroed tensor accumulators for i, ntr in enumerate(zip(l_bb, l_scr, l_idx)): bbox, cls, idx = ntr[0], ntr[1], ntr[2] # Unpack variables tsr_len = mx_len - bbox.shape[0] # Calculate the number of zero-based rows to add m = nn.ConstantPad2d((0, 0, 0, tsr_len), 0.) # Prepare to pad the box tensor with zero entries l_b.append(m(bbox)) # Add appropriate zero-based box rows and add to list # Create Category tensors cat_base = torch.zeros(mx_len-bbox.shape[0], dtype=torch.int32) img_cat = torch.cat((idx, cat_base), dim=0) l_cat.append(img_cat) # Create Score tensors img_cls = [] # List to construct class vectors for ix in range(idx.shape[0]): # Construct class vectors of dim(no of classes) cls_base = torch.zeros(no_classes).to(device) # Base zero-based class vector cls_base[idx[ix]] = cls[ix] # Add the score in the nth position img_cls.append(cls_base) img_stack = torch.stack(img_cls) # Create single tensor per image img_stack_out = m(img_stack) l_c.append( img_stack_out ) # Add appropriate zero-based class rows and add to list return (TensorBBox(torch.stack(l_b,0)), TensorMultiCategory(torch.stack(l_c,0)), TensorMultiCategory(torch.stack(l_cat,0)) ) # Cell def get_activ_offsets_mns(anchrs:Tensor, activs:Tensor, no_classes:int, threshold:float=0.5): ''' Takes in activations and calculates corresponding anchor box offsets. It then filters the resulting boxes through MNS Inputs: anchrs - Anchors as Tensor activs - Activations as Tensor no_classes - Number of classes (categories) threshold - Coarse filtering. Default = 0.5 Output: one_batch_boxes, one_batch_scores as Tuple''' p_bboxes, p_classes = activs # Read p_bboxes: [32, 189,4] Torch.Tensor and p_classes: [32, 189, 21] Torch.Tensor from self.learn.pred #scores = torch.sigmoid(p_classes) # Calculate the confidence levels, scores, for class predictions [0, 1] scores = torch.softmax(p_classes, -1) # Calculate the confidence levels, scores, for class predictions [0, 1] - Probabilistic offset_boxes = activ_decode(p_bboxes, anchrs) # Return anchors + anchor offsets wiith format (batch, No Items in Batch, 4) # For each item in batch, and for each class in the item, filter the image by passing it through NMS. Keep preds with IOU > thresshold one_batch_boxes = []; one_batch_scores = []; one_batch_cls_pred = [] # Agregators at the bath level for i in range(p_classes.shape[0]): # For each image in batch ... batch_p_boxes = offset_boxes[i] # box preds for the current batch batch_scores = scores[i] # Keep scores for the current batch max_scores, cls_idx = torch.max(batch_scores, 1 ) # Keep batch class indexes bch_th_mask = max_scores > threshold # Threshold mask for batch bch_keep_boxes = batch_p_boxes[bch_th_mask] # " bch_keep_scores = batch_scores[bch_th_mask] # " bch_keep_cls_idx = cls_idx[bch_th_mask] # Agregators per image in a batch img_boxes = [] # Bounding boxes per image img_scores = [] # Scores per image img_cls_pred = [] # Class predictons per image for c in range (1,no_classes): # Loop through each class cls_mask = bch_keep_cls_idx==c # Keep masks for the current class if cls_mask.sum() == 0: continue # Weed out images with no positive class masks cls_boxes = bch_keep_boxes[cls_mask] # Keep boxes per image cls_scores = bch_keep_scores[cls_mask].max(dim=1)[0] # Keep class scores for the current image nms_keep_idx = torchvision.ops.nms(cls_boxes, cls_scores, iou_threshold=0.5) # Filter images by passing them through NMS img_boxes += [*cls_boxes[nms_keep_idx]] # Agregate cls_boxes into tensors for all classes box_stack = torch.stack(img_boxes,0) # Transform individual tensors into a single box tensor img_scores += [*cls_scores[nms_keep_idx]] # Agregate cls_scores into tensors for all classes score_stack = torch.stack(img_scores, 0) # Transform individual tensors into a single score tensor img_cls_pred += [*tensor([c]*len(nms_keep_idx))] cls_pred_stack = torch.stack(img_cls_pred, 0) batch_mask = score_stack > threshold # filter final lists tto be greater than threshold box_stack = box_stack[batch_mask] # " score_stack = score_stack[batch_mask] # " cls_pred_stack = cls_pred_stack[batch_mask] # " if 'box_stack' not in locals(): continue # Failed to find any valid classes one_batch_boxes.append(box_stack) # Agregate bounding boxes for the batch one_batch_scores.append(score_stack) # Agregate scores for the batch one_batch_cls_pred.append(cls_pred_stack) # Pad individual box and score tensors into uniform-sized box and score tensors of shapes: (batch, no 0f items in batch, 4) and (batch, no 0f items in batch, 21) one_batch_boxes, one_batch_scores, one_batch_cats = pad_output(one_batch_boxes, one_batch_scores, one_batch_cls_pred, no_classes) return (one_batch_boxes, one_batch_cats)
[ "torch.stack", "torch.max", "torchvision.ops.nms", "torch.softmax", "torch.cuda.is_available", "torch.zeros", "torch.cat" ]
[((3994, 4022), 'torch.softmax', 'torch.softmax', (['p_classes', '(-1)'], {}), '(p_classes, -1)\n', (4007, 4022), False, 'import torch\n'), ((444, 469), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (467, 469), False, 'import torch\n'), ((2238, 2292), 'torch.zeros', 'torch.zeros', (['(mx_len - bbox.shape[0])'], {'dtype': 'torch.int32'}), '(mx_len - bbox.shape[0], dtype=torch.int32)\n', (2249, 2292), False, 'import torch\n'), ((2305, 2338), 'torch.cat', 'torch.cat', (['(idx, cat_base)'], {'dim': '(0)'}), '((idx, cat_base), dim=0)\n', (2314, 2338), False, 'import torch\n'), ((2847, 2867), 'torch.stack', 'torch.stack', (['img_cls'], {}), '(img_cls)\n', (2858, 2867), False, 'import torch\n'), ((4793, 4819), 'torch.max', 'torch.max', (['batch_scores', '(1)'], {}), '(batch_scores, 1)\n', (4802, 4819), False, 'import torch\n'), ((3104, 3123), 'torch.stack', 'torch.stack', (['l_b', '(0)'], {}), '(l_b, 0)\n', (3115, 3123), False, 'import torch\n'), ((3145, 3164), 'torch.stack', 'torch.stack', (['l_c', '(0)'], {}), '(l_c, 0)\n', (3156, 3164), False, 'import torch\n'), ((3186, 3207), 'torch.stack', 'torch.stack', (['l_cat', '(0)'], {}), '(l_cat, 0)\n', (3197, 3207), False, 'import torch\n'), ((5860, 5921), 'torchvision.ops.nms', 'torchvision.ops.nms', (['cls_boxes', 'cls_scores'], {'iou_threshold': '(0.5)'}), '(cls_boxes, cls_scores, iou_threshold=0.5)\n', (5879, 5921), False, 'import torchvision\n'), ((6093, 6118), 'torch.stack', 'torch.stack', (['img_boxes', '(0)'], {}), '(img_boxes, 0)\n', (6104, 6118), False, 'import torch\n'), ((6314, 6340), 'torch.stack', 'torch.stack', (['img_scores', '(0)'], {}), '(img_scores, 0)\n', (6325, 6340), False, 'import torch\n'), ((6487, 6515), 'torch.stack', 'torch.stack', (['img_cls_pred', '(0)'], {}), '(img_cls_pred, 0)\n', (6498, 6515), False, 'import torch\n'), ((2622, 2645), 'torch.zeros', 'torch.zeros', (['no_classes'], {}), '(no_classes)\n', (2633, 2645), False, 'import torch\n')]
# The MIT License (MIT) # # Copyright (c) 2014 <NAME> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. """ Redis backend. """ import redis from flagon import errors from flagon.backends import Backend class RedisBackend(Backend): def __init__(self, host, port, db): """ Creates an instance of the RedisBackend. :rtype: RedisBackend """ # https://pypi.python.org/pypi/redis/2.10.1 pool = redis.ConnectionPool(host=host, port=port, db=db) self._server = redis.Redis( connection_pool=pool, charset='utf-8', errors='strict', decode_responses=False) def set(self, name, key, value): """ Sets a value for a feature. This is a proposed name only!!! :param name: name of the feature. :rtype: bool """ self._server.hset(name, key, value) def exists(self, name, key): """ Checks if a feature exists. :param name: name of the feature. :rtype: bool """ return self._server.hexists(name, key) def is_active(self, name, key): """ Checks if a feature is on. :param name: name of the feature. :rtype: bool :raises: UnknownFeatureError """ if not self._server.hexists(name, key): raise errors.UnknownFeatureError('Unknown feature: %s' % name) if self._server.hget(name, key) == 'True': return True return False def _turn(self, name, key, value): """ Turns a feature off. :param name: name of the feature. :param value: Value to turn name to. :raises: UnknownFeatureError """ # TODO: Copy paste --- :-( if not self._server.hexists(name, key): raise errors.UnknownFeatureError('Unknown feature: %s %s' % ( name, key)) self._server.hset(name, key, value) turn_on = lambda s, name: s._turn(name, 'active', True) turn_off = lambda s, name: s._turn(name, 'active', False)
[ "redis.ConnectionPool", "flagon.errors.UnknownFeatureError", "redis.Redis" ]
[((1452, 1501), 'redis.ConnectionPool', 'redis.ConnectionPool', ([], {'host': 'host', 'port': 'port', 'db': 'db'}), '(host=host, port=port, db=db)\n', (1472, 1501), False, 'import redis\n'), ((1525, 1620), 'redis.Redis', 'redis.Redis', ([], {'connection_pool': 'pool', 'charset': '"""utf-8"""', 'errors': '"""strict"""', 'decode_responses': '(False)'}), "(connection_pool=pool, charset='utf-8', errors='strict',\n decode_responses=False)\n", (1536, 1620), False, 'import redis\n'), ((2372, 2428), 'flagon.errors.UnknownFeatureError', 'errors.UnknownFeatureError', (["('Unknown feature: %s' % name)"], {}), "('Unknown feature: %s' % name)\n", (2398, 2428), False, 'from flagon import errors\n'), ((2844, 2910), 'flagon.errors.UnknownFeatureError', 'errors.UnknownFeatureError', (["('Unknown feature: %s %s' % (name, key))"], {}), "('Unknown feature: %s %s' % (name, key))\n", (2870, 2910), False, 'from flagon import errors\n')]
from math import sin, cos, pi f = lambda x: 9*pi*cos(x) + 7*sin(x) + 4*x - 5*x*cos(x) # Analytical Solution df = lambda x: -9*pi*sin(x) + 7*cos(x) + 4 - 5*(cos(x)-x*sin(x)) dy = lambda x,y,u: u # 1st Derivative, y' = u du = lambda x,y,u: 4*x + 10*sin(x) - y # 2nd Derivative, u' = 4x+10sin(x)-y x = pi # Lower limit, [π xn = 2*pi # Upper limit, 2π] y = 0 # Initial condition, y(π) = 0 u = 2 # Initial condition, u(π) = 2 h = 0.5 # Width of each division, step size # h = 0.1 # Smaller step size gives less error n = int((xn-x)/h) # Number of divisions of the domain print('x \t\ty(RK4) \t\ty\'(RK4) \ty(Exact) \ty\'(Exact)') # Header of Output print('%f \t%f \t%f \t%f \t%f' % (x, y, u, f(x), df(x))) # Initial x and y for i in range(n): L1 = h * du(x,y,u) K1 = h * dy(x,y,u) L2 = h * du(x + h/2, y + K1/2, u + L1/2) K2 = h * dy(x + h/2, y + K1/2, u + L1/2) L3 = h * du(x + h/2, y + K2/2, u + L2/2) K3 = h * dy(x + h/2, y + K2/2, u + L2/2) L4 = h * du(x + h, y + K3, u + L3) K4 = h * dy(x + h, y + K3, u + L3) u += 1/6*(L1 + 2*L2 + 2*L3 + L4) # u(x+h) = u(x) + 1/6(L1+2L2+2L3+L4) y += 1/6*(K1 + 2*K2 + 2*K3 + K4) # y(x+h) = y(x) + 1/6(K1+2K2+2K3+K4) x += h # x for next step, x = x + h print('%f \t%f \t%f \t%f \t%f' % (x, y, u, f(x), df(x))) """ 2nd order ODE y'' = f(x,y,y') should be divided into two first order ODE's y' = u and u' = f(x,y,u) The two equations are solved simultaneously using RK4 L1 = h u'(x,y,u) K1 = h y'(x,y,u) L2 = h u'(x + h/2, y + K1/2, u + L1/2) K2 = h y'(x + h/2, y + K1/2, u + L1/2) L3 = h u'(x + h/2, y + K2/2, u + L2/2) K3 = h y'(x + h/2, y + K2/2, u + L2/2) L4 = h u'(x + h, y + K3, u + L3) K4 = h y'(x + h, y + K3, u + L3) u(x+h) = u(x) + 1/6 (L1 + 2 L2 + 2 L3 + L4) y(x+h) = y(x) + 1/6 (K1 + 2 K2 + 2 K3 + K4) The initial condition is the value of y(x) at initial domain x Find the numerical solution of the following differential equation over the domain [π,2π]: y''+y = 4x+10sin(x), y(π) = 0, y'(π) = 2 y' = u, y(π) = 0 u' = 4x+10sin(x)-y, u(π) = 2 Analytical Solution: y = 9π cos(x) + 7sin(x) + 4x - 5x cos(x) """
[ "math.cos", "math.sin" ]
[((79, 85), 'math.cos', 'cos', (['x'], {}), '(x)\n', (82, 85), False, 'from math import sin, cos, pi\n'), ((156, 162), 'math.cos', 'cos', (['x'], {}), '(x)\n', (159, 162), False, 'from math import sin, cos, pi\n'), ((267, 273), 'math.sin', 'sin', (['x'], {}), '(x)\n', (270, 273), False, 'from math import sin, cos, pi\n'), ((49, 55), 'math.cos', 'cos', (['x'], {}), '(x)\n', (52, 55), False, 'from math import sin, cos, pi\n'), ((60, 66), 'math.sin', 'sin', (['x'], {}), '(x)\n', (63, 66), False, 'from math import sin, cos, pi\n'), ((129, 135), 'math.sin', 'sin', (['x'], {}), '(x)\n', (132, 135), False, 'from math import sin, cos, pi\n'), ((140, 146), 'math.cos', 'cos', (['x'], {}), '(x)\n', (143, 146), False, 'from math import sin, cos, pi\n'), ((165, 171), 'math.sin', 'sin', (['x'], {}), '(x)\n', (168, 171), False, 'from math import sin, cos, pi\n')]
from records_mover.utils import quiet_remove from records_mover.records.delimited import cant_handle_hint, ValidatedRecordsHints from typing import Set, Tuple, Optional from .types import DateOrderStyle, DateOutputStyle def determine_date_output_style(unhandled_hints: Set[str], hints: ValidatedRecordsHints, fail_if_cant_handle_hint: bool) -> \ Tuple[DateOutputStyle, Optional[DateOrderStyle]]: # see docs in the types module dateformat = hints.dateformat timeonlyformat = hints.timeonlyformat datetimeformattz = hints.datetimeformattz datetimeformat = hints.datetimeformat date_order_style: Optional[DateOrderStyle] = None if (dateformat == 'YYYY-MM-DD' and timeonlyformat == 'HH24:MI:SS' and datetimeformattz in ['YYYY-MM-DD HH:MI:SSOF', 'YYYY-MM-DD HH24:MI:SSOF'] and datetimeformat == 'YYYY-MM-DD HH24:MI:SS'): date_output_style: DateOutputStyle = 'ISO' # date_order_style doesn't really matter, as ISO is not ambiguous else: # 'SQL', 'Postgres' and 'German' all support only alphabetic # timezone indicators, which aren't yet supported in the # records spec cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformattz', hints) quiet_remove(unhandled_hints, 'dateformat') quiet_remove(unhandled_hints, 'timeonlyformat') quiet_remove(unhandled_hints, 'datetimeformattz') quiet_remove(unhandled_hints, 'datetimeformat') return (date_output_style, date_order_style)
[ "records_mover.records.delimited.cant_handle_hint", "records_mover.utils.quiet_remove" ]
[((1346, 1389), 'records_mover.utils.quiet_remove', 'quiet_remove', (['unhandled_hints', '"""dateformat"""'], {}), "(unhandled_hints, 'dateformat')\n", (1358, 1389), False, 'from records_mover.utils import quiet_remove\n'), ((1394, 1441), 'records_mover.utils.quiet_remove', 'quiet_remove', (['unhandled_hints', '"""timeonlyformat"""'], {}), "(unhandled_hints, 'timeonlyformat')\n", (1406, 1441), False, 'from records_mover.utils import quiet_remove\n'), ((1446, 1495), 'records_mover.utils.quiet_remove', 'quiet_remove', (['unhandled_hints', '"""datetimeformattz"""'], {}), "(unhandled_hints, 'datetimeformattz')\n", (1458, 1495), False, 'from records_mover.utils import quiet_remove\n'), ((1500, 1547), 'records_mover.utils.quiet_remove', 'quiet_remove', (['unhandled_hints', '"""datetimeformat"""'], {}), "(unhandled_hints, 'datetimeformat')\n", (1512, 1547), False, 'from records_mover.utils import quiet_remove\n'), ((1271, 1340), 'records_mover.records.delimited.cant_handle_hint', 'cant_handle_hint', (['fail_if_cant_handle_hint', '"""datetimeformattz"""', 'hints'], {}), "(fail_if_cant_handle_hint, 'datetimeformattz', hints)\n", (1287, 1340), False, 'from records_mover.records.delimited import cant_handle_hint, ValidatedRecordsHints\n')]
from typing import Any, Dict, List, Type, TypeVar, Union, cast import attr from ..types import UNSET, Unset T = TypeVar("T", bound="Cluster") @attr.s(auto_attribs=True) class Cluster: """ """ addr: Union[Unset, str] = UNSET auth_timeout: Union[Unset, int] = UNSET cluster_port: Union[Unset, int] = UNSET name: Union[Unset, str] = UNSET tls_timeout: Union[Unset, int] = UNSET urls: Union[Unset, List[str]] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: addr = self.addr auth_timeout = self.auth_timeout cluster_port = self.cluster_port name = self.name tls_timeout = self.tls_timeout urls: Union[Unset, List[str]] = UNSET if not isinstance(self.urls, Unset): urls = self.urls field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if addr is not UNSET: field_dict['addr'] = addr if auth_timeout is not UNSET: field_dict['auth_timeout'] = auth_timeout if cluster_port is not UNSET: field_dict['cluster_port'] = cluster_port if name is not UNSET: field_dict['name'] = name if tls_timeout is not UNSET: field_dict['tls_timeout'] = tls_timeout if urls is not UNSET: field_dict['urls'] = urls return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() addr = d.pop("addr", UNSET) auth_timeout = d.pop("auth_timeout", UNSET) cluster_port = d.pop("cluster_port", UNSET) name = d.pop("name", UNSET) tls_timeout = d.pop("tls_timeout", UNSET) urls = cast(List[str], d.pop("urls", UNSET)) cluster = cls( addr=addr, auth_timeout=auth_timeout, cluster_port=cluster_port, name=name, tls_timeout=tls_timeout, urls=urls, ) cluster.additional_properties = d return cluster @property def additional_keys(self) -> List[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: return self.additional_properties[key] def __setitem__(self, key: str, value: Any) -> None: self.additional_properties[key] = value def __delitem__(self, key: str) -> None: del self.additional_properties[key] def __contains__(self, key: str) -> bool: return key in self.additional_properties
[ "attr.s", "attr.ib", "typing.TypeVar" ]
[((115, 144), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {'bound': '"""Cluster"""'}), "('T', bound='Cluster')\n", (122, 144), False, 'from typing import Any, Dict, List, Type, TypeVar, Union, cast\n'), ((148, 173), 'attr.s', 'attr.s', ([], {'auto_attribs': '(True)'}), '(auto_attribs=True)\n', (154, 173), False, 'import attr\n'), ((491, 524), 'attr.ib', 'attr.ib', ([], {'init': '(False)', 'factory': 'dict'}), '(init=False, factory=dict)\n', (498, 524), False, 'import attr\n')]
# import tkinter module from tkinter import * # import other necessery modules import random # Vigenère cipher for encryption and decryption import base64 # creating root object root = Tk() # defining size of window root.geometry("1200x4000") # setting up the title of window root.title("Message Encrypter and Decrypter") Tops = Frame(root, width=1600, relief=SUNKEN) Tops.pack(side=TOP) f1 = Frame(root, width=800, relief=SUNKEN) f1.pack(side=LEFT) # ============================================== lblInfo = Label(Tops, font=('helvetica', 40, 'bold', 'underline'), text="SECRET MESSAGING", fg="Black", bd=10, anchor='w') lblInfo.grid(row=0, column=0) # Initializing variables Msg = StringVar() key = StringVar() mode = StringVar() Result = StringVar() # labels for the message lblMsg = Label(f1, font=('arial', 16, 'bold'), text="MESSAGE", bd=16, anchor="w") lblMsg.grid(row=1, column=0) # Entry box for the message txtMsg = Entry(f1, font=('arial', 16, 'bold'), textvariable=Msg, bd=10, insertwidth=4, bg="powder blue", justify='right') txtMsg.grid(row=1, column=1) # labels for the key lblkey = Label(f1, font=('arial', 16, 'bold'), text="KEY (Only Integer)", bd=16, anchor="w") lblkey.grid(row=2, column=0) # Entry box for the key txtkey = Entry(f1, font=('arial', 16, 'bold'), textvariable=key, bd=10, insertwidth=4, bg="powder blue", justify='right') txtkey.grid(row=2, column=1) # labels for the mode lblmode = Label(f1, font=('arial', 16, 'bold'), text="MODE(e for encrypt, d for decrypt)", bd=16, anchor="w") lblmode.grid(row=3, column=0) # Entry box for the mode txtmode = Entry(f1, font=('arial', 16, 'bold'), textvariable=mode, bd=10, insertwidth=4, bg="powder blue", justify='right') txtmode.grid(row=3, column=1) # labels for the result lblResult = Label(f1, font=('arial', 16, 'bold'), text="The Result-", bd=16, anchor="w") lblResult.grid(row=2, column=2) # Entry box for the result txtResult = Entry(f1, font=('arial', 16, 'bold'), textvariable=Result, bd=10, insertwidth=4, bg="powder blue", justify='right') txtResult.grid(row=2, column=3) # Vigenère cipher # Function to encode def encode(key, msg): enc = [] for i in range(len(msg)): key_c = key[i % len(key)] enc_c = chr((ord(msg[i]) + ord(key_c)) % 256) enc.append(enc_c) print("enc:", enc) return base64.urlsafe_b64encode("".join(enc).encode()).decode() # Function to decode def decode(key, enc): dec = [] enc = base64.urlsafe_b64decode(enc).decode() for i in range(len(enc)): key_c = key[i % len(key)] dec_c = chr((256 + ord(enc[i]) - ord(key_c)) % 256) dec.append(dec_c) print("dec:", dec) return "".join(dec) def Results(): # print("Message= ", (Msg.get())) msg = Msg.get() k = key.get() m = mode.get() if (m == 'e'): Result.set(encode(k, msg)) else: Result.set(decode(k, msg)) # exit function def qExit(): root.destroy() # Function to reset the window def Reset(): Msg.set("") key.set("") mode.set("") Result.set("") # Show message button btnTotal = Button(f1, padx=16, pady=8, bd=16, fg="black", font=('arial', 16, 'bold'), width=10, text="Show Message", bg="yellow", command=Results).grid(row=7, column=1) # Reset button btnReset = Button(f1, padx=16, pady=8, bd=16, fg="black", font=('arial', 16, 'bold'), width=10, text="Reset", bg="green", command=Reset).grid(row=7, column=2) # Exit button btnExit = Button(f1, padx=16, pady=8, bd=16, fg="black", font=('arial', 16, 'bold'), width=10, text="Exit", bg="red", command=qExit).grid(row=7, column=3) # keeps window alive root.mainloop()
[ "base64.urlsafe_b64decode" ]
[((2850, 2879), 'base64.urlsafe_b64decode', 'base64.urlsafe_b64decode', (['enc'], {}), '(enc)\n', (2874, 2879), False, 'import base64\n')]
from selenium import webdriver from fixture.session import SessionHelper from fixture.group import GroupHelper from fixture.contact import ContactHelper class Application: def __init__(self, browser, base_url): if browser == "firefox": self.wd = webdriver.Firefox() elif browser == "Chrome": self.wd = webdriver.Chrome() elif browser == "ie": self.wd = webdriver.Ie() else: raise ValueError("Unrecoznized browse %s" % browser) self.wd.implicitly_wait(5) self.session = SessionHelper(self) self.group = GroupHelper(self) self.contact = ContactHelper(self) self.base_url = base_url def is_valid(self): try: self.wd.current_url return True except: return False def open_home_page(self): wd = self.wd wd.get(self.base_url) def destroy(self): self.wd.quit() def get_serial_number_element_by_id(self, list_elements, id): i = 0 for index_element in list_elements: if index_element.id == id: break else: i = i + 1 return i
[ "fixture.group.GroupHelper", "selenium.webdriver.Chrome", "selenium.webdriver.Firefox", "fixture.session.SessionHelper", "fixture.contact.ContactHelper", "selenium.webdriver.Ie" ]
[((572, 591), 'fixture.session.SessionHelper', 'SessionHelper', (['self'], {}), '(self)\n', (585, 591), False, 'from fixture.session import SessionHelper\n'), ((613, 630), 'fixture.group.GroupHelper', 'GroupHelper', (['self'], {}), '(self)\n', (624, 630), False, 'from fixture.group import GroupHelper\n'), ((654, 673), 'fixture.contact.ContactHelper', 'ContactHelper', (['self'], {}), '(self)\n', (667, 673), False, 'from fixture.contact import ContactHelper\n'), ((273, 292), 'selenium.webdriver.Firefox', 'webdriver.Firefox', ([], {}), '()\n', (290, 292), False, 'from selenium import webdriver\n'), ((349, 367), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {}), '()\n', (365, 367), False, 'from selenium import webdriver\n'), ((420, 434), 'selenium.webdriver.Ie', 'webdriver.Ie', ([], {}), '()\n', (432, 434), False, 'from selenium import webdriver\n')]
# https://stackoverflow.com/questions/47295473/how-to-plot-using-matplotlib-python-colahs-deformed-grid """ 这个形状仍然不对。靠近坐标轴的地方变化太大。不管是横轴还是纵轴。应该是以原点为圆心,各个网格均匀分担才对 而不管是否靠近坐标轴 变形的目标,是在某处给定一个球体或者立方体,整个坐标中的网格,靠近这个物体的,受到变形影响,距离越远,影响 越小,直到可以忽略不计 但有个要求是靠近物体的网格,是均匀的受到影响,不能有的多,有的少 或许用极坐标是更好的选择?但是也不行。极坐标如何体现原有的坐标系呢? 极坐标没有平直的地方,到处都不均匀。 """ import numpy as np import matplotlib.pyplot as plt from matplotlib.collections import LineCollection EDGE = 5 STEP = 2 * EDGE + 1 def plot_grid(x, y, ax=None, **kwargs): ax = ax or plt.gca() segs1 = np.stack((x, y), axis=2) segs2 = segs1.transpose(1, 0, 2) ax.add_collection(LineCollection(segs1, **kwargs)) ax.add_collection(LineCollection(segs2, **kwargs)) ax.autoscale() def sig(i): # return 1 return -1 if (i < 0) else 1 def f1(x: np.array, y: np.array): u = [] v = [] for i in range(0, len(x)): ui = [] vi = [] for j in range(0, len(x[i])): # 这样取到的是网格中每个点的坐标,逐行取,从左到右。 xx = x[i][j] yy = y[i][j] print("x=", xx, "y=", yy) expn = - 0.2 * (xx ** 2 + yy ** 2) # 坐标越远离中心,delta越小。当x=+-1或者y=+-1, delta = np.exp(expn) print(expn) uu = xx if xx == 0 else xx + sig(xx) * delta vv = yy if yy == 0 else yy + sig(yy) * delta print("uu=", uu, "vv=", vv) ui.append(uu) vi.append(vv) # vi.append(yy) # ui.append(xx) u.append(ui) v.append(vi) return u, v fig, ax = plt.subplots() ax.set_aspect('equal') grid_x, grid_y = np.meshgrid(np.linspace(-EDGE, EDGE, STEP), np.linspace(-EDGE, EDGE, STEP)) plot_grid(grid_x, grid_y, ax=ax, color="lightgrey") distx, disty = f1(grid_x, grid_y) plot_grid(distx, disty, ax=ax, color="C0") plt.show()
[ "matplotlib.pyplot.gca", "matplotlib.collections.LineCollection", "numpy.exp", "numpy.stack", "numpy.linspace", "matplotlib.pyplot.subplots", "matplotlib.pyplot.show" ]
[((1565, 1579), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (1577, 1579), True, 'import matplotlib.pyplot as plt\n'), ((1827, 1837), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1835, 1837), True, 'import matplotlib.pyplot as plt\n'), ((544, 568), 'numpy.stack', 'np.stack', (['(x, y)'], {'axis': '(2)'}), '((x, y), axis=2)\n', (552, 568), True, 'import numpy as np\n'), ((1632, 1662), 'numpy.linspace', 'np.linspace', (['(-EDGE)', 'EDGE', 'STEP'], {}), '(-EDGE, EDGE, STEP)\n', (1643, 1662), True, 'import numpy as np\n'), ((1664, 1694), 'numpy.linspace', 'np.linspace', (['(-EDGE)', 'EDGE', 'STEP'], {}), '(-EDGE, EDGE, STEP)\n', (1675, 1694), True, 'import numpy as np\n'), ((522, 531), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (529, 531), True, 'import matplotlib.pyplot as plt\n'), ((628, 659), 'matplotlib.collections.LineCollection', 'LineCollection', (['segs1'], {}), '(segs1, **kwargs)\n', (642, 659), False, 'from matplotlib.collections import LineCollection\n'), ((683, 714), 'matplotlib.collections.LineCollection', 'LineCollection', (['segs2'], {}), '(segs2, **kwargs)\n', (697, 714), False, 'from matplotlib.collections import LineCollection\n'), ((1195, 1207), 'numpy.exp', 'np.exp', (['expn'], {}), '(expn)\n', (1201, 1207), True, 'import numpy as np\n')]
# Python script uses flask and SQL alchemy to create API requests for weather data from Hawaii. # Import dependencies. import numpy as np import datetime as dt import sqlalchemy from sqlalchemy.ext.automap import automap_base from sqlalchemy.orm import Session from sqlalchemy import create_engine, func from flask import Flask, jsonify #---------------------------------------------------------------------------------------------------------------------- # Rubric - API SQLite Connection & Landing Page # The Flask Application does all of the following: # ✓ Correctly generates the engine to the correct sqlite file # ✓ Uses ​automap_base()​ and reflects the database schema # ✓ Correctly saves references to the tables in the sqlite file (measurement and station) # ✓ Correctly creates and binds the session between the python app and database #---------------------------------------------------------------------------------------------------------------------- # Database Setup engine = create_engine("sqlite:///Resources/hawaii.sqlite") # Reflect an existing database into a new model Base = automap_base() # Reflect the tables Base.prepare(engine, reflect=True) # Save references to tables Measurement = Base.classes.measurement Station = Base.classes.station # Create our session (link) from Python to the DB session = Session(engine) # Flask Setup app = Flask(__name__) # Flask Routes @app.route("/") def home(): """List all available api routes.""" return ( f"Available Routes:<br/>" f"/api/v1.0/precipitation<br/>" f"/api/v1.0/stations<br/>" f"/api/v1.0/mostactivetobs<br/>" f"/api/v1.0/start/<start><br/>" f"/api/v1.0/start_date/end_date/<start_date>/<end_date>" ) #---------------------------------------------------------------------------------------------------------------------- # Rubric - API Static Routes # The static routes do all of the following: # Precipitation route # ✓ Returns the jsonified precipitation data for the last year in the database. # ✓ Returns json with the date as the key and the value as the precipitation Stations route. # ✓ Returns jsonified data of all of the stations in the database Tobs route. # ✓ Returns jsonified data for the most active station (USC00519281) for the last year of data. #---------------------------------------------------------------------------------------------------------------------- @app.route("/api/v1.0/precipitation") def precipitation(): """Return JSON where (Key: date / Value: precipitation)""" print("Precipitation API request received.") # Create our session (link) from Python to the DB session = Session(engine) # Query the most recent date in dataset. # Convert to datetime object for calculation below. max_date = session.query(func.max(func.strftime("%Y-%m-%d", Measurement.date))).limit(1).all() max_date = max_date[0][0] max_date = dt.datetime.strptime(max_date, "%Y-%m-%d") # Calculate the date 1 year ago from the last data point in the database year_ago = max_date - dt.timedelta(days=366) # Perform a query to retrieve the last 12 months of precipitation data. precipitations = session.query(func.strftime("%Y-%m-%d", Measurement.date), Measurement.prcp).\ filter(func.strftime("%Y-%m-%d", Measurement.date) >= year_ago).all() # Iterate through precipitations to append all key/values to precipitation dictionary. # Append precipitation dictionary to list, then return jsonify. all_precipitations = [] for date, prcp in precipitations: precipitation_dict = {} precipitation_dict["date"] = date precipitation_dict["prcp"] = prcp all_precipitations.append(precipitation_dict) return jsonify(all_precipitations) @app.route("/api/v1.0/stations") def stations(): """Return JSON API for all stations in dataset.""" print("Stations API request received.") # Create our session (link) from Python to the DB session = Session(engine) # Query all stations in the dataset. stations = session.query(Station.id, Station.station, Station.name, Station.latitude, Station.longitude, Station.elevation).all() # Iterate through stations to append all key/values to station dictionary. # Append station dictionary to list, then return jsonify. all_stations = [] for id, station, name, latitude, longitude, elevation in stations: station_dict = {} station_dict["id"] = id station_dict["station"] = station station_dict["name"] = name station_dict["latitude"] = latitude station_dict["longitude"] = longitude station_dict["elevation"] = elevation all_stations.append(station_dict) return jsonify(all_stations) # most active station last year of data @app.route("/api/v1.0/mostactivetobs") def last_year_tobs_most_active(): print("Most Active Station API request received.") # Create our session (link) from Python to the DB session = Session(engine) # last date in the dataset and year from last date calculations last_date = session.query(Measurement.date,Measurement.prcp).order_by(Measurement.date.desc()).first()[0] last_year = str(dt.datetime.strptime(last_date,"%Y-%m-%d")-dt.timedelta(days=365)) last_year_tobs_most_active = session.query(Measurement.station,Measurement.date,Measurement.tobs).\ filter(Measurement.date >=last_year, Measurement.date <=last_date, Measurement.station == 'USC00519281').\ order_by(Measurement.date).all() # Iterate through temperatures to append all key/values to temperature dictionary. # Append temperature dictionary to list, then return jsonify. all_mostactivetobs = [] for station, date, tobs in last_year_tobs_most_active: last_year_tobs_most_active_dict = {} last_year_tobs_most_active_dict["station"] = station last_year_tobs_most_active_dict["date"] = date last_year_tobs_most_active_dict["tobs"] = tobs all_mostactivetobs.append(last_year_tobs_most_active_dict) return jsonify(all_mostactivetobs) #---------------------------------------------------------------------------------------------------------------------- # Rubric - API Dynamic Route # The dynamic route does all of the following: # Start route # ✓ Route accepts the start date as a parameter from the URL # Start/end route # ✓ Route accepts the start and end dates as parameters from the URL # ✓ Returns the min, max, and average temperatures calculated from the given start date to the given end date ✓ Returns the min, max, and average temperatures calculated from the given start date to the end of the dataset #---------------------------------------------------------------------------------------------------------------------- @app.route("/api/v1.0/start/<start>/") def calc_start_temps(start): """Return a JSON API of the minimum temperature, the average temperature, and the max temperature... for all dates greater than and equal to the start date.""" print("Calculate Start Temps. API request received.") # Create our session (link) from Python to the DB session = Session(engine) # Query will accept start date in the format '%Y-%m-%d' and return the minimum, average, and maximum temperatures # for all dates from that date. start_temps = session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)).\ filter(Measurement.date >= start).all() # Iterate through start temps to append all key/values to Start (Date) Calc Temp dictionary. # Append Start (Date) Calc Temp dictionary to list, then return jsonify. all_start_calc_temps = [] for result in start_temps: start_calc_temp_dict = {} start_calc_temp_dict["min_temp."] = result[0] start_calc_temp_dict["avg_temp."] = result[1] start_calc_temp_dict["max_temp."] = result[2] all_start_calc_temps.append(start_calc_temp_dict) return jsonify(all_start_calc_temps) @app.route("/api/v1.0/start_date/end_date/<start_date>/<end_date>") def calc_start_end_temps(start_date, end_date): # Return a JSON API of the minimum temperature, the average temperature, and the max temperature # between the start and end date inclusive. print("Calculate Start/End Temps. API request received.") # Create our session (link) from Python to the DB session = Session(engine) # Query will accept start and end dates in the format '%Y-%m-%d' and return the minimum, average, and # maximum temperatures for all dates in that range. start_end_temps = session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)).\ filter(Measurement.date >= start_date).filter(Measurement.date <= end_date).all() # Iterate through start temps to append all key/values to Start (Date) Calc Temp dictionary. # Append Start (Date) Calc Temp dictionary to list, then return jsonify. all_calc_start_end_temps = [] for result in start_end_temps: calc_start_end_temp_dict = {} calc_start_end_temp_dict["min_temp."] = result[0] calc_start_end_temp_dict["avg_temp."] = result[1] calc_start_end_temp_dict["max_temp."] = result[2] all_calc_start_end_temps.append(calc_start_end_temp_dict) return jsonify(all_calc_start_end_temps) if __name__ == '__main__': app.run(debug=True) session.close() #---------------------------------------------------------------------------------------------------------------------- # THE END #----------------------------------------------------------------------------------------------------------------------
[ "sqlalchemy.func.min", "flask.Flask", "datetime.datetime.strptime", "sqlalchemy.ext.automap.automap_base", "sqlalchemy.create_engine", "sqlalchemy.orm.Session", "sqlalchemy.func.max", "sqlalchemy.func.strftime", "sqlalchemy.func.avg", "datetime.timedelta", "flask.jsonify" ]
[((1064, 1114), 'sqlalchemy.create_engine', 'create_engine', (['"""sqlite:///Resources/hawaii.sqlite"""'], {}), "('sqlite:///Resources/hawaii.sqlite')\n", (1077, 1114), False, 'from sqlalchemy import create_engine, func\n'), ((1170, 1184), 'sqlalchemy.ext.automap.automap_base', 'automap_base', ([], {}), '()\n', (1182, 1184), False, 'from sqlalchemy.ext.automap import automap_base\n'), ((1399, 1414), 'sqlalchemy.orm.Session', 'Session', (['engine'], {}), '(engine)\n', (1406, 1414), False, 'from sqlalchemy.orm import Session\n'), ((1436, 1451), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (1441, 1451), False, 'from flask import Flask, jsonify\n'), ((2777, 2792), 'sqlalchemy.orm.Session', 'Session', (['engine'], {}), '(engine)\n', (2784, 2792), False, 'from sqlalchemy.orm import Session\n'), ((3039, 3081), 'datetime.datetime.strptime', 'dt.datetime.strptime', (['max_date', '"""%Y-%m-%d"""'], {}), "(max_date, '%Y-%m-%d')\n", (3059, 3081), True, 'import datetime as dt\n'), ((3877, 3904), 'flask.jsonify', 'jsonify', (['all_precipitations'], {}), '(all_precipitations)\n', (3884, 3904), False, 'from flask import Flask, jsonify\n'), ((4124, 4139), 'sqlalchemy.orm.Session', 'Session', (['engine'], {}), '(engine)\n', (4131, 4139), False, 'from sqlalchemy.orm import Session\n'), ((4877, 4898), 'flask.jsonify', 'jsonify', (['all_stations'], {}), '(all_stations)\n', (4884, 4898), False, 'from flask import Flask, jsonify\n'), ((5147, 5162), 'sqlalchemy.orm.Session', 'Session', (['engine'], {}), '(engine)\n', (5154, 5162), False, 'from sqlalchemy.orm import Session\n'), ((6226, 6253), 'flask.jsonify', 'jsonify', (['all_mostactivetobs'], {}), '(all_mostactivetobs)\n', (6233, 6253), False, 'from flask import Flask, jsonify\n'), ((7358, 7373), 'sqlalchemy.orm.Session', 'Session', (['engine'], {}), '(engine)\n', (7365, 7373), False, 'from sqlalchemy.orm import Session\n'), ((8197, 8226), 'flask.jsonify', 'jsonify', (['all_start_calc_temps'], {}), '(all_start_calc_temps)\n', (8204, 8226), False, 'from flask import Flask, jsonify\n'), ((8624, 8639), 'sqlalchemy.orm.Session', 'Session', (['engine'], {}), '(engine)\n', (8631, 8639), False, 'from sqlalchemy.orm import Session\n'), ((9554, 9587), 'flask.jsonify', 'jsonify', (['all_calc_start_end_temps'], {}), '(all_calc_start_end_temps)\n', (9561, 9587), False, 'from flask import Flask, jsonify\n'), ((3186, 3208), 'datetime.timedelta', 'dt.timedelta', ([], {'days': '(366)'}), '(days=366)\n', (3198, 3208), True, 'import datetime as dt\n'), ((5366, 5409), 'datetime.datetime.strptime', 'dt.datetime.strptime', (['last_date', '"""%Y-%m-%d"""'], {}), "(last_date, '%Y-%m-%d')\n", (5386, 5409), True, 'import datetime as dt\n'), ((5409, 5431), 'datetime.timedelta', 'dt.timedelta', ([], {'days': '(365)'}), '(days=365)\n', (5421, 5431), True, 'import datetime as dt\n'), ((3405, 3448), 'sqlalchemy.func.strftime', 'func.strftime', (['"""%Y-%m-%d"""', 'Measurement.date'], {}), "('%Y-%m-%d', Measurement.date)\n", (3418, 3448), False, 'from sqlalchemy import create_engine, func\n'), ((3321, 3364), 'sqlalchemy.func.strftime', 'func.strftime', (['"""%Y-%m-%d"""', 'Measurement.date'], {}), "('%Y-%m-%d', Measurement.date)\n", (3334, 3364), False, 'from sqlalchemy import create_engine, func\n'), ((7561, 7587), 'sqlalchemy.func.min', 'func.min', (['Measurement.tobs'], {}), '(Measurement.tobs)\n', (7569, 7587), False, 'from sqlalchemy import create_engine, func\n'), ((7589, 7615), 'sqlalchemy.func.avg', 'func.avg', (['Measurement.tobs'], {}), '(Measurement.tobs)\n', (7597, 7615), False, 'from sqlalchemy import create_engine, func\n'), ((7617, 7643), 'sqlalchemy.func.max', 'func.max', (['Measurement.tobs'], {}), '(Measurement.tobs)\n', (7625, 7643), False, 'from sqlalchemy import create_engine, func\n'), ((2933, 2976), 'sqlalchemy.func.strftime', 'func.strftime', (['"""%Y-%m-%d"""', 'Measurement.date'], {}), "('%Y-%m-%d', Measurement.date)\n", (2946, 2976), False, 'from sqlalchemy import create_engine, func\n'), ((8844, 8870), 'sqlalchemy.func.min', 'func.min', (['Measurement.tobs'], {}), '(Measurement.tobs)\n', (8852, 8870), False, 'from sqlalchemy import create_engine, func\n'), ((8872, 8898), 'sqlalchemy.func.avg', 'func.avg', (['Measurement.tobs'], {}), '(Measurement.tobs)\n', (8880, 8898), False, 'from sqlalchemy import create_engine, func\n'), ((8900, 8926), 'sqlalchemy.func.max', 'func.max', (['Measurement.tobs'], {}), '(Measurement.tobs)\n', (8908, 8926), False, 'from sqlalchemy import create_engine, func\n')]
import argparse import logging import sys import time import glob import os from . import logger from . import config from . visualization import plot from . optimization import run from . optimization import utils class optimalTAD: def __init__(self): self.log = logger.initialize_logger() self.cfg = config.get_configuration() parser = argparse.ArgumentParser(description = 'optimalTAD: Topologically Associating Domain optimal set prediction', usage = ''' optimalTAD <command> [<args>] The basic optimalTAD commands are: run Run optimization process visualize Visualize results ''') parser.add_argument('command', help='Subcommand to run') args = parser.parse_args([self.cfg.get('basic', 'mode')]) arg = sys.argv[1:2] if arg: if arg[0] in ['run', 'visualize']: args.command = arg[0] if args.command not in ['run', 'visualize']: self.log.info('Unrecognized command!') parser.print_help() sys.exit(1) chippath = self.cfg.get('run','chipseq') self.chippath = glob.glob(os.path.expanduser(chippath)) getattr(self, args.command)() def run(self): start_time = time.time() hicpath = self.cfg.get('run','hic') hicpath = glob.glob(os.path.expanduser(hicpath)) parser = argparse.ArgumentParser(description='Run optimization process') parser.add_argument('--hic', type = str, nargs='+', default = sorted(hicpath), help = 'Path to iteratively corrected Hi-C data') parser.add_argument('--chipseq', type = str, nargs = '+', default = sorted(self.chippath), help = 'Path ChIP-seq data') parser.add_argument('--np', type = int, default = int(self.cfg['run']['np']), help = 'Number of processors') parser.add_argument('--resolution', type = int, default = int(self.cfg['run']['resolution']), help = 'Resolution') parser.add_argument('--stepsize', type = float, default = float(self.cfg['run']['stepsize']), help = 'Step size to increment gamma parameter') parser.add_argument('--gamma_max', type = float, default = float(self.cfg['run']['gamma_max']), help = 'Max gamma parameter') parser.add_argument('--hic_format', type = str, default = self.cfg['run']['hic_format'], help = 'Hi-C matrices input format for armatus') parser.add_argument('--empty_row_imputation', action = 'store_true', help = 'Missing rows (and columns) imputation') parser.add_argument('--truncation', action = 'store_true', help = 'Value truncation of input Hi-C-matrix') parser.add_argument('--log2_hic', action = 'store_true', help = 'log2 transformation of input Hi-C matrix') parser.add_argument('--log2_chip', action = 'store_true', help = 'log2 transformation of input ChIP-Seq track') parser.add_argument('--zscore_chip', action = 'store_true', help = 'Z-score transformation of ChIP-Seq track') parser.set_defaults(empty_row_imputation = eval(self.cfg['run']['empty_row_imputation'])) parser.set_defaults(truncation = eval(self.cfg['run']['truncation'])) parser.set_defaults(log2_hic = eval(self.cfg['run']['log2_hic'])) parser.set_defaults(log2_chip = eval(self.cfg['run']['log2_chip'])) parser.set_defaults(zscore_chip = eval(self.cfg['run']['zscore_chip'])) args = parser.parse_args(sys.argv[2:]) run.main(args, self.cfg, self.log) cpu_time = round(time.time()-start_time, 2) self.log.info('Execution time: {} sec'.format(cpu_time)) def visualize(self): start_time = time.time() chipname = utils.get_chipname(self.chippath, self.cfg['visualization']['samplename']) parser = argparse.ArgumentParser(description='Visualize results') parser.add_argument('--samplename', type = str, default = self.cfg['visualization']['samplename'], help = 'Samplename of Hi-C data') parser.add_argument('--region', type = str, default = self.cfg['visualization']['region'], help = 'Chromosomal coordinates') parser.add_argument('--resolution', type = int, default = int(self.cfg['run']['resolution']), help = 'Resolution') parser.add_argument('--chipseq', type = str, default = chipname, help = 'Path to ChIP-seq data') parser.add_argument('--log2_chip', action = 'store_true', help = 'log2 transformation of an input ChIP-Seq track') parser.add_argument('--zscore_chip', action = 'store_true', help = 'Z-score transformation of an input ChIP-Seq track') parser.add_argument('--rnaseq', type = str, default = str(self.cfg['visualization']['rnaseq']), help = 'RNA-seq data') parser.set_defaults(log2_chip = eval(self.cfg['run']['log2_chip'])) parser.set_defaults(zscore_chip = eval(self.cfg['run']['zscore_chip'])) args = parser.parse_args(sys.argv[2:]) plot.main(args, self.cfg['visualization'], self.log) cpu_time = round(time.time()-start_time, 2) self.log.info('Execution time: {} sec'.format(cpu_time)) if __name__ == '__main__': optimalTAD()
[ "os.path.expanduser", "time.time", "argparse.ArgumentParser", "sys.exit" ]
[((376, 667), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""optimalTAD: Topologically Associating Domain optimal set prediction"""', 'usage': '""" optimalTAD <command> [<args>]\n \nThe basic optimalTAD commands are:\n run Run optimization process\n visualize Visualize results """'}), '(description=\n \'optimalTAD: Topologically Associating Domain optimal set prediction\',\n usage=\n """ optimalTAD <command> [<args>]\n \nThe basic optimalTAD commands are:\n run Run optimization process\n visualize Visualize results """\n )\n', (399, 667), False, 'import argparse\n'), ((1285, 1296), 'time.time', 'time.time', ([], {}), '()\n', (1294, 1296), False, 'import time\n'), ((1424, 1487), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Run optimization process"""'}), "(description='Run optimization process')\n", (1447, 1487), False, 'import argparse\n'), ((3693, 3704), 'time.time', 'time.time', ([], {}), '()\n', (3702, 3704), False, 'import time\n'), ((3816, 3872), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Visualize results"""'}), "(description='Visualize results')\n", (3839, 3872), False, 'import argparse\n'), ((1080, 1091), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1088, 1091), False, 'import sys\n'), ((1176, 1204), 'os.path.expanduser', 'os.path.expanduser', (['chippath'], {}), '(chippath)\n', (1194, 1204), False, 'import os\n'), ((1369, 1396), 'os.path.expanduser', 'os.path.expanduser', (['hicpath'], {}), '(hicpath)\n', (1387, 1396), False, 'import os\n'), ((3550, 3561), 'time.time', 'time.time', ([], {}), '()\n', (3559, 3561), False, 'import time\n'), ((5051, 5062), 'time.time', 'time.time', ([], {}), '()\n', (5060, 5062), False, 'import time\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Sensirion SHT2x humidity sensor. Drives SHT20, SHT21 and SHT25 humidity and temperature sensors. Sensirion `SHT2x Datasheets <https://www.sensirion.com/en/environmental-sensors/humidity-sensors/humidity-temperature-sensor-sht2x-digital-i2c-accurate/>` """ from i2cmod import SHT2X def example(): with SHT2X() as sensor: print("Identification: 0x{:016X}".format(sensor.serial_number)) for adc_res, reg_value in ( ('12/14', 0x02), (' 8/10', 0x03), ('10/13', 0x82), ('11/11', 0x83)): sensor.user_register = reg_value print("-" * 79) print("Resolution: {}-bit (rh/T)".format(adc_res)) print("Temperature: {:.2f} °C".format(sensor.centigrade)) print("Temperature: {:.2f} °F".format(sensor.fahrenheit)) print("Relative Humidity: {:.2f} % ".format(sensor.humidity)) print("User Register: 0x{:02X}".format(sensor.user_register)) if __name__ == '__main__': example()
[ "i2cmod.SHT2X" ]
[((360, 367), 'i2cmod.SHT2X', 'SHT2X', ([], {}), '()\n', (365, 367), False, 'from i2cmod import SHT2X\n')]
import argparse from os import listdir, path import numpy as np def convert(main_folder, output): ret = [] for label, class_folder in listdir(main_folder): class_folder_path = path.join(main_folder, class_folder) for img_name in listdir(class_folder_path): image_path = path.join(class_folder, img_name) ret.append([image_path, str(label)]) np.savetxt(output, ret, delimiter=" ", fmt="%s %i") if __name__ == "__main__": parser = argparse.ArgumentParser( description="Folder with classes subfolders to a file to train." ) parser.add_argument("--folder", "-f", help="Folder to convert.") parser.add_argument("--output", "-o", help="Output file.") args = parser.parse_args() convert(args.folder, args.output)
[ "os.path.join", "os.listdir", "argparse.ArgumentParser", "numpy.savetxt" ]
[((146, 166), 'os.listdir', 'listdir', (['main_folder'], {}), '(main_folder)\n', (153, 166), False, 'from os import listdir, path\n'), ((399, 450), 'numpy.savetxt', 'np.savetxt', (['output', 'ret'], {'delimiter': '""" """', 'fmt': '"""%s %i"""'}), "(output, ret, delimiter=' ', fmt='%s %i')\n", (409, 450), True, 'import numpy as np\n'), ((493, 587), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Folder with classes subfolders to a file to train."""'}), "(description=\n 'Folder with classes subfolders to a file to train.')\n", (516, 587), False, 'import argparse\n'), ((196, 232), 'os.path.join', 'path.join', (['main_folder', 'class_folder'], {}), '(main_folder, class_folder)\n', (205, 232), False, 'from os import listdir, path\n'), ((258, 284), 'os.listdir', 'listdir', (['class_folder_path'], {}), '(class_folder_path)\n', (265, 284), False, 'from os import listdir, path\n'), ((311, 344), 'os.path.join', 'path.join', (['class_folder', 'img_name'], {}), '(class_folder, img_name)\n', (320, 344), False, 'from os import listdir, path\n')]
#!/usr/bin/env python from setuptools import setup, find_packages import sys, os setup( name='anglerfish', version='0.4.1', description='Anglerfish, a tool to demultiplex Illumina libraries from ONT data', author='<NAME>', author_email='<EMAIL>', url='https://github.com/remiolsen/anglerfish', license='MIT', packages = find_packages(), install_requires=[ 'python-levenshtein', 'biopython', 'numpy' ], scripts=['./anglerfish.py'], zip_safe=False, classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: Healthcare Industry", "Intended Audience :: Science/Research", "License :: OSI Approved :: MIT License", "Operating System :: POSIX :: Linux", "Programming Language :: Python", "Topic :: Scientific/Engineering :: Medical Science Apps." ] )
[ "setuptools.find_packages" ]
[((353, 368), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (366, 368), False, 'from setuptools import setup, find_packages\n')]
# Copyright 2018 <NAME> and <NAME> # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import numpy as np import scipy.sparse as sp from sklearn.base import BaseEstimator, ClusterMixin class NormalizedKMeans(BaseEstimator, ClusterMixin): def __init__(self, iterations=20, center_num=5): self.center_num = center_num self.iterations = iterations def fit(self, X, k_center): self.k_center, self.ya = self.k_means(X, self.iterations, self.center_num, k_center, X.shape[0]) return self def k_means(self, data, iterations, center_num, k_center, rows): all_one = np.matrix([1] * rows).T all_one_k = np.matrix([1] * center_num) all_one_c = np.matrix([1] * k_center.shape[0]).T if sp.issparse(data): t2 = (data.power(2)).sum(axis=1).dot(all_one_k) else: t2 = (np.power(data, 2)).sum(axis=1).reshape((-1, 1)) * all_one_k t22 = data * 2 ya = None for _ in range(iterations): dist = t2 - t22 * k_center + all_one * np.power(k_center, 2).sum(axis=0) ya = (dist == (np.amin(dist) * all_one_k)) k_center = (data.T * ya) / (all_one_c * ya.sum(axis=0)) return k_center, ya
[ "numpy.matrix", "numpy.amin", "scipy.sparse.issparse", "numpy.power" ]
[((1145, 1172), 'numpy.matrix', 'np.matrix', (['([1] * center_num)'], {}), '([1] * center_num)\n', (1154, 1172), True, 'import numpy as np\n'), ((1241, 1258), 'scipy.sparse.issparse', 'sp.issparse', (['data'], {}), '(data)\n', (1252, 1258), True, 'import scipy.sparse as sp\n'), ((1101, 1122), 'numpy.matrix', 'np.matrix', (['([1] * rows)'], {}), '([1] * rows)\n', (1110, 1122), True, 'import numpy as np\n'), ((1193, 1227), 'numpy.matrix', 'np.matrix', (['([1] * k_center.shape[0])'], {}), '([1] * k_center.shape[0])\n', (1202, 1227), True, 'import numpy as np\n'), ((1602, 1615), 'numpy.amin', 'np.amin', (['dist'], {}), '(dist)\n', (1609, 1615), True, 'import numpy as np\n'), ((1541, 1562), 'numpy.power', 'np.power', (['k_center', '(2)'], {}), '(k_center, 2)\n', (1549, 1562), True, 'import numpy as np\n'), ((1352, 1369), 'numpy.power', 'np.power', (['data', '(2)'], {}), '(data, 2)\n', (1360, 1369), True, 'import numpy as np\n')]
import os from sys import platform def say_beep(n: int): for i in range(0, n): if platform == "darwin": os.system("say beep")
[ "os.system" ]
[((130, 151), 'os.system', 'os.system', (['"""say beep"""'], {}), "('say beep')\n", (139, 151), False, 'import os\n')]
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2022 HalfMarble LLC # Copyright 2019 <NAME> <<EMAIL>> from hm_gerber_tool.cam import FileSettings import hm_gerber_tool.rs274x from hm_gerber_tool.gerber_statements import * from hm_gerber_ex.gerber_statements import AMParamStmt, AMParamStmtEx, ADParamStmtEx from hm_gerber_ex.utility import rotate import re def loads(data, filename=None): cls = hm_gerber_tool.rs274x.GerberParser cls.SF = r"(?P<param>SF)(A(?P<a>{decimal}))?(B(?P<b>{decimal}))?".format(decimal=cls.DECIMAL) cls.PARAMS = (cls.FS, cls.MO, cls.LP, cls.AD_CIRCLE, cls.AD_RECT, cls.AD_OBROUND, cls.AD_POLY, cls.AD_MACRO, cls.AM, cls.AS, cls.IF, cls.IN, cls.IP, cls.IR, cls.MI, cls.OF, cls.SF, cls.LN) cls.PARAM_STMT = [re.compile(r"%?{0}\*%?".format(p)) for p in cls.PARAMS] return cls().parse_raw(data, filename) def write_gerber_header(file, settings): file.write('%s\n%s\n%%IPPOS*%%\n' % ( MOParamStmt('MO', settings.units).to_gerber(settings), FSParamStmt('FS', settings.zero_suppression, settings.notation, settings.format).to_gerber(settings))) class GerberFile(hm_gerber_tool.rs274x.GerberFile): @classmethod def from_gerber_file(cls, gerber_file): if not isinstance(gerber_file, hm_gerber_tool.rs274x.GerberFile): raise Exception('only gerber.rs274x.GerberFile object is specified') return cls(gerber_file.statements, gerber_file.settings, gerber_file.primitives, gerber_file.apertures, gerber_file.filename) def __init__(self, statements, settings, primitives, apertures, filename=None): super(GerberFile, self).__init__(statements, settings, primitives, apertures, filename) self.context = GerberContext.from_settings(self.settings) self.aperture_macros = {} self.aperture_defs = [] self.main_statements = [] for stmt in self.statements: type, stmts = self.context.normalize_statement(stmt) if type == self.context.TYPE_AM: for mdef in stmts: self.aperture_macros[mdef.name] = mdef elif type == self.context.TYPE_AD: self.aperture_defs.extend(stmts) elif type == self.context.TYPE_MAIN: self.main_statements.extend(stmts) if self.context.angle != 0: self.rotate(self.context.angle) if self.context.is_negative: self.negate_polarity() self.context.notation = 'absolute' self.context.zeros = 'trailing' def write(self, filename=None): self.context.notation = 'absolute' self.context.zeros = 'trailing' self.context.format = self.format self.units = self.units filename = filename if filename is not None else self.filename with open(filename, 'w') as f: write_gerber_header(f, self.context) for macro in self.aperture_macros: f.write(self.aperture_macros[macro].to_gerber(self.context) + '\n') for aperture in self.aperture_defs: f.write(aperture.to_gerber(self.context) + '\n') for statement in self.main_statements: f.write(statement.to_gerber(self.context) + '\n') f.write('M02*\n') def to_inch(self): if self.units == 'metric': for macro in self.aperture_macros: self.aperture_macros[macro].to_inch() for aperture in self.aperture_defs: aperture.to_inch() for statement in self.statements: statement.to_inch() self.units = 'inch' self.context.units = 'inch' def to_metric(self): if self.units == 'inch': for macro in self.aperture_macros: self.aperture_macros[macro].to_metric() for aperture in self.aperture_defs: aperture.to_metric() for statement in self.statements: statement.to_metric() self.units = 'metric' self.context.units = 'metric' def offset(self, x_offset=0, y_offset=0): for statement in self.main_statements: if isinstance(statement, CoordStmt): if statement.x is not None: statement.x += x_offset if statement.y is not None: statement.y += y_offset for primitive in self.primitives: primitive.offset(x_offset, y_offset) def rotate(self, angle, center=(0, 0)): if angle % 360 == 0: return last_x = 0 last_y = 0 last_rx = 0 last_ry = 0 # TODO major workaround verify! # PCB houses do not like rotated AM macros, so keep them same, but rotate arguments and points instead # self._generalize_aperture() # for name in self.aperture_macros: # self.aperture_macros[name].rotate(angle, center) if angle != 0: for aperture in self.aperture_defs: aperture.flip() for statement in self.main_statements: if isinstance(statement, CoordStmt) and statement.x is not None and statement.y is not None: if statement.i is not None and statement.j is not None: cx = last_x + statement.i cy = last_y + statement.j cx, cy = rotate(cx, cy, angle, center) statement.i = cx - last_rx statement.j = cy - last_ry last_x = statement.x last_y = statement.y last_rx, last_ry = rotate(statement.x, statement.y, angle, center) statement.x = last_rx statement.y = last_ry def negate_polarity(self): for statement in self.main_statements: if isinstance(statement, LPParamStmt): statement.lp = 'dark' if statement.lp == 'clear' else 'clear' def _generalize_aperture(self): CIRCLE = 0 RECTANGLE = 1 LANDSCAPE_OBROUND = 2 PORTRATE_OBROUND = 3 POLYGON = 4 macro_defs = [ ('MACC', AMParamStmtEx.circle), ('MACR', AMParamStmtEx.rectangle), ('MACLO', AMParamStmtEx.landscape_obround), ('MACPO', AMParamStmtEx.portrate_obround), ('MACP', AMParamStmtEx.polygon) ] need_to_change = False for statement in self.aperture_defs: if isinstance(statement, ADParamStmt) and statement.shape in ['R', 'O', 'P']: need_to_change = True if need_to_change: for idx in range(0, len(macro_defs)): macro_def = macro_defs[idx] name = macro_def[0] num = 1 while name in self.aperture_macros: name = '%s_%d' % (macro_def[0], num) num += 1 self.aperture_macros[name] = macro_def[1](name, self.units) macro_defs[idx] = (name, macro_def[1]) for statement in self.aperture_defs: if isinstance(statement, ADParamStmt): if statement.shape == 'R': statement.shape = macro_defs[RECTANGLE][0] elif statement.shape == 'O': x = statement.modifiers[0][0] if len(statement.modifiers[0]) > 0 else 0 y = statement.modifiers[0][1] if len(statement.modifiers[0]) > 1 else 0 if x < y: statement.shape = macro_defs[PORTRATE_OBROUND][0] elif x > y: statement.shape = macro_defs[LANDSCAPE_OBROUND][0] else: statement.shape = macro_defs[CIRCLE][0] elif statement.shape == 'P': statement.shape = macro_defs[POLYGON][0] class GerberContext(FileSettings): TYPE_NONE = 'none' TYPE_AM = 'am' TYPE_AD = 'ad' TYPE_MAIN = 'main' IP_LINEAR = 'linear' IP_ARC = 'arc' DIR_CLOCKWISE = 'cw' DIR_COUNTERCLOCKWISE = 'ccw' ignored_stmt = ('FSParamStmt', 'MOParamStmt', 'ASParamStmt', 'INParamStmt', 'IPParamStmt', 'IRParamStmt', 'MIParamStmt', 'OFParamStmt', 'SFParamStmt', 'LNParamStmt', 'CommentStmt', 'EofStmt',) @classmethod def from_settings(cls, settings): return cls(settings.notation, settings.units, settings.zero_suppression, settings.format, settings.zeros, settings.angle_units) def __init__(self, notation='absolute', units='inch', zero_suppression=None, format=(2, 5), zeros=None, angle_units='degrees', name=None, mirror=(False, False), offset=(0., 0.), scale=(1., 1.), angle=0., axis='xy'): super(GerberContext, self).__init__(notation, units, zero_suppression, format, zeros, angle_units) self.name = name self.mirror = mirror self.offset = offset self.scale = scale self.angle = angle self.axis = axis self.matrix = (1, 0, 1, 0, 1, 1) self.is_negative = False self.is_first_coordinate = True self.no_polarity = True self.in_single_quadrant_mode = False self.op = None self.interpolation = self.IP_LINEAR self.direction = self.DIR_CLOCKWISE self.x = 0. self.y = 0. def normalize_statement(self, stmt): additional_stmts = None if isinstance(stmt, INParamStmt): self.name = stmt.name elif isinstance(stmt, MIParamStmt): self.mirror = (stmt.a, stmt.b) self._update_matrix() elif isinstance(stmt, OFParamStmt): self.offset = (stmt.a, stmt.b) self._update_matrix() elif isinstance(stmt, SFParamStmt): self.scale = (stmt.a, stmt.b) self._update_matrix() elif isinstance(stmt, ASParamStmt): self.axis = 'yx' if stmt.mode == 'AYBX' else 'xy' self._update_matrix() elif isinstance(stmt, IRParamStmt): self.angle = stmt.angle elif isinstance(stmt, AMParamStmt) and not isinstance(stmt, AMParamStmtEx): stmt = AMParamStmtEx.from_stmt(stmt) return (self.TYPE_AM, [stmt]) elif isinstance(stmt, ADParamStmt) and not isinstance(stmt, AMParamStmtEx): stmt = ADParamStmtEx.from_stmt(stmt) return (self.TYPE_AD, [stmt]) elif isinstance(stmt, QuadrantModeStmt): self.in_single_quadrant_mode = stmt.mode == 'single-quadrant' stmt.mode = 'multi-quadrant' elif isinstance(stmt, IPParamStmt): self.is_negative = stmt.ip == 'negative' elif isinstance(stmt, LPParamStmt): self.no_polarity = False elif isinstance(stmt, CoordStmt): self._normalize_coordinate(stmt) if self.is_first_coordinate: self.is_first_coordinate = False if self.no_polarity: additional_stmts = [LPParamStmt('LP', 'dark'), stmt] if type(stmt).__name__ in self.ignored_stmt: return (self.TYPE_NONE, None) elif additional_stmts is not None: return (self.TYPE_MAIN, additional_stmts) else: return (self.TYPE_MAIN, [stmt]) def _update_matrix(self): if self.axis == 'xy': mx = -1 if self.mirror[0] else 1 my = -1 if self.mirror[1] else 1 self.matrix = ( self.scale[0] * mx, self.offset[0], self.scale[1] * my, self.offset[1], self.scale[0] * mx, self.scale[1] * my) else: mx = -1 if self.mirror[1] else 1 my = -1 if self.mirror[0] else 1 self.matrix = ( self.scale[1] * mx, self.offset[1], self.scale[0] * my, self.offset[0], self.scale[1] * mx, self.scale[0] * my) def _normalize_coordinate(self, stmt): if stmt.function == 'G01' or stmt.function == 'G1': self.interpolation = self.IP_LINEAR elif stmt.function == 'G02' or stmt.function == 'G2': self.interpolation = self.IP_ARC self.direction = self.DIR_CLOCKWISE if self.mirror[0] != self.mirror[1]: stmt.function = 'G03' elif stmt.function == 'G03' or stmt.function == 'G3': self.interpolation = self.IP_ARC self.direction = self.DIR_COUNTERCLOCKWISE if self.mirror[0] != self.mirror[1]: stmt.function = 'G02' if stmt.only_function: return last_x = self.x last_y = self.y if self.notation == 'absolute': x = stmt.x if stmt.x is not None else self.x y = stmt.y if stmt.y is not None else self.y else: x = self.x + stmt.x if stmt.x is not None else 0 y = self.y + stmt.y if stmt.y is not None else 0 self.x, self.y = x, y self.op = stmt.op if stmt.op is not None else self.op stmt.op = self.op stmt.x = self.matrix[0] * x + self.matrix[1] stmt.y = self.matrix[2] * y + self.matrix[3] if stmt.op == 'D01' and self.interpolation == self.IP_ARC: qx, qy = 1, 1 if self.in_single_quadrant_mode: if self.direction == self.DIR_CLOCKWISE: qx = 1 if y > last_y else -1 qy = 1 if x < last_x else -1 else: qx = 1 if y < last_y else -1 qy = 1 if x > last_x else -1 if last_x == x and last_y == y: qx, qy = 0, 0 stmt.i = qx * self.matrix[4] * stmt.i if stmt.i is not None else 0 stmt.j = qy * self.matrix[5] * stmt.j if stmt.j is not None else 0
[ "hm_gerber_ex.gerber_statements.ADParamStmtEx.from_stmt", "hm_gerber_ex.utility.rotate", "hm_gerber_ex.gerber_statements.AMParamStmtEx.from_stmt" ]
[((5745, 5792), 'hm_gerber_ex.utility.rotate', 'rotate', (['statement.x', 'statement.y', 'angle', 'center'], {}), '(statement.x, statement.y, angle, center)\n', (5751, 5792), False, 'from hm_gerber_ex.utility import rotate\n'), ((5512, 5541), 'hm_gerber_ex.utility.rotate', 'rotate', (['cx', 'cy', 'angle', 'center'], {}), '(cx, cy, angle, center)\n', (5518, 5541), False, 'from hm_gerber_ex.utility import rotate\n'), ((10559, 10588), 'hm_gerber_ex.gerber_statements.AMParamStmtEx.from_stmt', 'AMParamStmtEx.from_stmt', (['stmt'], {}), '(stmt)\n', (10582, 10588), False, 'from hm_gerber_ex.gerber_statements import AMParamStmt, AMParamStmtEx, ADParamStmtEx\n'), ((10734, 10763), 'hm_gerber_ex.gerber_statements.ADParamStmtEx.from_stmt', 'ADParamStmtEx.from_stmt', (['stmt'], {}), '(stmt)\n', (10757, 10763), False, 'from hm_gerber_ex.gerber_statements import AMParamStmt, AMParamStmtEx, ADParamStmtEx\n')]
""" Sample view for group aware projects """ from django.conf import settings from django.core.exceptions import ObjectDoesNotExist from django.contrib.auth.decorators import login_required from django.http import Http404, HttpResponseRedirect from django.shortcuts import render_to_response, get_object_or_404 from django.template import RequestContext from uni_form.helpers import FormHelper, Submit, Reset from wall.models import Post from wall.forms import WallForm @login_required def list(request, group_slug=None, bridge=None, form_class=WallForm): # If there is a bridge then get the group if bridge is not None: try: group = bridge.get_group(group_slug) except ObjectDoesNotExist: raise Http404 else: group = None # If we have a group we fetch the wall from the group if group: posts = group.content_objects(Post) else: posts = Post.objects.all() # check on user authentication or if user is member of a group if not request.user.is_authenticated(): is_member = False else: is_member = group.user_is_member(request.user) if is_member: if request.method == "POST": if request.user.is_authenticated(): form = form_class(request.user, group, request.POST) if form.is_valid(): post = form.save(commit = False) post.creator = request.user if group: group.associate(post) post.save() if group: redirect_to = bridge.reverse("wall_list", group) else: redirect_to = reverse("wall_list") return HttpResponseRedirect(redirect_to) else: form = form_class(request.user, group) else: form = None return render_to_response("wall/list.html", { "group": group, "posts": posts, "form": form, "is_member": is_member }, context_instance=RequestContext(request)) def detail(request, slug, group_slug=None, bridge=None): # If there is a bridge then get the group if bridge is not None: try: group = bridge.get_group(group_slug) except ObjectDoesNotExist: raise Http404 else: group = None # If we have a group we fetch the post from the group #if group: # posts = group.content_objects(Post) #else: post = get_object_or_404(Post, slug=slug) # check on user authentication or if user is member of a group if not request.user.is_authenticated(): is_member = False else: is_member = group.user_is_member(request.user) return render_to_response("wall/detail.html", { "group": group, "post": post, "is_member": is_member }, context_instance=RequestContext(request))
[ "django.http.HttpResponseRedirect", "django.shortcuts.get_object_or_404", "django.template.RequestContext", "wall.models.Post.objects.all" ]
[((2629, 2663), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Post'], {'slug': 'slug'}), '(Post, slug=slug)\n', (2646, 2663), False, 'from django.shortcuts import render_to_response, get_object_or_404\n'), ((941, 959), 'wall.models.Post.objects.all', 'Post.objects.all', ([], {}), '()\n', (957, 959), False, 'from wall.models import Post\n'), ((2169, 2192), 'django.template.RequestContext', 'RequestContext', (['request'], {}), '(request)\n', (2183, 2192), False, 'from django.template import RequestContext\n'), ((3029, 3052), 'django.template.RequestContext', 'RequestContext', (['request'], {}), '(request)\n', (3043, 3052), False, 'from django.template import RequestContext\n'), ((1852, 1885), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['redirect_to'], {}), '(redirect_to)\n', (1872, 1885), False, 'from django.http import Http404, HttpResponseRedirect\n')]
# Helpers common to the gui from contextlib import contextmanager from PyQt5 import QtWidgets @contextmanager def safe_command(command): """ Helper to suppress AttributeErrors from commands Args: command (callable): The command to run. Any AttributeError raised by the command will be suppressed. """ try: yield command except AttributeError as err: pass def show_error(title, message, parent): """ Helper to show a modal error window Args: title (str): Title for the error window message (str): Message text to display parent (object): Parent window for the modal. This window will be disabled while the modal is visible. Defaults to the main window. """ QtWidgets.QMessageBox.warning(parent, title, message, QtWidgets.QMessageBox.Ok)
[ "PyQt5.QtWidgets.QMessageBox.warning" ]
[((777, 856), 'PyQt5.QtWidgets.QMessageBox.warning', 'QtWidgets.QMessageBox.warning', (['parent', 'title', 'message', 'QtWidgets.QMessageBox.Ok'], {}), '(parent, title, message, QtWidgets.QMessageBox.Ok)\n', (806, 856), False, 'from PyQt5 import QtWidgets\n')]
import simplejson as json from .telegram_field import TelegramField class WTelegramHeader(object): def __init__(self): # self._startField = TelegramField() self._lField = TelegramField() self._cField = TelegramField() # self._crcField = TelegramField() # self._stopField = TelegramField() self._headerLength = 2 # self._headerLengthCRCStop = 8 @property def headerLength(self): return self._headerLength # @property # def headerLengthCRCStop(self): # return self._headerLengthCRCStop @property def startField(self): return self._startField @startField.setter def startField(self, value): self._startField = TelegramField(value) @property def lField(self): return self._lField @lField.setter def lField(self, value): self._lField = TelegramField(value) @property def cField(self): return self._cField @cField.setter def cField(self, value): self._cField = TelegramField(value) @property def interpreted(self): return { 'length': hex(self.lField.parts[0]), 'c': hex(self.cField.parts[0]), } # @property # def crcField(self): # return self._crcField # @crcField.setter # def crcField(self, value): # self._crcField = TelegramField(value) # @property # def stopField(self): # return self._stopField # @stopField.setter # def stopField(self, value): # self._stopField = TelegramField(value) def load(self, hat): header = hat if isinstance(hat, str): header = list(map(ord, hat)) # self.startField = header[0] self.lField = header[0] self.cField = header[1] # self.crcField = header[-2] # self.stopField = header[-1] def to_JSON(self): return json.dumps(self.interpreted, sort_keys=False, indent=4, use_decimal=True)
[ "simplejson.dumps" ]
[((1940, 2013), 'simplejson.dumps', 'json.dumps', (['self.interpreted'], {'sort_keys': '(False)', 'indent': '(4)', 'use_decimal': '(True)'}), '(self.interpreted, sort_keys=False, indent=4, use_decimal=True)\n', (1950, 2013), True, 'import simplejson as json\n')]
# Based on spec_tests.py from # https://github.com/commonmark/commonmark-spec/blob/master/test/spec_tests.py # and # https://github.com/github/cmark-gfm/blob/master/test/spec_tests.py import sys import os import os.path import re import md4c import md4c.domparser import pytest from normalize import normalize_html extension_flags = { 'table': md4c.MD_FLAG_TABLES, 'urlautolink': md4c.MD_FLAG_PERMISSIVEURLAUTOLINKS, 'emailautolink': md4c.MD_FLAG_PERMISSIVEEMAILAUTOLINKS, 'wwwautolink': md4c.MD_FLAG_PERMISSIVEWWWAUTOLINKS, 'tasklist': md4c.MD_FLAG_TASKLISTS, 'strikethrough': md4c.MD_FLAG_STRIKETHROUGH, 'underline': md4c.MD_FLAG_UNDERLINE, 'wikilink': md4c.MD_FLAG_WIKILINKS, 'latexmath': md4c.MD_FLAG_LATEXMATHSPANS, #TODO Add test cases for the rest of the flags # (including combination flags) } def get_tests(specfile): line_number = 0 start_line = 0 end_line = 0 example_number = 0 markdown_lines = [] html_lines = [] state = 0 # 0 regular text, 1 markdown example, 2 html output extensions = [] headertext = '' tests = [] header_re = re.compile('#+ ') full_specfile = os.path.join(sys.path[0], 'spec', specfile) with open(full_specfile, 'r', encoding='utf-8', newline='\n') as specf: for line in specf: line_number = line_number + 1 l = line.strip() if l.startswith("`" * 32 + " example"): state = 1 extensions = l[32 + len(" example"):].split() elif l == "`" * 32: state = 0 example_number = example_number + 1 end_line = line_number md4c_version = None for extension in extensions: if extension.startswith('md4c-'): md4c_version = extension break if md4c_version is not None: extensions.remove(md4c_version) md4c_version = md4c_version[5:] if 'disabled' not in extensions: tests.append({ "markdown":''.join(markdown_lines).replace('→',"\t"), "html":''.join(html_lines).replace('→',"\t"), "example": example_number, "start_line": start_line, "end_line": end_line, "section": headertext, "file": specfile, "md4c_version": md4c_version, "extensions": extensions}) start_line = 0 markdown_lines = [] html_lines = [] elif l == ".": state = 2 elif state == 1: if start_line == 0: start_line = line_number - 1 markdown_lines.append(line) elif state == 2: html_lines.append(line) elif state == 0 and re.match(header_re, line): headertext = header_re.sub('', line).strip() return tests def collect_all_tests(): all_tests = [] specfiles = os.listdir(os.path.join(sys.path[0], 'spec')) for specfile in specfiles: all_tests.extend(get_tests(specfile)) return all_tests def skip_if_older_version(running_version, test_version): """Skip the current test if the running version of MD4C is older than the version required for the test :param running_version: Running version of MD4C, e.g. "0.4.8" :type running_version: str :param test_version: Version of MD4C required for the test :type test_version: str """ if running_version is None or test_version is None: return running_version = [int(x) for x in running_version.split('.')] test_version = [int(x) for x in test_version.split('.')] for r, t in zip(running_version, test_version): if r < t: pytest.skip() for t in test_version[len(running_version):]: if t > 0: pytest.skip("Test requires newer MD4C") @pytest.fixture def md4c_version(pytestconfig): return pytestconfig.getoption('--md4c-version') @pytest.mark.parametrize( 'test_case', collect_all_tests(), ids=lambda x: f'{x["file"]}:{x["start_line"]}-{x["section"]}') def test_html_output(test_case, md4c_version): """Test HTMLRenderer with default render flags on the given example""" skip_if_older_version(md4c_version, test_case['md4c_version']) parser_flags = 0 for extension in test_case['extensions']: parser_flags |= extension_flags[extension] renderer = md4c.HTMLRenderer(parser_flags, 0) output = renderer.parse(test_case['markdown']) assert normalize_html(output) == normalize_html(test_case['html'], False) @pytest.mark.parametrize( 'test_case', collect_all_tests(), ids=lambda x: f'{x["file"]}:{x["start_line"]}-{x["section"]}') def test_domparser_html(test_case, md4c_version): """Test that the output for DOMParser render() matches HTMLRenderer char for char""" skip_if_older_version(md4c_version, test_case['md4c_version']) parser_flags = 0 for extension in test_case['extensions']: parser_flags |= extension_flags[extension] html_renderer = md4c.HTMLRenderer(parser_flags) html_output = html_renderer.parse(test_case['markdown']) dom_parser = md4c.domparser.DOMParser(parser_flags) dom_output = dom_parser.parse(test_case['markdown']).render() assert html_output == dom_output #TODO Test keyword arguments for flags #TODO Test HTML flags #TODO Test mixing keyword arguments and traditional flags
[ "re.compile", "md4c.domparser.DOMParser", "os.path.join", "re.match", "md4c.HTMLRenderer", "normalize.normalize_html", "pytest.skip" ]
[((1138, 1155), 're.compile', 're.compile', (['"""#+ """'], {}), "('#+ ')\n", (1148, 1155), False, 'import re\n'), ((1177, 1220), 'os.path.join', 'os.path.join', (['sys.path[0]', '"""spec"""', 'specfile'], {}), "(sys.path[0], 'spec', specfile)\n", (1189, 1220), False, 'import os\n'), ((4686, 4720), 'md4c.HTMLRenderer', 'md4c.HTMLRenderer', (['parser_flags', '(0)'], {}), '(parser_flags, 0)\n', (4703, 4720), False, 'import md4c\n'), ((5334, 5365), 'md4c.HTMLRenderer', 'md4c.HTMLRenderer', (['parser_flags'], {}), '(parser_flags)\n', (5351, 5365), False, 'import md4c\n'), ((5445, 5483), 'md4c.domparser.DOMParser', 'md4c.domparser.DOMParser', (['parser_flags'], {}), '(parser_flags)\n', (5469, 5483), False, 'import md4c\n'), ((3209, 3242), 'os.path.join', 'os.path.join', (['sys.path[0]', '"""spec"""'], {}), "(sys.path[0], 'spec')\n", (3221, 3242), False, 'import os\n'), ((4784, 4806), 'normalize.normalize_html', 'normalize_html', (['output'], {}), '(output)\n', (4798, 4806), False, 'from normalize import normalize_html\n'), ((4810, 4850), 'normalize.normalize_html', 'normalize_html', (["test_case['html']", '(False)'], {}), "(test_case['html'], False)\n", (4824, 4850), False, 'from normalize import normalize_html\n'), ((3992, 4005), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (4003, 4005), False, 'import pytest\n'), ((4086, 4125), 'pytest.skip', 'pytest.skip', (['"""Test requires newer MD4C"""'], {}), "('Test requires newer MD4C')\n", (4097, 4125), False, 'import pytest\n'), ((3031, 3056), 're.match', 're.match', (['header_re', 'line'], {}), '(header_re, line)\n', (3039, 3056), False, 'import re\n')]
""" Methods for user login """ from cgi import escape from google.appengine.ext import ndb def login_fields_complete(post_data): """ validates that both login fields were filled in :param post_data: :return: """ try: user_id = escape(post_data['user_id'], quote=True) except KeyError: user_id = False try: password = escape(post_data['password'], quote=True) except KeyError: password = False if user_id and password: return {'complete': True, 'user_id': user_id, 'password': password} else: return {'complete': False} def valid_user_id_check(user_id): """ checks that user exists :param user_id: :return: """ user_key = ndb.Key('User', user_id) user = user_key.get() if user: return True else: return False
[ "cgi.escape", "google.appengine.ext.ndb.Key" ]
[((746, 770), 'google.appengine.ext.ndb.Key', 'ndb.Key', (['"""User"""', 'user_id'], {}), "('User', user_id)\n", (753, 770), False, 'from google.appengine.ext import ndb\n'), ((262, 302), 'cgi.escape', 'escape', (["post_data['user_id']"], {'quote': '(True)'}), "(post_data['user_id'], quote=True)\n", (268, 302), False, 'from cgi import escape\n'), ((377, 418), 'cgi.escape', 'escape', (["post_data['password']"], {'quote': '(True)'}), "(post_data['password'], quote=True)\n", (383, 418), False, 'from cgi import escape\n')]
# -*- coding: utf-8 -*- """ Created on Fri Nov 16 13:20:51 2018 lstm encoder decoder for hands @author: Γιώργος """ import torch import torch.nn as nn from utils.file_utils import print_and_save class LSTM_Hands_encdec(nn.Module): # source: https://github.com/yunjey/pytorch-tutorial/blob/master/tutorials/02-intermediate/bidirectional_recurrent_neural_network/main.py def __init__(self, input_size, row_hidden, time_hidden, num_layers, num_classes, dropout, log_file=None): super(LSTM_Hands_encdec, self).__init__() self.row_hidden = row_hidden self.time_hidden = time_hidden self.num_layers = num_layers self.dropout = dropout self.log_file=log_file self.row_lstm = nn.LSTM(input_size, row_hidden, 1, bias=True, batch_first=False, dropout=dropout, bidirectional=False) self.time_lstm = nn.LSTM(row_hidden, time_hidden, num_layers, bias=True, batch_first=False, dropout=dropout, bidirectional=False) self.fc = nn.Linear(time_hidden, num_classes) def forward(self, seq_height_width, seq_lengths): # seq_batch_coords 256, x, 456 h0_row = torch.zeros(1, 1, self.row_hidden).cuda() c0_row = torch.zeros(1, 1, self.row_hidden).cuda() h0_time = torch.zeros(self.num_layers, 1, self.time_hidden).cuda() c0_time = torch.zeros(self.num_layers, 1, self.time_hidden).cuda() im_hiddens = [] for i in range(seq_height_width.size(0)): row_out, _ = self.row_lstm(seq_height_width[i].unsqueeze(1), (h0_row, c0_row)) im_hiddens.append(row_out[-1]) # can also concatenate the hiddens for an image time_input = torch.stack(im_hiddens)#.unsqueeze(1) time_out, _ = self.time_lstm(time_input, (h0_time, c0_time)) out = self.fc(time_out[-1]) return out
[ "torch.nn.LSTM", "torch.stack", "torch.zeros", "torch.nn.Linear" ]
[((746, 853), 'torch.nn.LSTM', 'nn.LSTM', (['input_size', 'row_hidden', '(1)'], {'bias': '(True)', 'batch_first': '(False)', 'dropout': 'dropout', 'bidirectional': '(False)'}), '(input_size, row_hidden, 1, bias=True, batch_first=False, dropout=\n dropout, bidirectional=False)\n', (753, 853), True, 'import torch.nn as nn\n'), ((915, 1031), 'torch.nn.LSTM', 'nn.LSTM', (['row_hidden', 'time_hidden', 'num_layers'], {'bias': '(True)', 'batch_first': '(False)', 'dropout': 'dropout', 'bidirectional': '(False)'}), '(row_hidden, time_hidden, num_layers, bias=True, batch_first=False,\n dropout=dropout, bidirectional=False)\n', (922, 1031), True, 'import torch.nn as nn\n'), ((1080, 1115), 'torch.nn.Linear', 'nn.Linear', (['time_hidden', 'num_classes'], {}), '(time_hidden, num_classes)\n', (1089, 1115), True, 'import torch.nn as nn\n'), ((1809, 1832), 'torch.stack', 'torch.stack', (['im_hiddens'], {}), '(im_hiddens)\n', (1820, 1832), False, 'import torch\n'), ((1246, 1280), 'torch.zeros', 'torch.zeros', (['(1)', '(1)', 'self.row_hidden'], {}), '(1, 1, self.row_hidden)\n', (1257, 1280), False, 'import torch\n'), ((1305, 1339), 'torch.zeros', 'torch.zeros', (['(1)', '(1)', 'self.row_hidden'], {}), '(1, 1, self.row_hidden)\n', (1316, 1339), False, 'import torch\n'), ((1374, 1423), 'torch.zeros', 'torch.zeros', (['self.num_layers', '(1)', 'self.time_hidden'], {}), '(self.num_layers, 1, self.time_hidden)\n', (1385, 1423), False, 'import torch\n'), ((1449, 1498), 'torch.zeros', 'torch.zeros', (['self.num_layers', '(1)', 'self.time_hidden'], {}), '(self.num_layers, 1, self.time_hidden)\n', (1460, 1498), False, 'import torch\n')]
import re with open("day14.txt", "r") as f: data = f.read().splitlines() def apply_mask(mask, value): binary_value = f"{value:>036b}" masked_value = "".join( value if mask_value == "X" else mask_value for value, mask_value in zip(binary_value, mask) ) return int(masked_value, 2) memory = {} for line in data: if "mask" in line: mask = line.split(" = ")[-1] else: address, value = re.findall("(\d+)", line) memory[address] = apply_mask(mask, int(value)) print(sum(memory.values()))
[ "re.findall" ]
[((446, 472), 're.findall', 're.findall', (['"""(\\\\d+)"""', 'line'], {}), "('(\\\\d+)', line)\n", (456, 472), False, 'import re\n')]
# ------------------------------------------------------------------------------ # Test Many Things Utilities # ------------------------------------------------------------------------------ import sys import datetime as dt import pytz from django.test import TestCase from django.utils import translation from ls.joyous.utils.manythings import (toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin) # ------------------------------------------------------------------------------ class Test(TestCase): def testToOrdinal(self): self.assertEqual(toOrdinal(-1), "last") self.assertEqual(toOrdinal(-2), "penultimate") self.assertEqual(toOrdinal(1), "first") self.assertEqual(toOrdinal(2), "second") self.assertEqual(toOrdinal(3), "third") self.assertEqual(toOrdinal(4), "fourth") self.assertEqual(toOrdinal(5), "fifth") def testToOrdinalNum(self): self.assertEqual(toOrdinal(6), "6th") self.assertEqual(toOrdinal(11), "11th") self.assertEqual(toOrdinal(12), "12th") self.assertEqual(toOrdinal(13), "13th") self.assertEqual(toOrdinal(21), "21st") self.assertEqual(toOrdinal(102), "102nd") self.assertEqual(toOrdinal(6543), "6543rd") def testToTheOrdinal(self): self.assertEqual(toTheOrdinal(-1), "The last") self.assertEqual(toTheOrdinal(-2, False), "the penultimate") self.assertEqual(toTheOrdinal(1), "The first") self.assertEqual(toTheOrdinal(2), "The second") self.assertEqual(toTheOrdinal(3), "The third") self.assertEqual(toTheOrdinal(4), "The fourth") self.assertEqual(toTheOrdinal(5), "The fifth") def testToTheOrdinalNum(self): self.assertEqual(toTheOrdinal(6), "The 6th") self.assertEqual(toTheOrdinal(11), "The 11th") self.assertEqual(toTheOrdinal(12), "The 12th") self.assertEqual(toTheOrdinal(13), "The 13th") self.assertEqual(toTheOrdinal(21), "The 21st") self.assertEqual(toTheOrdinal(102), "The 102nd") self.assertEqual(toTheOrdinal(6543), "The 6543rd") def testToDaysOffsetStr(self): self.assertEqual(toDaysOffsetStr(-3), "Three days before") self.assertEqual(toDaysOffsetStr(-2), "Two days before") self.assertEqual(toDaysOffsetStr(-1), "The day before") self.assertEqual(toDaysOffsetStr(0), "") self.assertEqual(toDaysOffsetStr(1), "The day after") self.assertEqual(toDaysOffsetStr(2), "Two days after") self.assertEqual(toDaysOffsetStr(3), "Three days after") self.assertEqual(toDaysOffsetStr(25), "Twenty-five days after") def testHumanReadableJoin(self): self.assertEqual(hrJoin([""]), "") self.assertEqual(hrJoin(["ice"]), "ice") self.assertEqual(hrJoin(["ice", "fire"]), "ice and fire") self.assertEqual(hrJoin(["wind", "ice", "fire"]), "wind, ice and fire") self.assertEqual(hrJoin(["dog", "cat", "hen", "yak", "ant"]), "dog, cat, hen, yak and ant") # ------------------------------------------------------------------------------ class TestFrançais(TestCase): def setUp(self): translation.activate('fr') def tearDown(self): translation.deactivate() def testToOrdinal(self): self.assertEqual(toOrdinal(-1), "dernier") self.assertEqual(toOrdinal(-2), "avant-dernier") self.assertEqual(toOrdinal (1), "premier") self.assertEqual(toOrdinal (2), "deuxième") self.assertEqual(toOrdinal (3), "troisième") self.assertEqual(toOrdinal (4), "quatrième") self.assertEqual(toOrdinal (5), "cinquième") def testToOrdinalNum(self): self.assertEqual(toOrdinal(6), "6me") self.assertEqual(toOrdinal(11), "11me") self.assertEqual(toOrdinal(12), "12me") self.assertEqual(toOrdinal(13), "13me") self.assertEqual(toOrdinal(21), "21me") self.assertEqual(toOrdinal(102), "102me") self.assertEqual(toOrdinal(6543), "6543me") def testToTheOrdinal(self): self.assertEqual(toTheOrdinal(-1), "Le dernier") self.assertEqual(toTheOrdinal(-2, True), "L'avant-dernier") self.assertEqual(toTheOrdinal(-2, False), "l'avant-dernier") self.assertEqual(toTheOrdinal(1), "La premier") self.assertEqual(toTheOrdinal(2, False), "la deuxième") self.assertEqual(toTheOrdinal(3), "Le troisième") self.assertEqual(toTheOrdinal(4), "Le quatrième") self.assertEqual(toTheOrdinal(5), "Le cinquième") def testToTheOrdinalNum(self): self.assertEqual(toTheOrdinal(6), "La 6me") self.assertEqual(toTheOrdinal(11), "La 11me") self.assertEqual(toTheOrdinal(12), "La 12me") self.assertEqual(toTheOrdinal(13), "La 13me") self.assertEqual(toTheOrdinal(21), "La 21me") self.assertEqual(toTheOrdinal(102), "La 102me") self.assertEqual(toTheOrdinal(6543), "La 6543me") def testToDaysOffsetStr(self): self.assertEqual(toDaysOffsetStr(-3), "Trois jours avant") self.assertEqual(toDaysOffsetStr(-2), "Deux jours avant") self.assertEqual(toDaysOffsetStr(-1), "Le jour précédent") self.assertEqual(toDaysOffsetStr(0), "") self.assertEqual(toDaysOffsetStr(1), "Le jour après") self.assertEqual(toDaysOffsetStr(2), "Deux jours après") self.assertEqual(toDaysOffsetStr(3), "Trois jours après") self.assertEqual(toDaysOffsetStr(25), "Vingt-cinq jours après") def testHumanReadableJoin(self): self.assertEqual(hrJoin([""]), "") self.assertEqual(hrJoin (["glace"]), "glace") self.assertEqual(hrJoin (["glace", "feu"]), "glace et feu") self.assertEqual(hrJoin (["vent", "glace", "feu"]), "vent, glace et feu") self.assertEqual(hrJoin (["chien", "chat", "poule", "yak", "fourmi"]), "chien, chat, poule, yak et fourmi") # ------------------------------------------------------------------------------ class TestΕλληνικά(TestCase): def setUp(self): translation.activate('el') def tearDown(self): translation.deactivate() def testToOrdinal(self): self.assertEqual(toOrdinal(-1), "τελευταίος") self.assertEqual(toOrdinal(-2), "προτελευταία") self.assertEqual(toOrdinal (1), "τελευταίο") self.assertEqual(toOrdinal (2), "προτελευταία") self.assertEqual(toOrdinal (3), "πρώτη") self.assertEqual(toOrdinal (4), "δεύτερη") self.assertEqual(toOrdinal (5), "τρίτη") # ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
[ "ls.joyous.utils.manythings.toOrdinal", "ls.joyous.utils.manythings.hrJoin", "django.utils.translation.activate", "ls.joyous.utils.manythings.toDaysOffsetStr", "ls.joyous.utils.manythings.toTheOrdinal", "django.utils.translation.deactivate" ]
[((3257, 3283), 'django.utils.translation.activate', 'translation.activate', (['"""fr"""'], {}), "('fr')\n", (3277, 3283), False, 'from django.utils import translation\n'), ((3317, 3341), 'django.utils.translation.deactivate', 'translation.deactivate', ([], {}), '()\n', (3339, 3341), False, 'from django.utils import translation\n'), ((6197, 6223), 'django.utils.translation.activate', 'translation.activate', (['"""el"""'], {}), "('el')\n", (6217, 6223), False, 'from django.utils import translation\n'), ((6257, 6281), 'django.utils.translation.deactivate', 'translation.deactivate', ([], {}), '()\n', (6279, 6281), False, 'from django.utils import translation\n'), ((594, 607), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(-1)'], {}), '(-1)\n', (603, 607), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((642, 655), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(-2)'], {}), '(-2)\n', (651, 655), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((697, 709), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(1)'], {}), '(1)\n', (706, 709), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((745, 757), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(2)'], {}), '(2)\n', (754, 757), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((794, 806), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(3)'], {}), '(3)\n', (803, 806), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((842, 854), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(4)'], {}), '(4)\n', (851, 854), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((891, 903), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(5)'], {}), '(5)\n', (900, 903), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((972, 984), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(6)'], {}), '(6)\n', (981, 984), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((1018, 1031), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(11)'], {}), '(11)\n', (1027, 1031), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((1066, 1079), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(12)'], {}), '(12)\n', (1075, 1079), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((1114, 1127), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(13)'], {}), '(13)\n', (1123, 1127), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((1162, 1175), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(21)'], {}), '(21)\n', (1171, 1175), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((1210, 1224), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(102)'], {}), '(102)\n', (1219, 1224), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((1260, 1275), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(6543)'], {}), '(6543)\n', (1269, 1275), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((1345, 1361), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(-1)'], {}), '(-1)\n', (1357, 1361), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((1400, 1423), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(-2)', '(False)'], {}), '(-2, False)\n', (1412, 1423), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((1469, 1484), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(1)'], {}), '(1)\n', (1481, 1484), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((1524, 1539), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(2)'], {}), '(2)\n', (1536, 1539), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((1580, 1595), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(3)'], {}), '(3)\n', (1592, 1595), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((1635, 1650), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(4)'], {}), '(4)\n', (1647, 1650), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((1691, 1706), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(5)'], {}), '(5)\n', (1703, 1706), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((1782, 1797), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(6)'], {}), '(6)\n', (1794, 1797), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((1835, 1851), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(11)'], {}), '(11)\n', (1847, 1851), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((1890, 1906), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(12)'], {}), '(12)\n', (1902, 1906), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((1945, 1961), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(13)'], {}), '(13)\n', (1957, 1961), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((2000, 2016), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(21)'], {}), '(21)\n', (2012, 2016), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((2055, 2072), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(102)'], {}), '(102)\n', (2067, 2072), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((2112, 2130), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(6543)'], {}), '(6543)\n', (2124, 2130), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((2207, 2226), 'ls.joyous.utils.manythings.toDaysOffsetStr', 'toDaysOffsetStr', (['(-3)'], {}), '(-3)\n', (2222, 2226), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((2274, 2293), 'ls.joyous.utils.manythings.toDaysOffsetStr', 'toDaysOffsetStr', (['(-2)'], {}), '(-2)\n', (2289, 2293), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((2339, 2358), 'ls.joyous.utils.manythings.toDaysOffsetStr', 'toDaysOffsetStr', (['(-1)'], {}), '(-1)\n', (2354, 2358), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((2403, 2421), 'ls.joyous.utils.manythings.toDaysOffsetStr', 'toDaysOffsetStr', (['(0)'], {}), '(0)\n', (2418, 2421), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((2452, 2470), 'ls.joyous.utils.manythings.toDaysOffsetStr', 'toDaysOffsetStr', (['(1)'], {}), '(1)\n', (2467, 2470), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((2514, 2532), 'ls.joyous.utils.manythings.toDaysOffsetStr', 'toDaysOffsetStr', (['(2)'], {}), '(2)\n', (2529, 2532), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((2577, 2595), 'ls.joyous.utils.manythings.toDaysOffsetStr', 'toDaysOffsetStr', (['(3)'], {}), '(3)\n', (2592, 2595), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((2642, 2661), 'ls.joyous.utils.manythings.toDaysOffsetStr', 'toDaysOffsetStr', (['(25)'], {}), '(25)\n', (2657, 2661), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((2752, 2764), 'ls.joyous.utils.manythings.hrJoin', 'hrJoin', (["['']"], {}), "([''])\n", (2758, 2764), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((2795, 2810), 'ls.joyous.utils.manythings.hrJoin', 'hrJoin', (["['ice']"], {}), "(['ice'])\n", (2801, 2810), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((2844, 2867), 'ls.joyous.utils.manythings.hrJoin', 'hrJoin', (["['ice', 'fire']"], {}), "(['ice', 'fire'])\n", (2850, 2867), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((2910, 2941), 'ls.joyous.utils.manythings.hrJoin', 'hrJoin', (["['wind', 'ice', 'fire']"], {}), "(['wind', 'ice', 'fire'])\n", (2916, 2941), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((3015, 3058), 'ls.joyous.utils.manythings.hrJoin', 'hrJoin', (["['dog', 'cat', 'hen', 'yak', 'ant']"], {}), "(['dog', 'cat', 'hen', 'yak', 'ant'])\n", (3021, 3058), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((3397, 3410), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(-1)'], {}), '(-1)\n', (3406, 3410), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((3448, 3461), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(-2)'], {}), '(-2)\n', (3457, 3461), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((3505, 3517), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(1)'], {}), '(1)\n', (3514, 3517), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((3556, 3568), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(2)'], {}), '(2)\n', (3565, 3568), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((3608, 3620), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(3)'], {}), '(3)\n', (3617, 3620), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((3661, 3673), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(4)'], {}), '(4)\n', (3670, 3673), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((3714, 3726), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(5)'], {}), '(5)\n', (3723, 3726), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((3800, 3812), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(6)'], {}), '(6)\n', (3809, 3812), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((3846, 3859), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(11)'], {}), '(11)\n', (3855, 3859), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((3894, 3907), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(12)'], {}), '(12)\n', (3903, 3907), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((3942, 3955), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(13)'], {}), '(13)\n', (3951, 3955), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((3990, 4003), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(21)'], {}), '(21)\n', (3999, 4003), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((4038, 4052), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(102)'], {}), '(102)\n', (4047, 4052), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((4088, 4103), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(6543)'], {}), '(6543)\n', (4097, 4103), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((4173, 4189), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(-1)'], {}), '(-1)\n', (4185, 4189), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((4230, 4252), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(-2)', '(True)'], {}), '(-2, True)\n', (4242, 4252), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((4298, 4321), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(-2)', '(False)'], {}), '(-2, False)\n', (4310, 4321), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((4367, 4382), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(1)'], {}), '(1)\n', (4379, 4382), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((4423, 4445), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(2)', '(False)'], {}), '(2, False)\n', (4435, 4445), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((4487, 4502), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(3)'], {}), '(3)\n', (4499, 4502), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((4545, 4560), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(4)'], {}), '(4)\n', (4557, 4560), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((4603, 4618), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(5)'], {}), '(5)\n', (4615, 4618), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((4697, 4712), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(6)'], {}), '(6)\n', (4709, 4712), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((4749, 4765), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(11)'], {}), '(11)\n', (4761, 4765), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((4803, 4819), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(12)'], {}), '(12)\n', (4815, 4819), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((4857, 4873), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(13)'], {}), '(13)\n', (4869, 4873), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((4911, 4927), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(21)'], {}), '(21)\n', (4923, 4927), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((4965, 4982), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(102)'], {}), '(102)\n', (4977, 4982), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((5021, 5039), 'ls.joyous.utils.manythings.toTheOrdinal', 'toTheOrdinal', (['(6543)'], {}), '(6543)\n', (5033, 5039), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((5115, 5134), 'ls.joyous.utils.manythings.toDaysOffsetStr', 'toDaysOffsetStr', (['(-3)'], {}), '(-3)\n', (5130, 5134), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((5182, 5201), 'ls.joyous.utils.manythings.toDaysOffsetStr', 'toDaysOffsetStr', (['(-2)'], {}), '(-2)\n', (5197, 5201), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((5248, 5267), 'ls.joyous.utils.manythings.toDaysOffsetStr', 'toDaysOffsetStr', (['(-1)'], {}), '(-1)\n', (5263, 5267), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((5315, 5333), 'ls.joyous.utils.manythings.toDaysOffsetStr', 'toDaysOffsetStr', (['(0)'], {}), '(0)\n', (5330, 5333), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((5364, 5382), 'ls.joyous.utils.manythings.toDaysOffsetStr', 'toDaysOffsetStr', (['(1)'], {}), '(1)\n', (5379, 5382), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((5426, 5444), 'ls.joyous.utils.manythings.toDaysOffsetStr', 'toDaysOffsetStr', (['(2)'], {}), '(2)\n', (5441, 5444), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((5491, 5509), 'ls.joyous.utils.manythings.toDaysOffsetStr', 'toDaysOffsetStr', (['(3)'], {}), '(3)\n', (5506, 5509), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((5557, 5576), 'ls.joyous.utils.manythings.toDaysOffsetStr', 'toDaysOffsetStr', (['(25)'], {}), '(25)\n', (5572, 5576), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((5667, 5679), 'ls.joyous.utils.manythings.hrJoin', 'hrJoin', (["['']"], {}), "([''])\n", (5673, 5679), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((5710, 5727), 'ls.joyous.utils.manythings.hrJoin', 'hrJoin', (["['glace']"], {}), "(['glace'])\n", (5716, 5727), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((5764, 5788), 'ls.joyous.utils.manythings.hrJoin', 'hrJoin', (["['glace', 'feu']"], {}), "(['glace', 'feu'])\n", (5770, 5788), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((5832, 5864), 'ls.joyous.utils.manythings.hrJoin', 'hrJoin', (["['vent', 'glace', 'feu']"], {}), "(['vent', 'glace', 'feu'])\n", (5838, 5864), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((5939, 5990), 'ls.joyous.utils.manythings.hrJoin', 'hrJoin', (["['chien', 'chat', 'poule', 'yak', 'fourmi']"], {}), "(['chien', 'chat', 'poule', 'yak', 'fourmi'])\n", (5945, 5990), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((6337, 6350), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(-1)'], {}), '(-1)\n', (6346, 6350), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((6391, 6404), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(-2)'], {}), '(-2)\n', (6400, 6404), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((6447, 6459), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(1)'], {}), '(1)\n', (6456, 6459), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((6500, 6512), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(2)'], {}), '(2)\n', (6509, 6512), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((6556, 6568), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(3)'], {}), '(3)\n', (6565, 6568), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((6605, 6617), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(4)'], {}), '(4)\n', (6614, 6617), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n'), ((6656, 6668), 'ls.joyous.utils.manythings.toOrdinal', 'toOrdinal', (['(5)'], {}), '(5)\n', (6665, 6668), False, 'from ls.joyous.utils.manythings import toOrdinal, toTheOrdinal, toDaysOffsetStr, hrJoin\n')]
from box.parser import Parser from box.generator import Generator import os class Importer: def __init__(self, path): # Path to directory containing function graphs to import self.path = os.path.abspath(path) # { "FunctionName": <Generator>, ... } self.function_declarations = {} # List of (Parser, Generator) objects, # one for each function graph .box file self.parser_generators = self._parse_box_files() print(self.function_declarations) def _parse_box_files(self): # Result is a list of tuples [(parser, generator), ...] result = [] for file in os.listdir(self.path): if file.endswith(".box"): path = os.path.join(self.path, file) parser = Parser(path) generator = Generator(parser) code = generator.to_python([]) result.append((parser, generator)) self.function_declarations[generator.function_name] = generator return result
[ "os.listdir", "os.path.join", "box.parser.Parser", "os.path.abspath", "box.generator.Generator" ]
[((209, 230), 'os.path.abspath', 'os.path.abspath', (['path'], {}), '(path)\n', (224, 230), False, 'import os\n'), ((652, 673), 'os.listdir', 'os.listdir', (['self.path'], {}), '(self.path)\n', (662, 673), False, 'import os\n'), ((736, 765), 'os.path.join', 'os.path.join', (['self.path', 'file'], {}), '(self.path, file)\n', (748, 765), False, 'import os\n'), ((791, 803), 'box.parser.Parser', 'Parser', (['path'], {}), '(path)\n', (797, 803), False, 'from box.parser import Parser\n'), ((832, 849), 'box.generator.Generator', 'Generator', (['parser'], {}), '(parser)\n', (841, 849), False, 'from box.generator import Generator\n')]
import os from docx import Document from docx.shared import Inches from docx import Document from docx.text.paragraph import Paragraph def Iceriyomu(dosyayol): document = Document('{}'.format(dosyayol)) headings = [] texts = [] para = [] giris = "" for paragraph in document.paragraphs: if paragraph.style.name.startswith("Heading"): if headings: texts.append(para) headings.append(paragraph.text) para = [] elif paragraph.style.name == "Normal" and not paragraph.text.find(' ',0,1) != -1 and paragraph.text !='': para.append(paragraph.text) if para or len(headings)>len(texts): texts.append(texts.append(para)) for h, t in zip(headings, texts): if h== "GİRİŞ" or h== "Giriş": giris = t[-1] #print(giris) if (giris.find('apsam') != -1 or giris.find('rganizasyon') != -1): sonuc="Giris bölümünün son bölümünde tezin organizasyonu ve kapsamına yer verilmis " RaporaEkle(sonuc) else: sonuc="Giris bölümünün son bölümünde tezin organizasyonu ve kapsamına yer verilmemis" RaporaEkle(sonuc) def RaporaEkle(sonuc): f = open('WordRapor.docx', 'rb') document = Document(f) document.add_paragraph( sonuc, style='List Number' ) document.add_heading('16541504-Fatih Uludag', level=1) document.add_heading('175541058-Doğukan Kurnaz', level=1) document.add_heading('14545520-Kemal Sanlı', level=1) document.add_heading('175541059-<NAME>', level=1) document.save('WordRapor.docx') f.close() print("Asama uc tamamlandi...") print("Word Raporu Olusturuldu...")
[ "docx.Document" ]
[((1304, 1315), 'docx.Document', 'Document', (['f'], {}), '(f)\n', (1312, 1315), False, 'from docx import Document\n')]
"""Assimp-based analyzer.""" from __future__ import absolute_import import os import logging import subprocess import pyassimp from damn_at import ( mimetypes, MetaDataType, MetaDataValue, FileId, FileDescription, AssetDescription, AssetId ) from damn_at.pluginmanager import IAnalyzer from six.moves import map from io import open LOG = logging.getLogger(__name__) def get_assimp_types(): """Extract all possible formats and store their mime types""" # TODO: not exactly reliable, a lot of unknown mimetypes # for those extensions :/ try: pro = subprocess.Popen( ['assimp', 'listext'], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) out, err = pro.communicate() if pro.returncode != 0: LOG.debug("'assimp listext' failed with error code %d! " % pro.returncode, out, err ) return [] except OSError as oserror: LOG.debug("'assimp listext' failed! %s", oserror) return [] extensions = out.split(';') mimes = [] for ext in extensions: mime = mimetypes.guess_type('file.' + ext, False)[0] LOG.info('Mimetype Info:\n\tExtension: %s\n\tMime: %s', ext, mime) mimes.append(mime) return mimes class AssimpAnalyzer(IAnalyzer): """Assimp-based analyzer.""" handled_types = ['application/wavefront-obj', 'application/fbx'] def __init__(self): IAnalyzer.__init__(self) def activate(self): pass def analyze(self, an_uri): fileid = FileId(filename=os.path.abspath(an_uri)) file_descr = FileDescription(file=fileid) file_descr.assets = [] assimp_mimetype = 'application/assimp' scene = None try: scene = pyassimp.load(an_uri) textures = {} materials = {} from damn_at.analyzers.mesh.metadata import ( MetaDataAssimpTexture, MetaDataAssimpMesh ) for i, texture in enumerate(scene.textures): name = texture.name if texture.name else 'texture-'+str(i) asset_descr = AssetDescription(asset=AssetId( subname=name, mimetype=assimp_mimetype + ".texture", file=fileid )) asset_descr.metadata = MetaDataAssimpTexture.extract(texture) file_descr.assets.append(asset_descr) textures[i] = asset_descr for i, material in enumerate(scene.materials): properties = {} for key, value in material.properties.items(): properties[key] = value name = properties.get('name', 'material-'+str(i)) asset_descr = AssetDescription(asset=AssetId( subname=name, mimetype=assimp_mimetype + ".material", file=fileid )) asset_descr.metadata = {} for key, value in properties.items(): if key == 'name' or key == 'file': continue asset_descr.metadata[key] = MetaDataValue( type=MetaDataType.STRING, string_value=str(value) ) file_descr.assets.append(asset_descr) materials[i] = asset_descr for i, mesh in enumerate(scene.meshes): name = mesh.name if mesh.name else 'mesh-' + str(i) asset_descr = AssetDescription(asset=AssetId( subname=name, mimetype=assimp_mimetype + ".mesh", file=fileid )) asset_descr.metadata = MetaDataAssimpMesh.extract(mesh) asset_descr.dependencies = [] # Dependencies if mesh.materialindex is not None: if mesh.materialindex in materials: asset_descr.dependencies.append( materials[mesh.materialindex].asset ) file_descr.assets.append(asset_descr) finally: pyassimp.release(scene) ''' obj = Loader(an_uri) from damn_at.analyzers.mesh.metadata import ( MetaDataWaveFrontDefault, MetaDataWaveFrontGroup ) d_asset_descr = AssetDescription(asset=AssetId( subname='default', mimetype="application/wavefront-obj", file=fileid )) d_asset_descr.metadata = MetaDataWaveFrontDefault.extract(obj) file_descr.assets.append(d_asset_descr) for name, group in obj.groups.items(): if name != 'default': asset_descr = AssetDescription(asset=AssetId( subname=name, mimetype="application/wavefront-obj.group", file=fileid )) asset_descr.metadata = MetaDataWaveFrontGroup.extract(group) asset_descr.dependencies = [d_asset_descr.asset] file_descr.assets.append(asset_descr)''' return file_descr class Loader(object): def __init__(self, path): vertices = [] normals = [] texcoords = [] default = {'faces': []} current = default self.groups = {'default': default} for line in open(path, "r"): if line.startswith('#'): continue values = line.split() if not values: continue if values[0] == 'g': current = {'faces': []} group_name = values[1] LOG.info("Group:\n%s\n%s", group_name, values) self.groups[group_name] = current elif values[0] == 'v': vertices.append(tuple(map(float, values[1:4]))) elif values[0] == 'vn': normals.append(tuple(map(float, values[1:4]))) elif values[0] == 'vt': texcoords.append(tuple(map(float, values[1:3]))) elif values[0] == 's': current['smooth'] = bool(values[2:3]) elif values[0] == 'f': faces = current['faces'] face = [] for v in values[1:]: w = [int(x) if x else None for x in v.split('/')] w = [x-1 if x is not None and x > 0 else x for x in w] face.append(tuple(w)) faces.append(tuple(face)) else: LOG.info('Loader value not known: %s - %s' % (values[0], line)) # save result self.vertices = vertices self.normals = normals self.texcoords = texcoords
[ "logging.getLogger", "damn_at.mimetypes.guess_type", "damn_at.analyzers.mesh.metadata.MetaDataAssimpTexture.extract", "damn_at.pluginmanager.IAnalyzer.__init__", "subprocess.Popen", "pyassimp.release", "io.open", "damn_at.FileDescription", "os.path.abspath", "six.moves.map", "damn_at.analyzers.m...
[((369, 396), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (386, 396), False, 'import logging\n'), ((603, 695), 'subprocess.Popen', 'subprocess.Popen', (["['assimp', 'listext']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['assimp', 'listext'], stdout=subprocess.PIPE, stderr=\n subprocess.PIPE)\n", (619, 695), False, 'import subprocess\n'), ((1567, 1591), 'damn_at.pluginmanager.IAnalyzer.__init__', 'IAnalyzer.__init__', (['self'], {}), '(self)\n', (1585, 1591), False, 'from damn_at.pluginmanager import IAnalyzer\n'), ((1741, 1769), 'damn_at.FileDescription', 'FileDescription', ([], {'file': 'fileid'}), '(file=fileid)\n', (1756, 1769), False, 'from damn_at import mimetypes, MetaDataType, MetaDataValue, FileId, FileDescription, AssetDescription, AssetId\n'), ((5643, 5658), 'io.open', 'open', (['path', '"""r"""'], {}), "(path, 'r')\n", (5647, 5658), False, 'from io import open\n'), ((1211, 1253), 'damn_at.mimetypes.guess_type', 'mimetypes.guess_type', (["('file.' + ext)", '(False)'], {}), "('file.' + ext, False)\n", (1231, 1253), False, 'from damn_at import mimetypes, MetaDataType, MetaDataValue, FileId, FileDescription, AssetDescription, AssetId\n'), ((1904, 1925), 'pyassimp.load', 'pyassimp.load', (['an_uri'], {}), '(an_uri)\n', (1917, 1925), False, 'import pyassimp\n'), ((4387, 4410), 'pyassimp.release', 'pyassimp.release', (['scene'], {}), '(scene)\n', (4403, 4410), False, 'import pyassimp\n'), ((1695, 1718), 'os.path.abspath', 'os.path.abspath', (['an_uri'], {}), '(an_uri)\n', (1710, 1718), False, 'import os\n'), ((2505, 2543), 'damn_at.analyzers.mesh.metadata.MetaDataAssimpTexture.extract', 'MetaDataAssimpTexture.extract', (['texture'], {}), '(texture)\n', (2534, 2543), False, 'from damn_at.analyzers.mesh.metadata import MetaDataAssimpTexture, MetaDataAssimpMesh\n'), ((3939, 3971), 'damn_at.analyzers.mesh.metadata.MetaDataAssimpMesh.extract', 'MetaDataAssimpMesh.extract', (['mesh'], {}), '(mesh)\n', (3965, 3971), False, 'from damn_at.analyzers.mesh.metadata import MetaDataAssimpTexture, MetaDataAssimpMesh\n'), ((2313, 2386), 'damn_at.AssetId', 'AssetId', ([], {'subname': 'name', 'mimetype': "(assimp_mimetype + '.texture')", 'file': 'fileid'}), "(subname=name, mimetype=assimp_mimetype + '.texture', file=fileid)\n", (2320, 2386), False, 'from damn_at import mimetypes, MetaDataType, MetaDataValue, FileId, FileDescription, AssetDescription, AssetId\n'), ((2958, 3032), 'damn_at.AssetId', 'AssetId', ([], {'subname': 'name', 'mimetype': "(assimp_mimetype + '.material')", 'file': 'fileid'}), "(subname=name, mimetype=assimp_mimetype + '.material', file=fileid)\n", (2965, 3032), False, 'from damn_at import mimetypes, MetaDataType, MetaDataValue, FileId, FileDescription, AssetDescription, AssetId\n'), ((3750, 3820), 'damn_at.AssetId', 'AssetId', ([], {'subname': 'name', 'mimetype': "(assimp_mimetype + '.mesh')", 'file': 'fileid'}), "(subname=name, mimetype=assimp_mimetype + '.mesh', file=fileid)\n", (3757, 3820), False, 'from damn_at import mimetypes, MetaDataType, MetaDataValue, FileId, FileDescription, AssetDescription, AssetId\n'), ((6106, 6129), 'six.moves.map', 'map', (['float', 'values[1:4]'], {}), '(float, values[1:4])\n', (6109, 6129), False, 'from six.moves import map\n'), ((6205, 6228), 'six.moves.map', 'map', (['float', 'values[1:4]'], {}), '(float, values[1:4])\n', (6208, 6228), False, 'from six.moves import map\n'), ((6306, 6329), 'six.moves.map', 'map', (['float', 'values[1:3]'], {}), '(float, values[1:3])\n', (6309, 6329), False, 'from six.moves import map\n')]
# Generated by Django 2.2.1 on 2019-06-30 00:31 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('students', '0009_auto_20190629_0125'), ] operations = [ migrations.AddField( model_name='institutionalemail', name='title_email', field=models.CharField(default='Assunto do email', editable=False, max_length=20), ), ]
[ "django.db.models.CharField" ]
[((352, 427), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""Assunto do email"""', 'editable': '(False)', 'max_length': '(20)'}), "(default='Assunto do email', editable=False, max_length=20)\n", (368, 427), False, 'from django.db import migrations, models\n')]
""" Author: <NAME> Email: <EMAIL> Date: 12/21/2019 Description: Loads Azure Utility Tool configuration file. The configuration file is a blend of what the Microsoft Authentication Library requires and some extra directives that the Auzre Utility Tool requires. It is a JSON file that is required to be stored in ~/.aut/aut_config.json """ import json import sys import os from azure_utility_tool.exceptions import ConfigFileNotFound def get_config(config_file="~/.aut/aut_config.json"): CONFIG_PATH = os.path.expanduser(config_file) # Ensure the directory exists, if not, then throw an Exception. if not os.path.exists(CONFIG_PATH): raise ConfigFileNotFound("The configuration file for the Azure" " Utility Tool was not found in " + config_file) return json.load(open(CONFIG_PATH))
[ "os.path.exists", "azure_utility_tool.exceptions.ConfigFileNotFound", "os.path.expanduser" ]
[((528, 559), 'os.path.expanduser', 'os.path.expanduser', (['config_file'], {}), '(config_file)\n', (546, 559), False, 'import os\n'), ((639, 666), 'os.path.exists', 'os.path.exists', (['CONFIG_PATH'], {}), '(CONFIG_PATH)\n', (653, 666), False, 'import os\n'), ((682, 794), 'azure_utility_tool.exceptions.ConfigFileNotFound', 'ConfigFileNotFound', (["('The configuration file for the Azure Utility Tool was not found in ' +\n config_file)"], {}), "(\n 'The configuration file for the Azure Utility Tool was not found in ' +\n config_file)\n", (700, 794), False, 'from azure_utility_tool.exceptions import ConfigFileNotFound\n')]
import json from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter, FileType from pathlib import Path def main(): parser = ArgumentParser(description='Collect markdown files, and write JSON.', formatter_class=ArgumentDefaultsHelpFormatter) project_path = Path(__file__).parent.parent.parent.parent parser.add_argument('--source', type=Path, default=project_path / 'html' / 'tutorials') parser.add_argument('--target', type=FileType('w'), default=str(project_path / 'html' / 'src' / 'tutorials.json')) args = parser.parse_args() tutorials = {} # source_file: Path for source_file in args.source.rglob('*.md'): name = str(source_file.relative_to(args.source).with_suffix('')) if name == 'README': continue source = source_file.read_text() tutorials[name] = source json.dump(tutorials, args.target) main()
[ "json.dump", "argparse.FileType", "argparse.ArgumentParser", "pathlib.Path" ]
[((141, 261), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""Collect markdown files, and write JSON."""', 'formatter_class': 'ArgumentDefaultsHelpFormatter'}), "(description='Collect markdown files, and write JSON.',\n formatter_class=ArgumentDefaultsHelpFormatter)\n", (155, 261), False, 'from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter, FileType\n'), ((1017, 1050), 'json.dump', 'json.dump', (['tutorials', 'args.target'], {}), '(tutorials, args.target)\n', (1026, 1050), False, 'import json\n'), ((553, 566), 'argparse.FileType', 'FileType', (['"""w"""'], {}), "('w')\n", (561, 566), False, 'from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter, FileType\n'), ((305, 319), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (309, 319), False, 'from pathlib import Path\n')]
# -*- coding: utf-8 -*- from __future__ import with_statement import base64 import time import simplejson as json from flask.ext.security.utils import capture_registrations, \ capture_reset_password_requests, capture_passwordless_login_requests from flask.ext.security.forms import LoginForm, ConfirmRegisterForm, RegisterForm, \ ForgotPasswordForm, ResetPasswordForm, SendConfirmationForm, \ PasswordlessLoginForm from flask.ext.security.forms import TextField, SubmitField, valid_user_email from tests import SecurityTest class ConfiguredPasswordHashSecurityTests(SecurityTest): AUTH_CONFIG = { 'SECURITY_PASSWORD_HASH': '<PASSWORD>', 'SECURITY_PASSWORD_SALT': '<PASSWORD>', 'USER_COUNT': 1 } def test_authenticate(self): r = self.authenticate(endpoint="/login") self.assertIn('Home Page', r.data) class ConfiguredSecurityTests(SecurityTest): AUTH_CONFIG = { 'SECURITY_REGISTERABLE': True, 'SECURITY_LOGOUT_URL': '/custom_logout', 'SECURITY_LOGIN_URL': '/custom_login', 'SECURITY_POST_LOGIN_VIEW': '/post_login', 'SECURITY_POST_LOGOUT_VIEW': '/post_logout', 'SECURITY_POST_REGISTER_VIEW': '/post_register', 'SECURITY_UNAUTHORIZED_VIEW': '/unauthorized', 'SECURITY_DEFAULT_HTTP_AUTH_REALM': 'Custom Realm' } def test_login_view(self): r = self._get('/custom_login') self.assertIn("<h1>Login</h1>", r.data) def test_authenticate(self): r = self.authenticate(endpoint="/custom_login") self.assertIn('Post Login', r.data) def test_logout(self): self.authenticate(endpoint="/custom_login") r = self.logout(endpoint="/custom_logout") self.assertIn('Post Logout', r.data) def test_register_view(self): r = self._get('/register') self.assertIn('<h1>Register</h1>', r.data) def test_register(self): data = dict(email='<EMAIL>', password='password', password_confirm='password') r = self._post('/register', data=data, follow_redirects=True) self.assertIn('Post Register', r.data) def test_register_with_next_querystring_argument(self): data = dict(email='<EMAIL>', password='password', password_confirm='password') r = self._post('/register?next=/page1', data=data, follow_redirects=True) self.assertIn('Page 1', r.data) def test_register_json(self): data = '{ "email": "<EMAIL>", "password": "password", "csrf_token":"%s" }' % self.csrf_token r = self._post('/register', data=data, content_type='application/json') data = json.loads(r.data) self.assertEquals(data['meta']['code'], 200) def test_register_existing_email(self): data = dict(email='<EMAIL>', password='password', password_confirm='password') r = self._post('/register', data=data, follow_redirects=True) msg = '<EMAIL> is already associated with an account' self.assertIn(msg, r.data) def test_unauthorized(self): self.authenticate("<EMAIL>", endpoint="/custom_auth") r = self._get("/admin", follow_redirects=True) msg = 'You are not allowed to access the requested resouce' self.assertIn(msg, r.data) def test_default_http_auth_realm(self): r = self._get('/http', headers={ 'Authorization': 'Basic ' + base64.b64encode("<EMAIL>:bogus") }) self.assertIn('<h1>Unauthorized</h1>', r.data) self.assertIn('WWW-Authenticate', r.headers) self.assertEquals('Basic realm="Custom Realm"', r.headers['WWW-Authenticate']) class BadConfiguredSecurityTests(SecurityTest): AUTH_CONFIG = { 'SECURITY_PASSWORD_HASH': '<PASSWORD>', 'USER_COUNT': 1 } def test_bad_configuration_raises_runtimer_error(self): self.assertRaises(RuntimeError, self.authenticate) class DefaultTemplatePathTests(SecurityTest): AUTH_CONFIG = { 'SECURITY_LOGIN_USER_TEMPLATE': 'custom_security/login_user.html', } def test_login_user_template(self): r = self._get('/login') self.assertIn('CUSTOM LOGIN USER', r.data) class RegisterableTemplatePathTests(SecurityTest): AUTH_CONFIG = { 'SECURITY_REGISTERABLE': True, 'SECURITY_REGISTER_USER_TEMPLATE': 'custom_security/register_user.html' } def test_register_user_template(self): r = self._get('/register') self.assertIn('CUSTOM REGISTER USER', r.data) class RecoverableTemplatePathTests(SecurityTest): AUTH_CONFIG = { 'SECURITY_RECOVERABLE': True, 'SECURITY_FORGOT_PASSWORD_TEMPLATE': 'custom_security/forgot_password.html', 'SECURITY_RESET_PASSWORD_TEMPLATE': 'custom_security/reset_password.html', } def test_forgot_password_template(self): r = self._get('/reset') self.assertIn('CUSTOM FORGOT PASSWORD', r.data) def test_reset_password_template(self): with capture_reset_password_requests() as requests: r = self._post('/reset', data=dict(email='<EMAIL>'), follow_redirects=True) t = requests[0]['token'] r = self._get('/reset/' + t) self.assertIn('CUSTOM RESET PASSWORD', r.data) class ConfirmableTemplatePathTests(SecurityTest): AUTH_CONFIG = { 'SECURITY_CONFIRMABLE': True, 'SECURITY_SEND_CONFIRMATION_TEMPLATE': 'custom_security/send_confirmation.html' } def test_send_confirmation_template(self): r = self._get('/confirm') self.assertIn('CUSTOM SEND CONFIRMATION', r.data) class PasswordlessTemplatePathTests(SecurityTest): AUTH_CONFIG = { 'SECURITY_PASSWORDLESS': True, 'SECURITY_SEND_LOGIN_TEMPLATE': 'custom_security/send_login.html' } def test_send_login_template(self): r = self._get('/login') self.assertIn('CUSTOM SEND LOGIN', r.data) class RegisterableTests(SecurityTest): AUTH_CONFIG = { 'SECURITY_REGISTERABLE': True, 'USER_COUNT': 1 } def test_register_valid_user(self): data = dict(email='<EMAIL>', password='password', password_confirm='password') self._post('/register', data=data, follow_redirects=True) r = self.authenticate('<EMAIL>') self.assertIn('Hello <EMAIL>', r.data) class ConfirmableTests(SecurityTest): AUTH_CONFIG = { 'SECURITY_CONFIRMABLE': True, 'SECURITY_REGISTERABLE': True, 'SECURITY_EMAIL_SUBJECT_REGISTER': 'Custom welcome subject', 'USER_COUNT': 1 } def test_login_before_confirmation(self): e = '<EMAIL>' self.register(e) r = self.authenticate(email=e) self.assertIn(self.get_message('CONFIRMATION_REQUIRED'), r.data) def test_send_confirmation_of_already_confirmed_account(self): e = '<EMAIL>' with capture_registrations() as registrations: self.register(e) token = registrations[0]['confirm_token'] self.client.get('/confirm/' + token, follow_redirects=True) self.logout() r = self._post('/confirm', data=dict(email=e)) self.assertIn(self.get_message('ALREADY_CONFIRMED'), r.data) def test_register_sends_confirmation_email(self): e = '<EMAIL>' with self.app.extensions['mail'].record_messages() as outbox: self.register(e) self.assertEqual(len(outbox), 1) self.assertIn(e, outbox[0].html) self.assertEqual('Custom welcome subject', outbox[0].subject) def test_confirm_email(self): e = '<EMAIL>' with capture_registrations() as registrations: self.register(e) token = registrations[0]['confirm_token'] r = self.client.get('/confirm/' + token, follow_redirects=True) msg = self.app.config['SECURITY_MSG_EMAIL_CONFIRMED'][0] self.assertIn(msg, r.data) def test_invalid_token_when_confirming_email(self): r = self.client.get('/confirm/bogus', follow_redirects=True) msg = self.app.config['SECURITY_MSG_INVALID_CONFIRMATION_TOKEN'][0] self.assertIn(msg, r.data) def test_send_confirmation_json(self): r = self._post('/confirm', data='{"email": "<EMAIL>"}', content_type='application/json') self.assertEquals(r.status_code, 200) def test_send_confirmation_with_invalid_email(self): r = self._post('/confirm', data=dict(email='<EMAIL>')) msg = self.app.config['SECURITY_MSG_USER_DOES_NOT_EXIST'][0] self.assertIn(msg, r.data) def test_resend_confirmation(self): e = '<EMAIL>' self.register(e) r = self._post('/confirm', data={'email': e}) msg = self.get_message('CONFIRMATION_REQUEST', email=e) self.assertIn(msg, r.data) def test_user_deleted_before_confirmation(self): e = '<EMAIL>' with capture_registrations() as registrations: self.register(e) user = registrations[0]['user'] token = registrations[0]['confirm_token'] with self.app.app_context(): from flask_security.core import _security _security.datastore.delete(user) _security.datastore.commit() r = self.client.get('/confirm/' + token, follow_redirects=True) msg = self.app.config['SECURITY_MSG_INVALID_CONFIRMATION_TOKEN'][0] self.assertIn(msg, r.data) class ExpiredConfirmationTest(SecurityTest): AUTH_CONFIG = { 'SECURITY_CONFIRMABLE': True, 'SECURITY_REGISTERABLE': True, 'SECURITY_CONFIRM_EMAIL_WITHIN': '1 milliseconds', 'USER_COUNT': 1 } def test_expired_confirmation_token_sends_email(self): e = '<EMAIL>' with capture_registrations() as registrations: self.register(e) token = registrations[0]['confirm_token'] time.sleep(1.25) with self.app.extensions['mail'].record_messages() as outbox: r = self.client.get('/confirm/' + token, follow_redirects=True) self.assertEqual(len(outbox), 1) self.assertNotIn(token, outbox[0].html) expire_text = self.AUTH_CONFIG['SECURITY_CONFIRM_EMAIL_WITHIN'] msg = self.app.config['SECURITY_MSG_CONFIRMATION_EXPIRED'][0] msg = msg % dict(within=expire_text, email=e) self.assertIn(msg, r.data) class LoginWithoutImmediateConfirmTests(SecurityTest): AUTH_CONFIG = { 'SECURITY_CONFIRMABLE': True, 'SECURITY_REGISTERABLE': True, 'SECURITY_LOGIN_WITHOUT_CONFIRMATION': True, 'USER_COUNT': 1 } def test_register_valid_user_automatically_signs_in(self): e = '<EMAIL>' p = 'password' data = dict(email=e, password=p, password_confirm=p) r = self._post('/register', data=data, follow_redirects=True) self.assertIn(e, r.data) class RecoverableTests(SecurityTest): AUTH_CONFIG = { 'SECURITY_RECOVERABLE': True, 'SECURITY_RESET_PASSWORD_ERROR_VIEW': '/', 'SECURITY_POST_FORGOT_VIEW': '/' } def test_reset_view(self): with capture_reset_password_requests() as requests: r = self._post('/reset', data=dict(email='<EMAIL>'), follow_redirects=True) t = requests[0]['token'] r = self._get('/reset/' + t) self.assertIn('<h1>Reset password</h1>', r.data) def test_forgot_post_sends_email(self): with capture_reset_password_requests(): with self.app.extensions['mail'].record_messages() as outbox: self._post('/reset', data=dict(email='<EMAIL>')) self.assertEqual(len(outbox), 1) def test_forgot_password_json(self): r = self._post('/reset', data='{"email": "<EMAIL>"}', content_type="application/json") self.assertEquals(r.status_code, 200) def test_forgot_password_invalid_email(self): r = self._post('/reset', data=dict(email='<EMAIL>'), follow_redirects=True) self.assertIn("Specified user does not exist", r.data) def test_reset_password_with_valid_token(self): with capture_reset_password_requests() as requests: r = self._post('/reset', data=dict(email='<EMAIL>'), follow_redirects=True) t = requests[0]['token'] r = self._post('/reset/' + t, data={ 'password': '<PASSWORD>', 'password_confirm': '<PASSWORD>' }, follow_redirects=True) r = self.logout() r = self.authenticate('<EMAIL>', '<PASSWORD>') self.assertIn('Hello <EMAIL>', r.data) def test_reset_password_with_invalid_token(self): r = self._post('/reset/bogus', data={ 'password': '<PASSWORD>', 'password_confirm': '<PASSWORD>' }, follow_redirects=True) self.assertIn(self.get_message('INVALID_RESET_PASSWORD_TOKEN'), r.data) class ExpiredResetPasswordTest(SecurityTest): AUTH_CONFIG = { 'SECURITY_RECOVERABLE': True, 'SECURITY_RESET_PASSWORD_WITHIN': '1 milliseconds' } def test_reset_password_with_expired_token(self): with capture_reset_password_requests() as requests: r = self._post('/reset', data=dict(email='<EMAIL>'), follow_redirects=True) t = requests[0]['token'] time.sleep(1) r = self._post('/reset/' + t, data={ 'password': '<PASSWORD>', 'password_confirm': '<PASSWORD>' }, follow_redirects=True) self.assertIn('You did not reset your password within', r.data) class ChangePasswordTest(SecurityTest): AUTH_CONFIG = { 'SECURITY_RECOVERABLE': True, 'SECURITY_CHANGEABLE': True, } def test_change_password(self): self.authenticate() r = self.client.get('/change', follow_redirects=True) self.assertIn('Change password', r.data) def test_change_password_invalid(self): self.authenticate() r = self._post('/change', data={ 'password': '<PASSWORD>', 'new_password': '<PASSWORD>', 'new_password_confirm': '<PASSWORD>' }, follow_redirects=True) self.assertNotIn('You successfully changed your password', r.data) self.assertIn('Invalid password', r.data) def test_change_password_mismatch(self): self.authenticate() r = self._post('/change', data={ 'password': 'password', 'new_password': '<PASSWORD>', 'new_password_confirm': '<PASSWORD>' }, follow_redirects=True) self.assertNotIn('You successfully changed your password', r.data) self.assertIn('Passwords do not match', r.data) def test_change_password_bad_password(self): self.authenticate() r = self._post('/change', data={ 'password': 'password', 'new_password': 'a', 'new_password_confirm': 'a' }, follow_redirects=True) self.assertNotIn('You successfully changed your password', r.data) self.assertIn('Field must be between', r.data) def test_change_password_success(self): self.authenticate() with self.app.extensions['mail'].record_messages() as outbox: r = self._post('/change', data={ 'password': 'password', 'new_password': '<PASSWORD>', 'new_password_confirm': '<PASSWORD>' }, follow_redirects=True) self.assertIn('You successfully changed your password', r.data) self.assertIn('Home Page', r.data) self.assertEqual(len(outbox), 1) self.assertIn("Your password has been changed", outbox[0].html) self.assertIn("/reset", outbox[0].html) class ChangePasswordPostViewTest(SecurityTest): AUTH_CONFIG = { 'SECURITY_CHANGEABLE': True, 'SECURITY_POST_CHANGE_VIEW': '/profile', } def test_change_password_success(self): self.authenticate() r = self._post('/change', data={ 'password': 'password', 'new_password': '<PASSWORD>', 'new_password_confirm': '<PASSWORD>' }, follow_redirects=True) self.assertIn('Profile Page', r.data) class ChangePasswordDisabledTest(SecurityTest): AUTH_CONFIG = { 'SECURITY_CHANGEABLE': False, } def test_change_password_endpoint_is_404(self): self.authenticate() r = self.client.get('/change', follow_redirects=True) self.assertEqual(404, r.status_code) class TrackableTests(SecurityTest): AUTH_CONFIG = { 'SECURITY_TRACKABLE': True, 'USER_COUNT': 1 } def test_did_track(self): e = '<EMAIL>' self.authenticate(email=e) self.logout() self.authenticate(email=e) with self.app.test_request_context('/profile'): user = self.app.security.datastore.find_user(email=e) self.assertIsNotNone(user.last_login_at) self.assertIsNotNone(user.current_login_at) self.assertEquals('untrackable', user.last_login_ip) self.assertEquals('untrackable', user.current_login_ip) self.assertEquals(2, user.login_count) class PasswordlessTests(SecurityTest): AUTH_CONFIG = { 'SECURITY_PASSWORDLESS': True } def test_login_request_for_inactive_user(self): msg = self.app.config['SECURITY_MSG_DISABLED_ACCOUNT'][0] r = self._post('/login', data=dict(email='<EMAIL>'), follow_redirects=True) self.assertIn(msg, r.data) def test_request_login_token_with_json_and_valid_email(self): data = '{"email": "<EMAIL>", "password": "password", "csrf_token":"%s"}' % self.csrf_token r = self._post('/login', data=data, content_type='application/json') self.assertEquals(r.status_code, 200) self.assertNotIn('error', r.data) def test_request_login_token_with_json_and_invalid_email(self): data = '{"email": "<EMAIL>", "password": "password"}' r = self._post('/login', data=data, content_type='application/json') self.assertIn('errors', r.data) def test_request_login_token_sends_email_and_can_login(self): e = '<EMAIL>' r, user, token = None, None, None with capture_passwordless_login_requests() as requests: with self.app.extensions['mail'].record_messages() as outbox: r = self._post('/login', data=dict(email=e), follow_redirects=True) self.assertEqual(len(outbox), 1) self.assertEquals(1, len(requests)) self.assertIn('user', requests[0]) self.assertIn('login_token', requests[0]) user = requests[0]['user'] token = requests[0]['login_token'] msg = self.app.config['SECURITY_MSG_LOGIN_EMAIL_SENT'][0] msg = msg % dict(email=user.email) self.assertIn(msg, r.data) r = self.client.get('/login/' + token, follow_redirects=True) msg = self.get_message('PASSWORDLESS_LOGIN_SUCCESSFUL') self.assertIn(msg, r.data) r = self.client.get('/profile') self.assertIn('Profile Page', r.data) def test_invalid_login_token(self): msg = self.app.config['SECURITY_MSG_INVALID_LOGIN_TOKEN'][0] r = self._get('/login/bogus', follow_redirects=True) self.assertIn(msg, r.data) def test_token_login_when_already_authenticated(self): with capture_passwordless_login_requests() as requests: self._post('/login', data=dict(email='<EMAIL>'), follow_redirects=True) token = requests[0]['login_token'] r = self.client.get('/login/' + token, follow_redirects=True) msg = self.get_message('PASSWORDLESS_LOGIN_SUCCESSFUL') self.assertIn(msg, r.data) r = self.client.get('/login/' + token, follow_redirects=True) msg = self.get_message('PASSWORDLESS_LOGIN_SUCCESSFUL') self.assertNotIn(msg, r.data) def test_send_login_with_invalid_email(self): r = self._post('/login', data=dict(email='<EMAIL>')) self.assertIn('Specified user does not exist', r.data) class ExpiredLoginTokenTests(SecurityTest): AUTH_CONFIG = { 'SECURITY_PASSWORDLESS': True, 'SECURITY_LOGIN_WITHIN': '1 milliseconds', 'USER_COUNT': 1 } def test_expired_login_token_sends_email(self): e = '<EMAIL>' with capture_passwordless_login_requests() as requests: self._post('/login', data=dict(email=e), follow_redirects=True) token = requests[0]['login_token'] time.sleep(1.25) with self.app.extensions['mail'].record_messages() as outbox: r = self.client.get('/login/' + token, follow_redirects=True) expire_text = self.AUTH_CONFIG['SECURITY_LOGIN_WITHIN'] msg = self.app.config['SECURITY_MSG_LOGIN_EXPIRED'][0] msg = msg % dict(within=expire_text, email=e) self.assertIn(msg, r.data) self.assertEqual(len(outbox), 1) self.assertIn(e, outbox[0].html) self.assertNotIn(token, outbox[0].html) class AsyncMailTaskTests(SecurityTest): AUTH_CONFIG = { 'SECURITY_RECOVERABLE': True, 'USER_COUNT': 1 } def setUp(self): super(AsyncMailTaskTests, self).setUp() self.mail_sent = False def test_send_email_task_is_called(self): @self.app.security.send_mail_task def send_email(msg): self.mail_sent = True self._post('/reset', data=dict(email='<EMAIL>')) self.assertTrue(self.mail_sent) class NoBlueprintTests(SecurityTest): APP_KWARGS = { 'register_blueprint': False, } AUTH_CONFIG = { 'USER_COUNT': 1 } def test_login_endpoint_is_404(self): r = self._get('/login') self.assertEqual(404, r.status_code) def test_http_auth_without_blueprint(self): auth = 'Basic ' + base64.b64encode("<EMAIL>:password") r = self._get('/http', headers={'Authorization': auth}) self.assertIn('HTTP Authentication', r.data) class ExtendFormsTest(SecurityTest): class MyLoginForm(LoginForm): email = TextField('My Login Email Address Field') class MyRegisterForm(RegisterForm): email = TextField('My Register Email Address Field') APP_KWARGS = { 'login_form': MyLoginForm, 'register_form': MyRegisterForm, } AUTH_CONFIG = { 'SECURITY_CONFIRMABLE': False, 'SECURITY_REGISTERABLE': True, } def test_login_view(self): r = self._get('/login', follow_redirects=True) self.assertIn("My Login Email Address Field", r.data) def test_register(self): r = self._get('/register', follow_redirects=True) self.assertIn("My Register Email Address Field", r.data) class RecoverableExtendFormsTest(SecurityTest): class MyForgotPasswordForm(ForgotPasswordForm): email = TextField('My Forgot Password Email Address Field', validators=[valid_user_email]) class MyResetPasswordForm(ResetPasswordForm): submit = SubmitField("My Reset Password Submit Field") APP_KWARGS = { 'forgot_password_form': MyForgotPasswordForm, 'reset_password_form': MyResetPasswordForm, } AUTH_CONFIG = { 'SECURITY_RECOVERABLE': True, } def test_forgot_password(self): r = self._get('/reset', follow_redirects=True) self.assertIn("My Forgot Password Email Address Field", r.data) def test_reset_password(self): with capture_reset_password_requests() as requests: self._post('/reset', data=dict(email='<EMAIL>'), follow_redirects=True) token = requests[0]['token'] r = self._get('/reset/' + token) self.assertIn("My Reset Password Submit Field", r.data) class PasswordlessExtendFormsTest(SecurityTest): class MyPasswordlessLoginForm(PasswordlessLoginForm): email = TextField('My Passwordless Login Email Address Field') APP_KWARGS = { 'passwordless_login_form': MyPasswordlessLoginForm, } AUTH_CONFIG = { 'SECURITY_PASSWORDLESS': True, } def test_passwordless_login(self): r = self._get('/login', follow_redirects=True) self.assertIn("My Passwordless Login Email Address Field", r.data) class ConfirmableExtendFormsTest(SecurityTest): class MyConfirmRegisterForm(ConfirmRegisterForm): email = TextField('My Confirm Register Email Address Field') class MySendConfirmationForm(SendConfirmationForm): email = TextField('My Send Confirmation Email Address Field') APP_KWARGS = { 'confirm_register_form': MyConfirmRegisterForm, 'send_confirmation_form': MySendConfirmationForm, } AUTH_CONFIG = { 'SECURITY_CONFIRMABLE': True, 'SECURITY_REGISTERABLE': True, } def test_register(self): r = self._get('/register', follow_redirects=True) self.assertIn("My Confirm Register Email Address Field", r.data) def test_send_confirmation(self): r = self._get('/confirm', follow_redirects=True) self.assertIn("My Send Confirmation Email Address Field", r.data)
[ "flask.ext.security.utils.capture_registrations", "flask_security.core._security.datastore.delete", "base64.b64encode", "flask.ext.security.forms.SubmitField", "time.sleep", "flask_security.core._security.datastore.commit", "flask.ext.security.utils.capture_reset_password_requests", "flask.ext.securit...
[((2724, 2742), 'simplejson.loads', 'json.loads', (['r.data'], {}), '(r.data)\n', (2734, 2742), True, 'import simplejson as json\n'), ((10141, 10157), 'time.sleep', 'time.sleep', (['(1.25)'], {}), '(1.25)\n', (10151, 10157), False, 'import time\n'), ((13827, 13840), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (13837, 13840), False, 'import time\n'), ((21256, 21272), 'time.sleep', 'time.sleep', (['(1.25)'], {}), '(1.25)\n', (21266, 21272), False, 'import time\n'), ((22869, 22910), 'flask.ext.security.forms.TextField', 'TextField', (['"""My Login Email Address Field"""'], {}), "('My Login Email Address Field')\n", (22878, 22910), False, 'from flask.ext.security.forms import TextField, SubmitField, valid_user_email\n'), ((22968, 23012), 'flask.ext.security.forms.TextField', 'TextField', (['"""My Register Email Address Field"""'], {}), "('My Register Email Address Field')\n", (22977, 23012), False, 'from flask.ext.security.forms import TextField, SubmitField, valid_user_email\n'), ((23641, 23728), 'flask.ext.security.forms.TextField', 'TextField', (['"""My Forgot Password Email Address Field"""'], {'validators': '[valid_user_email]'}), "('My Forgot Password Email Address Field', validators=[\n valid_user_email])\n", (23650, 23728), False, 'from flask.ext.security.forms import TextField, SubmitField, valid_user_email\n'), ((23818, 23863), 'flask.ext.security.forms.SubmitField', 'SubmitField', (['"""My Reset Password Submit Field"""'], {}), "('My Reset Password Submit Field')\n", (23829, 23863), False, 'from flask.ext.security.forms import TextField, SubmitField, valid_user_email\n'), ((24700, 24754), 'flask.ext.security.forms.TextField', 'TextField', (['"""My Passwordless Login Email Address Field"""'], {}), "('My Passwordless Login Email Address Field')\n", (24709, 24754), False, 'from flask.ext.security.forms import TextField, SubmitField, valid_user_email\n'), ((25198, 25250), 'flask.ext.security.forms.TextField', 'TextField', (['"""My Confirm Register Email Address Field"""'], {}), "('My Confirm Register Email Address Field')\n", (25207, 25250), False, 'from flask.ext.security.forms import TextField, SubmitField, valid_user_email\n'), ((25324, 25377), 'flask.ext.security.forms.TextField', 'TextField', (['"""My Send Confirmation Email Address Field"""'], {}), "('My Send Confirmation Email Address Field')\n", (25333, 25377), False, 'from flask.ext.security.forms import TextField, SubmitField, valid_user_email\n'), ((5134, 5167), 'flask.ext.security.utils.capture_reset_password_requests', 'capture_reset_password_requests', ([], {}), '()\n', (5165, 5167), False, 'from flask.ext.security.utils import capture_registrations, capture_reset_password_requests, capture_passwordless_login_requests\n'), ((7093, 7116), 'flask.ext.security.utils.capture_registrations', 'capture_registrations', ([], {}), '()\n', (7114, 7116), False, 'from flask.ext.security.utils import capture_registrations, capture_reset_password_requests, capture_passwordless_login_requests\n'), ((7844, 7867), 'flask.ext.security.utils.capture_registrations', 'capture_registrations', ([], {}), '()\n', (7865, 7867), False, 'from flask.ext.security.utils import capture_registrations, capture_reset_password_requests, capture_passwordless_login_requests\n'), ((9147, 9170), 'flask.ext.security.utils.capture_registrations', 'capture_registrations', ([], {}), '()\n', (9168, 9170), False, 'from flask.ext.security.utils import capture_registrations, capture_reset_password_requests, capture_passwordless_login_requests\n'), ((9420, 9452), 'flask_security.core._security.datastore.delete', '_security.datastore.delete', (['user'], {}), '(user)\n', (9446, 9452), False, 'from flask_security.core import _security\n'), ((9465, 9493), 'flask_security.core._security.datastore.commit', '_security.datastore.commit', ([], {}), '()\n', (9491, 9493), False, 'from flask_security.core import _security\n'), ((10007, 10030), 'flask.ext.security.utils.capture_registrations', 'capture_registrations', ([], {}), '()\n', (10028, 10030), False, 'from flask.ext.security.utils import capture_registrations, capture_reset_password_requests, capture_passwordless_login_requests\n'), ((11403, 11436), 'flask.ext.security.utils.capture_reset_password_requests', 'capture_reset_password_requests', ([], {}), '()\n', (11434, 11436), False, 'from flask.ext.security.utils import capture_registrations, capture_reset_password_requests, capture_passwordless_login_requests\n'), ((11793, 11826), 'flask.ext.security.utils.capture_reset_password_requests', 'capture_reset_password_requests', ([], {}), '()\n', (11824, 11826), False, 'from flask.ext.security.utils import capture_registrations, capture_reset_password_requests, capture_passwordless_login_requests\n'), ((12550, 12583), 'flask.ext.security.utils.capture_reset_password_requests', 'capture_reset_password_requests', ([], {}), '()\n', (12581, 12583), False, 'from flask.ext.security.utils import capture_registrations, capture_reset_password_requests, capture_passwordless_login_requests\n'), ((13619, 13652), 'flask.ext.security.utils.capture_reset_password_requests', 'capture_reset_password_requests', ([], {}), '()\n', (13650, 13652), False, 'from flask.ext.security.utils import capture_registrations, capture_reset_password_requests, capture_passwordless_login_requests\n'), ((18846, 18883), 'flask.ext.security.utils.capture_passwordless_login_requests', 'capture_passwordless_login_requests', ([], {}), '()\n', (18881, 18883), False, 'from flask.ext.security.utils import capture_registrations, capture_reset_password_requests, capture_passwordless_login_requests\n'), ((20074, 20111), 'flask.ext.security.utils.capture_passwordless_login_requests', 'capture_passwordless_login_requests', ([], {}), '()\n', (20109, 20111), False, 'from flask.ext.security.utils import capture_registrations, capture_reset_password_requests, capture_passwordless_login_requests\n'), ((21073, 21110), 'flask.ext.security.utils.capture_passwordless_login_requests', 'capture_passwordless_login_requests', ([], {}), '()\n', (21108, 21110), False, 'from flask.ext.security.utils import capture_registrations, capture_reset_password_requests, capture_passwordless_login_requests\n'), ((22625, 22661), 'base64.b64encode', 'base64.b64encode', (['"""<EMAIL>:password"""'], {}), "('<EMAIL>:password')\n", (22641, 22661), False, 'import base64\n'), ((24274, 24307), 'flask.ext.security.utils.capture_reset_password_requests', 'capture_reset_password_requests', ([], {}), '()\n', (24305, 24307), False, 'from flask.ext.security.utils import capture_registrations, capture_reset_password_requests, capture_passwordless_login_requests\n'), ((3515, 3548), 'base64.b64encode', 'base64.b64encode', (['"""<EMAIL>:bogus"""'], {}), "('<EMAIL>:bogus')\n", (3531, 3548), False, 'import base64\n')]
from allauth.socialaccount.helpers import complete_social_login from allauth.socialaccount.models import SocialApp, SocialToken, SocialLogin, SocialAccount from allauth.socialaccount.providers.facebook.views import fb_complete_login from allauth.socialaccount.providers.google.views import GoogleOAuth2Adapter from django.http import JsonResponse from requests import HTTPError from rest_framework.authtoken.models import Token from apps.users.models import User __author__ = 'kolyakoikelov' class SocialAuth(object): def __init__(self, provider, token_key): self.provider = provider self.token_key = token_key def login(self, request): try: original_request = request._request token = request.POST.get(self.token_key, '') google_auth_adapter = GoogleOAuth2Adapter(request=original_request) app = SocialApp.objects.get(provider=self.provider) social_auth_token = SocialToken(app=app, token=token) login = google_auth_adapter.complete_login(request=original_request, app=app, token=social_auth_token) \ if self.provider is 'google' else fb_complete_login(request=request, app=app, token=social_auth_token) extra_data = login.account.extra_data json_error_response = None if 'email' not in extra_data: json_error_response = JsonResponse(dict(message='email is not provided'), status=400) if json_error_response is not None: return json_error_response user = User.objects.filter(email=extra_data['email']).first() if user is not None: token, is_created = Token.objects.get_or_create(user=user) return JsonResponse(dict(key=token.key)) login.token = social_auth_token login.state = SocialLogin.state_from_request(original_request) complete_social_login(original_request, login) token, is_created = Token.objects.get_or_create(user=original_request.user) return JsonResponse(dict(key=token.key)) except HTTPError as e: return JsonResponse(dict(message=str(e)), status=400)
[ "allauth.socialaccount.providers.google.views.GoogleOAuth2Adapter", "allauth.socialaccount.helpers.complete_social_login", "allauth.socialaccount.models.SocialLogin.state_from_request", "allauth.socialaccount.providers.facebook.views.fb_complete_login", "allauth.socialaccount.models.SocialToken", "allauth...
[((818, 863), 'allauth.socialaccount.providers.google.views.GoogleOAuth2Adapter', 'GoogleOAuth2Adapter', ([], {'request': 'original_request'}), '(request=original_request)\n', (837, 863), False, 'from allauth.socialaccount.providers.google.views import GoogleOAuth2Adapter\n'), ((883, 928), 'allauth.socialaccount.models.SocialApp.objects.get', 'SocialApp.objects.get', ([], {'provider': 'self.provider'}), '(provider=self.provider)\n', (904, 928), False, 'from allauth.socialaccount.models import SocialApp, SocialToken, SocialLogin, SocialAccount\n'), ((961, 994), 'allauth.socialaccount.models.SocialToken', 'SocialToken', ([], {'app': 'app', 'token': 'token'}), '(app=app, token=token)\n', (972, 994), False, 'from allauth.socialaccount.models import SocialApp, SocialToken, SocialLogin, SocialAccount\n'), ((1870, 1918), 'allauth.socialaccount.models.SocialLogin.state_from_request', 'SocialLogin.state_from_request', (['original_request'], {}), '(original_request)\n', (1900, 1918), False, 'from allauth.socialaccount.models import SocialApp, SocialToken, SocialLogin, SocialAccount\n'), ((1932, 1978), 'allauth.socialaccount.helpers.complete_social_login', 'complete_social_login', (['original_request', 'login'], {}), '(original_request, login)\n', (1953, 1978), False, 'from allauth.socialaccount.helpers import complete_social_login\n'), ((2011, 2066), 'rest_framework.authtoken.models.Token.objects.get_or_create', 'Token.objects.get_or_create', ([], {'user': 'original_request.user'}), '(user=original_request.user)\n', (2038, 2066), False, 'from rest_framework.authtoken.models import Token\n'), ((1162, 1230), 'allauth.socialaccount.providers.facebook.views.fb_complete_login', 'fb_complete_login', ([], {'request': 'request', 'app': 'app', 'token': 'social_auth_token'}), '(request=request, app=app, token=social_auth_token)\n', (1179, 1230), False, 'from allauth.socialaccount.providers.facebook.views import fb_complete_login\n'), ((1703, 1741), 'rest_framework.authtoken.models.Token.objects.get_or_create', 'Token.objects.get_or_create', ([], {'user': 'user'}), '(user=user)\n', (1730, 1741), False, 'from rest_framework.authtoken.models import Token\n'), ((1578, 1624), 'apps.users.models.User.objects.filter', 'User.objects.filter', ([], {'email': "extra_data['email']"}), "(email=extra_data['email'])\n", (1597, 1624), False, 'from apps.users.models import User\n')]
""" training a super-network and periodically evaluating its performance on bench architectures a work in this direction exists: https://arxiv.org/abs/2001.01431 """ from uninas.main import Main # default configurations, for the search process and the network design # config_files = "{path_conf_bench_tasks}/s1_fairnas_cifar.run_config, {path_conf_net_search}/bench201.run_config" config_files = "{path_conf_bench_tasks}/s1_random_cifar.run_config, {path_conf_net_search}/bench201.run_config" # these changes are applied to the default configuration in the config files changes = { "{cls_task}.is_test_run": True, "{cls_task}.save_dir": "{path_tmp}/run_bench_s1_per/", "{cls_task}.save_del_old": True, "{cls_trainer}.max_epochs": 4, "{cls_data}.dir": "{path_data}/cifar_data/", "{cls_data}.fake": False, "{cls_data}.download": False, "{cls_data}.batch_size_train": 96, # example how to mask options "{cls_method}.mask_indices": "0, 1, 4", # mask Zero, Skip, Pool "{cls_network_body}.cell_order": "n, n, r, n, n, r, n, n", # 2 normal cells, one reduction cell, ... "{cls_network_stem}.features": 16, # start with 16 channels # some augmentations "cls_augmentations": "DartsCifarAug", # default augmentations for cifar "{cls_schedulers#0}.warmup_epochs": 0, # specifying how to add weights, note that SplitWeightsMixedOp requires a SplitWeightsMixedOpCallback "{cls_network_cells_primitives#0}.mixed_cls": "MixedOp", # MixedOp, BiasD1MixedOp, ... "{cls_network_cells_primitives#1}.mixed_cls": "MixedOp", # MixedOp, BiasD1MixedOp, ... "cls_callbacks": "CheckpointCallback, CreateBenchCallback", "{cls_callbacks#1}.each_epochs": 1, "{cls_callbacks#1}.reset_bn": True, "{cls_callbacks#1}.benchmark_path": "{path_data}/bench/nats/nats_bench_1.1_subset_m_test.pt", # what and how to evaluate each specific network "cls_cb_objectives": "NetValueEstimator", "{cls_cb_objectives#0}.key": "acc1/valid", "{cls_cb_objectives#0}.is_constraint": False, "{cls_cb_objectives#0}.is_objective": True, "{cls_cb_objectives#0}.maximize": True, "{cls_cb_objectives#0}.load": True, "{cls_cb_objectives#0}.batches_forward": 20, "{cls_cb_objectives#0}.batches_train": 0, "{cls_cb_objectives#0}.batches_eval": -1, "{cls_cb_objectives#0}.value": "val/accuracy/1", } if __name__ == "__main__": task = Main.new_task(config_files, args_changes=changes) task.run()
[ "uninas.main.Main.new_task" ]
[((2493, 2542), 'uninas.main.Main.new_task', 'Main.new_task', (['config_files'], {'args_changes': 'changes'}), '(config_files, args_changes=changes)\n', (2506, 2542), False, 'from uninas.main import Main\n')]
import numpy as np import sqlalchemy as sa from sqlalchemy.dialects import postgresql as psql from sqlalchemy.orm import relationship from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.schema import UniqueConstraint from astropy import units as u from .core import Base from .constants import APER_KEY, APERTURE_RADIUS __all__ = ['ForcedPhotometry', 'raw_aperture_photometry', 'aperture_photometry'] class ForcedPhotometry(Base): id = sa.Column(sa.Integer, primary_key=True) __tablename__ = 'forcedphotometry' flags = sa.Column(sa.Integer) ra = sa.Column(psql.DOUBLE_PRECISION) dec = sa.Column(psql.DOUBLE_PRECISION) @property def mag(self): return -2.5 * np.log10(self.flux) + self.image.header['MAGZP'] + \ self.image.header[self.apcorkey] @property def magerr(self): return 1.08573620476 * self.fluxerr / self.flux image_id = sa.Column(sa.Integer, sa.ForeignKey('calibratedimages.id', ondelete='CASCADE'), index=True) image = relationship('CalibratedImage', back_populates='forced_photometry', cascade='all') # thumbnails = relationship('Thumbnail', cascade='all') source_id = sa.Column(sa.Text, sa.ForeignKey('sources.id', ondelete='CASCADE'), index=True) source = relationship('Source', cascade='all') apcorkey='APCOR5' flux = sa.Column(sa.Float) fluxerr = sa.Column(sa.Float) zp = sa.Column(sa.Float) filtercode = sa.Column(sa.Text) obsjd = sa.Column(sa.Float) uniq = UniqueConstraint(image_id, source_id) reverse_idx = sa.Index('source_image', source_id, image_id) @hybrid_property def snr(self): return self.flux / self.fluxerr def raw_aperture_photometry(sci_path, rms_path, mask_path, ra, dec, apply_calibration=False): import photutils from astropy.coordinates import SkyCoord from astropy.io import fits from astropy.table import vstack from astropy.wcs import WCS ra = np.atleast_1d(ra) dec = np.atleast_1d(dec) coord = SkyCoord(ra, dec, unit='deg') with fits.open(sci_path, memmap=False) as shdu: header = shdu[0].header swcs = WCS(header) scipix = shdu[0].data with fits.open(rms_path, memmap=False) as rhdu: rmspix = rhdu[0].data with fits.open(mask_path, memmap=False) as mhdu: maskpix = mhdu[0].data apertures = photutils.SkyCircularAperture(coord, r=APERTURE_RADIUS) phot_table = photutils.aperture_photometry(scipix, apertures, error=rmspix, wcs=swcs) pixap = apertures.to_pixel(swcs) annulus_masks = pixap.to_mask(method='center') maskpix = [annulus_mask.cutout(maskpix) for annulus_mask in annulus_masks] magzp = header['MAGZP'] apcor = header[APER_KEY] # check for invalid photometry on masked pixels phot_table['flags'] = [int(np.bitwise_or.reduce(m, axis=(0, 1))) for m in maskpix] phot_table['zp'] = magzp + apcor phot_table['obsjd'] = header['OBSJD'] phot_table['filtercode'] = 'z' + header['FILTER'][-1] # rename some columns phot_table.rename_column('aperture_sum', 'flux') phot_table.rename_column('aperture_sum_err', 'fluxerr') return phot_table def aperture_photometry(calibratable, ra, dec, apply_calibration=False, assume_background_subtracted=False, use_cutout=False, direct_load=None, survey='ZTF',apfactor=1.0,seeing=1.0): import photutils from astropy.coordinates import SkyCoord from astropy.io import fits from astropy.table import vstack from astropy.wcs import WCS ra = np.atleast_1d(ra) dec = np.atleast_1d(dec) coord = SkyCoord(ra, dec, unit='deg') if not use_cutout: wcs = calibratable.wcs if seeing*3*apfactor < 2.5: apcorkey='APCOR1' aprad=2.0 elif seeing*3*apfactor >=2.5 and seeing*3*apfactor<3.5: apcorkey='APCOR2' aprad=3.0 elif seeing*3*apfactor >=3.5 and 3*apfactor*seeing<5.0: apcorkey='APCOR3' aprad=4.0 elif seeing*3*apfactor >=5.0 and 3*apfactor*seeing<8.0: apcorkey='APCOR4' aprad=6.0 elif seeing*3*apfactor >=8.0 and 3*apfactor*seeing<12.0: apcorkey='APCOR5' aprad=10.0 elif seeing*3*apfactor >=12.0: apcorkey='APCOR6' aprad=14 aprad=aprad*u.pixel apertures = photutils.SkyCircularAperture(coord, r=aprad)#APERTURE_RADIUS*apfactor*seeing) # something that is photometerable implements mask, background, and wcs if not assume_background_subtracted: pixels_bkgsub = calibratable.background_subtracted_image.data else: pixels_bkgsub = calibratable.data bkgrms = calibratable.rms_image.data mask = calibratable.mask_image.data phot_table = photutils.aperture_photometry(pixels_bkgsub, apertures, error=bkgrms, wcs=wcs) if survey=='PTF': phot_table['zp'] = calibratable.header['IMAGEZPT']#['LMGAPCZP']# + calibratable.header['APCOR4'] else: phot_table['zp'] = calibratable.header['MAGZP'] + calibratable.header[apcorkey]#'APCOR4'] phot_table['obsjd'] = calibratable.header['OBSJD'] phot_table['filtercode'] = 'z' + calibratable.header['FILTER'][-1] pixap = apertures.to_pixel(wcs) annulus_masks = pixap.to_mask(method='center') maskpix = [annulus_mask.cutout(mask.data) for annulus_mask in annulus_masks] else: phot_table = [] maskpix = [] for s in coord: if direct_load is not None and 'sci' in direct_load: sci_path = direct_load['sci'] else: if assume_background_subtracted: sci_path = calibratable.local_path else: sci_path = calibratable.background_subtracted_image.local_path if direct_load is not None and 'mask' in direct_load: mask_path = direct_load['mask'] else: mask_path = calibratable.mask_image.local_path if direct_load is not None and 'rms' in direct_load: rms_path = direct_load['rms'] else: rms_path = calibratable.rms_image.local_path with fits.open( sci_path, memmap=True ) as f: wcs = WCS(f[0].header) pixcoord = wcs.all_world2pix([[s.ra.deg, s.dec.deg]], 0)[0] pixx, pixy = pixcoord nx = calibratable.header['NAXIS1'] ny = calibratable.header['NAXIS2'] xmin = max(0, pixx - 1.5 * aprad)#APERTURE_RADIUS.value * seeing * apfactor) xmax = min(nx, pixx + 1.5 * aprad)#APERTURE_RADIUS.value * seeing * apfactor) ymin = max(0, pixy - 1.5 * aprad)#APERTURE_RADIUS.value * seeing * apfactor) ymax = min(ny, pixy + 1.5 * aprad)#APERTURE_RADIUS.value * seeing * apfactor) ixmin = int(np.floor(xmin)) ixmax = int(np.ceil(xmax)) iymin = int(np.floor(ymin)) iymax = int(np.ceil(ymax)) ap = photutils.CircularAperture([pixx - ixmin, pixy - iymin], aprad)#APERTURE_RADIUS.value * seeing * apfactor) # something that is photometerable implements mask, background, and wcs with fits.open( sci_path, memmap=True ) as f: pixels_bkgsub = f[0].data[iymin:iymax, ixmin:ixmax] with fits.open(rms_path, memmap=True) as f: bkgrms = f[0].data[iymin:iymax, ixmin:ixmax] with fits.open(mask_path, memmap=True) as f: mask = f[0].data[iymin:iymax, ixmin:ixmax] pt = photutils.aperture_photometry(pixels_bkgsub, ap, error=bkgrms) annulus_mask = ap.to_mask(method='center') mp = annulus_mask.cutout(mask.data) maskpix.append(mp) phot_table.append(pt) phot_table = vstack(phot_table) if apply_calibration: if survey=='PTF': magzp = calibratable.header['IMAGEZPT'] #apcor = calibratable.header[APER_KEY] phot_table['mag'] = -2.5 * np.log10(phot_table['aperture_sum']) + magzp# + apcor phot_table['magerr'] = 1.0826 * phot_table['aperture_sum_err'] / phot_table['aperture_sum'] else: magzp = calibratable.header['MAGZP'] apcor = calibratable.header[apcorkey]#APER_KEY] phot_table['mag'] = -2.5 * np.log10(phot_table['aperture_sum']) + magzp + apcor phot_table['magerr'] = 1.0826 * phot_table['aperture_sum_err'] / phot_table['aperture_sum'] # check for invalid photometry on masked pixels phot_table['flags'] = [int(np.bitwise_or.reduce(m, axis=(0, 1))) for m in maskpix] # rename some columns phot_table.rename_column('aperture_sum', 'flux') phot_table.rename_column('aperture_sum_err', 'fluxerr') return phot_table
[ "sqlalchemy.orm.relationship", "numpy.ceil", "numpy.log10", "astropy.table.vstack", "sqlalchemy.ForeignKey", "astropy.coordinates.SkyCoord", "photutils.aperture_photometry", "astropy.wcs.WCS", "numpy.floor", "photutils.CircularAperture", "numpy.bitwise_or.reduce", "sqlalchemy.Index", "sqlalc...
[((458, 497), 'sqlalchemy.Column', 'sa.Column', (['sa.Integer'], {'primary_key': '(True)'}), '(sa.Integer, primary_key=True)\n', (467, 497), True, 'import sqlalchemy as sa\n'), ((550, 571), 'sqlalchemy.Column', 'sa.Column', (['sa.Integer'], {}), '(sa.Integer)\n', (559, 571), True, 'import sqlalchemy as sa\n'), ((581, 613), 'sqlalchemy.Column', 'sa.Column', (['psql.DOUBLE_PRECISION'], {}), '(psql.DOUBLE_PRECISION)\n', (590, 613), True, 'import sqlalchemy as sa\n'), ((624, 656), 'sqlalchemy.Column', 'sa.Column', (['psql.DOUBLE_PRECISION'], {}), '(psql.DOUBLE_PRECISION)\n', (633, 656), True, 'import sqlalchemy as sa\n'), ((1103, 1190), 'sqlalchemy.orm.relationship', 'relationship', (['"""CalibratedImage"""'], {'back_populates': '"""forced_photometry"""', 'cascade': '"""all"""'}), "('CalibratedImage', back_populates='forced_photometry', cascade\n ='all')\n", (1115, 1190), False, 'from sqlalchemy.orm import relationship\n'), ((1434, 1471), 'sqlalchemy.orm.relationship', 'relationship', (['"""Source"""'], {'cascade': '"""all"""'}), "('Source', cascade='all')\n", (1446, 1471), False, 'from sqlalchemy.orm import relationship\n'), ((1510, 1529), 'sqlalchemy.Column', 'sa.Column', (['sa.Float'], {}), '(sa.Float)\n', (1519, 1529), True, 'import sqlalchemy as sa\n'), ((1544, 1563), 'sqlalchemy.Column', 'sa.Column', (['sa.Float'], {}), '(sa.Float)\n', (1553, 1563), True, 'import sqlalchemy as sa\n'), ((1574, 1593), 'sqlalchemy.Column', 'sa.Column', (['sa.Float'], {}), '(sa.Float)\n', (1583, 1593), True, 'import sqlalchemy as sa\n'), ((1611, 1629), 'sqlalchemy.Column', 'sa.Column', (['sa.Text'], {}), '(sa.Text)\n', (1620, 1629), True, 'import sqlalchemy as sa\n'), ((1642, 1661), 'sqlalchemy.Column', 'sa.Column', (['sa.Float'], {}), '(sa.Float)\n', (1651, 1661), True, 'import sqlalchemy as sa\n'), ((1674, 1711), 'sqlalchemy.schema.UniqueConstraint', 'UniqueConstraint', (['image_id', 'source_id'], {}), '(image_id, source_id)\n', (1690, 1711), False, 'from sqlalchemy.schema import UniqueConstraint\n'), ((1730, 1775), 'sqlalchemy.Index', 'sa.Index', (['"""source_image"""', 'source_id', 'image_id'], {}), "('source_image', source_id, image_id)\n", (1738, 1775), True, 'import sqlalchemy as sa\n'), ((2160, 2177), 'numpy.atleast_1d', 'np.atleast_1d', (['ra'], {}), '(ra)\n', (2173, 2177), True, 'import numpy as np\n'), ((2188, 2206), 'numpy.atleast_1d', 'np.atleast_1d', (['dec'], {}), '(dec)\n', (2201, 2206), True, 'import numpy as np\n'), ((2219, 2248), 'astropy.coordinates.SkyCoord', 'SkyCoord', (['ra', 'dec'], {'unit': '"""deg"""'}), "(ra, dec, unit='deg')\n", (2227, 2248), False, 'from astropy.coordinates import SkyCoord\n'), ((2577, 2632), 'photutils.SkyCircularAperture', 'photutils.SkyCircularAperture', (['coord'], {'r': 'APERTURE_RADIUS'}), '(coord, r=APERTURE_RADIUS)\n', (2606, 2632), False, 'import photutils\n'), ((2650, 2722), 'photutils.aperture_photometry', 'photutils.aperture_photometry', (['scipix', 'apertures'], {'error': 'rmspix', 'wcs': 'swcs'}), '(scipix, apertures, error=rmspix, wcs=swcs)\n', (2679, 2722), False, 'import photutils\n'), ((3925, 3942), 'numpy.atleast_1d', 'np.atleast_1d', (['ra'], {}), '(ra)\n', (3938, 3942), True, 'import numpy as np\n'), ((3953, 3971), 'numpy.atleast_1d', 'np.atleast_1d', (['dec'], {}), '(dec)\n', (3966, 3971), True, 'import numpy as np\n'), ((3984, 4013), 'astropy.coordinates.SkyCoord', 'SkyCoord', (['ra', 'dec'], {'unit': '"""deg"""'}), "(ra, dec, unit='deg')\n", (3992, 4013), False, 'from astropy.coordinates import SkyCoord\n'), ((945, 1001), 'sqlalchemy.ForeignKey', 'sa.ForeignKey', (['"""calibratedimages.id"""'], {'ondelete': '"""CASCADE"""'}), "('calibratedimages.id', ondelete='CASCADE')\n", (958, 1001), True, 'import sqlalchemy as sa\n'), ((1334, 1381), 'sqlalchemy.ForeignKey', 'sa.ForeignKey', (['"""sources.id"""'], {'ondelete': '"""CASCADE"""'}), "('sources.id', ondelete='CASCADE')\n", (1347, 1381), True, 'import sqlalchemy as sa\n'), ((2259, 2292), 'astropy.io.fits.open', 'fits.open', (['sci_path'], {'memmap': '(False)'}), '(sci_path, memmap=False)\n', (2268, 2292), False, 'from astropy.io import fits\n'), ((2349, 2360), 'astropy.wcs.WCS', 'WCS', (['header'], {}), '(header)\n', (2352, 2360), False, 'from astropy.wcs import WCS\n'), ((2401, 2434), 'astropy.io.fits.open', 'fits.open', (['rms_path'], {'memmap': '(False)'}), '(rms_path, memmap=False)\n', (2410, 2434), False, 'from astropy.io import fits\n'), ((2484, 2518), 'astropy.io.fits.open', 'fits.open', (['mask_path'], {'memmap': '(False)'}), '(mask_path, memmap=False)\n', (2493, 2518), False, 'from astropy.io import fits\n'), ((4782, 4827), 'photutils.SkyCircularAperture', 'photutils.SkyCircularAperture', (['coord'], {'r': 'aprad'}), '(coord, r=aprad)\n', (4811, 4827), False, 'import photutils\n'), ((5234, 5312), 'photutils.aperture_photometry', 'photutils.aperture_photometry', (['pixels_bkgsub', 'apertures'], {'error': 'bkgrms', 'wcs': 'wcs'}), '(pixels_bkgsub, apertures, error=bkgrms, wcs=wcs)\n', (5263, 5312), False, 'import photutils\n'), ((8602, 8620), 'astropy.table.vstack', 'vstack', (['phot_table'], {}), '(phot_table)\n', (8608, 8620), False, 'from astropy.table import vstack\n'), ((3129, 3165), 'numpy.bitwise_or.reduce', 'np.bitwise_or.reduce', (['m'], {'axis': '(0, 1)'}), '(m, axis=(0, 1))\n', (3149, 3165), True, 'import numpy as np\n'), ((7688, 7751), 'photutils.CircularAperture', 'photutils.CircularAperture', (['[pixx - ixmin, pixy - iymin]', 'aprad'], {}), '([pixx - ixmin, pixy - iymin], aprad)\n', (7714, 7751), False, 'import photutils\n'), ((8347, 8409), 'photutils.aperture_photometry', 'photutils.aperture_photometry', (['pixels_bkgsub', 'ap'], {'error': 'bkgrms'}), '(pixels_bkgsub, ap, error=bkgrms)\n', (8376, 8409), False, 'import photutils\n'), ((9404, 9440), 'numpy.bitwise_or.reduce', 'np.bitwise_or.reduce', (['m'], {'axis': '(0, 1)'}), '(m, axis=(0, 1))\n', (9424, 9440), True, 'import numpy as np\n'), ((6824, 6856), 'astropy.io.fits.open', 'fits.open', (['sci_path'], {'memmap': '(True)'}), '(sci_path, memmap=True)\n', (6833, 6856), False, 'from astropy.io import fits\n'), ((6931, 6947), 'astropy.wcs.WCS', 'WCS', (['f[0].header'], {}), '(f[0].header)\n', (6934, 6947), False, 'from astropy.wcs import WCS\n'), ((7535, 7549), 'numpy.floor', 'np.floor', (['xmin'], {}), '(xmin)\n', (7543, 7549), True, 'import numpy as np\n'), ((7575, 7588), 'numpy.ceil', 'np.ceil', (['xmax'], {}), '(xmax)\n', (7582, 7588), True, 'import numpy as np\n'), ((7615, 7629), 'numpy.floor', 'np.floor', (['ymin'], {}), '(ymin)\n', (7623, 7629), True, 'import numpy as np\n'), ((7655, 7668), 'numpy.ceil', 'np.ceil', (['ymax'], {}), '(ymax)\n', (7662, 7668), True, 'import numpy as np\n'), ((7941, 7973), 'astropy.io.fits.open', 'fits.open', (['sci_path'], {'memmap': '(True)'}), '(sci_path, memmap=True)\n', (7950, 7973), False, 'from astropy.io import fits\n'), ((8112, 8144), 'astropy.io.fits.open', 'fits.open', (['rms_path'], {'memmap': '(True)'}), '(rms_path, memmap=True)\n', (8121, 8144), False, 'from astropy.io import fits\n'), ((8230, 8263), 'astropy.io.fits.open', 'fits.open', (['mask_path'], {'memmap': '(True)'}), '(mask_path, memmap=True)\n', (8239, 8263), False, 'from astropy.io import fits\n'), ((713, 732), 'numpy.log10', 'np.log10', (['self.flux'], {}), '(self.flux)\n', (721, 732), True, 'import numpy as np\n'), ((8839, 8875), 'numpy.log10', 'np.log10', (["phot_table['aperture_sum']"], {}), "(phot_table['aperture_sum'])\n", (8847, 8875), True, 'import numpy as np\n'), ((9162, 9198), 'numpy.log10', 'np.log10', (["phot_table['aperture_sum']"], {}), "(phot_table['aperture_sum'])\n", (9170, 9198), True, 'import numpy as np\n')]
import subprocess from collections import UserDict from functools import lru_cache def _parse_handle_section(lines): """ Parse a section of dmidecode output * 1st line contains address, type and size * 2nd line is title * line started with one tab is one option and its value * line started with two tabs is a member of list """ data = {"_title": next(lines).rstrip()} for line in lines: line = line.rstrip() if line.startswith("\t\t"): try: data[k].append(line.lstrip()) except AttributeError: # ignore stray <OUT OF SPEC> lines pass elif line.startswith("\t"): k, v = [i.strip() for i in line.lstrip().split(":", 1)] if v is "": data[k] = [] else: data[k] = v else: break return data class Dmidecode(UserDict): """Dmidecode parser storing parsed data as dict like object.""" TYPE = { 0: "bios", 1: "system", 2: "base board", 3: "chassis", 4: "processor", 7: "cache", 8: "port connector", 9: "system slot", 10: "on board device", 11: "OEM strings", # 13: 'bios language', 15: "system event log", 16: "physical memory array", 17: "memory device", 19: "memory array mapped address", 24: "hardware security", 25: "system power controls", 27: "cooling device", 32: "system boot", 41: "onboard device", } @classmethod def from_command(cls, args=None): args = [] if args is None else args output = subprocess.run_command(["dmidecode", *args], root=True).stdout return cls(output) def __init__(self, output): self.output = output def i_entries(self): lines = self.output.strip().splitlines() for line in lines: if line.startswith("Handle 0x"): handle_str, type_str, byte_str = line.split(",", 2) handle = handle_str.split(" ", 1)[1] typ = int(type_str.strip()[len("DMI type") :]) if typ in cls.TYPE: # parse section section = _parse_handle_section(lines) # add handle information entry = {**section, "Handle": handle} yield (cls.TYPE[typ], entry) @property @lru_cache def entries(self): return list(self.i_entries()) @property @lru_cache def categories(self): """Parse dmidecode output to dict of categories with subitems. """ d = {} for category, entry in self.entries: # gather entries in categories d.setdefault(category, []).append(entry) return d
[ "subprocess.run_command" ]
[((1720, 1775), 'subprocess.run_command', 'subprocess.run_command', (["['dmidecode', *args]"], {'root': '(True)'}), "(['dmidecode', *args], root=True)\n", (1742, 1775), False, 'import subprocess\n')]
import os from datetime import datetime from flask_sqlalchemy import SQLAlchemy # heroku postgresql setting app.config['SQLALCHEMY_DATABASE_URI'] = os.getenv('DATABASE_URL', None) app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True db = SQLAlchemy(app) class usermessage(db.Model): ''' user message and reply db ''' __tablename__ = 'usermessage' id = db.Column(db.String(50), primary_key=True) user_id = db.Column(db.String(50)) message = db.Column(db.Text) reply_message = db.Column(db.Text) timestamp = db.Column(db.TIMESTAMP) def __init__(self, id, user_id, message, reply_message, timestamp,): self.id = id self.user_id = user_id self.message = message self.reply_message = reply_message self.timestamp = timestamp def to_dict(self): return dict( id=self.id, user_id=self.user_id, message=self.message, reply_message=self.reply_message, timestamp=self.timestamp, ) def addToSql(event, reply, sticker=False, image=False): ''' add message data to sql ''' if sticker: msg = "stamp {} {}".format(event.message.package_id, event.message.sticker_id) elif image: msg = "IMAGE_MESSAGE" else: msg = event.message.text, add_data = usermessage( id=event.message.id, user_id=event.source.user_id, message=msg, reply_message=reply, timestamp=datetime.fromtimestamp(int(event.timestamp)/1000) ) try: db.session.add(add_data) db.session.commit() except (SQLAlchemy.exc.SQLAlchemyError, SQLAlchemy.exc.DBAPIError) as e: print("sql error happen") print(e)
[ "flask_sqlalchemy.SQLAlchemy", "os.getenv" ]
[((148, 179), 'os.getenv', 'os.getenv', (['"""DATABASE_URL"""', 'None'], {}), "('DATABASE_URL', None)\n", (157, 179), False, 'import os\n'), ((237, 252), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (247, 252), False, 'from flask_sqlalchemy import SQLAlchemy\n')]
import torch import torch.nn as nn class voxel_match_loss(nn.Module): def __init__(self): super().__init__() self.criterion=nn.MSELoss() def forward(self,output,label): positive_mask=torch.zeros(label.shape).cuda() positive_mask=torch.where(label>0.2,torch.ones_like(positive_mask), positive_mask) positive_loss=self.criterion(output*positive_mask,label*positive_mask) negative_mask=torch.zeros(label.shape).cuda() negative_mask = torch.where(label <= 0.2, torch.ones_like(negative_mask), negative_mask) negative_loss=self.criterion(output*negative_mask,label*negative_mask) loss=positive_loss+negative_loss loss=loss/2 return loss
[ "torch.nn.MSELoss", "torch.ones_like", "torch.zeros" ]
[((145, 157), 'torch.nn.MSELoss', 'nn.MSELoss', ([], {}), '()\n', (155, 157), True, 'import torch.nn as nn\n'), ((292, 322), 'torch.ones_like', 'torch.ones_like', (['positive_mask'], {}), '(positive_mask)\n', (307, 322), False, 'import torch\n'), ((522, 552), 'torch.ones_like', 'torch.ones_like', (['negative_mask'], {}), '(negative_mask)\n', (537, 552), False, 'import torch\n'), ((216, 240), 'torch.zeros', 'torch.zeros', (['label.shape'], {}), '(label.shape)\n', (227, 240), False, 'import torch\n'), ((440, 464), 'torch.zeros', 'torch.zeros', (['label.shape'], {}), '(label.shape)\n', (451, 464), False, 'import torch\n')]
# Copyright 2019 The Johns Hopkins University Applied Physics Laboratory # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import parse from math import log1p from time import sleep, time from airflow.exceptions import AirflowException from airflow.models import BaseOperator from airflow.utils import apply_defaults from conduit.utils.datajoint_hook import DatajointHook, JobMetadata from airflow.contrib.hooks.aws_hook import AwsHook from datajoint.errors import DuplicateError class AWSBatchOperator(BaseOperator): """ Execute a job on AWS Batch Service :param job_name: the name for the job that will run on AWS Batch :type job_name: str :param job_definition: the job definition name on AWS Batch :type job_definition: str :param queue: the queue name on AWS Batch :type queue: str :param: overrides: the same parameter that boto3 will receive on containerOverrides: http://boto3.readthedocs.io/en/latest/reference/services/batch.html#submit_job :type: overrides: dict :param max_retries: exponential backoff retries while waiter is not merged :type max_retries: int :param aws_conn_id: connection id of AWS credentials / region name. If None, credential boto3 strategy will be used (http://boto3.readthedocs.io/en/latest/guide/configuration.html). :type aws_conn_id: str :param region_name: region name to use in AWS Hook. Override the region_name in connection (if provided) """ ui_color = "#c3dae0" client = None arn = None template_fields = ("overrides",) @apply_defaults def __init__( self, job_name, job_definition, queue, overrides, workflow_id, max_retries=288, aws_conn_id=None, region_name=None, job_parameters={}, score_format="", **kwargs ): super(AWSBatchOperator, self).__init__(**kwargs) self.job_name = job_name self.aws_conn_id = aws_conn_id self.region_name = region_name self.job_definition = job_definition self.queue = queue self.overrides = overrides self.max_retries = max_retries self.jobParameters = job_parameters self.jobId = None self.jobName = None self.dj_hook = DatajointHook() self.workflow_id = workflow_id self.jobmetadata_db = JobMetadata() self.hook = self.get_hook() self.score_format = score_format def execute(self, context): self.log.info( "Running AWS Batch Job - Job definition: %s - on queue %s", self.job_definition, self.queue, ) self.log.info("AWSBatchOperator overrides: %s", self.overrides) self.client = self.hook.get_client_type("batch", region_name=self.region_name) try: response = self.client.submit_job( jobName=self.job_name, jobQueue=self.queue, jobDefinition=self.job_definition, containerOverrides=self.overrides, parameters=self.jobParameters, ) self.log.info("AWS Batch Job started: %s", response) self.jobId = response["jobId"] self.jobName = response["jobName"] self._wait_for_task_ended() self._check_success_task() task_time, score = self._get_score() iteration = self.task_id.split(".")[1] real_task_id = self.task_id.split(".")[0] self.log.info( "Inserting {} {} {} {} {} into job metadata database".format( self.workflow_id, iteration, real_task_id, task_time, score ) ) self.dj_hook.insert1( { "iteration": iteration, "workflow_id": self.workflow_id, "job_id": real_task_id, "cost": task_time, "score": score, }, JobMetadata, ) self.log.info("AWS Batch Job has been successfully executed: %s", response) except Exception as e: self.log.info("AWS Batch Job has failed executed") raise AirflowException(e) def _wait_for_task_ended(self): """ Try to use a waiter from the below pull request * https://github.com/boto/botocore/pull/1307 If the waiter is not available apply a exponential backoff * docs.aws.amazon.com/general/latest/gr/api-retries.html """ # TODO improve this? Checking every 5s doesn't seem like too often... try: waiter = self.client.get_waiter("job_execution_complete") waiter.config.max_attempts = sys.maxsize # timeout is managed by airflow waiter.wait(jobs=[self.jobId]) except ValueError: # If waiter not available use expo retry = True retries = 0 while (retries < self.max_retries or self.max_retries <= 0) and retry: response = self.client.describe_jobs(jobs=[self.jobId]) if response["jobs"][-1]["status"] in ["SUCCEEDED", "FAILED"]: retry = False sleep(log1p(retries) * 30) retries += 1 def _check_success_task(self): response = self.client.describe_jobs(jobs=[self.jobId],) self.log.info("AWS Batch stopped, check status: %s", response) if len(response.get("jobs")) < 1: raise AirflowException("No job found for {}".format(response)) for job in response["jobs"]: if "attempts" in job: containers = job["attempts"] for container in containers: if ( job["status"] == "FAILED" or container["container"]["exitCode"] != 0 ): print("@@@@") raise AirflowException( "This containers encounter an error during execution {}".format( job ) ) elif job["status"] is not "SUCCEEDED": raise AirflowException( "This task is still pending {}".format(job["status"]) ) def get_hook(self): return AwsHook(aws_conn_id=self.aws_conn_id) def on_kill(self): response = self.client.terminate_job( jobId=self.jobId, reason="Task killed by the user" ) self.log.info(response) def _get_score(self): response = self.client.describe_jobs(jobs=[self.jobId]) runTime = response["jobs"][-1]["stoppedAt"] - response["jobs"][-1]["startedAt"] if self.score_format: logStream = response["jobs"][-1]["container"]["logStreamName"] self.logClient = self.hook.get_client_type( "logs", region_name=self.region_name ) response = self.logClient.get_log_events( logGroupName="/aws/batch/job", logStreamName=logStream, ) logEvents = response["events"] # Reads events from most recent to least recent (earliest), so the # first match is the most recent score. Perhaps change this? for logEvent in logEvents: parsed_event = parse.parse(self.score_format, logEvent["message"]) if parsed_event and "score" in parsed_event.named: return (runTime, float(parsed_event["score"])) self.log.info("Score format present but no score found in logs...") return (runTime, None)
[ "conduit.utils.datajoint_hook.DatajointHook", "parse.parse", "math.log1p", "airflow.exceptions.AirflowException", "airflow.contrib.hooks.aws_hook.AwsHook", "conduit.utils.datajoint_hook.JobMetadata" ]
[((2820, 2835), 'conduit.utils.datajoint_hook.DatajointHook', 'DatajointHook', ([], {}), '()\n', (2833, 2835), False, 'from conduit.utils.datajoint_hook import DatajointHook, JobMetadata\n'), ((2906, 2919), 'conduit.utils.datajoint_hook.JobMetadata', 'JobMetadata', ([], {}), '()\n', (2917, 2919), False, 'from conduit.utils.datajoint_hook import DatajointHook, JobMetadata\n'), ((6982, 7019), 'airflow.contrib.hooks.aws_hook.AwsHook', 'AwsHook', ([], {'aws_conn_id': 'self.aws_conn_id'}), '(aws_conn_id=self.aws_conn_id)\n', (6989, 7019), False, 'from airflow.contrib.hooks.aws_hook import AwsHook\n'), ((4785, 4804), 'airflow.exceptions.AirflowException', 'AirflowException', (['e'], {}), '(e)\n', (4801, 4804), False, 'from airflow.exceptions import AirflowException\n'), ((8008, 8059), 'parse.parse', 'parse.parse', (['self.score_format', "logEvent['message']"], {}), "(self.score_format, logEvent['message'])\n", (8019, 8059), False, 'import parse\n'), ((5821, 5835), 'math.log1p', 'log1p', (['retries'], {}), '(retries)\n', (5826, 5835), False, 'from math import log1p\n')]
from collections import OrderedDict from dataclasses import ( fields, ) from prettyprinter.prettyprinter import pretty_call, register_pretty def is_instance_of_dataclass(value): try: fields(value) except TypeError: return False else: return True def pretty_dataclass_instance(value, ctx): cls = type(value) field_defs = fields(value) kwargs = [] for field_def in field_defs: # repr is True by default, # therefore if this if False, the user # has explicitly indicated they don't want # to display the field value. if not field_def.repr: continue kwargs.append((field_def.name, getattr(value, field_def.name))) return pretty_call(ctx, cls, **OrderedDict(kwargs)) def install(): register_pretty(predicate=is_instance_of_dataclass)( pretty_dataclass_instance)
[ "collections.OrderedDict", "prettyprinter.prettyprinter.register_pretty", "dataclasses.fields" ]
[((373, 386), 'dataclasses.fields', 'fields', (['value'], {}), '(value)\n', (379, 386), False, 'from dataclasses import fields\n'), ((202, 215), 'dataclasses.fields', 'fields', (['value'], {}), '(value)\n', (208, 215), False, 'from dataclasses import fields\n'), ((810, 861), 'prettyprinter.prettyprinter.register_pretty', 'register_pretty', ([], {'predicate': 'is_instance_of_dataclass'}), '(predicate=is_instance_of_dataclass)\n', (825, 861), False, 'from prettyprinter.prettyprinter import pretty_call, register_pretty\n'), ((768, 787), 'collections.OrderedDict', 'OrderedDict', (['kwargs'], {}), '(kwargs)\n', (779, 787), False, 'from collections import OrderedDict\n')]
from __future__ import unicode_literals import codecs from django.conf import settings from rest_framework.compat import six from rest_framework.parsers import BaseParser, ParseError from rest_framework import renderers from rest_framework.settings import api_settings import ujson class UJSONParser(BaseParser): """ Parses JSON-serialized data. """ media_type = 'application/json' renderer_class = renderers.JSONRenderer strict = api_settings.STRICT_JSON def parse(self, stream, media_type=None, parser_context=None): """ Parses the incoming bytestream as JSON and returns the resulting data. """ parser_context = parser_context or {} encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET) try: decoded_stream = codecs.getreader(encoding)(stream) parse_constant = ujson.strict_constant if self.strict else None return ujson.load(decoded_stream, parse_constant=parse_constant) except ValueError as exc: raise ParseError('JSON parse error - %s' % six.text_type(exc))
[ "rest_framework.compat.six.text_type", "codecs.getreader", "ujson.load" ]
[((950, 1007), 'ujson.load', 'ujson.load', (['decoded_stream'], {'parse_constant': 'parse_constant'}), '(decoded_stream, parse_constant=parse_constant)\n', (960, 1007), False, 'import ujson\n'), ((820, 846), 'codecs.getreader', 'codecs.getreader', (['encoding'], {}), '(encoding)\n', (836, 846), False, 'import codecs\n'), ((1097, 1115), 'rest_framework.compat.six.text_type', 'six.text_type', (['exc'], {}), '(exc)\n', (1110, 1115), False, 'from rest_framework.compat import six\n')]
import datetime import re import os import requests import json import uuid import random import calendar import time import libs.SerializableDict as SerializableDict import libs.StorageObjects as StorageObjects import libs.Models as Models import libs.Loggiz as Loggiz from pytz import timezone import pytz import telegram import logging ''' Copyright (c) 2016, <NAME> All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ ''' __author__ = "<NAME>" __copyright__ = "Copyright 2016" __credits__ = ["<NAME>"] __license__ = "BSD" __version__ = "1.0.0" __maintainer__ = "<NAME>" __email__ = "<EMAIL>" __status__ = "Production" class emoji(object): def __init__(self): self.used = list() random.seed(calendar.timegm(time.gmtime())) def get_randomanimal(self): animals = [ telegram.Emoji.RAT, telegram.Emoji.MOUSE, telegram.Emoji.OX, telegram.Emoji.WATER_BUFFALO, telegram.Emoji.COW, telegram.Emoji.TIGER, telegram.Emoji.LEOPARD, telegram.Emoji.RABBIT, telegram.Emoji.CAT, telegram.Emoji.DRAGON, telegram.Emoji.CROCODILE, telegram.Emoji.WHALE, telegram.Emoji.RAM, telegram.Emoji.GOAT, telegram.Emoji.ROOSTER, telegram.Emoji.DOG, telegram.Emoji.PIG] while True: foundindex = random.randrange(1, len(animals)) - 1 if foundindex not in self.used: self.used.append(foundindex) break if len(self.used) == len(animals): self.used = list() return animals[foundindex] # self.db = dbobject # self.uindex = dbobject.Get("userindex") class GeneralMessageEvent(object): def __init__(self): self.dbobject = Models.StaticModels() self.config = self.dbobject.Get("config") def run(self, bot, update, args): raise NotImplementedError() @classmethod def stripusername(self, username): if username.startswith("@"): # remove @ return username[1:] else: return username class Null(GeneralMessageEvent): def __init__(self): GeneralMessageEvent.__init__(self) def run(self, bot, update, args): pass class WebSearchDuckDuckGo(GeneralMessageEvent): def __init__(self): GeneralMessageEvent.__init__(self) def _generate_url(self, searchstring): searchstring = searchstring.replace(" ", "+") print(searchstring) return "http://api.duckduckgo.com/?q={}&format=json".format(searchstring) def run(self, bot, update, args): r = requests.get(self._generate_url(" ".join(args))) if r.status_code == 200: searchresult = r.json() resultcount = len(searchresult["RelatedTopics"]) outputstring = "{} (found: {})\n".format(searchresult["Heading"], resultcount) limitcounter = 0 for article in searchresult["RelatedTopics"]: outputstring += article.get("Result", "") + "\n" d = article.get("Result", "") if d == "": print(article) limitcounter += 1 if limitcounter == 3: break bot.sendMessage(update.message.chat_id, text="{}".format(outputstring), parse_mode="HTML") class Time(GeneralMessageEvent): def __init__(self): GeneralMessageEvent.__init__(self) def run(self, bot, update, args): localtime = datetime.datetime.now() home = pytz.timezone("Europe/Oslo") localtime = home.normalize(home.localize(localtime)) timezones = self.config.timezones.Get() out = "<b>Current Time</b>\n" out += "Norway: " + str(localtime.strftime('%X %x %Z')) + "\n" for tz in timezones: desc = tz[0] zonename = tz[1] currentzone = pytz.timezone(zonename) currentlocaltime = localtime.astimezone(currentzone) out += "{}: {}\n".format(desc, str(currentlocaltime.strftime('%X %x %Z'))) Loggiz.log.write.info(out) bot.sendMessage(update.message.chat_id, text="{}".format(out), parse_mode="HTML") class Configure(GeneralMessageEvent): def __init__(self): GeneralMessageEvent.__init__(self) def addignoreword(self, word): d = self.config.ignorewords.Get() if word not in d: d.append(word) self.config.ignorewords.Set(d) return True return False def delignoreword(self, word): d = self.config.ignorewords.Get() if word in d: d.remove(word) self.config.ignorewords.Set(d) return True return False def addtimezone(self, desc, tzstring): d = self.config.timezones.Get() for tz in d: if tz[0] == desc: return False d.append([desc, tzstring]) self.config.timezones.Set(d) return True def deltimezone(self, desc): pass def run(self, bot, update, args): out = None if len(args) == 0: return if update.message.from_user.username not in self.config.admins.Get() and update.message.from_user.username != "ehasting": Loggiz.log.write.error("Non admin ({}) tried to configure the bot".format(update.message.from_user.username)) bot.sendMessage(update.message.chat_id, text="{}".format("you need backdoor access... no grid for you!!!!"), parse_mode="HTML") return if args[0] == "help": out = "Available configuration: addignoreword, delignoreword, addtimezone" elif args[0] == "addignoreword": for word in args[1:]: out = self.addignoreword(word) Loggiz.log.write.info("{} = {}".format(word, out)) elif args[0] == "delignoreword": for word in args[1:]: out = self.delignoreword(word) Loggiz.log.write.info("{} = {}".format(word, out)) elif args[0] == "addtimezone": out = self.addtimezone(args[1], args[2]) if out is not None: Loggiz.log.write.info(out) bot.sendMessage(update.message.chat_id, text="{}".format(out), parse_mode="HTML") class Stats(GeneralMessageEvent): def __init__(self): GeneralMessageEvent.__init__(self) self.seen = self.dbobject.Get("seenlog") self.uindex = self.dbobject.Get("userindex") self.wordcounter = self.dbobject.Get("wordcounter") def run(self, bot, update, args): self.ignorewords = self.config.ignorewords.Get() users = self.seen.usercounter.Get() data = users.rawdict() output_string = "<b>Most Active User Stats (by words):</b>\n" place = 1 placeemoji = emoji() for key, user in sorted(data, key=self.sort_by_word, reverse=True): username = key if username == "": continue Loggiz.log.write.info(user) usercountobject = SerializableDict.UserObject(user) useremoji = placeemoji.get_randomanimal() output_string += "{} [{}] {}: {} (Lines: {})\n".format(useremoji, place, username, usercountobject.wordcounter, usercountobject.counter) if telegram.Emoji.DRAGON == useremoji: output_string += " - Entering the dragon......\n" place += 1 output_string += "\n<b>Most used words:</b>\n" words = self.wordcounter.words.Get() cnt = 0 for key, value in sorted(words.rawdict(), key=self.sort_by_wordusage, reverse=True): Loggiz.log.write.info(value) currentword = SerializableDict.WordStats(value) Loggiz.log.write.info(currentword.word) if currentword.word in self.ignorewords: continue output_string += "{}: {} times\n".format(currentword.word, currentword.counter) cnt += 1 if cnt > 4: break Loggiz.log.write.info(output_string) bot.sendMessage(update.message.chat_id, text="{}".format(output_string), parse_mode="HTML") def sort_by_wordusage(self, worddict): d = SerializableDict.WordStats(worddict[1]) if not isinstance(d.counter, int): return 0 return d.counter def sort_by_word(self, userdict): usercountobject = SerializableDict.UserObject(userdict[1]) if not isinstance(usercountobject.wordcounter, int): return 1 Loggiz.log.write.info(usercountobject.wordcounter) return usercountobject.wordcounter class Help(GeneralMessageEvent): def __init__(self): GeneralMessageEvent.__init__(self) def run(self, bot, update, args): output_string = "<b>Available commands</b>\n" output_string += commands bot.sendMessage(update.message.chat_id, text="{}".format(output_string), parse_mode="HTML") @classmethod def sort_by_word(cls, userdict): usercountobject = SerializableDict.UserObject(userdict) if usercountobject.wordcounter == "": return 0 return usercountobject.wordcounter class AudioTips(GeneralMessageEvent): def __init__(self): GeneralMessageEvent.__init__(self) self.tipdb = self.dbobject.Get("tipdb") class Counter(GeneralMessageEvent): def __init__(self): GeneralMessageEvent.__init__(self) self.seen = self.dbobject.Get("seenlog") self.wordcounter = self.dbobject.Get("wordcounter") def run(self, bot, update): user = self.seen.usercounter.Get() usercount = user.get(update.message.from_user.username) usercountobject = SerializableDict.UserObject(usercount) words = self.wordcounter.words.Get() # Line counter if usercountobject.counter == "": usercountobject.counter = 1 else: usercountobject.counter = usercountobject.counter + 1 # Word counter currentwordcount = re.findall('\w+', update.message.text.lower()) ignorecharacterlist = [".", "!", "?", ",", ":", ";", "-", "_", "/"] for word in currentwordcount: #word = word.translate(None, ''.join(ignorecharacterlist)) current = words.get(word) current = SerializableDict.WordStats(current) if current.counter == "": current.counter = 0 current.word = word current.counter = int(current.counter) + 1 Loggiz.log.write.info("{}: {}".format(current.word, current.counter)) words.set(word, current.SaveObject()) self.wordcounter.words.Set(words) print("Words: {}".format(len(currentwordcount))) if usercountobject.wordcounter == "": usercountobject.wordcounter = len(currentwordcount) else: usercountobject.wordcounter = usercountobject.wordcounter + len(currentwordcount) # Last seen usercountobject.timestamp = str(datetime.datetime.now().replace(microsecond=0)) # Metadata usercountobject.firstname = update.message.from_user.first_name usercountobject.lastname = update.message.from_user.last_name usercountobject.username = update.message.from_user.username # Store object to dictionary and back to DB user.set(update.message.from_user.username, usercountobject.SaveObject()) self.seen.usercounter.Set(user) class Seen(GeneralMessageEvent): def __init__(self): GeneralMessageEvent.__init__(self) self.seendb = self.dbobject.Get("seenlog") def run(self, bot, update, args): Loggiz.log.write.info("Gettings Stats") user = self.seendb.usercounter.Get() if len(args) > 0: Loggiz.log.write.info("finding user {}".format(args[0])) username = self.stripusername(args[0]) fetchseenuser = user.get(username) userseenobject = SerializableDict.UserObject(fetchseenuser) Loggiz.log.write.info(userseenobject.timestamp) if userseenobject.timestamp != "": bot.sendMessage(update.message.chat_id, text="hey! {} was last seen {} (lines/words: {}/{})".format(username, userseenobject.timestamp, userseenobject.counter, userseenobject.wordcounter)) else: Loggiz.log.write.warn("Did not find any user info!") else: bot.sendMessage(update.message.chat_id, text="{} U ale wlong!! do like this!! command @<username>".format(telegram.Emoji.PILE_OF_POO)) class QuoteBase(GeneralMessageEvent): def __init__(self): GeneralMessageEvent.__init__(self) self.uindex = self.dbobject.Get("userindex") class AddQuote(QuoteBase): def __init__(self): QuoteBase.__init__(self) def run(self, bot, update, args): new_quote_index = str(uuid.uuid4()) if len(args) < 2: Loggiz.log.write.info("Argument length was {}".format(len(args))) bot.sendMessage(update.message.chat_id, text='[USAGE] <username> <quote>') else: username = self.stripusername(args[0]) if username not in self.uindex.index.Get(): tmplist = self.uindex.index.Get() tmplist.append(username) self.uindex.index.Set(tmplist) Loggiz.log.write.info("user/nick added to index") thequote = " ".join(args[1:]) if isinstance(thequote, unicode): quotetext = StorageObjects.ComnodeObject("quotestext.{}".format(new_quote_index), "unicode", desc="", hidden=False) else: quotetext = StorageObjects.ComnodeObject("quotestext.{}".format(new_quote_index), "str", desc="", hidden=False) quotetext.Set(thequote) quotemetausername = StorageObjects.ComnodeObject("quotemap.{}".format(username), "list", desc="", hidden=False) qmun = quotemetausername.Get() qmun.append(new_quote_index) quotemetausername.Set(qmun) bot.sendMessage(update.message.chat_id, text="Quote from {} added with id {}\n#quote\n/addquote {} {}".format(username, new_quote_index, username, thequote)) class Quote(QuoteBase): def __init__(self): QuoteBase.__init__(self) self.taken = list() random.seed(calendar.timegm(time.gmtime())) def get_quote(self, username): username = username.replace("<", "") username = username.replace(">", "") quotemetausername = StorageObjects.ComnodeObject("quotemap.{}".format(username), "list", desc="", hidden=False) qmun = quotemetausername.Get() if len(qmun) > 0: foundindex = random.randrange(0, len(qmun)) Loggiz.log.write.info("found: {}, total: {}".format(foundindex, len(qmun))) if len(qmun) == foundindex: foundindex = foundindex - 1 if qmun[foundindex] in self.taken: Loggiz.log.write.info("{} is taken".format(qmun[foundindex])) return "TAKEN" else: quotetext = StorageObjects.ComnodeObject("quotestext.{}".format(qmun[foundindex]), "str", desc="", hidden=False) self.taken.append(qmun[foundindex]) if quotetext.Get() == "": return "TAKEN" quoteoutput = quotetext.Get() quoteoutput = quoteoutput.replace("<", "") quoteoutput = quoteoutput.replace(">", "") return "<i>{}</i>: {}".format(username, quoteoutput) else: return None def findrandomuser(self): userindexlength = len(self.uindex.index.Get()) if userindexlength == 0: return luckyuser = random.randrange(0, userindexlength) if len(self.uindex.index.Get()) == luckyuser: luckyuser = luckyuser - 1 return self.uindex.index.Get()[luckyuser] def run(self, bot, update, args): emojiz = emoji() iterationcount = 0 if len(args) == 1: nums = int(args[0]) if nums > 10: nums = 10 quoteoutput = "<b>(almost) {} random Quotes</b>\n".format(nums) Loggiz.log.write.info("Args {} converted to {}".format(str(args), nums)) while True: if iterationcount > (nums * 20): Loggiz.log.write.warn("Retry exhausted") break randomuser = self.findrandomuser() currentquote = self.get_quote(randomuser) if currentquote == "TAKEN": Loggiz.log.write.info("Quote Taken or blank") iterationcount += 1 continue elif currentquote is None: Loggiz.log.write.info("Quote on {} not found".format(randomuser)) iterationcount += 1 continue quoteoutput += "{} {}\n".format(emojiz.get_randomanimal(), currentquote) if len(self.taken) >= nums: break else: quoteoutput = self.get_quote(self.findrandomuser()) if quoteoutput is not None: Loggiz.log.write.info(str(self.taken)) Loggiz.log.write.info(quoteoutput) bot.sendMessage(update.message.chat_id, text=quoteoutput, parse_mode="HTML") self.taken = list() if __name__ == '__main__': pass
[ "pytz.timezone", "libs.Models.StaticModels", "random.randrange", "libs.SerializableDict.WordStats", "libs.Loggiz.log.write.info", "libs.SerializableDict.UserObject", "datetime.datetime.now", "uuid.uuid4", "time.gmtime", "libs.Loggiz.log.write.warn" ]
[((3177, 3198), 'libs.Models.StaticModels', 'Models.StaticModels', ([], {}), '()\n', (3196, 3198), True, 'import libs.Models as Models\n'), ((4938, 4961), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4959, 4961), False, 'import datetime\n'), ((4977, 5005), 'pytz.timezone', 'pytz.timezone', (['"""Europe/Oslo"""'], {}), "('Europe/Oslo')\n", (4990, 5005), False, 'import pytz\n'), ((5517, 5543), 'libs.Loggiz.log.write.info', 'Loggiz.log.write.info', (['out'], {}), '(out)\n', (5538, 5543), True, 'import libs.Loggiz as Loggiz\n'), ((9510, 9546), 'libs.Loggiz.log.write.info', 'Loggiz.log.write.info', (['output_string'], {}), '(output_string)\n', (9531, 9546), True, 'import libs.Loggiz as Loggiz\n'), ((9703, 9742), 'libs.SerializableDict.WordStats', 'SerializableDict.WordStats', (['worddict[1]'], {}), '(worddict[1])\n', (9729, 9742), True, 'import libs.SerializableDict as SerializableDict\n'), ((9898, 9938), 'libs.SerializableDict.UserObject', 'SerializableDict.UserObject', (['userdict[1]'], {}), '(userdict[1])\n', (9925, 9938), True, 'import libs.SerializableDict as SerializableDict\n'), ((10029, 10079), 'libs.Loggiz.log.write.info', 'Loggiz.log.write.info', (['usercountobject.wordcounter'], {}), '(usercountobject.wordcounter)\n', (10050, 10079), True, 'import libs.Loggiz as Loggiz\n'), ((10533, 10570), 'libs.SerializableDict.UserObject', 'SerializableDict.UserObject', (['userdict'], {}), '(userdict)\n', (10560, 10570), True, 'import libs.SerializableDict as SerializableDict\n'), ((11217, 11255), 'libs.SerializableDict.UserObject', 'SerializableDict.UserObject', (['usercount'], {}), '(usercount)\n', (11244, 11255), True, 'import libs.SerializableDict as SerializableDict\n'), ((13194, 13233), 'libs.Loggiz.log.write.info', 'Loggiz.log.write.info', (['"""Gettings Stats"""'], {}), "('Gettings Stats')\n", (13215, 13233), True, 'import libs.Loggiz as Loggiz\n'), ((17313, 17349), 'random.randrange', 'random.randrange', (['(0)', 'userindexlength'], {}), '(0, userindexlength)\n', (17329, 17349), False, 'import random\n'), ((5333, 5356), 'pytz.timezone', 'pytz.timezone', (['zonename'], {}), '(zonename)\n', (5346, 5356), False, 'import pytz\n'), ((7620, 7646), 'libs.Loggiz.log.write.info', 'Loggiz.log.write.info', (['out'], {}), '(out)\n', (7641, 7646), True, 'import libs.Loggiz as Loggiz\n'), ((8465, 8492), 'libs.Loggiz.log.write.info', 'Loggiz.log.write.info', (['user'], {}), '(user)\n', (8486, 8492), True, 'import libs.Loggiz as Loggiz\n'), ((8523, 8556), 'libs.SerializableDict.UserObject', 'SerializableDict.UserObject', (['user'], {}), '(user)\n', (8550, 8556), True, 'import libs.SerializableDict as SerializableDict\n'), ((9124, 9152), 'libs.Loggiz.log.write.info', 'Loggiz.log.write.info', (['value'], {}), '(value)\n', (9145, 9152), True, 'import libs.Loggiz as Loggiz\n'), ((9179, 9212), 'libs.SerializableDict.WordStats', 'SerializableDict.WordStats', (['value'], {}), '(value)\n', (9205, 9212), True, 'import libs.SerializableDict as SerializableDict\n'), ((9225, 9264), 'libs.Loggiz.log.write.info', 'Loggiz.log.write.info', (['currentword.word'], {}), '(currentword.word)\n', (9246, 9264), True, 'import libs.Loggiz as Loggiz\n'), ((11830, 11865), 'libs.SerializableDict.WordStats', 'SerializableDict.WordStats', (['current'], {}), '(current)\n', (11856, 11865), True, 'import libs.SerializableDict as SerializableDict\n'), ((13503, 13545), 'libs.SerializableDict.UserObject', 'SerializableDict.UserObject', (['fetchseenuser'], {}), '(fetchseenuser)\n', (13530, 13545), True, 'import libs.SerializableDict as SerializableDict\n'), ((13558, 13605), 'libs.Loggiz.log.write.info', 'Loggiz.log.write.info', (['userseenobject.timestamp'], {}), '(userseenobject.timestamp)\n', (13579, 13605), True, 'import libs.Loggiz as Loggiz\n'), ((14421, 14433), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (14431, 14433), False, 'import uuid\n'), ((18837, 18871), 'libs.Loggiz.log.write.info', 'Loggiz.log.write.info', (['quoteoutput'], {}), '(quoteoutput)\n', (18858, 18871), True, 'import libs.Loggiz as Loggiz\n'), ((1939, 1952), 'time.gmtime', 'time.gmtime', ([], {}), '()\n', (1950, 1952), False, 'import time\n'), ((13892, 13944), 'libs.Loggiz.log.write.warn', 'Loggiz.log.write.warn', (['"""Did not find any user info!"""'], {}), "('Did not find any user info!')\n", (13913, 13944), True, 'import libs.Loggiz as Loggiz\n'), ((14902, 14951), 'libs.Loggiz.log.write.info', 'Loggiz.log.write.info', (['"""user/nick added to index"""'], {}), "('user/nick added to index')\n", (14923, 14951), True, 'import libs.Loggiz as Loggiz\n'), ((15921, 15934), 'time.gmtime', 'time.gmtime', ([], {}), '()\n', (15932, 15934), False, 'import time\n'), ((12542, 12565), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (12563, 12565), False, 'import datetime\n'), ((17948, 17988), 'libs.Loggiz.log.write.warn', 'Loggiz.log.write.warn', (['"""Retry exhausted"""'], {}), "('Retry exhausted')\n", (17969, 17988), True, 'import libs.Loggiz as Loggiz\n'), ((18188, 18233), 'libs.Loggiz.log.write.info', 'Loggiz.log.write.info', (['"""Quote Taken or blank"""'], {}), "('Quote Taken or blank')\n", (18209, 18233), True, 'import libs.Loggiz as Loggiz\n')]
""" Purpose of this file is to give examples of structured arrays This script is partially dirived from the LinkedIn learning course https://www.linkedin.com/learning/numpy-data-science-essential-training/create-arrays-from-python-structures """ import numpy as np person_data_def = [('name', 'S6'), ('height', 'f8'), ('weight', 'f8'), ('age', 'i8')] # create a structured array people_array = np.zeros(4, dtype=person_data_def) print(f'The structured array is of type {type(people_array)}\n{people_array}') # let us change some the data values # note that any int for height or weight will processed as default people_array[2] = ('Cat', 130, 56, 22) people_array[0] = ('Amy', 126, 60, 25) people_array[1] = ('Bell', 146, 60, 20) people_array[3] = ('Amy', 140, 80, 55) print(people_array) # we can print the information for name, height, weight and age ages = people_array['age'] print(f'the ages of the people are {ages}') print(f'The names of the people are {people_array["name"]}') print(f'The heights of the people are {people_array["height"]}') print(f'The weights of the people are {people_array["weight"]}') youthful = ages/2 print(f'The young ages are {youthful}') # Note that youthful does not change the original data print(f'The original ages are {ages}') print(people_array[['name', 'age']]) # Record array is a thin wrapper around structured array person_record_array = np.rec.array([('a', 100, 80, 50), ('b', 190, 189, 20)]) print(type(person_record_array[0]))
[ "numpy.zeros", "numpy.rec.array" ]
[((398, 432), 'numpy.zeros', 'np.zeros', (['(4)'], {'dtype': 'person_data_def'}), '(4, dtype=person_data_def)\n', (406, 432), True, 'import numpy as np\n'), ((1396, 1451), 'numpy.rec.array', 'np.rec.array', (["[('a', 100, 80, 50), ('b', 190, 189, 20)]"], {}), "([('a', 100, 80, 50), ('b', 190, 189, 20)])\n", (1408, 1451), True, 'import numpy as np\n')]
''' File: ebook_fix.py Created: 2021-03-06 15:46:09 Modified: 2021-03-06 15:46:14 Author: mcxiaoke (<EMAIL>) License: Apache License 2.0 ''' import sys import os from pprint import pprint from types import new_class from mobi import Mobi from ebooklib import epub import argparse from multiprocessing.dummy import Pool from functools import partial RET_OK = 0 RET_IGNORE = -1 RET_SKIP = -2 RET_PARSE_ERROR = -101 RET_OS_ERROR = -102 BOOK_FORMATS = ('.mobi', '.azw', '.azw3', '.epub') class BookParser: def __init__(self, src): self.src = src self.src_dir = os.path.dirname(src) self.src_name = os.path.basename(src) self.dst = None self.dst_name = None self.parse() def parse(self): raise('subclass must override this') def check(self): if not self.dst_name or not self.dst: return RET_PARSE_ERROR elif self.dst_name == self.src_name: return RET_IGNORE elif os.path.exists(self.dst): return RET_SKIP else: print('Name Before:\t{}'.format(self.src_name)) print('Name After:\t{}'.format(self.dst_name)) def rename(self): if not self.dst_name or not self.dst: # print('Bad Format:\t{}'.format(self.dst_name)) return RET_PARSE_ERROR elif self.dst_name == self.src_name: # print('Good Book:\t{}'.format(self.dst_name)) return RET_IGNORE elif os.path.exists(self.dst): # print('Skip Book:\t{}'.format(self.dst_name)) return RET_SKIP else: try: # print('Rename From:\t{}'.format(self.src_name)) print('Rename To:\t{}'.format(self.dst_name)) os.rename(self.src, self.dst) return RET_OK except Exception as e: print("Rename Error:\t{}".format(e)) return RET_OS_ERROR class MobiParser(BookParser): # using lib mobi-python def __init__(self, src): super().__init__(src) def parse(self): base, ext = os.path.splitext(self.src_name) ext = ext and ext.lower() try: book = Mobi(self.src) book.parse() title = book.config['mobi']['Full Name'].decode('utf8') self.dst_name = '{}{}'.format(title, ext) self.dst = os.path.join(self.src_dir, self.dst_name) # print('Mobi Title:\t{}'.format(self.dst_name)) except Exception as e: print("Parse Error:\t{}".format(e)) class EpubParser(BookParser): # using lib def __init__(self, src): super().__init__(src) def parse(self): base, ext = os.path.splitext(self.src_name) ext = ext and ext.lower() try: book = epub.read_epub(self.src) title = book.title self.dst_name = '{}{}'.format(title, ext) self.dst = os.path.join(self.src_dir, self.dst_name) # print('EPub Title:\t{}'.format(self.dst_name)) except Exception as e: print("Parse Error:", e) def list_files(source, recrusily=False, ext_filter=None): files = [] if not recrusily: names = os.listdir(source) if not ext_filter: files.extend([os.path.join(source, name) for name in names]) else: for name in names: _, ext = os.path.splitext(name) if ext and ext.lower() in ext_filter: files.append(os.path.join(source, name)) else: for root, dirs, names in os.walk(source): if not ext_filter: files.extend([os.path.join(root, name) for name in names]) else: for name in names: _, ext = os.path.splitext(name) if ext and ext.lower() in ext_filter: files.append(os.path.join(root, name)) return files def rename_one_book(fname, idx, total, execute=False): print('Task({}/{}):\t{}'.format(idx, total, fname)) name = os.path.basename(fname) _, ext = os.path.splitext(name) if ext in ('.mobi', '.azw', '.azw3'): book = MobiParser(fname) elif ext == '.epub': book = EpubParser(fname) else: print('Unknown Format: {}'.format(name)) book = None if book: if execute: book.rename() else: book.check() def rename_books(source, execute=False, recrusily=False): print('=== Source: {} ==='.format(source)) files = list_files(source, recrusily, BOOK_FORMATS) total = len(files) p = Pool(8) try: for idx, fname in enumerate(files): # print('Processing({}/{}):\t{}'.format(idx, total, fname)) # partial_rename_one = partial(rename_one_book, execute=execute) # rename_one_book(fname, execute) p.apply_async(rename_one_book, (fname, idx, total, execute)) p.close() p.join() except KeyboardInterrupt: print('Warning: User Ctrl-C inerrupt, abort.') p.terminate() # sys.exit(1) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument( 'source', help='Source folder contains ebooks') parser.add_argument('-e', '--execute', action='store_true', help='Rename all ebooks [default:False]') parser.add_argument('-r', '--recrusily', action='store_true', help='Process books in source folder recursively [default:False]') args = parser.parse_args() print(args) rename_books(args.source, args.execute, args.recrusily)
[ "os.path.exists", "ebooklib.epub.read_epub", "os.listdir", "argparse.ArgumentParser", "os.rename", "os.walk", "os.path.splitext", "os.path.join", "os.path.dirname", "os.path.basename", "mobi.Mobi", "multiprocessing.dummy.Pool" ]
[((4104, 4127), 'os.path.basename', 'os.path.basename', (['fname'], {}), '(fname)\n', (4120, 4127), False, 'import os\n'), ((4141, 4163), 'os.path.splitext', 'os.path.splitext', (['name'], {}), '(name)\n', (4157, 4163), False, 'import os\n'), ((4668, 4675), 'multiprocessing.dummy.Pool', 'Pool', (['(8)'], {}), '(8)\n', (4672, 4675), False, 'from multiprocessing.dummy import Pool\n'), ((5203, 5228), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (5226, 5228), False, 'import argparse\n'), ((582, 602), 'os.path.dirname', 'os.path.dirname', (['src'], {}), '(src)\n', (597, 602), False, 'import os\n'), ((627, 648), 'os.path.basename', 'os.path.basename', (['src'], {}), '(src)\n', (643, 648), False, 'import os\n'), ((2115, 2146), 'os.path.splitext', 'os.path.splitext', (['self.src_name'], {}), '(self.src_name)\n', (2131, 2146), False, 'import os\n'), ((2729, 2760), 'os.path.splitext', 'os.path.splitext', (['self.src_name'], {}), '(self.src_name)\n', (2745, 2760), False, 'import os\n'), ((3244, 3262), 'os.listdir', 'os.listdir', (['source'], {}), '(source)\n', (3254, 3262), False, 'import os\n'), ((3614, 3629), 'os.walk', 'os.walk', (['source'], {}), '(source)\n', (3621, 3629), False, 'import os\n'), ((2213, 2227), 'mobi.Mobi', 'Mobi', (['self.src'], {}), '(self.src)\n', (2217, 2227), False, 'from mobi import Mobi\n'), ((2398, 2439), 'os.path.join', 'os.path.join', (['self.src_dir', 'self.dst_name'], {}), '(self.src_dir, self.dst_name)\n', (2410, 2439), False, 'import os\n'), ((2827, 2851), 'ebooklib.epub.read_epub', 'epub.read_epub', (['self.src'], {}), '(self.src)\n', (2841, 2851), False, 'from ebooklib import epub\n'), ((2960, 3001), 'os.path.join', 'os.path.join', (['self.src_dir', 'self.dst_name'], {}), '(self.src_dir, self.dst_name)\n', (2972, 3001), False, 'import os\n'), ((981, 1005), 'os.path.exists', 'os.path.exists', (['self.dst'], {}), '(self.dst)\n', (995, 1005), False, 'import os\n'), ((1481, 1505), 'os.path.exists', 'os.path.exists', (['self.dst'], {}), '(self.dst)\n', (1495, 1505), False, 'import os\n'), ((3433, 3455), 'os.path.splitext', 'os.path.splitext', (['name'], {}), '(name)\n', (3449, 3455), False, 'import os\n'), ((3316, 3342), 'os.path.join', 'os.path.join', (['source', 'name'], {}), '(source, name)\n', (3328, 3342), False, 'import os\n'), ((3819, 3841), 'os.path.splitext', 'os.path.splitext', (['name'], {}), '(name)\n', (3835, 3841), False, 'import os\n'), ((1770, 1799), 'os.rename', 'os.rename', (['self.src', 'self.dst'], {}), '(self.src, self.dst)\n', (1779, 1799), False, 'import os\n'), ((3543, 3569), 'os.path.join', 'os.path.join', (['source', 'name'], {}), '(source, name)\n', (3555, 3569), False, 'import os\n'), ((3692, 3716), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (3704, 3716), False, 'import os\n'), ((3937, 3961), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (3949, 3961), False, 'import os\n')]
#!/usr/bin/env python3 # #<NAME> # # <NAME> # 7/06 # # # parsePhoneNrs.py - an example of 'grouping' - extracting parts of a match # # Python 3.5.2 # on Linux 4.4.0-36-generic x86_64 # # Demonstrates: regexp, re, search, groups # # Usage: By default, reads telNrs.txt . You may supply a different filename # # Notes: # The pattern: # Note that it is not perfect, but allows a bit of leeway in how we # write a phone #. No extensions. # Of course, only handles US-style numbers # # EDITOR: cols=120, tabstop=2 # import sys import re stderr = sys.stderr DEF_A_CODE = "None" def usage() : print( "Usage:" ) print( "\t" + sys.argv[0] + " [<file>]" ) def searchFile( fileName, pattern ) : fh = open( fileName, "r" ) for l in fh : l = l.strip() # Here's the actual search match = pattern.search( l ) if match : nr = match.groups() # Note, from the pattern, that 0 may be null, but 1 and 2 must exist if nr[0] is None : aCode = DEF_A_CODE else : aCode = nr[0] print( "area code: " + aCode + \ ", exchange: " + nr[1] + ", trunk: " + nr[2] ) else : print( "NO MATCH: " + l ) fh.close() def main() : # stick filename if len( sys.argv ) < 2 : # no file name # assume telNrs.txt fileName = "telNrs.txt" else : fileName = sys.argv[1] # for legibility, Python supplies a 'verbose' pattern # requires a special flag (re.VERBOSE) #patString = '(\d{3})*[- .)]*(\d{3})[- .]*(\d{4})' patString = r''' # don't match beginning of string (takes care of 1-) (\d{3})? # area code (3 digits) (optional) [- .)]* # optional separator (any # of space, dash, or dot, # or closing ')' ) (\d{3}) # exchange, 3 digits [- .]* # optional separator (any # of space, dash, or dot) (\d{4}) # number, 4 digits ''' # Here is what the pattern would look like as a regular pattern: #patString = r'(\d{3})\D*(\d{3})\D*(\d{4})' # Instead of creating a temporary object each time, we will compile this # regexp once, and store this object pattern = re.compile( patString, re.VERBOSE ) searchFile( fileName, pattern ) main()
[ "re.compile" ]
[((2062, 2095), 're.compile', 're.compile', (['patString', 're.VERBOSE'], {}), '(patString, re.VERBOSE)\n', (2072, 2095), False, 'import re\n')]
import functools from flask import Blueprint from flask import render_template from flask import g from flask import redirect from flask import url_for from flask import flash from mflac.vuln_app.db import get_db bp = Blueprint("admin", __name__, url_prefix="/admin") def admin_required(view): @functools.wraps(view) def wrapped_view(**kwargs): if g.user is None or not g.user['is_admin']: flash("Forbidden. You haven't enough permissions") return redirect(url_for("index.index")) return view(**kwargs) return wrapped_view def login_required(view): @functools.wraps(view) def wrapped_view(**kwargs): if g.user is None: return redirect(url_for("auth.login")) return view(**kwargs) return wrapped_view @bp.route("/users_list") @login_required @admin_required def users_list(): db = get_db() users = db.execute("SELECT id, username, is_admin FROM user").fetchall() return render_template('admin/users_list.html', users=users)
[ "flask.render_template", "mflac.vuln_app.db.get_db", "flask.flash", "functools.wraps", "flask.url_for", "flask.Blueprint" ]
[((221, 270), 'flask.Blueprint', 'Blueprint', (['"""admin"""', '__name__'], {'url_prefix': '"""/admin"""'}), "('admin', __name__, url_prefix='/admin')\n", (230, 270), False, 'from flask import Blueprint\n'), ((304, 325), 'functools.wraps', 'functools.wraps', (['view'], {}), '(view)\n', (319, 325), False, 'import functools\n'), ((613, 634), 'functools.wraps', 'functools.wraps', (['view'], {}), '(view)\n', (628, 634), False, 'import functools\n'), ((885, 893), 'mflac.vuln_app.db.get_db', 'get_db', ([], {}), '()\n', (891, 893), False, 'from mflac.vuln_app.db import get_db\n'), ((982, 1035), 'flask.render_template', 'render_template', (['"""admin/users_list.html"""'], {'users': 'users'}), "('admin/users_list.html', users=users)\n", (997, 1035), False, 'from flask import render_template\n'), ((423, 473), 'flask.flash', 'flash', (['"""Forbidden. You haven\'t enough permissions"""'], {}), '("Forbidden. You haven\'t enough permissions")\n', (428, 473), False, 'from flask import flash\n'), ((502, 524), 'flask.url_for', 'url_for', (['"""index.index"""'], {}), "('index.index')\n", (509, 524), False, 'from flask import url_for\n'), ((722, 743), 'flask.url_for', 'url_for', (['"""auth.login"""'], {}), "('auth.login')\n", (729, 743), False, 'from flask import url_for\n')]
import numpy as np import matplotlib.pyplot as plt plt.close('all') # From section 3.8.3 of wind energy explained # Prandlt tip loss calc B = 3 # number of blades R = 1 # blade length phi = np.deg2rad(10) # relative wind angle r = np.linspace(0,R,100) F = 2/np.pi * np.arccos(np.exp(-((B/2)*(1-(r/R)))/((r/R)*np.sin(phi)))) plt.figure(num='Tip loss for phi = %2.1f deg and %d blades' % (np.rad2deg(phi), B)) plt.plot(r,F) plt.xlabel('Non-Dimensional Blade Radius (r/R)') plt.ylabel('Tip Loss Factor')
[ "matplotlib.pyplot.ylabel", "matplotlib.pyplot.xlabel", "matplotlib.pyplot.plot", "matplotlib.pyplot.close", "numpy.deg2rad", "numpy.linspace", "numpy.sin", "numpy.rad2deg" ]
[((51, 67), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (60, 67), True, 'import matplotlib.pyplot as plt\n'), ((191, 205), 'numpy.deg2rad', 'np.deg2rad', (['(10)'], {}), '(10)\n', (201, 205), True, 'import numpy as np\n'), ((232, 254), 'numpy.linspace', 'np.linspace', (['(0)', 'R', '(100)'], {}), '(0, R, 100)\n', (243, 254), True, 'import numpy as np\n'), ((410, 424), 'matplotlib.pyplot.plot', 'plt.plot', (['r', 'F'], {}), '(r, F)\n', (418, 424), True, 'import matplotlib.pyplot as plt\n'), ((424, 472), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Non-Dimensional Blade Radius (r/R)"""'], {}), "('Non-Dimensional Blade Radius (r/R)')\n", (434, 472), True, 'import matplotlib.pyplot as plt\n'), ((473, 502), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Tip Loss Factor"""'], {}), "('Tip Loss Factor')\n", (483, 502), True, 'import matplotlib.pyplot as plt\n'), ((389, 404), 'numpy.rad2deg', 'np.rad2deg', (['phi'], {}), '(phi)\n', (399, 404), True, 'import numpy as np\n'), ((310, 321), 'numpy.sin', 'np.sin', (['phi'], {}), '(phi)\n', (316, 321), True, 'import numpy as np\n')]
""" VKontakte OpenAPI and OAuth 2.0 support. This contribution adds support for VKontakte OpenAPI and OAuth 2.0 service in the form www.vkontakte.ru. Username is retrieved from the identity returned by server. """ from django.conf import settings from django.contrib.auth import authenticate from django.utils import simplejson from urllib import urlencode, unquote from urllib2 import Request, urlopen, HTTPError from hashlib import md5 from time import time from social_auth.backends import SocialAuthBackend, OAuthBackend, BaseAuth, BaseOAuth2, USERNAME VKONTAKTE_API_URL = 'https://api.vkontakte.ru/method/' VKONTAKTE_SERVER_API_URL = 'http://api.vkontakte.ru/api.php' VKONTAKTE_API_VERSION = '3.0' VKONTAKTE_OAUTH2_SCOPE = [''] # Enough for authentication EXPIRES_NAME = getattr(settings, 'SOCIAL_AUTH_EXPIRATION', 'expires') USE_APP_AUTH = getattr(settings, 'VKONTAKTE_APP_AUTH', False) LOCAL_HTML = getattr(settings, 'VKONTAKTE_LOCAL_HTML', 'vkontakte.html') class VKontakteBackend(SocialAuthBackend): """VKontakte authentication backend""" name = 'vkontakte' def get_user_id(self, details, response): """Return user unique id provided by VKontakte""" return int(response.GET['id']) def get_user_details(self, response): """Return user details from VKontakte request""" nickname = unquote(response.GET['nickname']) values = { USERNAME: response.GET['id'] if len(nickname) == 0 else nickname, 'email': '', 'fullname': '', 'first_name': unquote(response.GET['first_name']), 'last_name': unquote(response.GET['last_name'])} return values class VKontakteOAuth2Backend(OAuthBackend): """VKontakteOAuth2 authentication backend""" name = 'vkontakte-oauth2' EXTRA_DATA = [('expires_in', EXPIRES_NAME)] def get_user_id(self, details, response): """Return user unique id provided by VKontakte""" return int(response['user_id']) def get_user_details(self, response): """Return user details from VKontakte request""" values = { USERNAME: str(response['user_id']), 'email': ''} details = response['response'] user_name = details.get('user_name') if user_name: values['fullname'] = unquote(user_name) if ' ' in values['fullname']: values['first_name'], values['last_name'] = values['fullname'].split() else: values['first_name'] = values['fullname'] if 'last_name' in details: values['last_name'] = unquote(details['last_name']) if 'first_name' in details: values['first_name'] = unquote(details['first_name']) return values class VKontakteAuth(BaseAuth): """VKontakte OpenAPI authorization mechanism""" AUTH_BACKEND = VKontakteBackend APP_ID = settings.VKONTAKTE_APP_ID def auth_html(self): """Returns local VK authentication page, not necessary for VK to authenticate """ from django.core.urlresolvers import reverse from django.template import RequestContext, loader dict = { 'VK_APP_ID' : self.APP_ID, 'VK_COMPLETE_URL': self.redirect } vk_template = loader.get_template(LOCAL_HTML) context = RequestContext(self.request, dict) return vk_template.render(context) def auth_complete(self, *args, **kwargs): """Performs check of authentication in VKontakte, returns User if succeeded""" app_cookie = 'vk_app_' + self.APP_ID if not 'id' in self.request.GET or not app_cookie in self.request.COOKIES: raise ValueError('VKontakte authentication is not completed') cookie_dict = dict(item.split('=') for item in self.request.COOKIES[app_cookie].split('&')) check_str = ''.join([item + '=' + cookie_dict[item] for item in ['expire', 'mid', 'secret', 'sid']]) hash = md5(check_str + settings.VKONTAKTE_APP_SECRET).hexdigest() if hash != cookie_dict['sig'] or int(cookie_dict['expire']) < time() : raise ValueError('VKontakte authentication failed: invalid hash') else: kwargs.update({'response': self.request, self.AUTH_BACKEND.name: True}) return authenticate(*args, **kwargs) @property def uses_redirect(self): """VKontakte does not require visiting server url in order to do authentication, so auth_xxx methods are not needed to be called. Their current implementation is just an example""" return False class VKontakteOAuth2(BaseOAuth2): """VKontakte OAuth2 support""" AUTH_BACKEND = VKontakteOAuth2Backend AUTHORIZATION_URL = 'http://api.vkontakte.ru/oauth/authorize' ACCESS_TOKEN_URL = ' https://api.vkontakte.ru/oauth/access_token' SETTINGS_KEY_NAME = 'VKONTAKTE_APP_ID' SETTINGS_SECRET_NAME = 'VKONTAKTE_APP_SECRET' def get_scope(self): return VKONTAKTE_OAUTH2_SCOPE + getattr(settings, 'VKONTAKTE_OAUTH2_EXTRA_SCOPE', []) def auth_complete(self, *args, **kwargs): if USE_APP_AUTH: stop, app_auth = self.application_auth() if app_auth: return app_auth if stop: return None try: auth_result = super(VKontakteOAuth2, self).auth_complete(*args, **kwargs) except HTTPError: # VKontakte returns HTTPError 400 if cancelled raise ValueError('Authentication cancelled') return auth_result def user_data(self, access_token): """Return user data from VKontakte API""" data = {'access_token': access_token } return vkontakte_api('getUserInfoEx', data) def user_profile(self, user_id, access_token = None): data = {'uids': user_id, 'fields': 'photo'} if access_token: data['access_token'] = access_token profiles = vkontakte_api('getProfiles', data).get('response', None) return profiles[0] if profiles else None def is_app_user(self, user_id, access_token = None): """Returns app usage flag from VKontakte API""" data = {'uid': user_id} if access_token: data['access_token'] = access_token return vkontakte_api('isAppUser', data).get('response', 0) def application_auth(self): required_params = ('is_app_user', 'viewer_id', 'access_token', 'api_id', ) for param in required_params: if not param in self.request.REQUEST: return (False, None,) auth_key = self.request.REQUEST.get('auth_key') # Verify signature, if present if auth_key: check_key = md5(self.request.REQUEST.get('api_id') + '_' + self.request.REQUEST.get('viewer_id') + '_' + \ USE_APP_AUTH['key']).hexdigest() if check_key != auth_key: raise ValueError('VKontakte authentication failed: invalid auth key') user_check = USE_APP_AUTH.get('user_mode', 0) user_id = self.request.REQUEST.get('viewer_id') if user_check: is_user = self.request.REQUEST.get('is_app_user') if user_check == 1 else self.is_app_user(user_id) if not int(is_user): return (True, None,) data = {'response': self.user_profile(user_id), 'user_id': user_id} return (True, authenticate(**{'response': data, self.AUTH_BACKEND.name: True})) def vkontakte_api(method, data): """ Calls VKontakte OpenAPI method http://vkontakte.ru/apiclub, http://vkontakte.ru/pages.php?o=-1&p=%C2%FB%EF%EE%EB%ED%E5%ED%E8%E5%20%E7%E0%EF%F0%EE%F1%EE%E2%20%EA%20API """ # We need to perform server-side call if no access_token if not 'access_token' in data: if not 'v' in data: data['v'] = VKONTAKTE_API_VERSION if not 'api_id' in data: data['api_id'] = USE_APP_AUTH.get('id') if USE_APP_AUTH else settings.VKONTAKTE_APP_ID data['method'] = method data['format'] = 'json' url = VKONTAKTE_SERVER_API_URL secret = USE_APP_AUTH.get('key') if USE_APP_AUTH else settings.VKONTAKTE_APP_SECRET param_list = sorted(list(item + '=' + data[item] for item in data)) data['sig'] = md5(''.join(param_list) + secret).hexdigest() else: url = VKONTAKTE_API_URL + method params = urlencode(data) api_request = Request(url + '?' + params) try: return simplejson.loads(urlopen(api_request).read()) except (TypeError, KeyError, IOError, ValueError, IndexError): return None # Backend definition BACKENDS = { 'vkontakte': VKontakteAuth, 'vkontakte-oauth2': VKontakteOAuth2 }
[ "django.contrib.auth.authenticate", "urllib2.urlopen", "hashlib.md5", "urllib.unquote", "django.template.RequestContext", "urllib2.Request", "urllib.urlencode", "time.time", "django.template.loader.get_template" ]
[((8401, 8416), 'urllib.urlencode', 'urlencode', (['data'], {}), '(data)\n', (8410, 8416), False, 'from urllib import urlencode, unquote\n'), ((8435, 8462), 'urllib2.Request', 'Request', (["(url + '?' + params)"], {}), "(url + '?' + params)\n", (8442, 8462), False, 'from urllib2 import Request, urlopen, HTTPError\n'), ((1357, 1390), 'urllib.unquote', 'unquote', (["response.GET['nickname']"], {}), "(response.GET['nickname'])\n", (1364, 1390), False, 'from urllib import urlencode, unquote\n'), ((3236, 3267), 'django.template.loader.get_template', 'loader.get_template', (['LOCAL_HTML'], {}), '(LOCAL_HTML)\n', (3255, 3267), False, 'from django.template import RequestContext, loader\n'), ((3286, 3320), 'django.template.RequestContext', 'RequestContext', (['self.request', 'dict'], {}), '(self.request, dict)\n', (3300, 3320), False, 'from django.template import RequestContext, loader\n'), ((1537, 1572), 'urllib.unquote', 'unquote', (["response.GET['first_name']"], {}), "(response.GET['first_name'])\n", (1544, 1572), False, 'from urllib import urlencode, unquote\n'), ((1587, 1621), 'urllib.unquote', 'unquote', (["response.GET['last_name']"], {}), "(response.GET['last_name'])\n", (1594, 1621), False, 'from urllib import urlencode, unquote\n'), ((2272, 2290), 'urllib.unquote', 'unquote', (['user_name'], {}), '(user_name)\n', (2279, 2290), False, 'from urllib import urlencode, unquote\n'), ((2567, 2596), 'urllib.unquote', 'unquote', (["details['last_name']"], {}), "(details['last_name'])\n", (2574, 2596), False, 'from urllib import urlencode, unquote\n'), ((2669, 2699), 'urllib.unquote', 'unquote', (["details['first_name']"], {}), "(details['first_name'])\n", (2676, 2699), False, 'from urllib import urlencode, unquote\n'), ((4262, 4291), 'django.contrib.auth.authenticate', 'authenticate', (['*args'], {}), '(*args, **kwargs)\n', (4274, 4291), False, 'from django.contrib.auth import authenticate\n'), ((7390, 7454), 'django.contrib.auth.authenticate', 'authenticate', ([], {}), "(**{'response': data, self.AUTH_BACKEND.name: True})\n", (7402, 7454), False, 'from django.contrib.auth import authenticate\n'), ((3928, 3974), 'hashlib.md5', 'md5', (['(check_str + settings.VKONTAKTE_APP_SECRET)'], {}), '(check_str + settings.VKONTAKTE_APP_SECRET)\n', (3931, 3974), False, 'from hashlib import md5\n'), ((4058, 4064), 'time.time', 'time', ([], {}), '()\n', (4062, 4064), False, 'from time import time\n'), ((8504, 8524), 'urllib2.urlopen', 'urlopen', (['api_request'], {}), '(api_request)\n', (8511, 8524), False, 'from urllib2 import Request, urlopen, HTTPError\n')]
#----------------------------------------------------------------------------- # Copyright (c) 2012 - 2017, Anaconda, Inc. All rights reserved. # # Powered by the Bokeh Development Team. # # The full license is in the file LICENSE.txt, distributed with this software. #----------------------------------------------------------------------------- ''' Provide utilities for formatting terminal output. ''' #----------------------------------------------------------------------------- # Boilerplate #----------------------------------------------------------------------------- from __future__ import absolute_import, division, print_function, unicode_literals import logging log = logging.getLogger(__name__) #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Standard library imports import sys # External imports # Bokeh imports #----------------------------------------------------------------------------- # General API #----------------------------------------------------------------------------- # provide fallbacks for highlights in case colorama is not installed try: import colorama from colorama import Fore, Style def bright(text): return "%s%s%s" % (Style.BRIGHT, text, Style.RESET_ALL) def dim(text): return "%s%s%s" % (Style.DIM, text, Style.RESET_ALL) def red(text): return "%s%s%s" % (Fore.RED, text, Style.RESET_ALL) def green(text): return "%s%s%s" % (Fore.GREEN, text, Style.RESET_ALL) def white(text): return "%s%s%s%s" % (Fore.WHITE, Style.BRIGHT, text, Style.RESET_ALL) def yellow(text): return "%s%s%s" % (Fore.YELLOW, text, Style.RESET_ALL) sys.platform == "win32" and colorama.init() except ImportError: def bright(text): return text def dim(text): return text def red(text): return text def green(text): return text def white(text): return text def yellow(text): return text def trace(*values, **kwargs): pass def write(*values, **kwargs): end = kwargs.get('end', '\n') print(*values, end=end) def fail(msg=None, label="FAIL"): msg = " " + msg if msg is not None else "" write("%s%s" % (red("[%s]" % label), msg)) def info(msg=None, label="INFO"): msg = " " + msg if msg is not None else "" write("%s%s" % (white("[%s]" % label), msg)) def ok(msg=None, label="OK"): msg = " " + msg if msg is not None else "" write("%s%s" % (green("[%s]" % label), msg)) def warn(msg=None, label="WARN"): msg = " " + msg if msg is not None else "" write("%s%s" % (yellow("[%s]" % label), msg)) #----------------------------------------------------------------------------- # Dev API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Private API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Code #-----------------------------------------------------------------------------
[ "logging.getLogger", "colorama.init" ]
[((684, 711), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (701, 711), False, 'import logging\n'), ((1787, 1802), 'colorama.init', 'colorama.init', ([], {}), '()\n', (1800, 1802), False, 'import colorama\n')]
# Project: SwarmAggregation # Filename: exp.py # Authors: <NAME> (<EMAIL>) and <NAME> # (<EMAIL>). """ exp: A flexible, unifying framework for defining and running experiments for swarm aggregation. """ import argparse from aggregation import aggregation, ideal from itertools import product from math import sin, cos, hypot, ceil from matplotlib.animation import FFMpegWriter, ArtistAnimation import matplotlib.cm as cm from matplotlib.collections import LineCollection, PatchCollection, PolyCollection import matplotlib.pyplot as plt from metrics import * import numpy as np import pickle from tqdm import tqdm class Experiment(object): """ A flexible, unifying framework for experiments. """ def __init__(self, id, params={}, iters=1, savehist=True, seed=None): """ Inputs: - id (str): identifier for the experiment - params (dict): the full parameter set for the simulation runs { 'N' : [int > 0] number of robots, 'R' : [float > 0] radius of rotation (m), 'r' : [float > 0] radius of a robot (m), 'm' : [float > 0] mass of a robot (kg), 'w0' : [float] rot. speed of a robot about its center (rad/s), 'w1' : [float] rot. speed of a robot in place (rad/s), 'sensor' : [0 <= float <= pi] size of the sight sensor (rad), 'noise' : [(str, float)] either ('err', p) for error probability with probability p or ('mot', f) for motion noise with maximum force f (N), 'time' : [float > 0] wall-clock duration of simulation (s), 'step' : [float > 0] wall-clock duration of a time step (s), 'stop' : [float >= 0] if not None, simulation stops if system's dispersion is within stop% of the ideal value, 'init' : ['rand', 'symm'] initialization mode } - iters (int): the number of iterated runs for each parameter setting - savehist (bool): True if a run's history should be saved - seed (int): random seed """ # Unpack singular parameters. self.id, self.iters, self.savehist, self.seed, = id, iters, savehist, seed # Unpack aggregation parameters. defaults = {'N' : [100], 'R' : [0.1445], 'r' : [0.037], 'm' : [0.125], \ 'w0' : [-0.75], 'w1' : [-5.02], 'sensor' : [0], \ 'noise' : [('err', 0)], 'time' : [300], 'step' : [0.005], \ 'stop' : [None], 'init' : ['rand']} plist = [params[p] if p in params else defaults[p] for p in defaults] self.params = list(product(*plist)) # Set up data and results filenames. self.fname = 'exp_{}_{}'.format(self.id, self.seed) # Instantiate a list to hold runs data. This data will have shape # A x B x [S x N x 3, 1] where A is the number of runs (i.e., unique # parameter combinations), B is the number of iterations per run, S is # the number of time steps simulated, N is the number of robots, and 3 # represents each robot's X/Y/Theta data. self.runs_data = [[] for p in self.params] def run(self): """ Run this experiment according to the input parameters. """ tqdm.write('Running Experiment ' + self.id + '...') # Set up random seeds for iterated runs. rng = np.random.default_rng(self.seed) run_seeds = rng.integers(0, 2**32, size=self.iters) # For each parameter combination, do iterated runs of aggregation. silent = len(self.params) > 1 or self.iters > 1 for i, param in enumerate(tqdm(self.params, desc='Simulating runs')): N, R, r, m, w0, w1, sensor, noise, time, step, stop, init = param for seed in tqdm(run_seeds, desc='Iterating run', \ leave=bool(i == len(self.params) - 1)): run_data = aggregation(N, R, r, m, w0, w1, sensor, noise, time,\ step, stop, init, seed, silent) if not self.savehist: # Only save the final configuration. history, final = run_data self.runs_data[i].append((np.copy(history[final-1]), final)) else: # Save the entire configuration history. self.runs_data[i].append(run_data) def save(self): """ Saves this experiment, including all parameters and run data, to a file named according to the experiment's ID and seed. """ tqdm.write('Saving Experiment ' + self.id + '...') with open('data/' + self.fname + '.pkl', 'wb') as f: pickle.dump(self, f) def plot_evo(self, runs, iters, metrics=['sed', 'hull', 'disp', 'clus'], \ labels=None, title='', anno=''): """ Takes indices of either (i) one run and multiple iterations or (ii) one iteration of multiple runs and plots the given metrics against time. """ tqdm.write('Plotting metrics over time...') # Sanity checks and setup. Assumes N, r, time, and step are static. assert self.savehist, 'ERROR: No history to calculate metrics per step' assert len(runs) == 1 or len(iters) == 1, 'ERROR: One run or one iter' runits = [i for i in product(runs, iters)] # Set up colors. cmap = np.vectorize(lambda x : cm.inferno(x)) c = np.array(cmap(np.linspace(0, 1, len(runits) + 2))).T # Plot metrics over time for each run/iteration. names = {'sed' : 'Smallest Enclosing Disc Circumference', \ 'hull' : 'Convex Hull Perimeter', \ 'disp' : 'Dispersion', \ 'clus' : 'Cluster Fraction'} for metric in metrics: fig, ax = plt.subplots() for i, runit in enumerate(tqdm(runits)): # Plot the given metric over time. N, r, time, step = [self.params[runit[0]][j] for j in [0,2,8,9]] configs, final = self.runs_data[runit[0]][runit[1]] x = np.arange(0, time + step, step)[:final] y = [] for config in tqdm(configs, desc='Calculating '+names[metric]): if metric == 'sed': y.append(sed_circumference(config)) elif metric == 'hull': y.append(hull_perimeter(config)) elif metric == 'disp': y.append(dispersion(config)) else: # metric == 'clus' y.append(cluster_fraction(config, r)) if labels != None: ax.plot(x, y, color=c[i+1], label=labels[i], zorder=4) else: ax.plot(x, y, color=c[i+1], zorder=4) # Plot the minimum value for this metric as a dashed line. if metric == 'sed': metric_min = sed_circumference(ideal(N, r)) elif metric == 'hull': metric_min = hull_perimeter(ideal(N, r)) elif metric == 'disp': metric_min = dispersion(ideal(N, r)) else: # metric == 'clus' metric_min = cluster_fraction(ideal(N, r), r) ax.plot(x, np.full(len(x), metric_min), color=c[i+1], \ linestyle='dashed', zorder=3) # Save figure. ax.set(title=title, xlabel='Time (s)', ylabel=names[metric]) ax.set_ylim(bottom=0) ax.grid() if labels != None: ax.legend(loc='upper right') plt.tight_layout() fig.savefig('figs/' + self.fname + '_' + metric + anno + '.png', \ dpi=300) plt.close() def plot_aggtime(self, N, ps, plabel, title='', anno=''): """ Plots final and average time to aggregation per parameter value per number of robots. Assumes that the only parameters that are varied are the number of robots (N) and one non-time related parameter. """ tqdm.write('Plotting average time to aggregation...') # Set up figure and colors. fig, ax = plt.subplots() cmap = np.vectorize(lambda x : cm.inferno(x)) c = np.array(cmap(np.linspace(0, 1, len(N) + 2))).T # Plot simulation time cutoff as a dashed line. time, step = self.params[0][8], self.params[0][9] ax.plot(ps, np.full(len(ps), time), color='k', linestyle='dashed') # Plot iteration times as a scatter plot and averages as lines. for i, ni in enumerate(N): xs, ys, aves = [], [], [] for j, run in enumerate(self.runs_data[i*len(ps):(i+1)*len(ps)]): agg_times = [] for iter in run: xs.append(ps[j]) agg_times.append(iter[1] * step) ys += agg_times aves.append(np.mean(agg_times)) ax.scatter(xs, ys, color=c[i+1], s=15, alpha=0.4) ax.plot(ps, aves, color=c[i+1], label='{} robots'.format(ni)) # Save figure. ax.set(title=title, xlabel=plabel, ylabel='Aggregation Time (s)') ax.set_ylim(bottom=0) ax.grid() ax.legend(loc='upper left') plt.tight_layout() fig.savefig('figs/' + self.fname + '_aggtime' + anno + '.png', dpi=300) plt.close() def animate(self, run, iter, frame=25, anno=''): """ Animate the robots' movement over time. """ tqdm.write('Animating robots\' movement...') # Check that a configuration history exists. assert self.savehist, 'ERROR: No history to animate' # Check that the desired frame rate is valid. assert frame > 0, 'ERROR: Frame rate must be positive value' # Get data and parameters. configs, final = self.runs_data[run][iter] N, r, sensor, time, step = [self.params[run][i] for i in [0,2,6,8,9]] # Set up plot. fig, ax = plt.subplots(figsize=(5,5), dpi=300) all_xy = configs[:,:,:2].flatten() fig_min, fig_max = np.min(all_xy) - r, np.max(all_xy) + r ax.set(xlim=[fig_min, fig_max], ylim=[fig_min, fig_max]) # Set up colors for the various robots. cmap = np.vectorize(lambda x : cm.inferno(x)) c = np.array(cmap(np.linspace(0, 0.9, N))).T # Set up frame rate to target at most 'frame' fps in real time. frame_step = 1 if step >= 1 / frame else ceil(1 / frame / step) interval = (step * frame_step) * 1000 # ms ims = [] max_dist = hypot(*np.full(2, fig_max-fig_min)) for s in tqdm(np.arange(0, min(len(configs), final), frame_step)): title = plt.text(1.0, 1.02, '{:.2f}s of {}s'.format(s*step, time), \ ha='right', va='bottom', transform=ax.transAxes) robots, lines, cones = [], [], [] for i in range(N): xy, theta = configs[s][i][:2], configs[s][i][2] sensor_xy = xy + np.array([r * cos(theta), r * sin(theta)]) # Add this robot's circle artist. robots.append(plt.Circle(xy, radius=r, linewidth=0, color=c[i])) # Add this robot's sight sensor direction artist. vec = max_dist * np.array([cos(theta), sin(theta)]) lines.append([sensor_xy, sensor_xy + vec]) # Add this robot's cone-of-sight polygon artist. if sensor > 0: cw, ccw = theta - sensor / 2, theta + sensor / 2 vec_cw = max_dist * np.array([cos(cw), sin(cw)]) vec_ccw = max_dist * np.array([cos(ccw), sin(ccw)]) tri_pts = [sensor_xy, sensor_xy+vec_cw, sensor_xy+vec_ccw] cones.append(plt.Polygon(tri_pts, color=c[i], alpha=0.15)) # Add this step's artists to the list of artists. robots = PatchCollection(robots, match_original=True, zorder=3) lines = LineCollection(lines, linewidths=0.5, colors=c, alpha=0.75,\ zorder=2) cones = PatchCollection(cones, match_original=True, zorder=1) ims.append([title, ax.add_collection(robots), \ ax.add_collection(lines), ax.add_collection(cones)]) # Animate. ani = ArtistAnimation(fig, ims, interval=interval, blit=True) ani.save('anis/' + self.fname + '_ani' + anno + '.mp4') plt.close() def load_exp(fname): """ Load an experiment from the specified file. """ with open(fname, 'rb') as f: exp = pickle.load(f) return exp ### DATA EXPERIMENTS ### def exp_base(seed=None): """ With default parameters, investigate aggregation over time. """ params = {} # This uses all default values. exp = Experiment('base', params, seed=seed) exp.run() exp.save() exp.plot_evo(runs=[0], iters=[0]) exp.animate(run=0, iter=0) def exp_symm(seed=None): """ With default parameters and symmetric initialization, investigate aggregation over time for a few system sizes. """ N = [3, 5, 10] params = {'N' : N, 'init' : ['symm']} exp = Experiment('symm', params, seed=seed) exp.run() exp.save() exp.plot_evo(runs=np.arange(len(exp.params)), iters=[0], metrics=['disp'], \ labels=['{} robots'.format(i) for i in N], \ title='Symmetric Initial Configuration') def exp_errprob(seed=None): """ With default parameters and a range of error probabilities, investigate average time to aggregation with a 15% stopping condition. """ N = [10, 25, 50, 100] errprob = np.arange(0, 0.501, 0.0125) params = {'N' : N, 'noise' : [('err', p) for p in errprob], 'stop' : [0.15]} exp = Experiment('errprob', params, iters=25, savehist=False, seed=seed) exp.run() exp.save() exp.plot_aggtime(N, errprob, 'Error Probability') def exp_motion(seed=None): """ With default parameters and a range of motion noise strengths, investigate average time to aggregation with a 15% stopping condition. """ N = [10, 25, 50, 100] fmax = np.arange(0, 40.1, 1.25) params = {'N' : N, 'noise' : [('mot', f) for f in fmax], 'stop' : [0.15]} exp = Experiment('motion', params, iters=25, savehist=False, seed=seed) exp.run() exp.save() exp.plot_aggtime(N, fmax, 'Max. Noise Force (N)') def exp_cone(seed=None): """ With default parameters and a range of sight sensor sizes, investigate average time to aggregation with a 15% stopping condition. """ N = [10, 25, 50, 100] sensor = np.arange(0, np.pi, 0.1) params = {'N' : N, 'sensor' : sensor, 'stop' : [0.15]} exp = Experiment('cone', params, iters=25, savehist=False, seed=seed) exp.run() exp.save() exp.plot_aggtime(N, sensor, 'Sight Sensor Size (rad)') ### CALIBRATION EXPERIMENTS ### def exp_step(seed=None): """ With default parameters and a range of time step durations, investigate aggregation over time. """ step = [0.0005, 0.001, 0.005, 0.01, 0.025] params = {'N' : [50], 'time' : [120], 'step' : step} exp = Experiment('step', params, seed=seed) exp.run() exp.save() exp.plot_evo(runs=np.arange(len(exp.params)), iters=[0], metrics=['disp'], \ labels=['{}s'.format(i) for i in step]) if __name__ == '__main__': # Parse command line arguments. parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-E', '--exps', type=str, nargs='+', required=True, \ help='IDs of experiments to run') parser.add_argument('-R', '--rand_seed', type=int, default=None, \ help='Seed for random number generation') args = parser.parse_args() # Run selected experiments. exps = {'base' : exp_base, 'symm' : exp_symm, 'errprob' : exp_errprob, \ 'motion' : exp_motion, 'cone' : exp_cone, 'step' : exp_step} for id in args.exps: exps[id](args.rand_seed)
[ "numpy.random.default_rng", "matplotlib.pyplot.Polygon", "matplotlib.collections.LineCollection", "math.cos", "aggregation.aggregation", "aggregation.ideal", "numpy.arange", "numpy.mean", "argparse.ArgumentParser", "tqdm.tqdm.write", "itertools.product", "numpy.max", "matplotlib.pyplot.close...
[((14036, 14063), 'numpy.arange', 'np.arange', (['(0)', '(0.501)', '(0.0125)'], {}), '(0, 0.501, 0.0125)\n', (14045, 14063), True, 'import numpy as np\n'), ((14529, 14553), 'numpy.arange', 'np.arange', (['(0)', '(40.1)', '(1.25)'], {}), '(0, 40.1, 1.25)\n', (14538, 14553), True, 'import numpy as np\n'), ((15011, 15035), 'numpy.arange', 'np.arange', (['(0)', 'np.pi', '(0.1)'], {}), '(0, np.pi, 0.1)\n', (15020, 15035), True, 'import numpy as np\n'), ((15833, 15877), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '__doc__'}), '(description=__doc__)\n', (15856, 15877), False, 'import argparse\n'), ((3384, 3435), 'tqdm.tqdm.write', 'tqdm.write', (["('Running Experiment ' + self.id + '...')"], {}), "('Running Experiment ' + self.id + '...')\n", (3394, 3435), False, 'from tqdm import tqdm\n'), ((3500, 3532), 'numpy.random.default_rng', 'np.random.default_rng', (['self.seed'], {}), '(self.seed)\n', (3521, 3532), True, 'import numpy as np\n'), ((4717, 4767), 'tqdm.tqdm.write', 'tqdm.write', (["('Saving Experiment ' + self.id + '...')"], {}), "('Saving Experiment ' + self.id + '...')\n", (4727, 4767), False, 'from tqdm import tqdm\n'), ((5182, 5225), 'tqdm.tqdm.write', 'tqdm.write', (['"""Plotting metrics over time..."""'], {}), "('Plotting metrics over time...')\n", (5192, 5225), False, 'from tqdm import tqdm\n'), ((8330, 8383), 'tqdm.tqdm.write', 'tqdm.write', (['"""Plotting average time to aggregation..."""'], {}), "('Plotting average time to aggregation...')\n", (8340, 8383), False, 'from tqdm import tqdm\n'), ((8439, 8453), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (8451, 8453), True, 'import matplotlib.pyplot as plt\n'), ((9542, 9560), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (9558, 9560), True, 'import matplotlib.pyplot as plt\n'), ((9649, 9660), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (9658, 9660), True, 'import matplotlib.pyplot as plt\n'), ((9796, 9839), 'tqdm.tqdm.write', 'tqdm.write', (['"""Animating robots\' movement..."""'], {}), '("Animating robots\' movement...")\n', (9806, 9839), False, 'from tqdm import tqdm\n'), ((10287, 10324), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(5, 5)', 'dpi': '(300)'}), '(figsize=(5, 5), dpi=300)\n', (10299, 10324), True, 'import matplotlib.pyplot as plt\n'), ((12676, 12731), 'matplotlib.animation.ArtistAnimation', 'ArtistAnimation', (['fig', 'ims'], {'interval': 'interval', 'blit': '(True)'}), '(fig, ims, interval=interval, blit=True)\n', (12691, 12731), False, 'from matplotlib.animation import FFMpegWriter, ArtistAnimation\n'), ((12804, 12815), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (12813, 12815), True, 'import matplotlib.pyplot as plt\n'), ((12950, 12964), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (12961, 12964), False, 'import pickle\n'), ((2734, 2749), 'itertools.product', 'product', (['*plist'], {}), '(*plist)\n', (2741, 2749), False, 'from itertools import product\n'), ((3759, 3800), 'tqdm.tqdm', 'tqdm', (['self.params'], {'desc': '"""Simulating runs"""'}), "(self.params, desc='Simulating runs')\n", (3763, 3800), False, 'from tqdm import tqdm\n'), ((4841, 4861), 'pickle.dump', 'pickle.dump', (['self', 'f'], {}), '(self, f)\n', (4852, 4861), False, 'import pickle\n'), ((5979, 5993), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (5991, 5993), True, 'import matplotlib.pyplot as plt\n'), ((7855, 7873), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (7871, 7873), True, 'import matplotlib.pyplot as plt\n'), ((7998, 8009), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (8007, 8009), True, 'import matplotlib.pyplot as plt\n'), ((10776, 10798), 'math.ceil', 'ceil', (['(1 / frame / step)'], {}), '(1 / frame / step)\n', (10780, 10798), False, 'from math import sin, cos, hypot, ceil\n'), ((12250, 12304), 'matplotlib.collections.PatchCollection', 'PatchCollection', (['robots'], {'match_original': '(True)', 'zorder': '(3)'}), '(robots, match_original=True, zorder=3)\n', (12265, 12304), False, 'from matplotlib.collections import LineCollection, PatchCollection, PolyCollection\n'), ((12325, 12394), 'matplotlib.collections.LineCollection', 'LineCollection', (['lines'], {'linewidths': '(0.5)', 'colors': 'c', 'alpha': '(0.75)', 'zorder': '(2)'}), '(lines, linewidths=0.5, colors=c, alpha=0.75, zorder=2)\n', (12339, 12394), False, 'from matplotlib.collections import LineCollection, PatchCollection, PolyCollection\n'), ((12451, 12504), 'matplotlib.collections.PatchCollection', 'PatchCollection', (['cones'], {'match_original': '(True)', 'zorder': '(1)'}), '(cones, match_original=True, zorder=1)\n', (12466, 12504), False, 'from matplotlib.collections import LineCollection, PatchCollection, PolyCollection\n'), ((4041, 4129), 'aggregation.aggregation', 'aggregation', (['N', 'R', 'r', 'm', 'w0', 'w1', 'sensor', 'noise', 'time', 'step', 'stop', 'init', 'seed', 'silent'], {}), '(N, R, r, m, w0, w1, sensor, noise, time, step, stop, init, seed,\n silent)\n', (4052, 4129), False, 'from aggregation import aggregation, ideal\n'), ((5491, 5511), 'itertools.product', 'product', (['runs', 'iters'], {}), '(runs, iters)\n', (5498, 5511), False, 'from itertools import product\n'), ((5578, 5591), 'matplotlib.cm.inferno', 'cm.inferno', (['x'], {}), '(x)\n', (5588, 5591), True, 'import matplotlib.cm as cm\n'), ((6032, 6044), 'tqdm.tqdm', 'tqdm', (['runits'], {}), '(runits)\n', (6036, 6044), False, 'from tqdm import tqdm\n'), ((6360, 6410), 'tqdm.tqdm', 'tqdm', (['configs'], {'desc': "('Calculating ' + names[metric])"}), "(configs, desc='Calculating ' + names[metric])\n", (6364, 6410), False, 'from tqdm import tqdm\n'), ((8493, 8506), 'matplotlib.cm.inferno', 'cm.inferno', (['x'], {}), '(x)\n', (8503, 8506), True, 'import matplotlib.cm as cm\n'), ((10394, 10408), 'numpy.min', 'np.min', (['all_xy'], {}), '(all_xy)\n', (10400, 10408), True, 'import numpy as np\n'), ((10414, 10428), 'numpy.max', 'np.max', (['all_xy'], {}), '(all_xy)\n', (10420, 10428), True, 'import numpy as np\n'), ((10586, 10599), 'matplotlib.cm.inferno', 'cm.inferno', (['x'], {}), '(x)\n', (10596, 10599), True, 'import matplotlib.cm as cm\n'), ((10895, 10924), 'numpy.full', 'np.full', (['(2)', '(fig_max - fig_min)'], {}), '(2, fig_max - fig_min)\n', (10902, 10924), True, 'import numpy as np\n'), ((6267, 6298), 'numpy.arange', 'np.arange', (['(0)', '(time + step)', 'step'], {}), '(0, time + step, step)\n', (6276, 6298), True, 'import numpy as np\n'), ((9196, 9214), 'numpy.mean', 'np.mean', (['agg_times'], {}), '(agg_times)\n', (9203, 9214), True, 'import numpy as np\n'), ((10627, 10649), 'numpy.linspace', 'np.linspace', (['(0)', '(0.9)', 'N'], {}), '(0, 0.9, N)\n', (10638, 10649), True, 'import numpy as np\n'), ((11456, 11505), 'matplotlib.pyplot.Circle', 'plt.Circle', (['xy'], {'radius': 'r', 'linewidth': '(0)', 'color': 'c[i]'}), '(xy, radius=r, linewidth=0, color=c[i])\n', (11466, 11505), True, 'import matplotlib.pyplot as plt\n'), ((7167, 7178), 'aggregation.ideal', 'ideal', (['N', 'r'], {}), '(N, r)\n', (7172, 7178), False, 'from aggregation import aggregation, ideal\n'), ((12120, 12164), 'matplotlib.pyplot.Polygon', 'plt.Polygon', (['tri_pts'], {'color': 'c[i]', 'alpha': '(0.15)'}), '(tri_pts, color=c[i], alpha=0.15)\n', (12131, 12164), True, 'import matplotlib.pyplot as plt\n'), ((4353, 4380), 'numpy.copy', 'np.copy', (['history[final - 1]'], {}), '(history[final - 1])\n', (4360, 4380), True, 'import numpy as np\n'), ((7267, 7278), 'aggregation.ideal', 'ideal', (['N', 'r'], {}), '(N, r)\n', (7272, 7278), False, 'from aggregation import aggregation, ideal\n'), ((11617, 11627), 'math.cos', 'cos', (['theta'], {}), '(theta)\n', (11620, 11627), False, 'from math import sin, cos, hypot, ceil\n'), ((11629, 11639), 'math.sin', 'sin', (['theta'], {}), '(theta)\n', (11632, 11639), False, 'from math import sin, cos, hypot, ceil\n'), ((7363, 7374), 'aggregation.ideal', 'ideal', (['N', 'r'], {}), '(N, r)\n', (7368, 7374), False, 'from aggregation import aggregation, ideal\n'), ((7468, 7479), 'aggregation.ideal', 'ideal', (['N', 'r'], {}), '(N, r)\n', (7473, 7479), False, 'from aggregation import aggregation, ideal\n'), ((11346, 11356), 'math.cos', 'cos', (['theta'], {}), '(theta)\n', (11349, 11356), False, 'from math import sin, cos, hypot, ceil\n'), ((11362, 11372), 'math.sin', 'sin', (['theta'], {}), '(theta)\n', (11365, 11372), False, 'from math import sin, cos, hypot, ceil\n'), ((11917, 11924), 'math.cos', 'cos', (['cw'], {}), '(cw)\n', (11920, 11924), False, 'from math import sin, cos, hypot, ceil\n'), ((11926, 11933), 'math.sin', 'sin', (['cw'], {}), '(cw)\n', (11929, 11933), False, 'from math import sin, cos, hypot, ceil\n'), ((11987, 11995), 'math.cos', 'cos', (['ccw'], {}), '(ccw)\n', (11990, 11995), False, 'from math import sin, cos, hypot, ceil\n'), ((11997, 12005), 'math.sin', 'sin', (['ccw'], {}), '(ccw)\n', (12000, 12005), False, 'from math import sin, cos, hypot, ceil\n')]
import rpyc from Crypto.Signature import pkcs1_15 from Crypto.Hash import SHA256 from Crypto.PublicKey import RSA ############# ## KLIJENT ## ############# def generiraj_kljuceve(): key = RSA.generate(2048) #stvaranje i spremanje privatnog ključa u datoteku file_out = open("private_key.pem", "wb") file_out.write(key.export_key()) file_out.close() #stvaranje i spremanje javnog ključa u datoteku file_out = open("public_key.pem", "wb") file_out.write(key.publickey().export_key()) file_out.close() return True flag = True try: #klijent iz prethodno stvorenih datoteka učitava svoj javni i privatni ključ prKey = RSA.import_key(open('private_key.pem').read()) puKey = RSA.import_key(open('public_key.pem').read()) except FileNotFoundError: #ukoliko datoteke s ključevima nisu pronađene, ide se u stvaranje novih print("Nije pronađena adresa pridružena klijentu!") odabir = input("Generirati novu adresu?[D/N]: ") odabir = odabir.lower() if odabir == 'd': if generiraj_kljuceve(): print("Stvaranje ključeva uspjelo") prKey = RSA.import_key(open('private_key.pem').read()) puKey = RSA.import_key(open('public_key.pem').read()) else: print('Prekid programa!') flag=False if flag: c = rpyc.connect("127.0.0.1", 25555) #nakon povezivanja sa serverom, ide se u petlju korisničnog sučelja while True: opcija = int(input( """ 1-Pošaljite transakciju na odabranu adresu 2-Provjerite stanje svoje adrese 3-Provjerite stanje tuđe adrese 4-Prijavi svoju adresu na mrežu 5-Odustani Odabir[1-5]: """)) if opcija == 1: ############################################### #implementirati unos odredišne adrese i iznosa# #-> korisnika se pita da unese ta 2 podatka # ############################################### adresa_primatelja = input('Unesite adresu primatelja: ') iznos = input('Unesite iznos transakcije: ') #message sadrži string s informacijama o transakciji u obliku: #adresa_pošiljatelja#adresa_primatelja#iznos #znak # je graničnik između pojedinih vrijednosti adresa_posiljatelja = str(puKey.n) ################################################################## #sastaviti string koji će se poslati serveru prema gornjem opisu # #spremiti ga u varijablu message # ################################################################## message = '#'.join([adresa_primatelja, adresa_posiljatelja, iznos]) #hakirani sustav #message = '#'.join([adresa_primatelja, adresa_posiljatelja, iznos]) #prije izrade signature-a moramo regularan string pretvoriti u byte string message = message.encode() #izrađujemo hash kod poruke h = SHA256.new(message) #hash kod kriptiramo privatnim ključem klijenta i tako dobijemo signature. #server može dekriptirati signature pomoću javnog ključa klijenta i tako dobiti hash kod iz njega #server može odrediti javni ključ klijenta na temelju njegove adrese signature = pkcs1_15.new(prKey).sign(h) print(c.root.transakcija(message,signature)) #gornja linija je slanje transakcije sa dig. potpisom dok je donja bez potpisa ##print(c.root.transakcija(message)) elif opcija == 2: print('Adresa: ') print(str(puKey.n)) print('Stanje: ') #šaljemo adresu klijenta #adresa se iz javnog ključa uzima pozivom atributa n #adresa se vraća kao integer pa ga treba pretvoriti u string print(c.root.provjeri_adresu(str(puKey.n))) elif opcija == 3: add = str(input('Unesi adresu za provjeru: ')) print('Stanje: ') print(c.root.provjeri_adresu(add)) elif opcija == 4: print(c.root.registriraj_adresu(str(puKey.n))) else: break
[ "Crypto.Hash.SHA256.new", "rpyc.connect", "Crypto.PublicKey.RSA.generate", "Crypto.Signature.pkcs1_15.new" ]
[((194, 212), 'Crypto.PublicKey.RSA.generate', 'RSA.generate', (['(2048)'], {}), '(2048)\n', (206, 212), False, 'from Crypto.PublicKey import RSA\n'), ((1330, 1362), 'rpyc.connect', 'rpyc.connect', (['"""127.0.0.1"""', '(25555)'], {}), "('127.0.0.1', 25555)\n", (1342, 1362), False, 'import rpyc\n'), ((3037, 3056), 'Crypto.Hash.SHA256.new', 'SHA256.new', (['message'], {}), '(message)\n', (3047, 3056), False, 'from Crypto.Hash import SHA256\n'), ((3360, 3379), 'Crypto.Signature.pkcs1_15.new', 'pkcs1_15.new', (['prKey'], {}), '(prKey)\n', (3372, 3379), False, 'from Crypto.Signature import pkcs1_15\n')]
from importlib import import_module from django.conf import settings from django.db import models from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext from django.template import loader from django.utils.text import slugify from django.utils import timezone from reversion.models import Version from reversion import revisions as reversion from ecs.documents.models import Document from ecs.utils.viewutils import render_pdf_context from ecs.notifications.constants import SAFETY_TYPE_CHOICES from ecs.notifications.managers import NotificationManager from ecs.authorization.managers import AuthorizationManager class NotificationType(models.Model): name = models.CharField(max_length=80, unique=True) form = models.CharField(max_length=80, default='ecs.notifications.forms.NotificationForm') default_response = models.TextField(blank=True) position = models.IntegerField(default=0) includes_diff = models.BooleanField(default=False) grants_vote_extension = models.BooleanField(default=False) finishes_study = models.BooleanField(default=False) is_rejectable = models.BooleanField(default=False) @property def form_cls(self): if not hasattr(self, '_form_cls'): module, cls_name = self.form.rsplit('.', 1) self._form_cls = getattr(import_module(module), cls_name) return self._form_cls def get_template(self, pattern): template_names = [pattern % name for name in (self.form_cls.__name__, 'base')] return loader.select_template(template_names) def __str__(self): return self.name class DiffNotification(models.Model): old_submission_form = models.ForeignKey('core.SubmissionForm', related_name="old_for_notification") new_submission_form = models.ForeignKey('core.SubmissionForm', related_name="new_for_notification") class Meta: abstract = True def save(self, **kwargs): super().save() self.submission_forms = [self.old_submission_form] self.new_submission_form.is_transient = False self.new_submission_form.save(update_fields=('is_transient',)) def apply(self): new_sf = self.new_submission_form if not self.new_submission_form.is_current and self.old_submission_form.is_current: new_sf.acknowledge(True) new_sf.mark_current() return True else: return False def get_diff(self, plainhtml=False): from ecs.core.diff import diff_submission_forms return diff_submission_forms(self.old_submission_form, self.new_submission_form).html(plain=plainhtml) class Notification(models.Model): type = models.ForeignKey(NotificationType, null=True, related_name='notifications') submission_forms = models.ManyToManyField('core.SubmissionForm', related_name='notifications') documents = models.ManyToManyField('documents.Document', related_name='notifications') pdf_document = models.OneToOneField(Document, related_name='_notification', null=True) comments = models.TextField() timestamp = models.DateTimeField(auto_now_add=True) user = models.ForeignKey('auth.User', null=True) objects = NotificationManager() unfiltered = models.Manager() def __str__(self): return '{} für {}'.format( self.short_name, ' + '.join(str(sf.submission) for sf in self.submission_forms.all()) ) @property def short_name(self): sn = getattr(self, 'safetynotification', None) if sn: return sn.get_safety_type_display() return self.type.name @property def is_rejected(self): try: return self.answer.is_rejected except NotificationAnswer.DoesNotExist: return None def get_submission_form(self): if self.submission_forms.exists(): return self.submission_forms.all()[0] return None def get_submission(self): sf = self.get_submission_form() if sf: return sf.submission return None def get_filename(self, suffix='.pdf'): ec_num = '_'.join( str(num) for num in self.submission_forms .order_by('submission__ec_number') .distinct() .values_list('submission__ec_number', flat=True) ) base = '{}-{}'.format(slugify(ec_num), slugify(self.type.name)) return base[:(250 - len(suffix))] + suffix def render_pdf(self): tpl = self.type.get_template('notifications/pdf/%s.html') submission_forms = self.submission_forms.select_related('submission') return render_pdf_context(tpl, { 'notification': self, 'submission_forms': submission_forms, 'documents': self.documents.order_by('doctype__identifier', 'date', 'name'), }) def render_pdf_document(self): assert self.pdf_document is None pdfdata = self.render_pdf() self.pdf_document = Document.objects.create_from_buffer(pdfdata, doctype='notification', parent_object=self, name=str(self)[:250], original_file_name=self.get_filename()) self.save() class ReportNotification(Notification): study_started = models.BooleanField(default=True) reason_for_not_started = models.TextField(null=True, blank=True) recruited_subjects = models.PositiveIntegerField(null=True, blank=False) finished_subjects = models.PositiveIntegerField(null=True, blank=False) aborted_subjects = models.PositiveIntegerField(null=True, blank=False) SAE_count = models.PositiveIntegerField(default=0, blank=False) SUSAR_count = models.PositiveIntegerField(default=0, blank=False) class Meta: abstract = True class CompletionReportNotification(ReportNotification): study_aborted = models.BooleanField(default=False) completion_date = models.DateField() class ProgressReportNotification(ReportNotification): runs_till = models.DateField(null=True, blank=True) class AmendmentNotification(DiffNotification, Notification): is_substantial = models.BooleanField(default=False) meeting = models.ForeignKey('meetings.Meeting', null=True, related_name='amendments') needs_signature = models.BooleanField(default=False) def schedule_to_meeting(self): from ecs.meetings.models import Meeting meeting = Meeting.objects.filter(started=None).order_by('start').first() self.meeting = meeting self.save() class SafetyNotification(Notification): safety_type = models.CharField(max_length=6, db_index=True, choices=SAFETY_TYPE_CHOICES, verbose_name=_('Type')) class CenterCloseNotification(Notification): investigator = models.ForeignKey('core.Investigator', related_name="closed_by_notification") close_date = models.DateField() @reversion.register(fields=('text',)) class NotificationAnswer(models.Model): notification = models.OneToOneField(Notification, related_name="answer") text = models.TextField() is_valid = models.BooleanField(default=True) is_final_version = models.BooleanField(default=False, verbose_name=_('Proofread')) is_rejected = models.BooleanField(default=False, verbose_name=_('rate negative')) pdf_document = models.OneToOneField(Document, related_name='_notification_answer', null=True) signed_at = models.DateTimeField(null=True) published_at = models.DateTimeField(null=True) objects = AuthorizationManager() unfiltered = models.Manager() @property def version_number(self): return Version.objects.get_for_object(self).count() def get_render_context(self): return { 'notification': self.notification, 'documents': self.notification.documents.order_by('doctype__identifier', 'date', 'name'), 'answer': self, } def render_pdf(self): notification = self.notification tpl = notification.type.get_template('notifications/answers/pdf/%s.html') return render_pdf_context(tpl, self.get_render_context()) def render_pdf_document(self): pdfdata = self.render_pdf() self.pdf_document = Document.objects.create_from_buffer(pdfdata, doctype='notification_answer', parent_object=self, name=str(self), original_file_name=self.notification.get_filename('-answer.pdf') ) self.save() def distribute(self): from ecs.core.models.submissions import Submission self.published_at = timezone.now() self.save() if not self.is_rejected and self.notification.type.includes_diff: try: notification = AmendmentNotification.objects.get(pk=self.notification.pk) notification.apply() except AmendmentNotification.DoesNotExist: assert False, "we should never get here" extend, finish = False, False if not self.is_rejected: if self.notification.type.grants_vote_extension: extend = True if self.notification.type.finishes_study: finish = True for submission in Submission.objects.filter(forms__in=self.notification.submission_forms.values('pk').query): if extend: for vote in submission.votes.positive().permanent(): vote.extend() if finish: submission.finish() presenting_parties = submission.current_submission_form.get_presenting_parties() _ = ugettext presenting_parties.send_message( _('New Notification Answer'), 'notifications/answers/new_message.txt', context={ 'notification': self.notification, 'answer': self, 'ABSOLUTE_URL_PREFIX': settings.ABSOLUTE_URL_PREFIX, }, submission=submission) NOTIFICATION_MODELS = ( Notification, CompletionReportNotification, ProgressReportNotification, AmendmentNotification, SafetyNotification, CenterCloseNotification, )
[ "django.db.models.Manager", "django.db.models.DateField", "django.db.models.TextField", "django.db.models.IntegerField", "ecs.core.diff.diff_submission_forms", "django.template.loader.select_template", "reversion.revisions.register", "ecs.meetings.models.Meeting.objects.filter", "ecs.notifications.m...
[((7001, 7037), 'reversion.revisions.register', 'reversion.register', ([], {'fields': "('text',)"}), "(fields=('text',))\n", (7019, 7037), True, 'from reversion import revisions as reversion\n'), ((715, 759), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(80)', 'unique': '(True)'}), '(max_length=80, unique=True)\n', (731, 759), False, 'from django.db import models\n'), ((771, 859), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(80)', 'default': '"""ecs.notifications.forms.NotificationForm"""'}), "(max_length=80, default=\n 'ecs.notifications.forms.NotificationForm')\n", (787, 859), False, 'from django.db import models\n'), ((878, 906), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (894, 906), False, 'from django.db import models\n'), ((922, 952), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (941, 952), False, 'from django.db import models\n'), ((974, 1008), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (993, 1008), False, 'from django.db import models\n'), ((1037, 1071), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1056, 1071), False, 'from django.db import models\n'), ((1093, 1127), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1112, 1127), False, 'from django.db import models\n'), ((1148, 1182), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1167, 1182), False, 'from django.db import models\n'), ((1731, 1808), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""core.SubmissionForm"""'], {'related_name': '"""old_for_notification"""'}), "('core.SubmissionForm', related_name='old_for_notification')\n", (1748, 1808), False, 'from django.db import models\n'), ((1835, 1912), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""core.SubmissionForm"""'], {'related_name': '"""new_for_notification"""'}), "('core.SubmissionForm', related_name='new_for_notification')\n", (1852, 1912), False, 'from django.db import models\n'), ((2758, 2834), 'django.db.models.ForeignKey', 'models.ForeignKey', (['NotificationType'], {'null': '(True)', 'related_name': '"""notifications"""'}), "(NotificationType, null=True, related_name='notifications')\n", (2775, 2834), False, 'from django.db import models\n'), ((2858, 2933), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['"""core.SubmissionForm"""'], {'related_name': '"""notifications"""'}), "('core.SubmissionForm', related_name='notifications')\n", (2880, 2933), False, 'from django.db import models\n'), ((2950, 3024), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['"""documents.Document"""'], {'related_name': '"""notifications"""'}), "('documents.Document', related_name='notifications')\n", (2972, 3024), False, 'from django.db import models\n'), ((3044, 3115), 'django.db.models.OneToOneField', 'models.OneToOneField', (['Document'], {'related_name': '"""_notification"""', 'null': '(True)'}), "(Document, related_name='_notification', null=True)\n", (3064, 3115), False, 'from django.db import models\n'), ((3132, 3150), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (3148, 3150), False, 'from django.db import models\n'), ((3167, 3206), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (3187, 3206), False, 'from django.db import models\n'), ((3218, 3259), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""auth.User"""'], {'null': '(True)'}), "('auth.User', null=True)\n", (3235, 3259), False, 'from django.db import models\n'), ((3279, 3300), 'ecs.notifications.managers.NotificationManager', 'NotificationManager', ([], {}), '()\n', (3298, 3300), False, 'from ecs.notifications.managers import NotificationManager\n'), ((3318, 3334), 'django.db.models.Manager', 'models.Manager', ([], {}), '()\n', (3332, 3334), False, 'from django.db import models\n'), ((5385, 5418), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (5404, 5418), False, 'from django.db import models\n'), ((5448, 5487), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (5464, 5487), False, 'from django.db import models\n'), ((5513, 5564), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'null': '(True)', 'blank': '(False)'}), '(null=True, blank=False)\n', (5540, 5564), False, 'from django.db import models\n'), ((5589, 5640), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'null': '(True)', 'blank': '(False)'}), '(null=True, blank=False)\n', (5616, 5640), False, 'from django.db import models\n'), ((5664, 5715), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'null': '(True)', 'blank': '(False)'}), '(null=True, blank=False)\n', (5691, 5715), False, 'from django.db import models\n'), ((5732, 5783), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'default': '(0)', 'blank': '(False)'}), '(default=0, blank=False)\n', (5759, 5783), False, 'from django.db import models\n'), ((5802, 5853), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'default': '(0)', 'blank': '(False)'}), '(default=0, blank=False)\n', (5829, 5853), False, 'from django.db import models\n'), ((5981, 6015), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (6000, 6015), False, 'from django.db import models\n'), ((6038, 6056), 'django.db.models.DateField', 'models.DateField', ([], {}), '()\n', (6054, 6056), False, 'from django.db import models\n'), ((6129, 6168), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (6145, 6168), False, 'from django.db import models\n'), ((6253, 6287), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (6272, 6287), False, 'from django.db import models\n'), ((6302, 6377), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""meetings.Meeting"""'], {'null': '(True)', 'related_name': '"""amendments"""'}), "('meetings.Meeting', null=True, related_name='amendments')\n", (6319, 6377), False, 'from django.db import models\n'), ((6408, 6442), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (6427, 6442), False, 'from django.db import models\n'), ((6884, 6961), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""core.Investigator"""'], {'related_name': '"""closed_by_notification"""'}), "('core.Investigator', related_name='closed_by_notification')\n", (6901, 6961), False, 'from django.db import models\n'), ((6979, 6997), 'django.db.models.DateField', 'models.DateField', ([], {}), '()\n', (6995, 6997), False, 'from django.db import models\n'), ((7097, 7154), 'django.db.models.OneToOneField', 'models.OneToOneField', (['Notification'], {'related_name': '"""answer"""'}), "(Notification, related_name='answer')\n", (7117, 7154), False, 'from django.db import models\n'), ((7166, 7184), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (7182, 7184), False, 'from django.db import models\n'), ((7200, 7233), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (7219, 7233), False, 'from django.db import models\n'), ((7426, 7504), 'django.db.models.OneToOneField', 'models.OneToOneField', (['Document'], {'related_name': '"""_notification_answer"""', 'null': '(True)'}), "(Document, related_name='_notification_answer', null=True)\n", (7446, 7504), False, 'from django.db import models\n'), ((7521, 7552), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)'}), '(null=True)\n', (7541, 7552), False, 'from django.db import models\n'), ((7572, 7603), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)'}), '(null=True)\n', (7592, 7603), False, 'from django.db import models\n'), ((7623, 7645), 'ecs.authorization.managers.AuthorizationManager', 'AuthorizationManager', ([], {}), '()\n', (7643, 7645), False, 'from ecs.authorization.managers import AuthorizationManager\n'), ((7663, 7679), 'django.db.models.Manager', 'models.Manager', ([], {}), '()\n', (7677, 7679), False, 'from django.db import models\n'), ((1573, 1611), 'django.template.loader.select_template', 'loader.select_template', (['template_names'], {}), '(template_names)\n', (1595, 1611), False, 'from django.template import loader\n'), ((8701, 8715), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (8713, 8715), False, 'from django.utils import timezone\n'), ((4498, 4513), 'django.utils.text.slugify', 'slugify', (['ec_num'], {}), '(ec_num)\n', (4505, 4513), False, 'from django.utils.text import slugify\n'), ((4515, 4538), 'django.utils.text.slugify', 'slugify', (['self.type.name'], {}), '(self.type.name)\n', (4522, 4538), False, 'from django.utils.text import slugify\n'), ((6807, 6816), 'django.utils.translation.ugettext_lazy', '_', (['"""Type"""'], {}), "('Type')\n", (6808, 6816), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7305, 7319), 'django.utils.translation.ugettext_lazy', '_', (['"""Proofread"""'], {}), "('Proofread')\n", (7306, 7319), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7387, 7405), 'django.utils.translation.ugettext_lazy', '_', (['"""rate negative"""'], {}), "('rate negative')\n", (7388, 7405), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1362, 1383), 'importlib.import_module', 'import_module', (['module'], {}), '(module)\n', (1375, 1383), False, 'from importlib import import_module\n'), ((2615, 2688), 'ecs.core.diff.diff_submission_forms', 'diff_submission_forms', (['self.old_submission_form', 'self.new_submission_form'], {}), '(self.old_submission_form, self.new_submission_form)\n', (2636, 2688), False, 'from ecs.core.diff import diff_submission_forms\n'), ((7740, 7776), 'reversion.models.Version.objects.get_for_object', 'Version.objects.get_for_object', (['self'], {}), '(self)\n', (7770, 7776), False, 'from reversion.models import Version\n'), ((9813, 9841), 'django.utils.translation.ugettext_lazy', '_', (['"""New Notification Answer"""'], {}), "('New Notification Answer')\n", (9814, 9841), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6545, 6581), 'ecs.meetings.models.Meeting.objects.filter', 'Meeting.objects.filter', ([], {'started': 'None'}), '(started=None)\n', (6567, 6581), False, 'from ecs.meetings.models import Meeting\n')]
from abc import ABCMeta, abstractmethod import numpy as np class BaseSubspace(metaclass=ABCMeta): def __init__(self, measurements=None, A=None, k=None, rank=None, pks=[], name=''): # Check A if A is None: self.__A = np.asarray(a=1, dtype=measurements.dtype) else: # Check the type and number of dimensions of A if not (type(A) is np.ndarray): raise ValueError('A must be an array') else: if not (len(A.shape) == 2): raise ValueError("Dimensions of A must be 2") self.__A = np.asarray(A) # Shape of A m, n = A.shape self.__At = np.transpose(np.conjugate(self.__A)) # Check measurements if measurements is None: self._measurements = np.asarray(1) else: if not (type(measurements) is np.ndarray): raise ValueError('measurements must be an array') # Check the dimensions of the measurements if not (measurements.shape[0] == A.shape[0]): raise ValueError("The dimension of y is not consistent with the dimensions of A") self.__measurements = np.asarray(a=measurements, dtype=measurements.dtype) # Control of the value of k if k is None: print('WARNING: Unknown sparsity considered. Some of the algorithms may not be applicable.') self.__k = k else: if k > self.A.shape[1]: raise ValueError("k cannot be larger than the number of atoms") else: self.__k = k # Assign the given rank if rank is not None: if rank < 0: raise ValueError('rank must be positive.') self._rank = rank # Check the partially known support if not(type(pks) is list): self._pks = pks.tolist() else: self._pks = pks # Create the solution self.sol = np.zeros(shape=(n, measurements.shape[1]), dtype=measurements.dtype) self.support_sol = [] # Assign the name self.__name = name @abstractmethod def solve(self, threshold): pass @property def A(self): return self.__A @property def At(self): return self.__At @property def measurements(self): return self.__measurements @property def k(self): return self.__k @property def name(self): return self.__name @property def rank(self): return self._rank @property def pks(self): return self._pks def estimate_measurement_rank(self): return np.linalg.matrix_rank(M=self.measurements, tol=None, hermitian=False) def compute_covariance_matrix(self): return np.matmul(self.measurements, np.conjugate(self.measurements.T)) / self.measurements.shape[1] def estimate_signal_subspace(self, threshold=0.01): # Compute the covariance matrix gamma = self.compute_covariance_matrix() # EVD eig_vals, eig_vecs = np.linalg.eigh(gamma, UPLO='L') eig_vals = eig_vals[::-1] eig_vecs = eig_vecs[:, ::-1] # If the rank is not known - Estimate the rank if self._rank is None: # Shape of the measurements m = self.measurements.shape[0] # Estimate the dimension of the signal subspace eig_diff = np.abs(np.diff(eig_vals)) ind = np.where(eig_diff >= threshold*eig_vals[0])[0][-1] self._rank = m - ind # r dominant eigenvectors of the covariance matrix U = eig_vecs[:,:self._rank] # Projection matrix P = np.matmul(U, np.conjugate(U.T)) return P def estimate_noise_subspace(self, threshold=0.1): # Compute the covariance matrix gamma = self.compute_covariance_matrix() # EVD eig_vals, eig_vecs = np.linalg.eigh(gamma, UPLO='L') eig_vals = eig_vals[::-1] eig_vecs = eig_vecs[:, ::-1] # If the rank is not known - Estimate the rank if self._rank is None: # Shape of the measurements m = self.measurements.shape[0] # Estimate the dimension of the signal subspace eig_diff = np.diff(eig_vals) ind = np.where(eig_diff >= threshold*eig_vals[0])[0] self._rank = m - ind # n-r lowest eigenvectors of the covariance matrix U = eig_vecs[:,self.rank:] # Projection matrix P = np.matmul(U, np.conjugate(U.T)) return P
[ "numpy.linalg.matrix_rank", "numpy.where", "numpy.conjugate", "numpy.asarray", "numpy.diff", "numpy.zeros", "numpy.linalg.eigh" ]
[((2029, 2097), 'numpy.zeros', 'np.zeros', ([], {'shape': '(n, measurements.shape[1])', 'dtype': 'measurements.dtype'}), '(shape=(n, measurements.shape[1]), dtype=measurements.dtype)\n', (2037, 2097), True, 'import numpy as np\n'), ((2735, 2804), 'numpy.linalg.matrix_rank', 'np.linalg.matrix_rank', ([], {'M': 'self.measurements', 'tol': 'None', 'hermitian': '(False)'}), '(M=self.measurements, tol=None, hermitian=False)\n', (2756, 2804), True, 'import numpy as np\n'), ((3145, 3176), 'numpy.linalg.eigh', 'np.linalg.eigh', (['gamma'], {'UPLO': '"""L"""'}), "(gamma, UPLO='L')\n", (3159, 3176), True, 'import numpy as np\n'), ((4005, 4036), 'numpy.linalg.eigh', 'np.linalg.eigh', (['gamma'], {'UPLO': '"""L"""'}), "(gamma, UPLO='L')\n", (4019, 4036), True, 'import numpy as np\n'), ((249, 290), 'numpy.asarray', 'np.asarray', ([], {'a': '(1)', 'dtype': 'measurements.dtype'}), '(a=1, dtype=measurements.dtype)\n', (259, 290), True, 'import numpy as np\n'), ((614, 627), 'numpy.asarray', 'np.asarray', (['A'], {}), '(A)\n', (624, 627), True, 'import numpy as np\n'), ((713, 735), 'numpy.conjugate', 'np.conjugate', (['self.__A'], {}), '(self.__A)\n', (725, 735), True, 'import numpy as np\n'), ((833, 846), 'numpy.asarray', 'np.asarray', (['(1)'], {}), '(1)\n', (843, 846), True, 'import numpy as np\n'), ((1229, 1281), 'numpy.asarray', 'np.asarray', ([], {'a': 'measurements', 'dtype': 'measurements.dtype'}), '(a=measurements, dtype=measurements.dtype)\n', (1239, 1281), True, 'import numpy as np\n'), ((3780, 3797), 'numpy.conjugate', 'np.conjugate', (['U.T'], {}), '(U.T)\n', (3792, 3797), True, 'import numpy as np\n'), ((4362, 4379), 'numpy.diff', 'np.diff', (['eig_vals'], {}), '(eig_vals)\n', (4369, 4379), True, 'import numpy as np\n'), ((4627, 4644), 'numpy.conjugate', 'np.conjugate', (['U.T'], {}), '(U.T)\n', (4639, 4644), True, 'import numpy as np\n'), ((2891, 2924), 'numpy.conjugate', 'np.conjugate', (['self.measurements.T'], {}), '(self.measurements.T)\n', (2903, 2924), True, 'import numpy as np\n'), ((3509, 3526), 'numpy.diff', 'np.diff', (['eig_vals'], {}), '(eig_vals)\n', (3516, 3526), True, 'import numpy as np\n'), ((4398, 4443), 'numpy.where', 'np.where', (['(eig_diff >= threshold * eig_vals[0])'], {}), '(eig_diff >= threshold * eig_vals[0])\n', (4406, 4443), True, 'import numpy as np\n'), ((3546, 3591), 'numpy.where', 'np.where', (['(eig_diff >= threshold * eig_vals[0])'], {}), '(eig_diff >= threshold * eig_vals[0])\n', (3554, 3591), True, 'import numpy as np\n')]
from connectors.tableau.tableau import TableauConnector from posixpath import join from typing import List, Dict, Tuple import argparse import connectors.tableau import os import utils import logging import sys import yaml logging.basicConfig(level=logging.INFO) MAIN_PATH = '/Users/tomevers/projects/airglow' CONNECTIONS_CONF_FILE = 'airglow_connections.yml' DS_FILENAME = 'data sources.yml' DS_TEMPLATE = 'templates/data_source.md' class ConnectionValidationError(Exception): pass def get_connections_config(yaml_format=True) -> dict: yaml_file = os.path.join(MAIN_PATH, CONNECTIONS_CONF_FILE) try: return utils.get_file(yaml_file, yaml_format) except FileNotFoundError: logging.exception(FileNotFoundError('Airglow connections file can not be found.')) sys.exit(1) def store_ds(events_md: str, event: dict, docs_dir: str): file_dir = os.path.join(docs_dir, 'data sources', event['category']) file_name = event['name'] + '.md' if not os.path.isdir(file_dir): os.makedirs(file_dir) with open(os.path.join(file_dir, file_name), 'w') as file: file.write(events_md) def generate_datasources_yaml(): conn_config = get_connections_config() if 'connections' not in conn_config.keys(): logging.exception('connections info not found in airglow_connections config file.') sys.exit(1) tableau_config = conn_config['connections']['tableau'] tableau_connector = TableauConnector(server=tableau_config['server'], sitename=tableau_config['sitename'], password=tableau_config['password'], username=tableau_config['username']) ds = tableau_connector.fetch_datasources() ds = [tableau_connector.generate_datasource_dag(datasource) for datasource in ds] logging.info("storing data source") with open(r'/Users/tomevers/projects/airglow/definitions/data sources.yml', 'w') as file: documents = yaml.dump(ds, file, sort_keys=False) return ds def generate_markdown(datasource): template_path = os.path.join(MAIN_PATH, DS_TEMPLATE) with open(template_path, 'r') as file: ds_md = file.read() ds_md = ds_md.replace('{<yaml_header>}', yaml.dump(datasource)) return ds_md def get_datasource_definitions(yaml_format=True) -> dict: """ returns the data source definition yaml file as a dict. Returns: a dict with all data sources defined in the yaml file. """ yaml_file = os.path.join(MAIN_PATH, 'definitions', DS_FILENAME) try: return utils.get_file(yaml_file, yaml_format) except FileNotFoundError: logging.exception(FileNotFoundError('Datasource definition file can not be found.')) sys.exit(1) def main(args): logging.info('Starting datasource generation script..') logging.info('****************************************') logging.info('** Step 1: Get all information') logging.info('****************************************') if args.use_local_definitions.lower() in ('true', '1', 't'): logging.info('** Retrieving data source definitions from local yaml file') datasource_defs = get_datasource_definitions() else: logging.info('** Retrieving data source definitions from Tableau') datasource_defs = generate_datasources_yaml() logging.info('****************************************') logging.info('** Step 2: Generate and store event files.') logging.info('****************************************') for datasource in datasource_defs: logging.info('generating datasource md file for {}'.format(datasource['data_source_name'])) ds_md = generate_markdown(datasource) utils.store_md(ds_md, 'data sources', datasource['data_source_project'], datasource['data_source_name'], args.docs_dir) if __name__ == "__main__": parser = argparse.ArgumentParser('Script to convert event definitions file into markdown format.') parser.add_argument('--docs_dir', type=str, help='path to the folder where the generated docs should be stored. The script will need write access to this folder. Defaults to "./docs/"') parser.add_argument('--use_local_definitions', type=str, help='path to the folder where the generated docs should be stored. The script will need write access to this folder. Defaults to "./docs/"') args = parser.parse_args() main(args)
[ "logging.basicConfig", "utils.store_md", "argparse.ArgumentParser", "os.makedirs", "yaml.dump", "os.path.join", "logging.exception", "os.path.isdir", "sys.exit", "connectors.tableau.tableau.TableauConnector", "logging.info", "utils.get_file" ]
[((226, 265), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (245, 265), False, 'import logging\n'), ((565, 611), 'os.path.join', 'os.path.join', (['MAIN_PATH', 'CONNECTIONS_CONF_FILE'], {}), '(MAIN_PATH, CONNECTIONS_CONF_FILE)\n', (577, 611), False, 'import os\n'), ((891, 948), 'os.path.join', 'os.path.join', (['docs_dir', '"""data sources"""', "event['category']"], {}), "(docs_dir, 'data sources', event['category'])\n", (903, 948), False, 'import os\n'), ((1467, 1637), 'connectors.tableau.tableau.TableauConnector', 'TableauConnector', ([], {'server': "tableau_config['server']", 'sitename': "tableau_config['sitename']", 'password': "tableau_config['password']", 'username': "tableau_config['username']"}), "(server=tableau_config['server'], sitename=tableau_config[\n 'sitename'], password=tableau_config['password'], username=\n tableau_config['username'])\n", (1483, 1637), False, 'from connectors.tableau.tableau import TableauConnector\n'), ((1890, 1925), 'logging.info', 'logging.info', (['"""storing data source"""'], {}), "('storing data source')\n", (1902, 1925), False, 'import logging\n'), ((2154, 2190), 'os.path.join', 'os.path.join', (['MAIN_PATH', 'DS_TEMPLATE'], {}), '(MAIN_PATH, DS_TEMPLATE)\n', (2166, 2190), False, 'import os\n'), ((2576, 2627), 'os.path.join', 'os.path.join', (['MAIN_PATH', '"""definitions"""', 'DS_FILENAME'], {}), "(MAIN_PATH, 'definitions', DS_FILENAME)\n", (2588, 2627), False, 'import os\n'), ((2856, 2911), 'logging.info', 'logging.info', (['"""Starting datasource generation script.."""'], {}), "('Starting datasource generation script..')\n", (2868, 2911), False, 'import logging\n'), ((2916, 2972), 'logging.info', 'logging.info', (['"""****************************************"""'], {}), "('****************************************')\n", (2928, 2972), False, 'import logging\n'), ((2977, 3023), 'logging.info', 'logging.info', (['"""** Step 1: Get all information"""'], {}), "('** Step 1: Get all information')\n", (2989, 3023), False, 'import logging\n'), ((3028, 3084), 'logging.info', 'logging.info', (['"""****************************************"""'], {}), "('****************************************')\n", (3040, 3084), False, 'import logging\n'), ((3436, 3492), 'logging.info', 'logging.info', (['"""****************************************"""'], {}), "('****************************************')\n", (3448, 3492), False, 'import logging\n'), ((3497, 3555), 'logging.info', 'logging.info', (['"""** Step 2: Generate and store event files."""'], {}), "('** Step 2: Generate and store event files.')\n", (3509, 3555), False, 'import logging\n'), ((3560, 3616), 'logging.info', 'logging.info', (['"""****************************************"""'], {}), "('****************************************')\n", (3572, 3616), False, 'import logging\n'), ((3973, 4067), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""Script to convert event definitions file into markdown format."""'], {}), "(\n 'Script to convert event definitions file into markdown format.')\n", (3996, 4067), False, 'import argparse\n'), ((636, 674), 'utils.get_file', 'utils.get_file', (['yaml_file', 'yaml_format'], {}), '(yaml_file, yaml_format)\n', (650, 674), False, 'import utils\n'), ((998, 1021), 'os.path.isdir', 'os.path.isdir', (['file_dir'], {}), '(file_dir)\n', (1011, 1021), False, 'import os\n'), ((1031, 1052), 'os.makedirs', 'os.makedirs', (['file_dir'], {}), '(file_dir)\n', (1042, 1052), False, 'import os\n'), ((1280, 1368), 'logging.exception', 'logging.exception', (['"""connections info not found in airglow_connections config file."""'], {}), "(\n 'connections info not found in airglow_connections config file.')\n", (1297, 1368), False, 'import logging\n'), ((1372, 1383), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1380, 1383), False, 'import sys\n'), ((2045, 2081), 'yaml.dump', 'yaml.dump', (['ds', 'file'], {'sort_keys': '(False)'}), '(ds, file, sort_keys=False)\n', (2054, 2081), False, 'import yaml\n'), ((2307, 2328), 'yaml.dump', 'yaml.dump', (['datasource'], {}), '(datasource)\n', (2316, 2328), False, 'import yaml\n'), ((2652, 2690), 'utils.get_file', 'utils.get_file', (['yaml_file', 'yaml_format'], {}), '(yaml_file, yaml_format)\n', (2666, 2690), False, 'import utils\n'), ((3158, 3232), 'logging.info', 'logging.info', (['"""** Retrieving data source definitions from local yaml file"""'], {}), "('** Retrieving data source definitions from local yaml file')\n", (3170, 3232), False, 'import logging\n'), ((3306, 3372), 'logging.info', 'logging.info', (['"""** Retrieving data source definitions from Tableau"""'], {}), "('** Retrieving data source definitions from Tableau')\n", (3318, 3372), False, 'import logging\n'), ((3810, 3933), 'utils.store_md', 'utils.store_md', (['ds_md', '"""data sources"""', "datasource['data_source_project']", "datasource['data_source_name']", 'args.docs_dir'], {}), "(ds_md, 'data sources', datasource['data_source_project'],\n datasource['data_source_name'], args.docs_dir)\n", (3824, 3933), False, 'import utils\n'), ((804, 815), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (812, 815), False, 'import sys\n'), ((1067, 1100), 'os.path.join', 'os.path.join', (['file_dir', 'file_name'], {}), '(file_dir, file_name)\n', (1079, 1100), False, 'import os\n'), ((2822, 2833), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2830, 2833), False, 'import sys\n')]
import logging from itertools import starmap from funcy import join from .context import Context from .interpolate import resolve logger = logging.getLogger(__name__) STAGES = "stages" class DataResolver: def __init__(self, d): self.context = Context() self.data = d def _resolve_entry(self, name, definition): stage_d = resolve(definition, self.context) logger.trace("Resolved stage data for '%s': %s", name, stage_d) return {name: stage_d} def resolve(self): stages = self.data.get(STAGES, {}) data = join(starmap(self._resolve_entry, stages.items())) return {**self.data, STAGES: data}
[ "logging.getLogger" ]
[((142, 169), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (159, 169), False, 'import logging\n')]
from __future__ import absolute_import from __future__ import print_function import os import sys from conversion_imagenet import TestModels from conversion_imagenet import is_paddle_supported def get_test_table(): return { 'paddle' : { 'resnet50' : [ TestModels.onnx_emit, #TestModels.caffe_emit, #TestModels.cntk_emit, TestModels.coreml_emit, TestModels.keras_emit, TestModels.mxnet_emit, TestModels.pytorch_emit, TestModels.tensorflow_emit ], 'resnet101' : [ #TestModels.onnx_emit, #TestModels.caffe_emit, #TestModels.cntk_emit, TestModels.coreml_emit, TestModels.keras_emit, TestModels.mxnet_emit, TestModels.pytorch_emit, TestModels.tensorflow_emit ], 'vgg16' : [ TestModels.onnx_emit, #TestModels.caffe_emit, #TestModels.cntk_emit, #TestModels.coreml_emit, #TestModels.keras_emit, #TestModels.mxnet_emit, #TestModels.pytorch_emit, #TestModels.tensorflow_emit ], }} def test_paddle(): if not is_paddle_supported(): return # omit tensorflow lead to crash import tensorflow as tf test_table = get_test_table() tester = TestModels(test_table) tester._test_function('paddle', tester.paddle_parse) if __name__ == '__main__': test_paddle()
[ "conversion_imagenet.TestModels", "conversion_imagenet.is_paddle_supported" ]
[((1542, 1564), 'conversion_imagenet.TestModels', 'TestModels', (['test_table'], {}), '(test_table)\n', (1552, 1564), False, 'from conversion_imagenet import TestModels\n'), ((1393, 1414), 'conversion_imagenet.is_paddle_supported', 'is_paddle_supported', ([], {}), '()\n', (1412, 1414), False, 'from conversion_imagenet import is_paddle_supported\n')]
# GENERATED BY KOMAND SDK - DO NOT EDIT import komand import json class Component: DESCRIPTION = "This action resets password for Okta user and transitions user status to PASSWORD_EXPIRED, so that the user is required to change their password at their next login" class Input: TEMP_PASSWORD = "<PASSWORD>" USER_ID = "user_id" class Output: SUCCESS = "success" TEMP_PASSWORD = "<PASSWORD>" class ResetPasswordInput(komand.Input): schema = json.loads(""" { "type": "object", "title": "Variables", "properties": { "temp_password": { "type": "boolean", "title": "Okta User Temporary Password", "description": "If set to true, sets the user's password to a temporary password and returns it", "default": false, "order": 2 }, "user_id": { "type": "string", "title": "Okta User ID", "description": "User ID whose password will be reset", "order": 1 } }, "required": [ "user_id" ] } """) def __init__(self): super(self.__class__, self).__init__(self.schema) class ResetPasswordOutput(komand.Output): schema = json.loads(""" { "type": "object", "title": "Variables", "properties": { "success": { "type": "boolean", "title": "Success", "description": "Whether the reset was successful", "order": 1 }, "temp_password": { "type": "string", "title": "Okta User Temporary Password", "description": "The temporary password of the Okta user, if true was set in Temporary Password input", "order": 2 } }, "required": [ "success" ] } """) def __init__(self): super(self.__class__, self).__init__(self.schema)
[ "json.loads" ]
[((478, 1022), 'json.loads', 'json.loads', (['"""\n {\n "type": "object",\n "title": "Variables",\n "properties": {\n "temp_password": {\n "type": "boolean",\n "title": "Okta User Temporary Password",\n "description": "If set to true, sets the user\'s password to a temporary password and returns it",\n "default": false,\n "order": 2\n },\n "user_id": {\n "type": "string",\n "title": "Okta User ID",\n "description": "User ID whose password will be reset",\n "order": 1\n }\n },\n "required": [\n "user_id"\n ]\n}\n """'], {}), '(\n """\n {\n "type": "object",\n "title": "Variables",\n "properties": {\n "temp_password": {\n "type": "boolean",\n "title": "Okta User Temporary Password",\n "description": "If set to true, sets the user\'s password to a temporary password and returns it",\n "default": false,\n "order": 2\n },\n "user_id": {\n "type": "string",\n "title": "Okta User ID",\n "description": "User ID whose password will be reset",\n "order": 1\n }\n },\n "required": [\n "user_id"\n ]\n}\n """\n )\n', (488, 1022), False, 'import json\n'), ((1153, 1669), 'json.loads', 'json.loads', (['"""\n {\n "type": "object",\n "title": "Variables",\n "properties": {\n "success": {\n "type": "boolean",\n "title": "Success",\n "description": "Whether the reset was successful",\n "order": 1\n },\n "temp_password": {\n "type": "string",\n "title": "Okta User Temporary Password",\n "description": "The temporary password of the Okta user, if true was set in Temporary Password input",\n "order": 2\n }\n },\n "required": [\n "success"\n ]\n}\n """'], {}), '(\n """\n {\n "type": "object",\n "title": "Variables",\n "properties": {\n "success": {\n "type": "boolean",\n "title": "Success",\n "description": "Whether the reset was successful",\n "order": 1\n },\n "temp_password": {\n "type": "string",\n "title": "Okta User Temporary Password",\n "description": "The temporary password of the Okta user, if true was set in Temporary Password input",\n "order": 2\n }\n },\n "required": [\n "success"\n ]\n}\n """\n )\n', (1163, 1669), False, 'import json\n')]
from lexer.token import Token class Scanner: # Construtor da classe def __init__(self, programa): self.inicio = 0 self.atual = 0 self.linha = 1 self.tokens = [] self.programa = programa # Busca caracteres, passa para o próximo char (atual é o char a frente do que tá sendo lido) def nextChar(self): self.atual += 1 return self.programa[self.atual - 1] # # Chama o buscador de Tokens, adiciona o fim do arquivo (Token END), # chama o buscador de token de palavras reservadas def scan(self): self.scanTokens() self.scanReserved() return self.tokens # Procura tokens até chegar no Fim def scanTokens(self): while self.atual < len(self.programa): self.inicio = self.atual char = self.nextChar() if char == " " or char == "\t" or char == "\r": pass elif char == "\n": self.linha += 1 # Verificar se são tokens delimitadores ("(", ")", "{", "}") elif char == "(" or char == ")" or char == "{" or char == "}": self.tokens.append( Token( self.delimitadoresToken(char), self.programa[self.inicio : self.atual], self.linha, ) ) # Verificar se são tokens de operações aritméticas ("+", "-", "*", "/") elif char == "+" or char == "-" or char == "*" or char == "/": self.tokens.append( Token( self.opAritmeticaToken(char), self.programa[self.inicio : self.atual], self.linha, ) ) # Verificar se são tokens de operações booleanas ("=". "==", "!=", ">", "<", ">=", "<=") elif char == "=" or char == "!" or char == "<" or char == ">": self.tokens.append( Token( self.opBolleanaToken(char), self.programa[self.inicio : self.atual], self.linha, ) ) # Separador elif char == ",": # Virgula self.tokens.append( Token("COMMA", self.programa[self.inicio : self.atual], self.linha) ) # Demarcador de fim de bloco / expressão elif char == ";": # Ponto e virgula self.tokens.append( Token( "SEMICOLON", self.programa[self.inicio : self.atual], self.linha ) ) # Números elif char >= "0" and char <= "9": while self.lookAhead() >= "0" and self.lookAhead() <= "9": self.nextChar() self.tokens.append( Token("NUM", self.programa[self.inicio : self.atual], self.linha) ) # Letras / Identificadores / Palavras Reservadas elif char.isalpha(): while self.lookAhead().isalnum(): self.nextChar() self.tokens.append( Token("ID", self.programa[self.inicio : self.atual], self.linha) ) # Outros/Error else: print("Caractere inválido na linha ", self.linha) exit(2) def delimitadoresToken(self, char): # Delimitadores if char == "(": # Parentese esquerdo return "PLEFT" elif char == ")": # Parentese direito return "PRIGHT" elif char == "{": # Chaves esquerdo return "CLEFT" elif char == "}": # Chaves direito return "CRIGHT" def opAritmeticaToken(self, char): # Operações Aritméticas if char == "+": # Soma return "ADD" elif char == "-": # Subtração return "SUB" elif char == "*": # Multiplicação return "MULT" elif char == "/": # Divisão return "DIV" def opBolleanaToken(self, char): # Operações Booleanas if char == "=": # Igual ou Atribuição if self.lookAhead() == "=": # == (comparação) self.atual += 1 return "EQUAL" else: # = (atribuição) return "ATB" elif char == "!": # Diferente ("!=") if self.lookAhead() == "=": self.atual += 1 return "DIFF" elif char == "<": # Menor ou igual, menor if self.lookAhead() == "=": # ("<= ") self.atual += 1 return "LESSEQUAL" else: # ("<") return "LESS" elif char == ">": # Maior ou igual, Maior if self.lookAhead() == "=": # (">=") self.atual += 1 return "GREATEREQUAL" else: # (">") return "GREATER" def scanReserved(self): for i in self.tokens: if i.tipo == "ID": # Inicio do programa if i.lexema == "program": i.tipo = "PROGRAM" # Fim do programa elif i.lexema == "end": i.tipo = "END" # Identificador de função elif i.lexema == "func": i.tipo = "FUNC" # Identificador de procedimento elif i.lexema == "proc": i.tipo = "PROC" # Identificador de chamada para proc e func elif i.lexema == "call": i.tipo = "CALL" # Identificador de inteiros elif i.lexema == "int": i.tipo = "INT" # Tipo Booleano elif i.lexema == "bool": i.tipo = "BOOL" # Booleano Verdadeiro elif i.lexema == "True": i.tipo = "BOOLEAN" # Booleano Falso elif i.lexema == "False": i.tipo = "BOOLEAN" # Retorno da função elif i.lexema == "return": i.tipo = "RETURN" # Condicional IF elif i.lexema == "if": i.tipo = "IF" # Identificador de fim do IF elif i.lexema == "endif": i.tipo = "ENDIF" # Condicional ELSE elif i.lexema == "else": i.tipo = "ELSE" # Identificador de fim do ELSE elif i.lexema == "endelse": i.tipo = "ENDELSE" # Condicional WHILE elif i.lexema == "while": i.tipo = "WHILE" # Identificador de fim do WHILE elif i.lexema == "endwhile": i.tipo = "ENDWHILE" # Escrita na tela elif i.lexema == "print": i.tipo = "PRINT" # Incondicional BREAK elif i.lexema == "break": i.tipo = "BREAK" # Incondicional CONTINUE elif i.lexema == "continue": i.tipo = "CONTINUE" # Verifica o simbolo a frente e se está no final do programa def lookAhead(self): if self.atual < len(self.programa): return self.programa[self.atual] else: return "\0"
[ "lexer.token.Token" ]
[((2373, 2438), 'lexer.token.Token', 'Token', (['"""COMMA"""', 'self.programa[self.inicio:self.atual]', 'self.linha'], {}), "('COMMA', self.programa[self.inicio:self.atual], self.linha)\n", (2378, 2438), False, 'from lexer.token import Token\n'), ((2618, 2687), 'lexer.token.Token', 'Token', (['"""SEMICOLON"""', 'self.programa[self.inicio:self.atual]', 'self.linha'], {}), "('SEMICOLON', self.programa[self.inicio:self.atual], self.linha)\n", (2623, 2687), False, 'from lexer.token import Token\n'), ((2990, 3053), 'lexer.token.Token', 'Token', (['"""NUM"""', 'self.programa[self.inicio:self.atual]', 'self.linha'], {}), "('NUM', self.programa[self.inicio:self.atual], self.linha)\n", (2995, 3053), False, 'from lexer.token import Token\n'), ((3311, 3373), 'lexer.token.Token', 'Token', (['"""ID"""', 'self.programa[self.inicio:self.atual]', 'self.linha'], {}), "('ID', self.programa[self.inicio:self.atual], self.linha)\n", (3316, 3373), False, 'from lexer.token import Token\n')]
import os import pytest import numpy as np from laserembeddings import Laser SIMILARITY_TEST = os.getenv('SIMILARITY_TEST') def test_laser(): with open(Laser.DEFAULT_ENCODER_FILE, 'rb') as f_encoder: laser = Laser( Laser.DEFAULT_BPE_CODES_FILE, None, f_encoder, ) assert laser.embed_sentences( ['hello world!', 'i hope the tests are passing'], lang='en').shape == (2, 1024) def test_similarity(test_data): if not SIMILARITY_TEST: pytest.skip("SIMILARITY_TEST not set") if not test_data: raise FileNotFoundError( 'laserembeddings-test-data.npz is missing, run "python -m laserembeddings download-test-data" to fix that 🔧' ) report = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'report', 'comparison-with-LASER.md') laser = Laser() with open(report, 'w', encoding='utf-8') as f_report: f_report.write( '# Comparison of the embeddings computed with original LASER with the embeddings computed with this package\n' ) f_report.write( '| |language|avg. cosine similarity|min. cosine similarity|\n') f_report.write( '|-|--------|----------------------|----------------------|\n') for lang in test_data['langs']: if lang in ('cmn', 'wuu', 'yue', 'zh', 'jpn', 'ja', 'el'): # language not supported, ignoring continue sents = test_data[f'{lang}_sentences'] orig_embeddings = test_data[f'{lang}_embeddings'] embeddings = laser.embed_sentences(sents, lang) assert embeddings.shape == orig_embeddings.shape cosine_similarities = np.sum( orig_embeddings * embeddings, axis=1) / (np.linalg.norm(orig_embeddings, axis=1) * np.linalg.norm(embeddings, axis=1)) similarity_mean = np.mean(cosine_similarities) similarity_min = np.min(cosine_similarities) f_report.write( f'|{"✅" if similarity_min > 0.99999 else "⚠️" if similarity_mean > 0.99 else "❌"}|{lang}|{similarity_mean:.5f}|{similarity_min:.5f}|\n' )
[ "numpy.mean", "os.getenv", "numpy.linalg.norm", "os.path.realpath", "numpy.sum", "numpy.min", "pytest.skip", "laserembeddings.Laser" ]
[((98, 126), 'os.getenv', 'os.getenv', (['"""SIMILARITY_TEST"""'], {}), "('SIMILARITY_TEST')\n", (107, 126), False, 'import os\n'), ((912, 919), 'laserembeddings.Laser', 'Laser', ([], {}), '()\n', (917, 919), False, 'from laserembeddings import Laser\n'), ((225, 277), 'laserembeddings.Laser', 'Laser', (['Laser.DEFAULT_BPE_CODES_FILE', 'None', 'f_encoder'], {}), '(Laser.DEFAULT_BPE_CODES_FILE, None, f_encoder)\n', (230, 277), False, 'from laserembeddings import Laser\n'), ((537, 575), 'pytest.skip', 'pytest.skip', (['"""SIMILARITY_TEST not set"""'], {}), "('SIMILARITY_TEST not set')\n", (548, 575), False, 'import pytest\n'), ((806, 832), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (822, 832), False, 'import os\n'), ((2014, 2042), 'numpy.mean', 'np.mean', (['cosine_similarities'], {}), '(cosine_similarities)\n', (2021, 2042), True, 'import numpy as np\n'), ((2072, 2099), 'numpy.min', 'np.min', (['cosine_similarities'], {}), '(cosine_similarities)\n', (2078, 2099), True, 'import numpy as np\n'), ((1797, 1841), 'numpy.sum', 'np.sum', (['(orig_embeddings * embeddings)'], {'axis': '(1)'}), '(orig_embeddings * embeddings, axis=1)\n', (1803, 1841), True, 'import numpy as np\n'), ((1878, 1917), 'numpy.linalg.norm', 'np.linalg.norm', (['orig_embeddings'], {'axis': '(1)'}), '(orig_embeddings, axis=1)\n', (1892, 1917), True, 'import numpy as np\n'), ((1947, 1981), 'numpy.linalg.norm', 'np.linalg.norm', (['embeddings'], {'axis': '(1)'}), '(embeddings, axis=1)\n', (1961, 1981), True, 'import numpy as np\n')]
# -------------- # Importing header files import numpy as np # Path of the file has been stored in variable called 'path' #New record new_record=[[50, 9, 4, 1, 0, 0, 40, 0]] #Code starts here data_file='subset_1000.csv' data=np.genfromtxt(path,delimiter=",",skip_header=1) print(data) census=np.concatenate((new_record,data),axis=0) print(census) # -------------- #Code starts here age=census[:,0] max_age=np.max(age) min_age=np.min(age) age_mean=np.mean(age) age_std=np.std(age) # -------------- #Code starts here race_0=census[census[:,2]==0] race_1=census[census[:,2]==1] race_2=census[census[:,2]==2] race_3=census[census[:,2]==3] race_4=census[census[:,2]==4] len_0=len(race_0) len_1=len(race_1) len_2=len(race_2) len_3=len(race_3) len_4=len(race_4) print(len_0,len_1,len_2,len_3,len_4) minority_race=3 # -------------- #Code starts here senior_citizens=census[census[:,0]>60] working_hours_sum=senior_citizens.sum(axis=0)[6] senior_citizens_len=len(senior_citizens) avg_working_hours=working_hours_sum/senior_citizens_len print(avg_working_hours) # -------------- #Code starts here high=census[census[:,1]>10] low=census[census[:,1]<=10] avg_pay_high=round(high.mean(axis=0)[7],2) avg_pay_low=round(low.mean(axis=0)[7],2) print(avg_pay_high,avg_pay_low) a=avg_pay_high-avg_pay_low print(a)
[ "numpy.mean", "numpy.std", "numpy.max", "numpy.concatenate", "numpy.min", "numpy.genfromtxt" ]
[((244, 293), 'numpy.genfromtxt', 'np.genfromtxt', (['path'], {'delimiter': '""","""', 'skip_header': '(1)'}), "(path, delimiter=',', skip_header=1)\n", (257, 293), True, 'import numpy as np\n'), ((313, 355), 'numpy.concatenate', 'np.concatenate', (['(new_record, data)'], {'axis': '(0)'}), '((new_record, data), axis=0)\n', (327, 355), True, 'import numpy as np\n'), ((432, 443), 'numpy.max', 'np.max', (['age'], {}), '(age)\n', (438, 443), True, 'import numpy as np\n'), ((453, 464), 'numpy.min', 'np.min', (['age'], {}), '(age)\n', (459, 464), True, 'import numpy as np\n'), ((475, 487), 'numpy.mean', 'np.mean', (['age'], {}), '(age)\n', (482, 487), True, 'import numpy as np\n'), ((497, 508), 'numpy.std', 'np.std', (['age'], {}), '(age)\n', (503, 508), True, 'import numpy as np\n')]
import numpy as np from matplotlib import pyplot as plt """ https://stackoverflow.com/questions/42750910/convert-rgb-image-to-index-image/62980021#62980021 convert semantic labels from RGB coding to index coding Steps: 1. define COLORS (see below) 2. hash colors 3. run rgb2index(segmentation_rgb) see example below TODO: apparently, using cv2.LUT is much simpler (and maybe faster?) """ COLORS = np.array([[0, 0, 0], [0, 0, 255], [255, 0, 0], [0, 255, 0]]) W = np.power(255, [0, 1, 2]) HASHES = np.sum(W * COLORS, axis=-1) HASH2COLOR = {h: c for h, c in zip(HASHES, COLORS)} HASH2IDX = {h: i for i, h in enumerate(HASHES)} def rgb2index(segmentation_rgb): """ turn a 3 channel RGB color to 1 channel index color """ s_shape = segmentation_rgb.shape s_hashes = np.sum(W * segmentation_rgb, axis=-1) print(np.unique(segmentation_rgb.reshape((-1, 3)), axis=0)) func = lambda x: HASH2IDX[int(x)] # noqa segmentation_idx = np.apply_along_axis(func, 0, s_hashes.reshape((1, -1))) segmentation_idx = segmentation_idx.reshape(s_shape[:2]) return segmentation_idx segmentation = np.array([[0, 0, 0], [0, 0, 255], [255, 0, 0]] * 3).reshape((3, 3, 3)) segmentation_idx = rgb2index(segmentation) print(segmentation) print(segmentation_idx) fig, axes = plt.subplots(1, 2, figsize=(6, 3)) axes[0].imshow(segmentation) axes[0].set_title("Segmentation RGB") axes[1].imshow(segmentation_idx) axes[1].set_title("Segmentation IDX") plt.show()
[ "numpy.power", "numpy.array", "numpy.sum", "matplotlib.pyplot.subplots", "matplotlib.pyplot.show" ]
[((402, 462), 'numpy.array', 'np.array', (['[[0, 0, 0], [0, 0, 255], [255, 0, 0], [0, 255, 0]]'], {}), '([[0, 0, 0], [0, 0, 255], [255, 0, 0], [0, 255, 0]])\n', (410, 462), True, 'import numpy as np\n'), ((467, 491), 'numpy.power', 'np.power', (['(255)', '[0, 1, 2]'], {}), '(255, [0, 1, 2])\n', (475, 491), True, 'import numpy as np\n'), ((502, 529), 'numpy.sum', 'np.sum', (['(W * COLORS)'], {'axis': '(-1)'}), '(W * COLORS, axis=-1)\n', (508, 529), True, 'import numpy as np\n'), ((1294, 1328), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(2)'], {'figsize': '(6, 3)'}), '(1, 2, figsize=(6, 3))\n', (1306, 1328), True, 'from matplotlib import pyplot as plt\n'), ((1467, 1477), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1475, 1477), True, 'from matplotlib import pyplot as plt\n'), ((789, 826), 'numpy.sum', 'np.sum', (['(W * segmentation_rgb)'], {'axis': '(-1)'}), '(W * segmentation_rgb, axis=-1)\n', (795, 826), True, 'import numpy as np\n'), ((1122, 1173), 'numpy.array', 'np.array', (['([[0, 0, 0], [0, 0, 255], [255, 0, 0]] * 3)'], {}), '([[0, 0, 0], [0, 0, 255], [255, 0, 0]] * 3)\n', (1130, 1173), True, 'import numpy as np\n')]
import asyncio import shutil from functools import cached_property from pathlib import Path import aiopath from datafiles import datafile, field from furl import furl from sanic import Request from sanic.log import logger from .. import settings, utils from ..types import Dimensions from .overlay import Overlay from .text import Text @datafile("../../templates/{self.id}/config.yml", defaults=True) class Template: id: str name: str = "" source: str | None = None text: list[Text] = field( default_factory=lambda: [Text(), Text(anchor_x=0.0, anchor_y=0.8)] ) example: list[str] = field(default_factory=lambda: ["Top Line", "Bottom Line"]) overlay: list[Overlay] = field(default_factory=lambda: [Overlay()]) def __str__(self): return str(self.directory) def __lt__(self, other): return self.id < other.id @cached_property def valid(self) -> bool: if not settings.DEPLOYED: self._update_example() self.datafile.save() return ( not self.id.startswith("_") and self.image.suffix != settings.PLACEHOLDER_SUFFIX ) def _update_example(self): for line in self.example: if line and not line.isupper(): return self.example = [line.lower() for line in self.example] @cached_property def styles(self): styles = [] for path in self.directory.iterdir(): if not path.stem[0] in {".", "_"} and path.stem not in { "config", settings.DEFAULT_STYLE, }: styles.append(path.stem) if styles or self.overlay != [Overlay()]: styles.append("default") styles.sort() return styles @cached_property def directory(self) -> Path: return self.datafile.path.parent @cached_property def image(self) -> Path: return self.get_image() def get_image(self, style: str = "") -> Path: style = style or settings.DEFAULT_STYLE if utils.urls.schema(style): url = style style = utils.text.fingerprint(url) self.directory.mkdir(exist_ok=True) for path in self.directory.iterdir(): if path.stem == style and path.suffix != settings.PLACEHOLDER_SUFFIX: return path if style == settings.DEFAULT_STYLE: logger.debug(f"No default background image for template: {self.id}") return self.directory / ( settings.DEFAULT_STYLE + settings.PLACEHOLDER_SUFFIX ) logger.warning(f"Style {style!r} not available for template: {self.id}") return self.get_image() def jsonify(self, request: Request) -> dict: return { "id": self.id, "name": self.name, "lines": len(self.text), "overlays": len(self.overlay) if self.styles else 0, "styles": self.styles, "blank": request.app.url_for( "Memes.blank", template_id=self.id + "." + settings.DEFAULT_EXTENSION, _external=True, _scheme=settings.SCHEME, ), "example": { "text": self.example if any(self.example) else [], "url": self.build_example_url(request), }, "source": self.source, "_self": self.build_self_url(request), } def build_self_url(self, request: Request) -> str: return request.app.url_for( "Templates.detail", id=self.id, _external=True, _scheme=settings.SCHEME, ) def build_example_url( self, request: Request, *, extension: str = settings.DEFAULT_EXTENSION, external: bool = True, ) -> str: kwargs = { "template_id": self.id, "text_paths": utils.text.encode(self.example) + "." + extension, "_external": external, } if external: kwargs["_scheme"] = settings.SCHEME url = request.app.url_for("Memes.text", **kwargs) return utils.urls.clean(url) def build_custom_url( self, request: Request, text_lines: list[str], *, extension: str = settings.DEFAULT_EXTENSION, background: str = "", style: str = "", ): if extension not in settings.ALLOWED_EXTENSIONS: extension = settings.DEFAULT_EXTENSION if style == settings.DEFAULT_STYLE: style = "" url = request.app.url_for( "Memes.text", template_id="custom" if self.id == "_custom" else self.id, text_paths=utils.text.encode(text_lines) + "." + extension, _external=True, _scheme=settings.SCHEME, **utils.urls.params(background=background, style=style), ) return utils.urls.clean(url) def build_path( self, text_lines: list[str], style: str, size: Dimensions, watermark: str, extension: str, ) -> Path: slug = utils.text.encode(text_lines) variant = str(self.text) + str(style) + str(size) + watermark fingerprint = utils.text.fingerprint(variant, prefix="") filename = f"{slug}.{fingerprint}.{extension}" return Path(self.id) / filename @classmethod async def create(cls, url: str, *, force=False) -> "Template": try: parsed = furl(url) except ValueError as e: logger.error(e) return cls.objects.get("_error") if parsed.netloc and "memegen.link" in parsed.netloc: logger.info(f"Handling template URL: {url}") if len(parsed.path.segments) > 1: id = Path(parsed.path.segments[1]).stem if id != "custom": return cls.objects.get_or_none(id) or cls.objects.get("_error") background = parsed.args.get("background") if not background: return cls.objects.get("_error") url = background parsed = furl(url) id = utils.text.fingerprint(url) template = cls.objects.get_or_create(id, url) suffix = Path(str(parsed.path)).suffix if not suffix or len(suffix) > 10: logger.warning(f"Unable to determine image extension: {url}") suffix = settings.PLACEHOLDER_SUFFIX filename = "default" + suffix path = aiopath.AsyncPath(template.directory) / filename if await path.exists() and not settings.DEBUG and not force: logger.info(f"Found background {url} at {path}") return template logger.info(f"Saving background {url} to {path}") if not await utils.http.download(url, path): return template try: await asyncio.to_thread(utils.images.load, Path(path)) except utils.images.EXCEPTIONS as e: logger.error(e) await path.unlink(missing_ok=True) return template async def check(self, style: str, *, force=False) -> bool: if style in {"", None, settings.DEFAULT_STYLE}: return True if style in self.styles: return True if not utils.urls.schema(style): logger.error(f"Invalid style for {self.id} template: {style}") return False filename = utils.text.fingerprint(style, suffix=self.image.suffix) path = aiopath.AsyncPath(self.directory) / filename if await path.exists() and not settings.DEBUG and not force: logger.info(f"Found overlay {style} at {path}") return True urls = style.split(",") logger.info(f"Embeding {len(urls)} overlay image(s) onto {path}") await asyncio.to_thread(shutil.copy, self.image, path) embedded = 0 for index, url in enumerate(urls): success = await self._embed(index, url, path, force) if success: embedded += 1 if len(urls) == 1 and not embedded: await path.unlink() return embedded == len(urls) async def _embed( self, index: int, url: str, background: aiopath.AsyncPath, force: bool ) -> bool: if url.strip() in {"", settings.DEFAULT_STYLE}: return True suffix = Path(str(furl(url).path)).suffix if not suffix: logger.warning(f"Unable to determine image extension: {url}") suffix = ".png" filename = utils.text.fingerprint(url, prefix="_embed-", suffix=suffix) foreground = aiopath.AsyncPath(self.directory) / filename if await foreground.exists() and not settings.DEBUG and not force: logger.info(f"Found overlay {url} at {foreground}") else: logger.info(f"Saving overlay {url} to {foreground}") await utils.http.download(url, foreground) try: await asyncio.to_thread( utils.images.embed, self, index, Path(foreground), Path(background) ) except utils.images.EXCEPTIONS as e: logger.error(e) await foreground.unlink(missing_ok=True) return await foreground.exists() def clean(self): for path in self.directory.iterdir(): if path.stem not in {"config", "default"}: path.unlink() def delete(self): if self.directory.exists(): shutil.rmtree(self.directory) def matches(self, query: str) -> bool: example = " ".join(line.lower() for line in self.example) return any((query in self.id, query in self.name.lower(), query in example))
[ "datafiles.field", "furl.furl", "sanic.log.logger.error", "pathlib.Path", "sanic.log.logger.warning", "aiopath.AsyncPath", "sanic.log.logger.info", "sanic.log.logger.debug", "asyncio.to_thread", "shutil.rmtree", "datafiles.datafile" ]
[((341, 404), 'datafiles.datafile', 'datafile', (['"""../../templates/{self.id}/config.yml"""'], {'defaults': '(True)'}), "('../../templates/{self.id}/config.yml', defaults=True)\n", (349, 404), False, 'from datafiles import datafile, field\n'), ((620, 679), 'datafiles.field', 'field', ([], {'default_factory': "(lambda : ['Top Line', 'Bottom Line'])"}), "(default_factory=lambda : ['Top Line', 'Bottom Line'])\n", (625, 679), False, 'from datafiles import datafile, field\n'), ((2633, 2705), 'sanic.log.logger.warning', 'logger.warning', (['f"""Style {style!r} not available for template: {self.id}"""'], {}), "(f'Style {style!r} not available for template: {self.id}')\n", (2647, 2705), False, 'from sanic.log import logger\n'), ((6838, 6887), 'sanic.log.logger.info', 'logger.info', (['f"""Saving background {url} to {path}"""'], {}), "(f'Saving background {url} to {path}')\n", (6849, 6887), False, 'from sanic.log import logger\n'), ((2434, 2502), 'sanic.log.logger.debug', 'logger.debug', (['f"""No default background image for template: {self.id}"""'], {}), "(f'No default background image for template: {self.id}')\n", (2446, 2502), False, 'from sanic.log import logger\n'), ((5442, 5455), 'pathlib.Path', 'Path', (['self.id'], {}), '(self.id)\n', (5446, 5455), False, 'from pathlib import Path\n'), ((5586, 5595), 'furl.furl', 'furl', (['url'], {}), '(url)\n', (5590, 5595), False, 'from furl import furl\n'), ((5776, 5820), 'sanic.log.logger.info', 'logger.info', (['f"""Handling template URL: {url}"""'], {}), "(f'Handling template URL: {url}')\n", (5787, 5820), False, 'from sanic.log import logger\n'), ((6456, 6517), 'sanic.log.logger.warning', 'logger.warning', (['f"""Unable to determine image extension: {url}"""'], {}), "(f'Unable to determine image extension: {url}')\n", (6470, 6517), False, 'from sanic.log import logger\n'), ((6621, 6658), 'aiopath.AsyncPath', 'aiopath.AsyncPath', (['template.directory'], {}), '(template.directory)\n', (6638, 6658), False, 'import aiopath\n'), ((6752, 6800), 'sanic.log.logger.info', 'logger.info', (['f"""Found background {url} at {path}"""'], {}), "(f'Found background {url} at {path}')\n", (6763, 6800), False, 'from sanic.log import logger\n'), ((7451, 7513), 'sanic.log.logger.error', 'logger.error', (['f"""Invalid style for {self.id} template: {style}"""'], {}), "(f'Invalid style for {self.id} template: {style}')\n", (7463, 7513), False, 'from sanic.log import logger\n'), ((7630, 7663), 'aiopath.AsyncPath', 'aiopath.AsyncPath', (['self.directory'], {}), '(self.directory)\n', (7647, 7663), False, 'import aiopath\n'), ((7756, 7803), 'sanic.log.logger.info', 'logger.info', (['f"""Found overlay {style} at {path}"""'], {}), "(f'Found overlay {style} at {path}')\n", (7767, 7803), False, 'from sanic.log import logger\n'), ((7949, 7997), 'asyncio.to_thread', 'asyncio.to_thread', (['shutil.copy', 'self.image', 'path'], {}), '(shutil.copy, self.image, path)\n', (7966, 7997), False, 'import asyncio\n'), ((8580, 8641), 'sanic.log.logger.warning', 'logger.warning', (['f"""Unable to determine image extension: {url}"""'], {}), "(f'Unable to determine image extension: {url}')\n", (8594, 8641), False, 'from sanic.log import logger\n'), ((8772, 8805), 'aiopath.AsyncPath', 'aiopath.AsyncPath', (['self.directory'], {}), '(self.directory)\n', (8789, 8805), False, 'import aiopath\n'), ((8905, 8956), 'sanic.log.logger.info', 'logger.info', (['f"""Found overlay {url} at {foreground}"""'], {}), "(f'Found overlay {url} at {foreground}')\n", (8916, 8956), False, 'from sanic.log import logger\n'), ((8983, 9035), 'sanic.log.logger.info', 'logger.info', (['f"""Saving overlay {url} to {foreground}"""'], {}), "(f'Saving overlay {url} to {foreground}')\n", (8994, 9035), False, 'from sanic.log import logger\n'), ((9632, 9661), 'shutil.rmtree', 'shutil.rmtree', (['self.directory'], {}), '(self.directory)\n', (9645, 9661), False, 'import shutil\n'), ((5640, 5655), 'sanic.log.logger.error', 'logger.error', (['e'], {}), '(e)\n', (5652, 5655), False, 'from sanic.log import logger\n'), ((6247, 6256), 'furl.furl', 'furl', (['url'], {}), '(url)\n', (6251, 6256), False, 'from furl import furl\n'), ((7107, 7122), 'sanic.log.logger.error', 'logger.error', (['e'], {}), '(e)\n', (7119, 7122), False, 'from sanic.log import logger\n'), ((9297, 9312), 'sanic.log.logger.error', 'logger.error', (['e'], {}), '(e)\n', (9309, 9312), False, 'from sanic.log import logger\n'), ((5888, 5917), 'pathlib.Path', 'Path', (['parsed.path.segments[1]'], {}), '(parsed.path.segments[1])\n', (5892, 5917), False, 'from pathlib import Path\n'), ((7038, 7048), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (7042, 7048), False, 'from pathlib import Path\n'), ((9191, 9207), 'pathlib.Path', 'Path', (['foreground'], {}), '(foreground)\n', (9195, 9207), False, 'from pathlib import Path\n'), ((9209, 9225), 'pathlib.Path', 'Path', (['background'], {}), '(background)\n', (9213, 9225), False, 'from pathlib import Path\n'), ((8521, 8530), 'furl.furl', 'furl', (['url'], {}), '(url)\n', (8525, 8530), False, 'from furl import furl\n')]
import codecs import os # Function to save a string into a file def save_string_in_file(string_text, file_name): with codecs.open(file_name, "w", "utf-8") as f: f.write(string_text) f.close() # Function to read all files in a dir with a specific extension def read_files_in_dir_ext(dir_route, extension): files = os.listdir(dir_route) files_ext = [file for file in files if file.endswith(extension)] return files_ext # Function to read a file into a string def read_file_in_string(file_name): file_in_string = "" with codecs.open(file_name, "r", "utf-8") as f: file_in_string = f.read() f.close() return file_in_string # Function to create a directory def create_directory(directory): if not os.path.exists(directory): os.makedirs(directory) return
[ "os.path.exists", "codecs.open", "os.listdir", "os.makedirs" ]
[((340, 361), 'os.listdir', 'os.listdir', (['dir_route'], {}), '(dir_route)\n', (350, 361), False, 'import os\n'), ((123, 159), 'codecs.open', 'codecs.open', (['file_name', '"""w"""', '"""utf-8"""'], {}), "(file_name, 'w', 'utf-8')\n", (134, 159), False, 'import codecs\n'), ((564, 600), 'codecs.open', 'codecs.open', (['file_name', '"""r"""', '"""utf-8"""'], {}), "(file_name, 'r', 'utf-8')\n", (575, 600), False, 'import codecs\n'), ((764, 789), 'os.path.exists', 'os.path.exists', (['directory'], {}), '(directory)\n', (778, 789), False, 'import os\n'), ((799, 821), 'os.makedirs', 'os.makedirs', (['directory'], {}), '(directory)\n', (810, 821), False, 'import os\n')]
# This file was automatically generated by SWIG (http://www.swig.org). # Version 4.0.2 # # Do not make changes to this file unless you know what you are doing--modify # the SWIG interface file instead. from sys import version_info as _swig_python_version_info if _swig_python_version_info < (2, 7, 0): raise RuntimeError("Python 2.7 or later required") # Import the low-level C/C++ module if __package__ or "." in __name__: from . import _beast else: import _beast try: import builtins as __builtin__ except ImportError: import __builtin__ def _swig_repr(self): try: strthis = "proxy of " + self.this.__repr__() except __builtin__.Exception: strthis = "" return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,) def _swig_setattr_nondynamic_instance_variable(set): def set_instance_attr(self, name, value): if name == "thisown": self.this.own(value) elif name == "this": set(self, name, value) elif hasattr(self, name) and isinstance(getattr(type(self), name), property): set(self, name, value) else: raise AttributeError("You cannot add instance attributes to %s" % self) return set_instance_attr def _swig_setattr_nondynamic_class_variable(set): def set_class_attr(cls, name, value): if hasattr(cls, name) and not isinstance(getattr(cls, name), property): set(cls, name, value) else: raise AttributeError("You cannot add class attributes to %s" % cls) return set_class_attr def _swig_add_metaclass(metaclass): """Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass""" def wrapper(cls): return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy()) return wrapper class _SwigNonDynamicMeta(type): """Meta class to enforce nondynamic attributes (no new attributes) for a class""" __setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__) PI = _beast.PI TWOPI = _beast.TWOPI def load_config(filename: "char const *") -> "void": return _beast.load_config(filename) def xyz_hash(x: "float", y: "float", z: "float") -> "size_t": return _beast.xyz_hash(x, y, z) def xyz_hash_mask(radians: "float") -> "size_t": return _beast.xyz_hash_mask(radians) class star(object): thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") __repr__ = _swig_repr x = property(_beast.star_x_get, _beast.star_x_set) y = property(_beast.star_y_get, _beast.star_y_set) z = property(_beast.star_z_get, _beast.star_z_set) flux = property(_beast.star_flux_get, _beast.star_flux_set) id = property(_beast.star_id_get, _beast.star_id_set) px = property(_beast.star_px_get, _beast.star_px_set) py = property(_beast.star_py_get, _beast.star_py_set) unreliable = property(_beast.star_unreliable_get, _beast.star_unreliable_set) star_idx = property(_beast.star_star_idx_get, _beast.star_star_idx_set) sigma_sq = property(_beast.star_sigma_sq_get, _beast.star_sigma_sq_set) hash_val = property(_beast.star_hash_val_get, _beast.star_hash_val_set) def __init__(self, *args): _beast.star_swiginit(self, _beast.new_star(*args)) def __eq__(self, s: "star") -> "bool": return _beast.star___eq__(self, s) def __mul__(self, s: "star") -> "float": return _beast.star___mul__(self, s) def DBG_(self, s: "char const *") -> "void": return _beast.star_DBG_(self, s) __swig_destroy__ = _beast.delete_star # Register star in _beast: _beast.star_swigregister(star) cvar = _beast.cvar def star_gt_x(s1: "star", s2: "star") -> "bool": return _beast.star_gt_x(s1, s2) def star_gt_y(s1: "star", s2: "star") -> "bool": return _beast.star_gt_y(s1, s2) def star_gt_z(s1: "star", s2: "star") -> "bool": return _beast.star_gt_z(s1, s2) def star_gt_flux(s1: "star", s2: "star") -> "bool": return _beast.star_gt_flux(s1, s2) def star_lt_x(s1: "star", s2: "star") -> "bool": return _beast.star_lt_x(s1, s2) def star_lt_y(s1: "star", s2: "star") -> "bool": return _beast.star_lt_y(s1, s2) def star_lt_z(s1: "star", s2: "star") -> "bool": return _beast.star_lt_z(s1, s2) def star_lt_flux(s1: "star", s2: "star") -> "bool": return _beast.star_lt_flux(s1, s2) class star_db(object): thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") __repr__ = _swig_repr max_variance = property(_beast.star_db_max_variance_get, _beast.star_db_max_variance_set) def __init__(self): _beast.star_db_swiginit(self, _beast.new_star_db()) __swig_destroy__ = _beast.delete_star_db def size(self) -> "size_t": return _beast.star_db_size(self) def __iadd__(self, *args) -> "star_db *": return _beast.star_db___iadd__(self, *args) def __sub__(self, s: "star_db") -> "star_db *": return _beast.star_db___sub__(self, s) def __and__(self, s: "star_db") -> "star_db *": return _beast.star_db___and__(self, s) def get_star(self, idx: "int") -> "star *": return _beast.star_db_get_star(self, idx) def copy(self) -> "star_db *": return _beast.star_db_copy(self) def copy_n_brightest(self, n: "size_t") -> "star_db *": return _beast.star_db_copy_n_brightest(self, n) def load_catalog(self, catalog: "char const *", year: "float") -> "void": return _beast.star_db_load_catalog(self, catalog, year) def count(self, *args) -> "size_t": return _beast.star_db_count(self, *args) def DBG_(self, s: "char const *") -> "void": return _beast.star_db_DBG_(self, s) # Register star_db in _beast: _beast.star_db_swigregister(star_db) class star_fov(object): thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") __repr__ = _swig_repr def get_score(self, *args) -> "float": return _beast.star_fov_get_score(self, *args) def get_id(self, px: "float", py: "float") -> "int": return _beast.star_fov_get_id(self, px, py) def __init__(self, s: "star_db", db_max_variance_: "float"): _beast.star_fov_swiginit(self, _beast.new_star_fov(s, db_max_variance_)) __swig_destroy__ = _beast.delete_star_fov # Register star_fov in _beast: _beast.star_fov_swigregister(star_fov) class star_query(object): thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") __repr__ = _swig_repr map = property(_beast.star_query_map_get, _beast.star_query_map_set) map_size = property(_beast.star_query_map_size_get, _beast.star_query_map_size_set) kdresults = property(_beast.star_query_kdresults_get, _beast.star_query_kdresults_set) def __init__(self, s: "star_db"): _beast.star_query_swiginit(self, _beast.new_star_query(s)) __swig_destroy__ = _beast.delete_star_query def is_kdsorted(self) -> "uint8_t": return _beast.star_query_is_kdsorted(self) def kdsort(self) -> "void": return _beast.star_query_kdsort(self) def sort(self) -> "void": return _beast.star_query_sort(self) def r_size(self) -> "size_t": return _beast.star_query_r_size(self) def get_kdmask(self, i: "size_t") -> "int8_t": return _beast.star_query_get_kdmask(self, i) def reset_kdmask(self) -> "void": return _beast.star_query_reset_kdmask(self) def clear_kdresults(self) -> "void": return _beast.star_query_clear_kdresults(self) def kdcheck(self, idx: "int", x: "float", y: "float", z: "float", r: "float", min_flux: "float") -> "void": return _beast.star_query_kdcheck(self, idx, x, y, z, r, min_flux) def kdsearch(self, *args) -> "void": return _beast.star_query_kdsearch(self, *args) def kdsearch_x(self, x: "float const", y: "float const", z: "float const", r: "float const", min_flux: "float", min: "int", max: "int") -> "void": return _beast.star_query_kdsearch_x(self, x, y, z, r, min_flux, min, max) def kdsearch_y(self, x: "float const", y: "float const", z: "float const", r: "float const", min_flux: "float", min: "int", max: "int") -> "void": return _beast.star_query_kdsearch_y(self, x, y, z, r, min_flux, min, max) def kdsearch_z(self, x: "float const", y: "float const", z: "float const", r: "float const", min_flux: "float", min: "int", max: "int") -> "void": return _beast.star_query_kdsearch_z(self, x, y, z, r, min_flux, min, max) def kdmask_filter_catalog(self) -> "void": return _beast.star_query_kdmask_filter_catalog(self) def kdmask_uniform_density(self, min_stars_per_fov: "int") -> "void": return _beast.star_query_kdmask_uniform_density(self, min_stars_per_fov) def from_kdmask(self) -> "star_db *": return _beast.star_query_from_kdmask(self) def from_kdresults(self) -> "star_db *": return _beast.star_query_from_kdresults(self) def DBG_(self, s: "char const *") -> "void": return _beast.star_query_DBG_(self, s) # Register star_query in _beast: _beast.star_query_swigregister(star_query) class constellation(object): thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") __repr__ = _swig_repr p = property(_beast.constellation_p_get, _beast.constellation_p_set) s1 = property(_beast.constellation_s1_get, _beast.constellation_s1_set) s2 = property(_beast.constellation_s2_get, _beast.constellation_s2_set) idx = property(_beast.constellation_idx_get, _beast.constellation_idx_set) def DBG_(self, s: "char const *") -> "void": return _beast.constellation_DBG_(self, s) def __init__(self): _beast.constellation_swiginit(self, _beast.new_constellation()) __swig_destroy__ = _beast.delete_constellation # Register constellation in _beast: _beast.constellation_swigregister(constellation) class constellation_pair(object): thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") __repr__ = _swig_repr totalscore = property(_beast.constellation_pair_totalscore_get, _beast.constellation_pair_totalscore_set) db_s1 = property(_beast.constellation_pair_db_s1_get, _beast.constellation_pair_db_s1_set) db_s2 = property(_beast.constellation_pair_db_s2_get, _beast.constellation_pair_db_s2_set) img_s1 = property(_beast.constellation_pair_img_s1_get, _beast.constellation_pair_img_s1_set) img_s2 = property(_beast.constellation_pair_img_s2_get, _beast.constellation_pair_img_s2_set) def flip(self) -> "void": return _beast.constellation_pair_flip(self) def DBG_(self, s: "char const *") -> "void": return _beast.constellation_pair_DBG_(self, s) def __init__(self): _beast.constellation_pair_swiginit(self, _beast.new_constellation_pair()) __swig_destroy__ = _beast.delete_constellation_pair # Register constellation_pair in _beast: _beast.constellation_pair_swigregister(constellation_pair) class constellation_lt(object): thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") __repr__ = _swig_repr def __call__(self, c1: "constellation", c2: "constellation") -> "bool": return _beast.constellation_lt___call__(self, c1, c2) def __init__(self): _beast.constellation_lt_swiginit(self, _beast.new_constellation_lt()) __swig_destroy__ = _beast.delete_constellation_lt # Register constellation_lt in _beast: _beast.constellation_lt_swigregister(constellation_lt) def constellation_lt_s1(c1: "constellation", c2: "constellation") -> "bool": return _beast.constellation_lt_s1(c1, c2) def constellation_lt_s2(c1: "constellation", c2: "constellation") -> "bool": return _beast.constellation_lt_s2(c1, c2) def constellation_lt_p(c1: "constellation", c2: "constellation") -> "bool": return _beast.constellation_lt_p(c1, c2) class constellation_db(object): thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") __repr__ = _swig_repr stars = property(_beast.constellation_db_stars_get, _beast.constellation_db_stars_set) results = property(_beast.constellation_db_results_get, _beast.constellation_db_results_set) map_size = property(_beast.constellation_db_map_size_get, _beast.constellation_db_map_size_set) map = property(_beast.constellation_db_map_get, _beast.constellation_db_map_set) def __init__(self, s: "star_db", stars_per_fov: "int", from_image: "int"): _beast.constellation_db_swiginit(self, _beast.new_constellation_db(s, stars_per_fov, from_image)) __swig_destroy__ = _beast.delete_constellation_db def DBG_(self, s: "char const *") -> "void": return _beast.constellation_db_DBG_(self, s) # Register constellation_db in _beast: _beast.constellation_db_swigregister(constellation_db) class match_result(object): thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") __repr__ = _swig_repr R11 = property(_beast.match_result_R11_get, _beast.match_result_R11_set) R12 = property(_beast.match_result_R12_get, _beast.match_result_R12_set) R13 = property(_beast.match_result_R13_get, _beast.match_result_R13_set) R21 = property(_beast.match_result_R21_get, _beast.match_result_R21_set) R22 = property(_beast.match_result_R22_get, _beast.match_result_R22_set) R23 = property(_beast.match_result_R23_get, _beast.match_result_R23_set) R31 = property(_beast.match_result_R31_get, _beast.match_result_R31_set) R32 = property(_beast.match_result_R32_get, _beast.match_result_R32_set) R33 = property(_beast.match_result_R33_get, _beast.match_result_R33_set) match = property(_beast.match_result_match_get, _beast.match_result_match_set) dec = property(_beast.match_result_dec_get, _beast.match_result_dec_set) ra = property(_beast.match_result_ra_get, _beast.match_result_ra_set) ori = property(_beast.match_result_ori_get, _beast.match_result_ori_set) def __init__(self, db_: "constellation_db", img_: "constellation_db", img_mask_: "star_fov"): _beast.match_result_swiginit(self, _beast.new_match_result(db_, img_, img_mask_)) __swig_destroy__ = _beast.delete_match_result def size(self) -> "size_t": return _beast.match_result_size(self) def init(self, db_const_: "constellation", img_const_: "constellation") -> "void": return _beast.match_result_init(self, db_const_, img_const_) def copy_over(self, c: "match_result") -> "void": return _beast.match_result_copy_over(self, c) def related(self, m: "constellation_pair") -> "int": return _beast.match_result_related(self, m) def search(self) -> "void": return _beast.match_result_search(self) def clear_search(self) -> "void": return _beast.match_result_clear_search(self) def compute_score(self) -> "void": return _beast.match_result_compute_score(self) def from_match(self) -> "star_db *": return _beast.match_result_from_match(self) def weighted_triad(self) -> "void": return _beast.match_result_weighted_triad(self) def DBG_(self, s: "char const *") -> "void": return _beast.match_result_DBG_(self, s) def calc_ori(self) -> "void": return _beast.match_result_calc_ori(self) def get_dec(self) -> "double": return _beast.match_result_get_dec(self) def get_ra(self) -> "double": return _beast.match_result_get_ra(self) def get_ori(self) -> "double": return _beast.match_result_get_ori(self) # Register match_result in _beast: _beast.match_result_swigregister(match_result) class db_match(object): thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") __repr__ = _swig_repr p_match = property(_beast.db_match_p_match_get, _beast.db_match_p_match_set) winner = property(_beast.db_match_winner_get, _beast.db_match_winner_set) def __init__(self, db: "constellation_db", img: "constellation_db"): _beast.db_match_swiginit(self, _beast.new_db_match(db, img)) __swig_destroy__ = _beast.delete_db_match # Register db_match in _beast: _beast.db_match_swigregister(db_match)
[ "_beast.new_star_db", "_beast.new_star_fov", "_beast.constellation_swigregister", "_beast.star_DBG_", "_beast.star_lt_x", "_beast.star_query_kdsearch_z", "_beast.star_query_kdsort", "_beast.match_result_size", "_beast.star_fov_get_id", "_beast.star_query_from_kdresults", "_beast.star_db___and__"...
[((3680, 3710), '_beast.star_swigregister', '_beast.star_swigregister', (['star'], {}), '(star)\n', (3704, 3710), False, 'import _beast\n'), ((5830, 5866), '_beast.star_db_swigregister', '_beast.star_db_swigregister', (['star_db'], {}), '(star_db)\n', (5857, 5866), False, 'import _beast\n'), ((6453, 6491), '_beast.star_fov_swigregister', '_beast.star_fov_swigregister', (['star_fov'], {}), '(star_fov)\n', (6481, 6491), False, 'import _beast\n'), ((9249, 9291), '_beast.star_query_swigregister', '_beast.star_query_swigregister', (['star_query'], {}), '(star_query)\n', (9279, 9291), False, 'import _beast\n'), ((10039, 10087), '_beast.constellation_swigregister', '_beast.constellation_swigregister', (['constellation'], {}), '(constellation)\n', (10072, 10087), False, 'import _beast\n'), ((11140, 11198), '_beast.constellation_pair_swigregister', '_beast.constellation_pair_swigregister', (['constellation_pair'], {}), '(constellation_pair)\n', (11178, 11198), False, 'import _beast\n'), ((11696, 11750), '_beast.constellation_lt_swigregister', '_beast.constellation_lt_swigregister', (['constellation_lt'], {}), '(constellation_lt)\n', (11732, 11750), False, 'import _beast\n'), ((13038, 13092), '_beast.constellation_db_swigregister', '_beast.constellation_db_swigregister', (['constellation_db'], {}), '(constellation_db)\n', (13074, 13092), False, 'import _beast\n'), ((15881, 15927), '_beast.match_result_swigregister', '_beast.match_result_swigregister', (['match_result'], {}), '(match_result)\n', (15913, 15927), False, 'import _beast\n'), ((16461, 16499), '_beast.db_match_swigregister', '_beast.db_match_swigregister', (['db_match'], {}), '(db_match)\n', (16489, 16499), False, 'import _beast\n'), ((2171, 2199), '_beast.load_config', '_beast.load_config', (['filename'], {}), '(filename)\n', (2189, 2199), False, 'import _beast\n'), ((2274, 2298), '_beast.xyz_hash', '_beast.xyz_hash', (['x', 'y', 'z'], {}), '(x, y, z)\n', (2289, 2298), False, 'import _beast\n'), ((2360, 2389), '_beast.xyz_hash_mask', '_beast.xyz_hash_mask', (['radians'], {}), '(radians)\n', (2380, 2389), False, 'import _beast\n'), ((3792, 3816), '_beast.star_gt_x', '_beast.star_gt_x', (['s1', 's2'], {}), '(s1, s2)\n', (3808, 3816), False, 'import _beast\n'), ((3878, 3902), '_beast.star_gt_y', '_beast.star_gt_y', (['s1', 's2'], {}), '(s1, s2)\n', (3894, 3902), False, 'import _beast\n'), ((3964, 3988), '_beast.star_gt_z', '_beast.star_gt_z', (['s1', 's2'], {}), '(s1, s2)\n', (3980, 3988), False, 'import _beast\n'), ((4053, 4080), '_beast.star_gt_flux', '_beast.star_gt_flux', (['s1', 's2'], {}), '(s1, s2)\n', (4072, 4080), False, 'import _beast\n'), ((4142, 4166), '_beast.star_lt_x', '_beast.star_lt_x', (['s1', 's2'], {}), '(s1, s2)\n', (4158, 4166), False, 'import _beast\n'), ((4228, 4252), '_beast.star_lt_y', '_beast.star_lt_y', (['s1', 's2'], {}), '(s1, s2)\n', (4244, 4252), False, 'import _beast\n'), ((4314, 4338), '_beast.star_lt_z', '_beast.star_lt_z', (['s1', 's2'], {}), '(s1, s2)\n', (4330, 4338), False, 'import _beast\n'), ((4403, 4430), '_beast.star_lt_flux', '_beast.star_lt_flux', (['s1', 's2'], {}), '(s1, s2)\n', (4422, 4430), False, 'import _beast\n'), ((11841, 11875), '_beast.constellation_lt_s1', '_beast.constellation_lt_s1', (['c1', 'c2'], {}), '(c1, c2)\n', (11867, 11875), False, 'import _beast\n'), ((11965, 11999), '_beast.constellation_lt_s2', '_beast.constellation_lt_s2', (['c1', 'c2'], {}), '(c1, c2)\n', (11991, 11999), False, 'import _beast\n'), ((12088, 12121), '_beast.constellation_lt_p', '_beast.constellation_lt_p', (['c1', 'c2'], {}), '(c1, c2)\n', (12113, 12121), False, 'import _beast\n'), ((3401, 3428), '_beast.star___eq__', '_beast.star___eq__', (['self', 's'], {}), '(self, s)\n', (3419, 3428), False, 'import _beast\n'), ((3490, 3518), '_beast.star___mul__', '_beast.star___mul__', (['self', 's'], {}), '(self, s)\n', (3509, 3518), False, 'import _beast\n'), ((3584, 3609), '_beast.star_DBG_', '_beast.star_DBG_', (['self', 's'], {}), '(self, s)\n', (3600, 3609), False, 'import _beast\n'), ((4854, 4879), '_beast.star_db_size', '_beast.star_db_size', (['self'], {}), '(self)\n', (4873, 4879), False, 'import _beast\n'), ((4942, 4978), '_beast.star_db___iadd__', '_beast.star_db___iadd__', (['self', '*args'], {}), '(self, *args)\n', (4965, 4978), False, 'import _beast\n'), ((5047, 5078), '_beast.star_db___sub__', '_beast.star_db___sub__', (['self', 's'], {}), '(self, s)\n', (5069, 5078), False, 'import _beast\n'), ((5147, 5178), '_beast.star_db___and__', '_beast.star_db___and__', (['self', 's'], {}), '(self, s)\n', (5169, 5178), False, 'import _beast\n'), ((5243, 5277), '_beast.star_db_get_star', '_beast.star_db_get_star', (['self', 'idx'], {}), '(self, idx)\n', (5266, 5277), False, 'import _beast\n'), ((5329, 5354), '_beast.star_db_copy', '_beast.star_db_copy', (['self'], {}), '(self)\n', (5348, 5354), False, 'import _beast\n'), ((5431, 5471), '_beast.star_db_copy_n_brightest', '_beast.star_db_copy_n_brightest', (['self', 'n'], {}), '(self, n)\n', (5462, 5471), False, 'import _beast\n'), ((5566, 5614), '_beast.star_db_load_catalog', '_beast.star_db_load_catalog', (['self', 'catalog', 'year'], {}), '(self, catalog, year)\n', (5593, 5614), False, 'import _beast\n'), ((5671, 5704), '_beast.star_db_count', '_beast.star_db_count', (['self', '*args'], {}), '(self, *args)\n', (5691, 5704), False, 'import _beast\n'), ((5770, 5798), '_beast.star_db_DBG_', '_beast.star_db_DBG_', (['self', 's'], {}), '(self, s)\n', (5789, 5798), False, 'import _beast\n'), ((6079, 6117), '_beast.star_fov_get_score', '_beast.star_fov_get_score', (['self', '*args'], {}), '(self, *args)\n', (6104, 6117), False, 'import _beast\n'), ((6191, 6227), '_beast.star_fov_get_id', '_beast.star_fov_get_id', (['self', 'px', 'py'], {}), '(self, px, py)\n', (6213, 6227), False, 'import _beast\n'), ((7109, 7144), '_beast.star_query_is_kdsorted', '_beast.star_query_is_kdsorted', (['self'], {}), '(self)\n', (7138, 7144), False, 'import _beast\n'), ((7193, 7223), '_beast.star_query_kdsort', '_beast.star_query_kdsort', (['self'], {}), '(self)\n', (7217, 7223), False, 'import _beast\n'), ((7270, 7298), '_beast.star_query_sort', '_beast.star_query_sort', (['self'], {}), '(self)\n', (7292, 7298), False, 'import _beast\n'), ((7349, 7379), '_beast.star_query_r_size', '_beast.star_query_r_size', (['self'], {}), '(self)\n', (7373, 7379), False, 'import _beast\n'), ((7447, 7484), '_beast.star_query_get_kdmask', '_beast.star_query_get_kdmask', (['self', 'i'], {}), '(self, i)\n', (7475, 7484), False, 'import _beast\n'), ((7539, 7575), '_beast.star_query_reset_kdmask', '_beast.star_query_reset_kdmask', (['self'], {}), '(self)\n', (7569, 7575), False, 'import _beast\n'), ((7633, 7672), '_beast.star_query_clear_kdresults', '_beast.star_query_clear_kdresults', (['self'], {}), '(self)\n', (7666, 7672), False, 'import _beast\n'), ((7801, 7859), '_beast.star_query_kdcheck', '_beast.star_query_kdcheck', (['self', 'idx', 'x', 'y', 'z', 'r', 'min_flux'], {}), '(self, idx, x, y, z, r, min_flux)\n', (7826, 7859), False, 'import _beast\n'), ((7917, 7956), '_beast.star_query_kdsearch', '_beast.star_query_kdsearch', (['self', '*args'], {}), '(self, *args)\n', (7943, 7956), False, 'import _beast\n'), ((8124, 8190), '_beast.star_query_kdsearch_x', '_beast.star_query_kdsearch_x', (['self', 'x', 'y', 'z', 'r', 'min_flux', 'min', 'max'], {}), '(self, x, y, z, r, min_flux, min, max)\n', (8152, 8190), False, 'import _beast\n'), ((8358, 8424), '_beast.star_query_kdsearch_y', '_beast.star_query_kdsearch_y', (['self', 'x', 'y', 'z', 'r', 'min_flux', 'min', 'max'], {}), '(self, x, y, z, r, min_flux, min, max)\n', (8386, 8424), False, 'import _beast\n'), ((8592, 8658), '_beast.star_query_kdsearch_z', '_beast.star_query_kdsearch_z', (['self', 'x', 'y', 'z', 'r', 'min_flux', 'min', 'max'], {}), '(self, x, y, z, r, min_flux, min, max)\n', (8620, 8658), False, 'import _beast\n'), ((8722, 8767), '_beast.star_query_kdmask_filter_catalog', '_beast.star_query_kdmask_filter_catalog', (['self'], {}), '(self)\n', (8761, 8767), False, 'import _beast\n'), ((8858, 8923), '_beast.star_query_kdmask_uniform_density', '_beast.star_query_kdmask_uniform_density', (['self', 'min_stars_per_fov'], {}), '(self, min_stars_per_fov)\n', (8898, 8923), False, 'import _beast\n'), ((8982, 9017), '_beast.star_query_from_kdmask', '_beast.star_query_from_kdmask', (['self'], {}), '(self)\n', (9011, 9017), False, 'import _beast\n'), ((9079, 9117), '_beast.star_query_from_kdresults', '_beast.star_query_from_kdresults', (['self'], {}), '(self)\n', (9111, 9117), False, 'import _beast\n'), ((9183, 9214), '_beast.star_query_DBG_', '_beast.star_query_DBG_', (['self', 's'], {}), '(self, s)\n', (9205, 9214), False, 'import _beast\n'), ((9819, 9853), '_beast.constellation_DBG_', '_beast.constellation_DBG_', (['self', 's'], {}), '(self, s)\n', (9844, 9853), False, 'import _beast\n'), ((10793, 10829), '_beast.constellation_pair_flip', '_beast.constellation_pair_flip', (['self'], {}), '(self)\n', (10823, 10829), False, 'import _beast\n'), ((10895, 10934), '_beast.constellation_pair_DBG_', '_beast.constellation_pair_DBG_', (['self', 's'], {}), '(self, s)\n', (10925, 10934), False, 'import _beast\n'), ((11452, 11498), '_beast.constellation_lt___call__', '_beast.constellation_lt___call__', (['self', 'c1', 'c2'], {}), '(self, c1, c2)\n', (11484, 11498), False, 'import _beast\n'), ((12960, 12997), '_beast.constellation_db_DBG_', '_beast.constellation_db_DBG_', (['self', 's'], {}), '(self, s)\n', (12988, 12997), False, 'import _beast\n'), ((14541, 14571), '_beast.match_result_size', '_beast.match_result_size', (['self'], {}), '(self)\n', (14565, 14571), False, 'import _beast\n'), ((14675, 14728), '_beast.match_result_init', '_beast.match_result_init', (['self', 'db_const_', 'img_const_'], {}), '(self, db_const_, img_const_)\n', (14699, 14728), False, 'import _beast\n'), ((14799, 14837), '_beast.match_result_copy_over', '_beast.match_result_copy_over', (['self', 'c'], {}), '(self, c)\n', (14828, 14837), False, 'import _beast\n'), ((14911, 14947), '_beast.match_result_related', '_beast.match_result_related', (['self', 'm'], {}), '(self, m)\n', (14938, 14947), False, 'import _beast\n'), ((14996, 15028), '_beast.match_result_search', '_beast.match_result_search', (['self'], {}), '(self)\n', (15022, 15028), False, 'import _beast\n'), ((15083, 15121), '_beast.match_result_clear_search', '_beast.match_result_clear_search', (['self'], {}), '(self)\n', (15115, 15121), False, 'import _beast\n'), ((15177, 15216), '_beast.match_result_compute_score', '_beast.match_result_compute_score', (['self'], {}), '(self)\n', (15210, 15216), False, 'import _beast\n'), ((15274, 15310), '_beast.match_result_from_match', '_beast.match_result_from_match', (['self'], {}), '(self)\n', (15304, 15310), False, 'import _beast\n'), ((15367, 15407), '_beast.match_result_weighted_triad', '_beast.match_result_weighted_triad', (['self'], {}), '(self)\n', (15401, 15407), False, 'import _beast\n'), ((15473, 15506), '_beast.match_result_DBG_', '_beast.match_result_DBG_', (['self', 's'], {}), '(self, s)\n', (15497, 15506), False, 'import _beast\n'), ((15557, 15591), '_beast.match_result_calc_ori', '_beast.match_result_calc_ori', (['self'], {}), '(self)\n', (15585, 15591), False, 'import _beast\n'), ((15643, 15676), '_beast.match_result_get_dec', '_beast.match_result_get_dec', (['self'], {}), '(self)\n', (15670, 15676), False, 'import _beast\n'), ((15727, 15759), '_beast.match_result_get_ra', '_beast.match_result_get_ra', (['self'], {}), '(self)\n', (15753, 15759), False, 'import _beast\n'), ((15811, 15844), '_beast.match_result_get_ori', '_beast.match_result_get_ori', (['self'], {}), '(self)\n', (15838, 15844), False, 'import _beast\n'), ((3318, 3340), '_beast.new_star', '_beast.new_star', (['*args'], {}), '(*args)\n', (3333, 3340), False, 'import _beast\n'), ((4739, 4759), '_beast.new_star_db', '_beast.new_star_db', ([], {}), '()\n', (4757, 4759), False, 'import _beast\n'), ((6333, 6373), '_beast.new_star_fov', '_beast.new_star_fov', (['s', 'db_max_variance_'], {}), '(s, db_max_variance_)\n', (6352, 6373), False, 'import _beast\n'), ((6979, 7003), '_beast.new_star_query', '_beast.new_star_query', (['s'], {}), '(s)\n', (7000, 7003), False, 'import _beast\n'), ((9923, 9949), '_beast.new_constellation', '_beast.new_constellation', ([], {}), '()\n', (9947, 9949), False, 'import _beast\n'), ((11009, 11040), '_beast.new_constellation_pair', '_beast.new_constellation_pair', ([], {}), '()\n', (11038, 11040), False, 'import _beast\n'), ((11571, 11600), '_beast.new_constellation_lt', '_beast.new_constellation_lt', ([], {}), '()\n', (11598, 11600), False, 'import _beast\n'), ((12782, 12839), '_beast.new_constellation_db', '_beast.new_constellation_db', (['s', 'stars_per_fov', 'from_image'], {}), '(s, stars_per_fov, from_image)\n', (12809, 12839), False, 'import _beast\n'), ((14396, 14441), '_beast.new_match_result', '_beast.new_match_result', (['db_', 'img_', 'img_mask_'], {}), '(db_, img_, img_mask_)\n', (14419, 14441), False, 'import _beast\n'), ((16353, 16381), '_beast.new_db_match', '_beast.new_db_match', (['db', 'img'], {}), '(db, img)\n', (16372, 16381), False, 'import _beast\n')]
from onegov.core.orm import Base from onegov.core.orm.mixins import ContentMixin from onegov.core.orm.mixins import TimestampMixin from onegov.core.orm.mixins import UTCPublicationMixin from onegov.core.orm.types import UUID from onegov.search import ORMSearchable from sqlalchemy import Column from sqlalchemy import ForeignKey from sqlalchemy import Integer from sqlalchemy import Text from sqlalchemy.orm import backref from sqlalchemy.orm import object_session from sqlalchemy.orm import relationship from uuid import uuid4 class AgencyMembership(Base, ContentMixin, TimestampMixin, ORMSearchable, UTCPublicationMixin): """ A membership to an agency. """ __tablename__ = 'agency_memberships' #: the type of the item, this can be used to create custom polymorphic #: subclasses of this class. See #: `<http://docs.sqlalchemy.org/en/improve_toc/\ #: orm/extensions/declarative/inheritance.html>`_. type = Column(Text, nullable=True) __mapper_args__ = { 'polymorphic_on': type, 'polymorphic_identity': None, } es_public = True es_properties = { 'title': {'type': 'text'}, } #: the unique id, part of the url id = Column(UUID, primary_key=True, default=uuid4) #: the id of the agency agency_id = Column( Integer, ForeignKey('agencies.id'), nullable=False ) #: the related agency (which may have any number of memberships) agency = relationship( 'Agency', backref=backref( 'memberships', cascade='all, delete-orphan', lazy='dynamic', order_by='AgencyMembership.order_within_agency' ) ) #: the id of the person person_id = Column(UUID, ForeignKey('people.id'), nullable=False) #: the related person (which may have any number of memberships) person = relationship( 'Person', backref=backref( 'memberships', cascade='all, delete-orphan', lazy='dynamic', ) ) #: the position of the membership within the agency order_within_agency = Column(Integer, nullable=False) #: the position of the membership within all memberships of a person order_within_person = Column(Integer, nullable=False) #: describes the membership title = Column(Text, nullable=False) #: when the membership started since = Column(Text, nullable=True) @property def siblings_by_agency(self): """ Returns a query that includes all siblings by agency, including the item itself ordered by `order_within_agency`. """ query = object_session(self).query(self.__class__) query = query.order_by(self.__class__.order_within_agency) query = query.filter(self.__class__.agency == self.agency) return query @property def siblings_by_person(self): """ Returns a query that includes all siblings by person, including the item itself ordered by `order_within_person`. """ query = object_session(self).query(self.__class__) query = query.order_by(self.__class__.order_within_person) query = query.filter(self.__class__.person == self.person) return query def vcard(self, exclude=None): """ Returns the person as vCard (3.0). Allows to specify the included attributes, provides a reasonable default if none are specified. Always includes the first and last name. """ if not self.person: return '' result = self.person.vcard_object(exclude, include_memberships=False) line = result.add('org') line.value = [f"{self.agency.title}, {self.title}"] line.charset_param = 'utf-8' return result.serialize()
[ "sqlalchemy.orm.object_session", "sqlalchemy.ForeignKey", "sqlalchemy.Column", "sqlalchemy.orm.backref" ]
[((962, 989), 'sqlalchemy.Column', 'Column', (['Text'], {'nullable': '(True)'}), '(Text, nullable=True)\n', (968, 989), False, 'from sqlalchemy import Column\n'), ((1224, 1269), 'sqlalchemy.Column', 'Column', (['UUID'], {'primary_key': '(True)', 'default': 'uuid4'}), '(UUID, primary_key=True, default=uuid4)\n', (1230, 1269), False, 'from sqlalchemy import Column\n'), ((2152, 2183), 'sqlalchemy.Column', 'Column', (['Integer'], {'nullable': '(False)'}), '(Integer, nullable=False)\n', (2158, 2183), False, 'from sqlalchemy import Column\n'), ((2284, 2315), 'sqlalchemy.Column', 'Column', (['Integer'], {'nullable': '(False)'}), '(Integer, nullable=False)\n', (2290, 2315), False, 'from sqlalchemy import Column\n'), ((2361, 2389), 'sqlalchemy.Column', 'Column', (['Text'], {'nullable': '(False)'}), '(Text, nullable=False)\n', (2367, 2389), False, 'from sqlalchemy import Column\n'), ((2438, 2465), 'sqlalchemy.Column', 'Column', (['Text'], {'nullable': '(True)'}), '(Text, nullable=True)\n', (2444, 2465), False, 'from sqlalchemy import Column\n'), ((1348, 1373), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""agencies.id"""'], {}), "('agencies.id')\n", (1358, 1373), False, 'from sqlalchemy import ForeignKey\n'), ((1775, 1798), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""people.id"""'], {}), "('people.id')\n", (1785, 1798), False, 'from sqlalchemy import ForeignKey\n'), ((1535, 1656), 'sqlalchemy.orm.backref', 'backref', (['"""memberships"""'], {'cascade': '"""all, delete-orphan"""', 'lazy': '"""dynamic"""', 'order_by': '"""AgencyMembership.order_within_agency"""'}), "('memberships', cascade='all, delete-orphan', lazy='dynamic',\n order_by='AgencyMembership.order_within_agency')\n", (1542, 1656), False, 'from sqlalchemy.orm import backref\n'), ((1947, 2015), 'sqlalchemy.orm.backref', 'backref', (['"""memberships"""'], {'cascade': '"""all, delete-orphan"""', 'lazy': '"""dynamic"""'}), "('memberships', cascade='all, delete-orphan', lazy='dynamic')\n", (1954, 2015), False, 'from sqlalchemy.orm import backref\n'), ((2677, 2697), 'sqlalchemy.orm.object_session', 'object_session', (['self'], {}), '(self)\n', (2691, 2697), False, 'from sqlalchemy.orm import object_session\n'), ((3086, 3106), 'sqlalchemy.orm.object_session', 'object_session', (['self'], {}), '(self)\n', (3100, 3106), False, 'from sqlalchemy.orm import object_session\n')]
import os try: from google.cloud import bigquery # noqa except ImportError: BIGQUERY = False else: BIGQUERY = True GOOGLE_APPLICATION_CREDENTIALS = "GOOGLE_APPLICATION_CREDENTIALS" BIGQUERY_LOCATION = "BIGQUERY_LOCATION" BIGQUERY_DATASET = "BIGQUERY_DATASET" def use_bigquery(): return ( BIGQUERY and os.environ.get(GOOGLE_APPLICATION_CREDENTIALS) and os.environ.get(BIGQUERY_LOCATION) and os.environ(BIGQUERY_DATASET) )
[ "os.environ.get", "os.environ" ]
[((340, 386), 'os.environ.get', 'os.environ.get', (['GOOGLE_APPLICATION_CREDENTIALS'], {}), '(GOOGLE_APPLICATION_CREDENTIALS)\n', (354, 386), False, 'import os\n'), ((399, 432), 'os.environ.get', 'os.environ.get', (['BIGQUERY_LOCATION'], {}), '(BIGQUERY_LOCATION)\n', (413, 432), False, 'import os\n'), ((445, 473), 'os.environ', 'os.environ', (['BIGQUERY_DATASET'], {}), '(BIGQUERY_DATASET)\n', (455, 473), False, 'import os\n')]
import torch from skimage.filters import gaussian def blur_cm_plot(Cm_plot, sigma): """ Blur the keypoints/center-of-masses for better visualiztion Arguments --------- Cm_plot : tensor with the center-of-masses sigma : how much to blur Return ------ out : blurred points """ n_batch = Cm_plot.shape[0] n_reg = Cm_plot.shape[1] out = [] for n in range(n_batch): cm_plot = Cm_plot[n, :, :, :] blur_cm_plot = [] for r in range(n_reg): _blur_cm_plot = gaussian(cm_plot[r, :, :, :], sigma=sigma, mode='nearest') _blur_cm_plot = torch.from_numpy(_blur_cm_plot).float().unsqueeze(0) blur_cm_plot += [_blur_cm_plot] blur_cm_plot = torch.cat(blur_cm_plot, 0) out += [blur_cm_plot.unsqueeze(0)] return torch.cat(out, 0) def get_cm_plot(Y_cm, dim0, dim1, dim2): """ Convert the coordinate of the keypoint/center-of-mass to points in an tensor Arguments --------- Y_cm : keypoints coordinates/center-of-masses[n_bath, 3, n_reg] dim : dim of the image Return ------ out : tensor it assigns value of 1 where keypoints are located otherwise 0 """ n_batch = Y_cm.shape[0] out = [] for n in range(n_batch): Y = Y_cm[n, :, :] n_reg = Y.shape[1] axis2 = torch.linspace(-1, 1, dim2).float() axis1 = torch.linspace(-1, 1, dim1).float() axis0 = torch.linspace(-1, 1, dim0).float() index0 = [] for i in range(n_reg): index0.append(torch.argmin((axis0 - Y[2, i]) ** 2).item()) index1 = [] for i in range(n_reg): index1.append(torch.argmin((axis1 - Y[1, i]) ** 2).item()) index2 = [] for i in range(n_reg): index2.append(torch.argmin((axis2 - Y[0, i]) ** 2).item()) cm_plot = torch.zeros(n_reg, dim0, dim1, dim2) for i in range(n_reg): cm_plot[i, index0[i], index1[i], index2[i]] = 1 out += [cm_plot.unsqueeze(0)] return torch.cat(out, 0)
[ "torch.from_numpy", "torch.cat", "skimage.filters.gaussian", "torch.argmin", "torch.zeros", "torch.linspace" ]
[((918, 935), 'torch.cat', 'torch.cat', (['out', '(0)'], {}), '(out, 0)\n', (927, 935), False, 'import torch\n'), ((2160, 2177), 'torch.cat', 'torch.cat', (['out', '(0)'], {}), '(out, 0)\n', (2169, 2177), False, 'import torch\n'), ((837, 863), 'torch.cat', 'torch.cat', (['blur_cm_plot', '(0)'], {}), '(blur_cm_plot, 0)\n', (846, 863), False, 'import torch\n'), ((1981, 2017), 'torch.zeros', 'torch.zeros', (['n_reg', 'dim0', 'dim1', 'dim2'], {}), '(n_reg, dim0, dim1, dim2)\n', (1992, 2017), False, 'import torch\n'), ((555, 613), 'skimage.filters.gaussian', 'gaussian', (['cm_plot[r, :, :, :]'], {'sigma': 'sigma', 'mode': '"""nearest"""'}), "(cm_plot[r, :, :, :], sigma=sigma, mode='nearest')\n", (563, 613), False, 'from skimage.filters import gaussian\n'), ((1453, 1480), 'torch.linspace', 'torch.linspace', (['(-1)', '(1)', 'dim2'], {}), '(-1, 1, dim2)\n', (1467, 1480), False, 'import torch\n'), ((1505, 1532), 'torch.linspace', 'torch.linspace', (['(-1)', '(1)', 'dim1'], {}), '(-1, 1, dim1)\n', (1519, 1532), False, 'import torch\n'), ((1557, 1584), 'torch.linspace', 'torch.linspace', (['(-1)', '(1)', 'dim0'], {}), '(-1, 1, dim0)\n', (1571, 1584), False, 'import torch\n'), ((1671, 1707), 'torch.argmin', 'torch.argmin', (['((axis0 - Y[2, i]) ** 2)'], {}), '((axis0 - Y[2, i]) ** 2)\n', (1683, 1707), False, 'import torch\n'), ((1794, 1830), 'torch.argmin', 'torch.argmin', (['((axis1 - Y[1, i]) ** 2)'], {}), '((axis1 - Y[1, i]) ** 2)\n', (1806, 1830), False, 'import torch\n'), ((1917, 1953), 'torch.argmin', 'torch.argmin', (['((axis2 - Y[0, i]) ** 2)'], {}), '((axis2 - Y[0, i]) ** 2)\n', (1929, 1953), False, 'import torch\n'), ((716, 747), 'torch.from_numpy', 'torch.from_numpy', (['_blur_cm_plot'], {}), '(_blur_cm_plot)\n', (732, 747), False, 'import torch\n')]
import os, sys sys.path.insert(0, os.path.join(os.path.dirname(__file__),'../src'))
[ "os.path.dirname" ]
[((48, 73), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (63, 73), False, 'import os, sys\n')]
import os import wandb import torch import warnings import numpy as np import torchvision.transforms from fvcore.nn import FlopCountAnalysis from dpt.models import DPTDepthModel def get_flops(model, x, unit="G", quiet=True): _prefix = {'k': 1e3, # kilo 'M': 1e6, # mega 'G': 1e9, # giga 'T': 1e12, # tera 'P': 1e15, # peta } flops = FlopCountAnalysis(model, x) num_flops = flops.total() / _prefix[unit] if not quiet: print(f"Model FLOPs: {num_flops:.2f} {unit}FLOPs") return num_flops def get_model_size(model): torch.save(model.state_dict(), "tmp.pt") model_size = os.path.getsize("tmp.pt")/1e6 os.remove('tmp.pt') return model_size # Hyperparameters and config # Input net_w, net_h = 640, 192 h_kitti, w_kitti = 352, 1216 # Model architecture backbone = "vitb_rn50_384" # "vitb_effb0" transformer_hooks = "str:8,11" attention_variant = None # "performer" attention_heads = 12 mixed_precision = False config_dict = { "input_size": f"{net_h},{net_w}", "downsampling": "Resize image along w and h", "mixed_precision": mixed_precision, "backbone": backbone, "transformer_hooks": transformer_hooks, "attention_variant": attention_variant, "attention_heads": attention_heads, } if __name__ == "__main__": warnings.simplefilter("ignore", UserWarning) # Init wandb wandb.init(config=config_dict) config = wandb.config # Re-read config for wandb-sweep-managed inference mixed_precision = config["mixed_precision"] backbone = config["backbone"] transformer_hooks = config["transformer_hooks"] attention_variant = config["attention_variant"] if attention_variant == "None": attention_variant = None attention_heads = config["attention_heads"] input_size = config["input_size"] net_h = int(input_size.split(",")[0]) net_w = int(input_size.split(",")[1]) # Convert str hooks to list (wandb hacky solution to display hooks correctly) assert isinstance(transformer_hooks, str) and transformer_hooks[:4] == "str:", \ 'Hooks are not in the format "str:[att_hook1, att_hook2]"' conv_hooks = {"vitb_rn50_384": [0, 1], "vitb_effb0": [1, 2]}[backbone] transformer_hooks = [int(hook) for hook in transformer_hooks.split(":")[-1].split(",")] hooks = conv_hooks + transformer_hooks # Get cpu or gpu device for training. device = "cuda" if torch.cuda.is_available() else "cpu" print("Using {} device".format(device)) torch.backends.cudnn.benchmark = True torch.backends.cudnn.enabled = True # Create model model = DPTDepthModel( path=None, scale=0.00006016, # KITTI shift=0.00579, invert=True, backbone=backbone, attention_heads=attention_heads, hooks=hooks, non_negative=True, enable_attention_hooks=False, attention_variant=attention_variant).to(device) n_inferences = 500 wandb.log({"num_inferences": n_inferences}) measures = np.zeros((n_inferences, 1)) x = torch.rand(1, 3, h_kitti, w_kitti).to(device) print(f"Kitti size: {h_kitti}, {w_kitti} | Network input size: {net_h}, {net_w}") # Cuda events t0 = torch.cuda.Event(enable_timing=True) end = torch.cuda.Event(enable_timing=True) # Measure inference time with torch.no_grad(): with torch.cuda.amp.autocast(enabled=mixed_precision): dummy = torchvision.transforms.Resize((net_h, net_w))(x) _ = model(dummy) # Warm-up for i in range(n_inferences): t0.record() if net_h != h_kitti or net_w != w_kitti: x = torchvision.transforms.Resize((net_h, net_w))(x) y = model(x) if net_h != h_kitti or net_w != w_kitti: _ = torch.nn.functional.interpolate(y.unsqueeze(1), size=(h_kitti, w_kitti), mode="bicubic", align_corners=True) end.record() torch.cuda.synchronize() measures[i] = t0.elapsed_time(end) mean_ms = np.mean(measures) std_ms = np.std(measures) fps = 1000/measures mean_fps = np.mean(fps) std_fps = np.std(fps) GFLOPs = get_flops(model.to("cpu"), x.to("cpu")) model_MB = get_model_size(model) wandb.log({"FPS": mean_fps, "std_fps": std_fps, "ms": mean_ms, "std_ms": std_ms, "GFLOPs": GFLOPs, "MB": model_MB}) print(f"FPS: {mean_fps:.2f} +- {1/std_fps:.2f} || Inference speed (ms): {mean_ms:.4f} +- {std_ms:.4f}") print(f"GFLOPs: {GFLOPs:.3f} || Model size (MB): {model_MB:.2f}")
[ "torch.cuda.Event", "numpy.mean", "os.path.getsize", "wandb.log", "fvcore.nn.FlopCountAnalysis", "wandb.init", "torch.cuda.synchronize", "numpy.zeros", "torch.cuda.is_available", "torch.cuda.amp.autocast", "numpy.std", "warnings.simplefilter", "torch.no_grad", "dpt.models.DPTDepthModel", ...
[((424, 451), 'fvcore.nn.FlopCountAnalysis', 'FlopCountAnalysis', (['model', 'x'], {}), '(model, x)\n', (441, 451), False, 'from fvcore.nn import FlopCountAnalysis\n'), ((721, 740), 'os.remove', 'os.remove', (['"""tmp.pt"""'], {}), "('tmp.pt')\n", (730, 740), False, 'import os\n'), ((1369, 1413), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""', 'UserWarning'], {}), "('ignore', UserWarning)\n", (1390, 1413), False, 'import warnings\n'), ((1436, 1466), 'wandb.init', 'wandb.init', ([], {'config': 'config_dict'}), '(config=config_dict)\n', (1446, 1466), False, 'import wandb\n'), ((3111, 3154), 'wandb.log', 'wandb.log', (["{'num_inferences': n_inferences}"], {}), "({'num_inferences': n_inferences})\n", (3120, 3154), False, 'import wandb\n'), ((3170, 3197), 'numpy.zeros', 'np.zeros', (['(n_inferences, 1)'], {}), '((n_inferences, 1))\n', (3178, 3197), True, 'import numpy as np\n'), ((3366, 3402), 'torch.cuda.Event', 'torch.cuda.Event', ([], {'enable_timing': '(True)'}), '(enable_timing=True)\n', (3382, 3402), False, 'import torch\n'), ((3413, 3449), 'torch.cuda.Event', 'torch.cuda.Event', ([], {'enable_timing': '(True)'}), '(enable_timing=True)\n', (3429, 3449), False, 'import torch\n'), ((4400, 4417), 'numpy.mean', 'np.mean', (['measures'], {}), '(measures)\n', (4407, 4417), True, 'import numpy as np\n'), ((4431, 4447), 'numpy.std', 'np.std', (['measures'], {}), '(measures)\n', (4437, 4447), True, 'import numpy as np\n'), ((4487, 4499), 'numpy.mean', 'np.mean', (['fps'], {}), '(fps)\n', (4494, 4499), True, 'import numpy as np\n'), ((4514, 4525), 'numpy.std', 'np.std', (['fps'], {}), '(fps)\n', (4520, 4525), True, 'import numpy as np\n'), ((4621, 4740), 'wandb.log', 'wandb.log', (["{'FPS': mean_fps, 'std_fps': std_fps, 'ms': mean_ms, 'std_ms': std_ms,\n 'GFLOPs': GFLOPs, 'MB': model_MB}"], {}), "({'FPS': mean_fps, 'std_fps': std_fps, 'ms': mean_ms, 'std_ms':\n std_ms, 'GFLOPs': GFLOPs, 'MB': model_MB})\n", (4630, 4740), False, 'import wandb\n'), ((687, 712), 'os.path.getsize', 'os.path.getsize', (['"""tmp.pt"""'], {}), "('tmp.pt')\n", (702, 712), False, 'import os\n'), ((2484, 2509), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (2507, 2509), False, 'import torch\n'), ((3489, 3504), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3502, 3504), False, 'import torch\n'), ((2680, 2913), 'dpt.models.DPTDepthModel', 'DPTDepthModel', ([], {'path': 'None', 'scale': '(6.016e-05)', 'shift': '(0.00579)', 'invert': '(True)', 'backbone': 'backbone', 'attention_heads': 'attention_heads', 'hooks': 'hooks', 'non_negative': '(True)', 'enable_attention_hooks': '(False)', 'attention_variant': 'attention_variant'}), '(path=None, scale=6.016e-05, shift=0.00579, invert=True,\n backbone=backbone, attention_heads=attention_heads, hooks=hooks,\n non_negative=True, enable_attention_hooks=False, attention_variant=\n attention_variant)\n', (2693, 2913), False, 'from dpt.models import DPTDepthModel\n'), ((3206, 3240), 'torch.rand', 'torch.rand', (['(1)', '(3)', 'h_kitti', 'w_kitti'], {}), '(1, 3, h_kitti, w_kitti)\n', (3216, 3240), False, 'import torch\n'), ((3519, 3567), 'torch.cuda.amp.autocast', 'torch.cuda.amp.autocast', ([], {'enabled': 'mixed_precision'}), '(enabled=mixed_precision)\n', (3542, 3567), False, 'import torch\n'), ((4310, 4334), 'torch.cuda.synchronize', 'torch.cuda.synchronize', ([], {}), '()\n', (4332, 4334), False, 'import torch\n')]
from django.db import models import uuid, datetime from django.utils import timezone # Create your models here. class User(models.Model): user_id = models.CharField(max_length=100,default=uuid.uuid4) email = models.EmailField(max_length=100) name = models.CharField(max_length=100) password = models.CharField(max_length=250) def getUserDetails(self): return self.email class Event(models.Model): event_id = models.CharField(max_length=100,default=uuid.uuid4) event_name = models.CharField(max_length = 120) event_start = models.DateTimeField() event_end = models.DateTimeField() host_email = models.EmailField(max_length = 100) host_name = models.CharField(max_length = 100) event_description = models.CharField(max_length = 300) registration_deadline = models.DateTimeField(default=timezone.now) event_poster = models.URLField(max_length=150,default = '') def getEventDetails(self): return [self.event_name,self.event_start,self.event_end,self.host,self.event_description] class Participant(models.Model): pevent_id = models.CharField(max_length=100) participant_email = models.EmailField(max_length = 100) participant_name = models.CharField(max_length=100) participant_contactno = models.IntegerField() group_registration = models.BooleanField() no_of_members = models.IntegerField()
[ "django.db.models.EmailField", "django.db.models.IntegerField", "django.db.models.BooleanField", "django.db.models.DateTimeField", "django.db.models.URLField", "django.db.models.CharField" ]
[((154, 206), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'default': 'uuid.uuid4'}), '(max_length=100, default=uuid.uuid4)\n', (170, 206), False, 'from django.db import models\n'), ((218, 251), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (235, 251), False, 'from django.db import models\n'), ((263, 295), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (279, 295), False, 'from django.db import models\n'), ((311, 343), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)'}), '(max_length=250)\n', (327, 343), False, 'from django.db import models\n'), ((443, 495), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'default': 'uuid.uuid4'}), '(max_length=100, default=uuid.uuid4)\n', (459, 495), False, 'from django.db import models\n'), ((512, 544), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(120)'}), '(max_length=120)\n', (528, 544), False, 'from django.db import models\n'), ((565, 587), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (585, 587), False, 'from django.db import models\n'), ((604, 626), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (624, 626), False, 'from django.db import models\n'), ((644, 677), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (661, 677), False, 'from django.db import models\n'), ((696, 728), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (712, 728), False, 'from django.db import models\n'), ((755, 787), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (771, 787), False, 'from django.db import models\n'), ((818, 860), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'timezone.now'}), '(default=timezone.now)\n', (838, 860), False, 'from django.db import models\n'), ((880, 923), 'django.db.models.URLField', 'models.URLField', ([], {'max_length': '(150)', 'default': '""""""'}), "(max_length=150, default='')\n", (895, 923), False, 'from django.db import models\n'), ((1104, 1136), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1120, 1136), False, 'from django.db import models\n'), ((1161, 1194), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1178, 1194), False, 'from django.db import models\n'), ((1220, 1252), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1236, 1252), False, 'from django.db import models\n'), ((1281, 1302), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (1300, 1302), False, 'from django.db import models\n'), ((1328, 1349), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (1347, 1349), False, 'from django.db import models\n'), ((1370, 1391), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (1389, 1391), False, 'from django.db import models\n')]
#!/usr/bin/env python3 import os import sys import argparse from zulip_bots.finder import import_module_from_source, resolve_bot_path from zulip_bots.simple_lib import TerminalBotHandler current_dir = os.path.dirname(os.path.abspath(__file__)) def parse_args(): description = ''' This tool allows you to test a bot using the terminal (and no Zulip server). Examples: %(prog)s followup ''' parser = argparse.ArgumentParser(description=description, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('bot', action='store', help='the name or path an existing bot to run') parser.add_argument('--bot-config-file', '-b', action='store', help='optional third party config file (e.g. ~/giphy.conf)') args = parser.parse_args() return args def main(): args = parse_args() bot_path, bot_name = resolve_bot_path(args.bot) bot_dir = os.path.dirname(bot_path) sys.path.insert(0, bot_dir) try: lib_module = import_module_from_source(bot_path, bot_name) if lib_module is None: raise OSError except OSError: print("Could not find and import bot '{}'".format(bot_name)) sys.exit(1) try: message_handler = lib_module.handler_class() except AttributeError: print("This module does not appear to have a bot handler_class specified.") sys.exit(1) bot_handler = TerminalBotHandler(args.bot_config_file) if hasattr(message_handler, 'initialize') and callable(message_handler.initialize): message_handler.initialize(bot_handler) sender_email = '<EMAIL>' try: while True: content = input('Enter your message: ') message = dict( content=content, sender_email=sender_email, display_recipient=sender_email, ) message_handler.handle_message( message=message, bot_handler=bot_handler, ) except KeyboardInterrupt: print("\n\nOk, if you're happy with your terminal-based testing, try it out with a Zulip server.", "\nYou can refer to https://zulipchat.com/api/running-bots#running-a-bot.") sys.exit(1) if __name__ == '__main__': main()
[ "sys.path.insert", "argparse.ArgumentParser", "zulip_bots.finder.resolve_bot_path", "os.path.dirname", "zulip_bots.simple_lib.TerminalBotHandler", "zulip_bots.finder.import_module_from_source", "sys.exit", "os.path.abspath" ]
[((219, 244), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (234, 244), False, 'import os\n'), ((437, 544), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'description', 'formatter_class': 'argparse.RawDescriptionHelpFormatter'}), '(description=description, formatter_class=argparse.\n RawDescriptionHelpFormatter)\n', (460, 544), False, 'import argparse\n'), ((1008, 1034), 'zulip_bots.finder.resolve_bot_path', 'resolve_bot_path', (['args.bot'], {}), '(args.bot)\n', (1024, 1034), False, 'from zulip_bots.finder import import_module_from_source, resolve_bot_path\n'), ((1049, 1074), 'os.path.dirname', 'os.path.dirname', (['bot_path'], {}), '(bot_path)\n', (1064, 1074), False, 'import os\n'), ((1079, 1106), 'sys.path.insert', 'sys.path.insert', (['(0)', 'bot_dir'], {}), '(0, bot_dir)\n', (1094, 1106), False, 'import sys\n'), ((1563, 1603), 'zulip_bots.simple_lib.TerminalBotHandler', 'TerminalBotHandler', (['args.bot_config_file'], {}), '(args.bot_config_file)\n', (1581, 1603), False, 'from zulip_bots.simple_lib import TerminalBotHandler\n'), ((1138, 1183), 'zulip_bots.finder.import_module_from_source', 'import_module_from_source', (['bot_path', 'bot_name'], {}), '(bot_path, bot_name)\n', (1163, 1183), False, 'from zulip_bots.finder import import_module_from_source, resolve_bot_path\n'), ((1338, 1349), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1346, 1349), False, 'import sys\n'), ((1532, 1543), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1540, 1543), False, 'import sys\n'), ((2386, 2397), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2394, 2397), False, 'import sys\n')]
from flask import redirect from flask import request as flask_req from flask import _app_ctx_stack from ..base_client import RemoteApp class FlaskRemoteApp(RemoteApp): """Flask integrated RemoteApp of :class:`~authlib.client.OAuthClient`. It has built-in hooks for OAuthClient. The only required configuration is token model. """ def __init__(self, framework, name=None, fetch_token=None, **kwargs): fetch_request_token = kwargs.pop('fetch_request_token', None) save_request_token = kwargs.pop('save_request_token', None) super(FlaskRemoteApp, self).__init__(framework, name, fetch_token, **kwargs) self._fetch_request_token = fetch_request_token self._save_request_token = save_request_token def _on_update_token(self, token, refresh_token=None, access_token=None): self.token = token super(FlaskRemoteApp, self)._on_update_token( token, refresh_token, access_token ) @property def token(self): ctx = _app_ctx_stack.top attr = 'authlib_oauth_token_{}'.format(self.name) token = getattr(ctx, attr, None) if token: return token if self._fetch_token: token = self._fetch_token() self.token = token return token @token.setter def token(self, token): ctx = _app_ctx_stack.top attr = 'authlib_oauth_token_{}'.format(self.name) setattr(ctx, attr, token) def request(self, method, url, token=None, **kwargs): if token is None and not kwargs.get('withhold_token'): token = self.token return super(FlaskRemoteApp, self).request( method, url, token=token, **kwargs) def authorize_redirect(self, redirect_uri=None, **kwargs): """Create a HTTP Redirect for Authorization Endpoint. :param redirect_uri: Callback or redirect URI for authorization. :param kwargs: Extra parameters to include. :return: A HTTP redirect response. """ rv = self.create_authorization_url(redirect_uri, **kwargs) if self.request_token_url: request_token = rv.pop('request_token', None) self._save_request_token(request_token) self.save_authorize_data(flask_req, redirect_uri=redirect_uri, **rv) return redirect(rv['url']) def authorize_access_token(self, **kwargs): """Authorize access token.""" if self.request_token_url: request_token = self._fetch_request_token() else: request_token = None params = self.retrieve_access_token_params(flask_req, request_token) params.update(kwargs) token = self.fetch_access_token(**params) self.token = token return token def parse_id_token(self, token, claims_options=None): return self._parse_id_token(flask_req, token, claims_options)
[ "flask.redirect" ]
[((2349, 2368), 'flask.redirect', 'redirect', (["rv['url']"], {}), "(rv['url'])\n", (2357, 2368), False, 'from flask import redirect\n')]
from .model import KerasModel import keras from keras.models import Sequential from keras.layers import Dense, Activation, Flatten from keras.layers import BatchNormalization, Dropout, Conv2D, MaxPooling2D import kapre from kapre.utils import Normalization2D from kapre.time_frequency import Spectrogram class CNN_STFT(KerasModel): def create_model(self, input_shape, dropout=0.5, print_summary=False): # basis of the CNN_STFT is a Sequential network model = Sequential() # spectrogram creation using STFT model.add(Spectrogram(n_dft = 128, n_hop = 16, input_shape = input_shape, return_decibel_spectrogram = False, power_spectrogram = 2.0, trainable_kernel = False, name = 'static_stft')) model.add(Normalization2D(str_axis = 'freq')) # Conv Block 1 model.add(Conv2D(filters = 24, kernel_size = (12, 12), strides = (1, 1), name = 'conv1', border_mode = 'same')) model.add(BatchNormalization(axis = 1)) model.add(Activation('relu')) model.add(MaxPooling2D(pool_size = (2, 2), strides = (2,2), padding = 'valid', data_format = 'channels_last')) # Conv Block 2 model.add(Conv2D(filters = 48, kernel_size = (8, 8), name = 'conv2', border_mode = 'same')) model.add(BatchNormalization(axis = 1)) model.add(Activation('relu')) model.add(MaxPooling2D(pool_size = (2, 2), strides = (2, 2), padding = 'valid', data_format = 'channels_last')) # Conv Block 3 model.add(Conv2D(filters = 96, kernel_size = (4, 4), name = 'conv3', border_mode = 'same')) model.add(BatchNormalization(axis = 1)) model.add(Activation('relu')) model.add(MaxPooling2D(pool_size = (2, 2), strides = (2,2), padding = 'valid', data_format = 'channels_last')) model.add(Dropout(dropout)) # classificator model.add(Flatten()) model.add(Dense(2)) # two classes only model.add(Activation('softmax')) if print_summary: print(model.summary()) # compile the model model.compile(loss = 'categorical_crossentropy', optimizer = 'adam', metrics = ['accuracy']) # assign model and return self.model = model return model
[ "keras.layers.Conv2D", "keras.layers.Flatten", "keras.layers.MaxPooling2D", "keras.models.Sequential", "keras.layers.Activation", "kapre.utils.Normalization2D", "keras.layers.Dense", "keras.layers.BatchNormalization", "keras.layers.Dropout", "kapre.time_frequency.Spectrogram" ]
[((484, 496), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (494, 496), False, 'from keras.models import Sequential\n'), ((558, 724), 'kapre.time_frequency.Spectrogram', 'Spectrogram', ([], {'n_dft': '(128)', 'n_hop': '(16)', 'input_shape': 'input_shape', 'return_decibel_spectrogram': '(False)', 'power_spectrogram': '(2.0)', 'trainable_kernel': '(False)', 'name': '"""static_stft"""'}), "(n_dft=128, n_hop=16, input_shape=input_shape,\n return_decibel_spectrogram=False, power_spectrogram=2.0,\n trainable_kernel=False, name='static_stft')\n", (569, 724), False, 'from kapre.time_frequency import Spectrogram\n'), ((786, 818), 'kapre.utils.Normalization2D', 'Normalization2D', ([], {'str_axis': '"""freq"""'}), "(str_axis='freq')\n", (801, 818), False, 'from kapre.utils import Normalization2D\n'), ((864, 958), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(24)', 'kernel_size': '(12, 12)', 'strides': '(1, 1)', 'name': '"""conv1"""', 'border_mode': '"""same"""'}), "(filters=24, kernel_size=(12, 12), strides=(1, 1), name='conv1',\n border_mode='same')\n", (870, 958), False, 'from keras.layers import BatchNormalization, Dropout, Conv2D, MaxPooling2D\n'), ((1034, 1060), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (1052, 1060), False, 'from keras.layers import BatchNormalization, Dropout, Conv2D, MaxPooling2D\n'), ((1082, 1100), 'keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (1092, 1100), False, 'from keras.layers import Dense, Activation, Flatten\n'), ((1120, 1217), 'keras.layers.MaxPooling2D', 'MaxPooling2D', ([], {'pool_size': '(2, 2)', 'strides': '(2, 2)', 'padding': '"""valid"""', 'data_format': '"""channels_last"""'}), "(pool_size=(2, 2), strides=(2, 2), padding='valid', data_format\n ='channels_last')\n", (1132, 1217), False, 'from keras.layers import BatchNormalization, Dropout, Conv2D, MaxPooling2D\n'), ((1294, 1366), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(48)', 'kernel_size': '(8, 8)', 'name': '"""conv2"""', 'border_mode': '"""same"""'}), "(filters=48, kernel_size=(8, 8), name='conv2', border_mode='same')\n", (1300, 1366), False, 'from keras.layers import BatchNormalization, Dropout, Conv2D, MaxPooling2D\n'), ((1419, 1445), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (1437, 1445), False, 'from keras.layers import BatchNormalization, Dropout, Conv2D, MaxPooling2D\n'), ((1467, 1485), 'keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (1477, 1485), False, 'from keras.layers import Dense, Activation, Flatten\n'), ((1505, 1602), 'keras.layers.MaxPooling2D', 'MaxPooling2D', ([], {'pool_size': '(2, 2)', 'strides': '(2, 2)', 'padding': '"""valid"""', 'data_format': '"""channels_last"""'}), "(pool_size=(2, 2), strides=(2, 2), padding='valid', data_format\n ='channels_last')\n", (1517, 1602), False, 'from keras.layers import BatchNormalization, Dropout, Conv2D, MaxPooling2D\n'), ((1680, 1752), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(96)', 'kernel_size': '(4, 4)', 'name': '"""conv3"""', 'border_mode': '"""same"""'}), "(filters=96, kernel_size=(4, 4), name='conv3', border_mode='same')\n", (1686, 1752), False, 'from keras.layers import BatchNormalization, Dropout, Conv2D, MaxPooling2D\n'), ((1805, 1831), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (1823, 1831), False, 'from keras.layers import BatchNormalization, Dropout, Conv2D, MaxPooling2D\n'), ((1853, 1871), 'keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (1863, 1871), False, 'from keras.layers import Dense, Activation, Flatten\n'), ((1891, 1988), 'keras.layers.MaxPooling2D', 'MaxPooling2D', ([], {'pool_size': '(2, 2)', 'strides': '(2, 2)', 'padding': '"""valid"""', 'data_format': '"""channels_last"""'}), "(pool_size=(2, 2), strides=(2, 2), padding='valid', data_format\n ='channels_last')\n", (1903, 1988), False, 'from keras.layers import BatchNormalization, Dropout, Conv2D, MaxPooling2D\n'), ((2072, 2088), 'keras.layers.Dropout', 'Dropout', (['dropout'], {}), '(dropout)\n', (2079, 2088), False, 'from keras.layers import BatchNormalization, Dropout, Conv2D, MaxPooling2D\n'), ((2133, 2142), 'keras.layers.Flatten', 'Flatten', ([], {}), '()\n', (2140, 2142), False, 'from keras.layers import Dense, Activation, Flatten\n'), ((2162, 2170), 'keras.layers.Dense', 'Dense', (['(2)'], {}), '(2)\n', (2167, 2170), False, 'from keras.layers import Dense, Activation, Flatten\n'), ((2210, 2231), 'keras.layers.Activation', 'Activation', (['"""softmax"""'], {}), "('softmax')\n", (2220, 2231), False, 'from keras.layers import Dense, Activation, Flatten\n')]
from datetime import datetime from django.contrib.auth.models import User from django.core.exceptions import ValidationError from django.db import models from scaffold.exceptions.exceptions import AppError # def patch_methods(model_class): # def do_patch(cls): # for k in cls.__dict__: # obj = getattr(cls, k) # if not k.startswith('_') and callable(obj): # setattr(model_class, k, obj) # # return do_patch class SortableModel(models.Model): """ 可排序模型 """ sorting = models.BigIntegerField( verbose_name='排序', default=0, help_text='用于系统进行排序的参数,可以给用户设定或者作为计算列存储组合权重', db_index=True, ) class Meta: abstract = True ordering = ['-sorting'] class StickModel(models.Model): """ 可置顶模型 """ is_sticky = models.BooleanField( verbose_name='是否置顶', default=False, db_index=True, ) class Meta: abstract = True ordering = ['-is_sticky'] class ActiveModel(models.Model): """ 可以切换可用/不可用的模型 """ is_active = models.BooleanField( verbose_name='是否可用', default=True, db_index=True, ) class Meta: abstract = True class DatedModel(models.Model): """ 记录了创建时间和修改时间的模型 """ date_created = models.DateTimeField( verbose_name='创建时间', auto_now_add=True, db_index=True, ) date_updated = models.DateTimeField( verbose_name='修改时间', auto_now=True, db_index=True, ) class Meta: abstract = True class NamedModel(models.Model): """ 有名称的模型 """ name = models.CharField( verbose_name='名称', max_length=255, blank=True, default='', ) class Meta: abstract = True def __str__(self): return self.name or '[{}]'.format(self.pk) class ContentModel(models.Model): """ 有内容的模型 """ content = models.TextField( verbose_name='内容', blank=True, default='', ) excerpt = models.CharField( verbose_name='摘要', max_length=255, blank=True, default='', ) class Meta: abstract = True class HierarchicalModel(models.Model): """ 层次模型,具备 parent 和 children 属性 """ parent = models.ForeignKey( verbose_name='上级', to='self', related_name='children', blank=True, null=True, on_delete=models.SET_NULL, ) class Meta: abstract = True def clean(self): # 环路检测 p = self.parent while p is not None: if p.pk == self.pk: raise ValidationError('级联结构不能出现循环引用') p = p.parent @property def parent_name(self): return self.parent and getattr(self.parent, 'name', None) class NullableUserOwnedModel(models.Model): """ 由用户拥有的模型类 包含作者字段 """ author = models.ForeignKey( verbose_name='作者', to='auth.User', related_name='%(class)ss_owned', blank=True, null=True, on_delete=models.SET_NULL, ) class Meta: abstract = True class UserOwnedModel(models.Model): """ 由用户拥有的模型类 包含作者字段,要求非空 """ author = models.ForeignKey( verbose_name='作者', to='auth.User', related_name='%(class)ss_owned', on_delete=models.CASCADE, ) class Meta: abstract = True class EntityModel(NamedModel, SortableModel, StickModel, DatedModel): """ 实体类模型 """ class Meta: abstract = True ordering = ['-date_created', '-is_sticky', '-sorting'] def __str__(self): return self.name or str(self.pk) class AbstractValidationModel(models.Model): """ 抽象验证类 1. 提交一次验证的时候,必须没有非 EXPIRED 的验证信息; 2. 提交验证之后,创建一条新的 PersonalValidationInfo 信息; 3. 新提交的验证,状态为 PENDING,记录 date_submitted; 4. 管理员权限可以进行审批,或者驳回,改变状态并记录 date_response; 5. 任何阶段,用户可以取消掉现有的验证信息,变成 EXPIRED 并记录时间; 6. 取消掉唯一一条活动的验证信息之后,可以提交新的验证信息; """ STATUS_DRAFT = 'DRAFT' STATUS_PENDING = 'PENDING' STATUS_REJECTED = 'REJECTED' STATUS_SUCCESS = 'SUCCESS' STATUS_EXPIRED = 'EXPIRED' STATUS_CHOICES = ( (STATUS_DRAFT, '草稿'), (STATUS_PENDING, '等待审批'), (STATUS_REJECTED, '驳回'), (STATUS_SUCCESS, '成功'), (STATUS_EXPIRED, '已失效'), ) status = models.CharField( verbose_name='验证状态', max_length=20, choices=STATUS_CHOICES, default=STATUS_DRAFT, ) date_submitted = models.DateTimeField( verbose_name='提交时间', blank=True, null=True, ) date_response = models.DateTimeField( verbose_name='审批时间', blank=True, null=True, ) date_expired = models.DateTimeField( verbose_name='失效时间', blank=True, null=True, ) remark = models.CharField( verbose_name='审核不通过原因', max_length=255, blank=True, default='', ) class Meta: abstract = True def approve(self, *args, **kwargs): if self.status not in (self.STATUS_PENDING, self.STATUS_REJECTED): raise AppError('ERR091', '审批对象的状态必须为等待审批或者驳回') self.status = self.STATUS_SUCCESS self.date_response = datetime.now() self.save() def reject(self, reason, *args, **kwargs): if self.status not in (self.STATUS_PENDING,): raise AppError('ERR092', '审批对象的状态必须为等待审批') if not reason: raise AppError('ERR093', '请填写驳回理由') self.status = self.STATUS_REJECTED self.date_response = datetime.now() self.remark = reason self.save() class AbstractTransactionModel(models.Model): debit = models.ForeignKey( verbose_name='借方用户', to=User, related_name='%(class)ss_debit', null=True, blank=True, on_delete=models.PROTECT, help_text='即余额增加的账户,默认情况用户作为账户,' '如需定义其他模型作为账号,派生时覆写此字段', ) credit = models.ForeignKey( verbose_name='贷方用户', to=User, related_name='%(class)ss_credit', null=True, blank=True, on_delete=models.PROTECT, help_text='即余额减少的账户,默认情况用户作为账户,' '如需定义其他模型作为账号,派生时覆写此字段', ) amount = models.DecimalField( verbose_name='金额', max_digits=18, decimal_places=2, ) remark = models.CharField( verbose_name='备注', blank=True, default='', max_length=255, ) class Meta: abstract = True
[ "django.db.models.TextField", "django.db.models.ForeignKey", "django.core.exceptions.ValidationError", "django.db.models.BooleanField", "datetime.datetime.now", "django.db.models.BigIntegerField", "django.db.models.DateTimeField", "django.db.models.DecimalField", "scaffold.exceptions.exceptions.AppE...
[((540, 658), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {'verbose_name': '"""排序"""', 'default': '(0)', 'help_text': '"""用于系统进行排序的参数,可以给用户设定或者作为计算列存储组合权重"""', 'db_index': '(True)'}), "(verbose_name='排序', default=0, help_text=\n '用于系统进行排序的参数,可以给用户设定或者作为计算列存储组合权重', db_index=True)\n", (562, 658), False, 'from django.db import models\n'), ((838, 908), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'verbose_name': '"""是否置顶"""', 'default': '(False)', 'db_index': '(True)'}), "(verbose_name='是否置顶', default=False, db_index=True)\n", (857, 908), False, 'from django.db import models\n'), ((1096, 1165), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'verbose_name': '"""是否可用"""', 'default': '(True)', 'db_index': '(True)'}), "(verbose_name='是否可用', default=True, db_index=True)\n", (1115, 1165), False, 'from django.db import models\n'), ((1323, 1398), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'verbose_name': '"""创建时间"""', 'auto_now_add': '(True)', 'db_index': '(True)'}), "(verbose_name='创建时间', auto_now_add=True, db_index=True)\n", (1343, 1398), False, 'from django.db import models\n'), ((1450, 1521), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'verbose_name': '"""修改时间"""', 'auto_now': '(True)', 'db_index': '(True)'}), "(verbose_name='修改时间', auto_now=True, db_index=True)\n", (1470, 1521), False, 'from django.db import models\n'), ((1663, 1738), 'django.db.models.CharField', 'models.CharField', ([], {'verbose_name': '"""名称"""', 'max_length': '(255)', 'blank': '(True)', 'default': '""""""'}), "(verbose_name='名称', max_length=255, blank=True, default='')\n", (1679, 1738), False, 'from django.db import models\n'), ((1968, 2027), 'django.db.models.TextField', 'models.TextField', ([], {'verbose_name': '"""内容"""', 'blank': '(True)', 'default': '""""""'}), "(verbose_name='内容', blank=True, default='')\n", (1984, 2027), False, 'from django.db import models\n'), ((2074, 2149), 'django.db.models.CharField', 'models.CharField', ([], {'verbose_name': '"""摘要"""', 'max_length': '(255)', 'blank': '(True)', 'default': '""""""'}), "(verbose_name='摘要', max_length=255, blank=True, default='')\n", (2090, 2149), False, 'from django.db import models\n'), ((2329, 2455), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'verbose_name': '"""上级"""', 'to': '"""self"""', 'related_name': '"""children"""', 'blank': '(True)', 'null': '(True)', 'on_delete': 'models.SET_NULL'}), "(verbose_name='上级', to='self', related_name='children',\n blank=True, null=True, on_delete=models.SET_NULL)\n", (2346, 2455), False, 'from django.db import models\n'), ((2954, 3094), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'verbose_name': '"""作者"""', 'to': '"""auth.User"""', 'related_name': '"""%(class)ss_owned"""', 'blank': '(True)', 'null': '(True)', 'on_delete': 'models.SET_NULL'}), "(verbose_name='作者', to='auth.User', related_name=\n '%(class)ss_owned', blank=True, null=True, on_delete=models.SET_NULL)\n", (2971, 3094), False, 'from django.db import models\n'), ((3280, 3396), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'verbose_name': '"""作者"""', 'to': '"""auth.User"""', 'related_name': '"""%(class)ss_owned"""', 'on_delete': 'models.CASCADE'}), "(verbose_name='作者', to='auth.User', related_name=\n '%(class)ss_owned', on_delete=models.CASCADE)\n", (3297, 3396), False, 'from django.db import models\n'), ((4476, 4578), 'django.db.models.CharField', 'models.CharField', ([], {'verbose_name': '"""验证状态"""', 'max_length': '(20)', 'choices': 'STATUS_CHOICES', 'default': 'STATUS_DRAFT'}), "(verbose_name='验证状态', max_length=20, choices=STATUS_CHOICES,\n default=STATUS_DRAFT)\n", (4492, 4578), False, 'from django.db import models\n'), ((4636, 4700), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'verbose_name': '"""提交时间"""', 'blank': '(True)', 'null': '(True)'}), "(verbose_name='提交时间', blank=True, null=True)\n", (4656, 4700), False, 'from django.db import models\n'), ((4753, 4817), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'verbose_name': '"""审批时间"""', 'blank': '(True)', 'null': '(True)'}), "(verbose_name='审批时间', blank=True, null=True)\n", (4773, 4817), False, 'from django.db import models\n'), ((4869, 4933), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'verbose_name': '"""失效时间"""', 'blank': '(True)', 'null': '(True)'}), "(verbose_name='失效时间', blank=True, null=True)\n", (4889, 4933), False, 'from django.db import models\n'), ((4979, 5064), 'django.db.models.CharField', 'models.CharField', ([], {'verbose_name': '"""审核不通过原因"""', 'max_length': '(255)', 'blank': '(True)', 'default': '""""""'}), "(verbose_name='审核不通过原因', max_length=255, blank=True, default=''\n )\n", (4995, 5064), False, 'from django.db import models\n'), ((5845, 6038), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'verbose_name': '"""借方用户"""', 'to': 'User', 'related_name': '"""%(class)ss_debit"""', 'null': '(True)', 'blank': '(True)', 'on_delete': 'models.PROTECT', 'help_text': '"""即余额增加的账户,默认情况用户作为账户,如需定义其他模型作为账号,派生时覆写此字段"""'}), "(verbose_name='借方用户', to=User, related_name=\n '%(class)ss_debit', null=True, blank=True, on_delete=models.PROTECT,\n help_text='即余额增加的账户,默认情况用户作为账户,如需定义其他模型作为账号,派生时覆写此字段')\n", (5862, 6038), False, 'from django.db import models\n'), ((6128, 6322), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'verbose_name': '"""贷方用户"""', 'to': 'User', 'related_name': '"""%(class)ss_credit"""', 'null': '(True)', 'blank': '(True)', 'on_delete': 'models.PROTECT', 'help_text': '"""即余额减少的账户,默认情况用户作为账户,如需定义其他模型作为账号,派生时覆写此字段"""'}), "(verbose_name='贷方用户', to=User, related_name=\n '%(class)ss_credit', null=True, blank=True, on_delete=models.PROTECT,\n help_text='即余额减少的账户,默认情况用户作为账户,如需定义其他模型作为账号,派生时覆写此字段')\n", (6145, 6322), False, 'from django.db import models\n'), ((6412, 6483), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'verbose_name': '"""金额"""', 'max_digits': '(18)', 'decimal_places': '(2)'}), "(verbose_name='金额', max_digits=18, decimal_places=2)\n", (6431, 6483), False, 'from django.db import models\n'), ((6529, 6604), 'django.db.models.CharField', 'models.CharField', ([], {'verbose_name': '"""备注"""', 'blank': '(True)', 'default': '""""""', 'max_length': '(255)'}), "(verbose_name='备注', blank=True, default='', max_length=255)\n", (6545, 6604), False, 'from django.db import models\n'), ((5386, 5400), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (5398, 5400), False, 'from datetime import datetime\n'), ((5721, 5735), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (5733, 5735), False, 'from datetime import datetime\n'), ((5274, 5314), 'scaffold.exceptions.exceptions.AppError', 'AppError', (['"""ERR091"""', '"""审批对象的状态必须为等待审批或者驳回"""'], {}), "('ERR091', '审批对象的状态必须为等待审批或者驳回')\n", (5282, 5314), False, 'from scaffold.exceptions.exceptions import AppError\n'), ((5541, 5577), 'scaffold.exceptions.exceptions.AppError', 'AppError', (['"""ERR092"""', '"""审批对象的状态必须为等待审批"""'], {}), "('ERR092', '审批对象的状态必须为等待审批')\n", (5549, 5577), False, 'from scaffold.exceptions.exceptions import AppError\n'), ((5619, 5648), 'scaffold.exceptions.exceptions.AppError', 'AppError', (['"""ERR093"""', '"""请填写驳回理由"""'], {}), "('ERR093', '请填写驳回理由')\n", (5627, 5648), False, 'from scaffold.exceptions.exceptions import AppError\n'), ((2692, 2723), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""级联结构不能出现循环引用"""'], {}), "('级联结构不能出现循环引用')\n", (2707, 2723), False, 'from django.core.exceptions import ValidationError\n')]
# # This file is part of softboxen software. # # Copyright (c) 2020, <NAME> <<EMAIL>> # License: https://github.com/etingof/softboxen/LICENSE.rst # import json import sys import unittest from unittest import mock from softboxen.client.resources.box import box from softboxen.client.resources.box import credentials from softboxen.client.resources.box import route class BoxTestCase(unittest.TestCase): def setUp(self): super(BoxTestCase, self).setUp() self.conn = mock.Mock() with open('tests/unit/client/resources/samples/box.json') as f: self.json_doc = json.load(f) self.conn.get.return_value.json.return_value = self.json_doc self.box = box.Box(self.conn, '/softboxen/v1/boxen/1') def test__parse_attributes(self): self.box._parse_attributes(self.json_doc) self.assertEqual('Cisco 5300', self.box.description) self.assertEqual('rt-1', self.box.hostname) self.assertEqual('10.0.0.1', self.box.mgmt_address) self.assertEqual('1', self.box.version) self.assertEqual('5300', self.box.model) self.assertEqual('cisco', self.box.vendor) self.assertEqual('123e4567-e89b-12d3-a456-426655440000', self.box.uuid) self.assertEqual('/softboxen/v1/boxen/1', self.box.path) self.assertEqual([], self.box.credentials.members_identities) self.assertEqual([], self.box.routes.members_identities) def test_credentials(self): self.conn.get.return_value.json.reset_mock() with open('tests/unit/client/resources/samples/' 'credentials_collection.json') as f: self.conn.get.return_value.json.return_value = json.load(f) expected = self.box.credentials self.assertIsInstance( expected, credentials.CredentialsCollection) self.conn.get.return_value.json.assert_called_once_with() def test_routes(self): self.conn.get.return_value.json.reset_mock() with open('tests/unit/client/resources/samples/' 'route_collection.json') as f: self.conn.get.return_value.json.return_value = json.load(f) expected = self.box.routes self.assertIsInstance( expected, route.RouteCollection) self.conn.get.return_value.json.assert_called_once_with() class BoxCollectionTestCase(unittest.TestCase): def setUp(self): super(BoxCollectionTestCase, self).setUp() self.conn = mock.Mock() with open('tests/unit/client/resources/samples/' 'box_collection.json') as f: self.json_doc = json.load(f) self.conn.get.return_value.json.return_value = self.json_doc self.box_col = box.BoxCollection( self.conn, '/softboxen/v1/boxen') def test__parse_attributes(self): self.box_col._parse_attributes(self.json_doc) self.assertEqual( ['/softboxen/v1/boxen/1'], self.box_col.members_identities) @mock.patch.object(box, 'Box', autospec=True) def test_get_member(self, mock_box): self.box_col.get_member('/softboxen/v1/boxen/1') mock_box.assert_called_once_with( self.box_col._conn, '/softboxen/v1/boxen/1') @mock.patch.object(box, 'Box', autospec=True) def test_get_members(self, mock_box): members = list(self.box_col) mock_box.assert_called_once_with( self.box_col._conn, '/softboxen/v1/boxen/1') self.assertIsInstance(members, list) self.assertEqual(1, len(members)) suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__]) if __name__ == '__main__': unittest.TextTestRunner(verbosity=2).run(suite)
[ "softboxen.client.resources.box.box.Box", "unittest.mock.Mock", "softboxen.client.resources.box.box.BoxCollection", "unittest.mock.patch.object", "json.load", "unittest.TextTestRunner", "unittest.TestLoader" ]
[((3008, 3052), 'unittest.mock.patch.object', 'mock.patch.object', (['box', '"""Box"""'], {'autospec': '(True)'}), "(box, 'Box', autospec=True)\n", (3025, 3052), False, 'from unittest import mock\n'), ((3257, 3301), 'unittest.mock.patch.object', 'mock.patch.object', (['box', '"""Box"""'], {'autospec': '(True)'}), "(box, 'Box', autospec=True)\n", (3274, 3301), False, 'from unittest import mock\n'), ((490, 501), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (499, 501), False, 'from unittest import mock\n'), ((706, 749), 'softboxen.client.resources.box.box.Box', 'box.Box', (['self.conn', '"""/softboxen/v1/boxen/1"""'], {}), "(self.conn, '/softboxen/v1/boxen/1')\n", (713, 749), False, 'from softboxen.client.resources.box import box\n'), ((2493, 2504), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (2502, 2504), False, 'from unittest import mock\n'), ((2745, 2796), 'softboxen.client.resources.box.box.BoxCollection', 'box.BoxCollection', (['self.conn', '"""/softboxen/v1/boxen"""'], {}), "(self.conn, '/softboxen/v1/boxen')\n", (2762, 2796), False, 'from softboxen.client.resources.box import box\n'), ((3579, 3600), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (3598, 3600), False, 'import unittest\n'), ((603, 615), 'json.load', 'json.load', (['f'], {}), '(f)\n', (612, 615), False, 'import json\n'), ((1699, 1711), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1708, 1711), False, 'import json\n'), ((2156, 2168), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2165, 2168), False, 'import json\n'), ((2638, 2650), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2647, 2650), False, 'import json\n'), ((3676, 3712), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (3699, 3712), False, 'import unittest\n')]
import click from flask import current_app, g from flask.cli import with_appcontext from flask_pymongo import PyMongo from werkzeug.security import check_password_hash, generate_password_hash def get_db(): if 'db' not in g: mongo = PyMongo(current_app) g.db = mongo.db g.db_client = mongo.cx return g.db def close_db(e=None): g.pop('db', None) db_client = g.pop('db_client', None) if db_client is not None: db_client.close() def init_app(app): app.teardown_appcontext(close_db)
[ "flask_pymongo.PyMongo", "flask.g.pop" ]
[((367, 384), 'flask.g.pop', 'g.pop', (['"""db"""', 'None'], {}), "('db', None)\n", (372, 384), False, 'from flask import current_app, g\n'), ((401, 425), 'flask.g.pop', 'g.pop', (['"""db_client"""', 'None'], {}), "('db_client', None)\n", (406, 425), False, 'from flask import current_app, g\n'), ((246, 266), 'flask_pymongo.PyMongo', 'PyMongo', (['current_app'], {}), '(current_app)\n', (253, 266), False, 'from flask_pymongo import PyMongo\n')]
# preferences panel to allow inputting cutom parameters for the structure of a project and its # naming conventions. # -------------------------------------------------------------------------------------------------- import hashlib import nuke from nukescripts.panels import PythonPanel import fileinput import os import smtplib import sys class Preferences(PythonPanel): def __init__(self): PythonPanel.__init__(self, 'C1 Preferences') # C1 Preferences self.email = None self.localDir = None self.projectDir = None # custom regex definitions for validation engine self.regex = {} self.projectStructure = { 'root': {} } self.scriptDir = { 'root': os.path.join(os.path.join(os.path.realpath(__file__), os.pardir), os.pardir), 'c1_tools': os.path.join(os.path.realpath(__file__), os.pardir) } # define knobs self.inp_email = nuke.String_Knob('email', 'C1 Initials: ') self.inp_localDir = nuke.String_Knob( 'localDir', 'Local Working Directory: ') self.btn_localDir = nuke.PyScript_Knob("Set Working Dir") self.loginButton = nuke.PyScript_Knob("Login") self.cancelButton = nuke.PyScript_Knob("Cancel") # Project Map Tab self.projectMapTab = nuke.Tab_Knob("Project Map") self.setProjectButton = nuke.File_Knob( 'projectDir', 'Project Location') self.inp_projectLocation = nuke.String_Knob('projectDir', '<b><font size="3" color="red">Remote Project Directory</font></b>') self.inp_projectName = nuke.String_Knob('projectName', 'Project Name') self.inp_projectNum = nuke.Int_Knob('projectNum') # self.inp_projectNum.clearFlag( nuke.STARTLINE ) self.inp_projectCode = nuke.String_Knob('projectCode', 'Project Code') self.inp_projectCode.clearFlag(nuke.STARTLINE) # add knobs self.addKnob(self.inp_localDir) self.addKnob(self.btn_localDir) self.addKnob(self.inp_email) self.addKnob(self.loginButton) self.addKnob(self.cancelButton) # Project Map Tab self.addKnob(self.projectMapTab) self.addKnob(self.setProjectButton) self.addKnob(self.inp_projectName) self.addKnob(self.inp_projectNum) self.addKnob(self.inp_projectCode) # retrieve previous login from login.txt self.retrieveLogin() return def validate(self): self.retrieveLogin() return # Retrieve login.txt data def retrieveLogin(self): if os.path.exists(os.path.join(self.scriptDir['c1_tools'], 'login.txt')): text = open(os.path.join(self.scriptDir[ 'c1_tools'], 'login.txt'), 'r+') lines = [] for line in text: # append each line of the found login.txt lines.append(line) text.close() self.email = lines[0] self.localDir = lines[1] else: self.prompt() print('Succsessfully logged in as: ' + self.email) return # create login.txt data def createLogin(self): try: text = open(os.path.join(self.scriptDir[ 'c1_tools'], 'login.txt'), 'w') text.write(self.inp_email.value() + '\n') text.write(self.inp_localDir.value()) text.close() except: print('Failed to save login info! ') return def prompt(self): PythonPanel.showModal(self) return def knobChanged(self, knob): if knob.name() == 'Login': self.email = self.inp_email.value() self.localDir = self.inp_localDir.value() # write login.txt self.createLogin() self.status = 'online' self.ok() elif knob.name() == 'Set Working Dir': self.inp_localDir.setValue(os.path.abspath(nuke.getFilename( 'Navigate to Local Working Directory...'))) elif knob.name() == 'Project Location': self.inp_projectLocation.setValue(os.path.abspath(nuke.getFilename( 'Navigate to Remote \'Root\' Project Directory...'))) return
[ "nuke.getFilename", "nuke.File_Knob", "nuke.String_Knob", "nukescripts.panels.PythonPanel.showModal", "os.path.join", "os.path.realpath", "nuke.PyScript_Knob", "nuke.Int_Knob", "nuke.Tab_Knob", "nukescripts.panels.PythonPanel.__init__" ]
[((408, 452), 'nukescripts.panels.PythonPanel.__init__', 'PythonPanel.__init__', (['self', '"""C1 Preferences"""'], {}), "(self, 'C1 Preferences')\n", (428, 452), False, 'from nukescripts.panels import PythonPanel\n'), ((971, 1013), 'nuke.String_Knob', 'nuke.String_Knob', (['"""email"""', '"""C1 Initials: """'], {}), "('email', 'C1 Initials: ')\n", (987, 1013), False, 'import nuke\n'), ((1042, 1099), 'nuke.String_Knob', 'nuke.String_Knob', (['"""localDir"""', '"""Local Working Directory: """'], {}), "('localDir', 'Local Working Directory: ')\n", (1058, 1099), False, 'import nuke\n'), ((1141, 1178), 'nuke.PyScript_Knob', 'nuke.PyScript_Knob', (['"""Set Working Dir"""'], {}), "('Set Working Dir')\n", (1159, 1178), False, 'import nuke\n'), ((1206, 1233), 'nuke.PyScript_Knob', 'nuke.PyScript_Knob', (['"""Login"""'], {}), "('Login')\n", (1224, 1233), False, 'import nuke\n'), ((1262, 1290), 'nuke.PyScript_Knob', 'nuke.PyScript_Knob', (['"""Cancel"""'], {}), "('Cancel')\n", (1280, 1290), False, 'import nuke\n'), ((1346, 1374), 'nuke.Tab_Knob', 'nuke.Tab_Knob', (['"""Project Map"""'], {}), "('Project Map')\n", (1359, 1374), False, 'import nuke\n'), ((1407, 1455), 'nuke.File_Knob', 'nuke.File_Knob', (['"""projectDir"""', '"""Project Location"""'], {}), "('projectDir', 'Project Location')\n", (1421, 1455), False, 'import nuke\n'), ((1504, 1607), 'nuke.String_Knob', 'nuke.String_Knob', (['"""projectDir"""', '"""<b><font size="3" color="red">Remote Project Directory</font></b>"""'], {}), '(\'projectDir\',\n \'<b><font size="3" color="red">Remote Project Directory</font></b>\')\n', (1520, 1607), False, 'import nuke\n'), ((1687, 1734), 'nuke.String_Knob', 'nuke.String_Knob', (['"""projectName"""', '"""Project Name"""'], {}), "('projectName', 'Project Name')\n", (1703, 1734), False, 'import nuke\n'), ((1765, 1792), 'nuke.Int_Knob', 'nuke.Int_Knob', (['"""projectNum"""'], {}), "('projectNum')\n", (1778, 1792), False, 'import nuke\n'), ((1882, 1929), 'nuke.String_Knob', 'nuke.String_Knob', (['"""projectCode"""', '"""Project Code"""'], {}), "('projectCode', 'Project Code')\n", (1898, 1929), False, 'import nuke\n'), ((3629, 3656), 'nukescripts.panels.PythonPanel.showModal', 'PythonPanel.showModal', (['self'], {}), '(self)\n', (3650, 3656), False, 'from nukescripts.panels import PythonPanel\n'), ((2689, 2742), 'os.path.join', 'os.path.join', (["self.scriptDir['c1_tools']", '"""login.txt"""'], {}), "(self.scriptDir['c1_tools'], 'login.txt')\n", (2701, 2742), False, 'import os\n'), ((874, 900), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (890, 900), False, 'import os\n'), ((2769, 2822), 'os.path.join', 'os.path.join', (["self.scriptDir['c1_tools']", '"""login.txt"""'], {}), "(self.scriptDir['c1_tools'], 'login.txt')\n", (2781, 2822), False, 'import os\n'), ((3304, 3357), 'os.path.join', 'os.path.join', (["self.scriptDir['c1_tools']", '"""login.txt"""'], {}), "(self.scriptDir['c1_tools'], 'login.txt')\n", (3316, 3357), False, 'import os\n'), ((785, 811), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (801, 811), False, 'import os\n'), ((4063, 4121), 'nuke.getFilename', 'nuke.getFilename', (['"""Navigate to Local Working Directory..."""'], {}), "('Navigate to Local Working Directory...')\n", (4079, 4121), False, 'import nuke\n'), ((4251, 4317), 'nuke.getFilename', 'nuke.getFilename', (['"""Navigate to Remote \'Root\' Project Directory..."""'], {}), '("Navigate to Remote \'Root\' Project Directory...")\n', (4267, 4317), False, 'import nuke\n')]
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" import grpc from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 import runtime_pb2 as runtime__pb2 class RuntimeStub(object): """Missing associated documentation comment in .proto file.""" def __init__(self, channel): """Constructor. Args: channel: A grpc.Channel. """ self.SayHello = channel.unary_unary( '/spec.proto.runtime.v1.Runtime/SayHello', request_serializer=runtime__pb2.SayHelloRequest.SerializeToString, response_deserializer=runtime__pb2.SayHelloResponse.FromString, ) self.InvokeService = channel.unary_unary( '/spec.proto.runtime.v1.Runtime/InvokeService', request_serializer=runtime__pb2.InvokeServiceRequest.SerializeToString, response_deserializer=runtime__pb2.InvokeResponse.FromString, ) self.GetConfiguration = channel.unary_unary( '/spec.proto.runtime.v1.Runtime/GetConfiguration', request_serializer=runtime__pb2.GetConfigurationRequest.SerializeToString, response_deserializer=runtime__pb2.GetConfigurationResponse.FromString, ) self.SaveConfiguration = channel.unary_unary( '/spec.proto.runtime.v1.Runtime/SaveConfiguration', request_serializer=runtime__pb2.SaveConfigurationRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) self.DeleteConfiguration = channel.unary_unary( '/spec.proto.runtime.v1.Runtime/DeleteConfiguration', request_serializer=runtime__pb2.DeleteConfigurationRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) self.SubscribeConfiguration = channel.stream_stream( '/spec.proto.runtime.v1.Runtime/SubscribeConfiguration', request_serializer=runtime__pb2.SubscribeConfigurationRequest.SerializeToString, response_deserializer=runtime__pb2.SubscribeConfigurationResponse.FromString, ) self.TryLock = channel.unary_unary( '/spec.proto.runtime.v1.Runtime/TryLock', request_serializer=runtime__pb2.TryLockRequest.SerializeToString, response_deserializer=runtime__pb2.TryLockResponse.FromString, ) self.Unlock = channel.unary_unary( '/spec.proto.runtime.v1.Runtime/Unlock', request_serializer=runtime__pb2.UnlockRequest.SerializeToString, response_deserializer=runtime__pb2.UnlockResponse.FromString, ) self.GetNextId = channel.unary_unary( '/spec.proto.runtime.v1.Runtime/GetNextId', request_serializer=runtime__pb2.GetNextIdRequest.SerializeToString, response_deserializer=runtime__pb2.GetNextIdResponse.FromString, ) self.GetState = channel.unary_unary( '/spec.proto.runtime.v1.Runtime/GetState', request_serializer=runtime__pb2.GetStateRequest.SerializeToString, response_deserializer=runtime__pb2.GetStateResponse.FromString, ) self.GetBulkState = channel.unary_unary( '/spec.proto.runtime.v1.Runtime/GetBulkState', request_serializer=runtime__pb2.GetBulkStateRequest.SerializeToString, response_deserializer=runtime__pb2.GetBulkStateResponse.FromString, ) self.SaveState = channel.unary_unary( '/spec.proto.runtime.v1.Runtime/SaveState', request_serializer=runtime__pb2.SaveStateRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) self.DeleteState = channel.unary_unary( '/spec.proto.runtime.v1.Runtime/DeleteState', request_serializer=runtime__pb2.DeleteStateRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) self.DeleteBulkState = channel.unary_unary( '/spec.proto.runtime.v1.Runtime/DeleteBulkState', request_serializer=runtime__pb2.DeleteBulkStateRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) self.ExecuteStateTransaction = channel.unary_unary( '/spec.proto.runtime.v1.Runtime/ExecuteStateTransaction', request_serializer=runtime__pb2.ExecuteStateTransactionRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) self.PublishEvent = channel.unary_unary( '/spec.proto.runtime.v1.Runtime/PublishEvent', request_serializer=runtime__pb2.PublishEventRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) self.GetFile = channel.unary_stream( '/spec.proto.runtime.v1.Runtime/GetFile', request_serializer=runtime__pb2.GetFileRequest.SerializeToString, response_deserializer=runtime__pb2.GetFileResponse.FromString, ) self.PutFile = channel.stream_unary( '/spec.proto.runtime.v1.Runtime/PutFile', request_serializer=runtime__pb2.PutFileRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) self.ListFile = channel.unary_unary( '/spec.proto.runtime.v1.Runtime/ListFile', request_serializer=runtime__pb2.ListFileRequest.SerializeToString, response_deserializer=runtime__pb2.ListFileResp.FromString, ) self.DelFile = channel.unary_unary( '/spec.proto.runtime.v1.Runtime/DelFile', request_serializer=runtime__pb2.DelFileRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) self.GetFileMeta = channel.unary_unary( '/spec.proto.runtime.v1.Runtime/GetFileMeta', request_serializer=runtime__pb2.GetFileMetaRequest.SerializeToString, response_deserializer=runtime__pb2.GetFileMetaResponse.FromString, ) self.InvokeBinding = channel.unary_unary( '/spec.proto.runtime.v1.Runtime/InvokeBinding', request_serializer=runtime__pb2.InvokeBindingRequest.SerializeToString, response_deserializer=runtime__pb2.InvokeBindingResponse.FromString, ) class RuntimeServicer(object): """Missing associated documentation comment in .proto file.""" def SayHello(self, request, context): """SayHello used for test """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def InvokeService(self, request, context): """InvokeService do rpc calls """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetConfiguration(self, request, context): """GetConfiguration gets configuration from configuration store. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SaveConfiguration(self, request, context): """SaveConfiguration saves configuration into configuration store. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DeleteConfiguration(self, request, context): """DeleteConfiguration deletes configuration from configuration store. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SubscribeConfiguration(self, request_iterator, context): """SubscribeConfiguration gets configuration from configuration store and subscribe the updates. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def TryLock(self, request, context): """Distributed Lock API A non-blocking method trying to get a lock with ttl. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def Unlock(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetNextId(self, request, context): """Sequencer API Get next unique id with some auto-increment guarantee """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetState(self, request, context): """Below are the APIs compatible with Dapr. We try to keep them same as Dapr's because we want to work with Dapr to build an API spec for cloud native runtime ,like CloudEvent for event data. Gets the state for a specific key. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetBulkState(self, request, context): """Gets a bulk of state items for a list of keys """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SaveState(self, request, context): """Saves an array of state objects """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DeleteState(self, request, context): """Deletes the state for a specific key. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DeleteBulkState(self, request, context): """Deletes a bulk of state items for a list of keys """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ExecuteStateTransaction(self, request, context): """Executes transactions for a specified store """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def PublishEvent(self, request, context): """Publishes events to the specific topic """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetFile(self, request, context): """Get file with stream """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def PutFile(self, request_iterator, context): """Put file with stream """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ListFile(self, request, context): """List all files """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DelFile(self, request, context): """Delete specific file """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetFileMeta(self, request, context): """Get file meta data, if file not exist,return code.NotFound error """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def InvokeBinding(self, request, context): """Invokes binding data to specific output bindings """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def add_RuntimeServicer_to_server(servicer, server): rpc_method_handlers = { 'SayHello': grpc.unary_unary_rpc_method_handler( servicer.SayHello, request_deserializer=runtime__pb2.SayHelloRequest.FromString, response_serializer=runtime__pb2.SayHelloResponse.SerializeToString, ), 'InvokeService': grpc.unary_unary_rpc_method_handler( servicer.InvokeService, request_deserializer=runtime__pb2.InvokeServiceRequest.FromString, response_serializer=runtime__pb2.InvokeResponse.SerializeToString, ), 'GetConfiguration': grpc.unary_unary_rpc_method_handler( servicer.GetConfiguration, request_deserializer=runtime__pb2.GetConfigurationRequest.FromString, response_serializer=runtime__pb2.GetConfigurationResponse.SerializeToString, ), 'SaveConfiguration': grpc.unary_unary_rpc_method_handler( servicer.SaveConfiguration, request_deserializer=runtime__pb2.SaveConfigurationRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), 'DeleteConfiguration': grpc.unary_unary_rpc_method_handler( servicer.DeleteConfiguration, request_deserializer=runtime__pb2.DeleteConfigurationRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), 'SubscribeConfiguration': grpc.stream_stream_rpc_method_handler( servicer.SubscribeConfiguration, request_deserializer=runtime__pb2.SubscribeConfigurationRequest.FromString, response_serializer=runtime__pb2.SubscribeConfigurationResponse.SerializeToString, ), 'TryLock': grpc.unary_unary_rpc_method_handler( servicer.TryLock, request_deserializer=runtime__pb2.TryLockRequest.FromString, response_serializer=runtime__pb2.TryLockResponse.SerializeToString, ), 'Unlock': grpc.unary_unary_rpc_method_handler( servicer.Unlock, request_deserializer=runtime__pb2.UnlockRequest.FromString, response_serializer=runtime__pb2.UnlockResponse.SerializeToString, ), 'GetNextId': grpc.unary_unary_rpc_method_handler( servicer.GetNextId, request_deserializer=runtime__pb2.GetNextIdRequest.FromString, response_serializer=runtime__pb2.GetNextIdResponse.SerializeToString, ), 'GetState': grpc.unary_unary_rpc_method_handler( servicer.GetState, request_deserializer=runtime__pb2.GetStateRequest.FromString, response_serializer=runtime__pb2.GetStateResponse.SerializeToString, ), 'GetBulkState': grpc.unary_unary_rpc_method_handler( servicer.GetBulkState, request_deserializer=runtime__pb2.GetBulkStateRequest.FromString, response_serializer=runtime__pb2.GetBulkStateResponse.SerializeToString, ), 'SaveState': grpc.unary_unary_rpc_method_handler( servicer.SaveState, request_deserializer=runtime__pb2.SaveStateRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), 'DeleteState': grpc.unary_unary_rpc_method_handler( servicer.DeleteState, request_deserializer=runtime__pb2.DeleteStateRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), 'DeleteBulkState': grpc.unary_unary_rpc_method_handler( servicer.DeleteBulkState, request_deserializer=runtime__pb2.DeleteBulkStateRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), 'ExecuteStateTransaction': grpc.unary_unary_rpc_method_handler( servicer.ExecuteStateTransaction, request_deserializer=runtime__pb2.ExecuteStateTransactionRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), 'PublishEvent': grpc.unary_unary_rpc_method_handler( servicer.PublishEvent, request_deserializer=runtime__pb2.PublishEventRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), 'GetFile': grpc.unary_stream_rpc_method_handler( servicer.GetFile, request_deserializer=runtime__pb2.GetFileRequest.FromString, response_serializer=runtime__pb2.GetFileResponse.SerializeToString, ), 'PutFile': grpc.stream_unary_rpc_method_handler( servicer.PutFile, request_deserializer=runtime__pb2.PutFileRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), 'ListFile': grpc.unary_unary_rpc_method_handler( servicer.ListFile, request_deserializer=runtime__pb2.ListFileRequest.FromString, response_serializer=runtime__pb2.ListFileResp.SerializeToString, ), 'DelFile': grpc.unary_unary_rpc_method_handler( servicer.DelFile, request_deserializer=runtime__pb2.DelFileRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), 'GetFileMeta': grpc.unary_unary_rpc_method_handler( servicer.GetFileMeta, request_deserializer=runtime__pb2.GetFileMetaRequest.FromString, response_serializer=runtime__pb2.GetFileMetaResponse.SerializeToString, ), 'InvokeBinding': grpc.unary_unary_rpc_method_handler( servicer.InvokeBinding, request_deserializer=runtime__pb2.InvokeBindingRequest.FromString, response_serializer=runtime__pb2.InvokeBindingResponse.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'spec.proto.runtime.v1.Runtime', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) # This class is part of an EXPERIMENTAL API. class Runtime(object): """Missing associated documentation comment in .proto file.""" @staticmethod def SayHello(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/spec.proto.runtime.v1.Runtime/SayHello', runtime__pb2.SayHelloRequest.SerializeToString, runtime__pb2.SayHelloResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def InvokeService(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/spec.proto.runtime.v1.Runtime/InvokeService', runtime__pb2.InvokeServiceRequest.SerializeToString, runtime__pb2.InvokeResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def GetConfiguration(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/spec.proto.runtime.v1.Runtime/GetConfiguration', runtime__pb2.GetConfigurationRequest.SerializeToString, runtime__pb2.GetConfigurationResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SaveConfiguration(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/spec.proto.runtime.v1.Runtime/SaveConfiguration', runtime__pb2.SaveConfigurationRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def DeleteConfiguration(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/spec.proto.runtime.v1.Runtime/DeleteConfiguration', runtime__pb2.DeleteConfigurationRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SubscribeConfiguration(request_iterator, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.stream_stream(request_iterator, target, '/spec.proto.runtime.v1.Runtime/SubscribeConfiguration', runtime__pb2.SubscribeConfigurationRequest.SerializeToString, runtime__pb2.SubscribeConfigurationResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def TryLock(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/spec.proto.runtime.v1.Runtime/TryLock', runtime__pb2.TryLockRequest.SerializeToString, runtime__pb2.TryLockResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def Unlock(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/spec.proto.runtime.v1.Runtime/Unlock', runtime__pb2.UnlockRequest.SerializeToString, runtime__pb2.UnlockResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def GetNextId(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/spec.proto.runtime.v1.Runtime/GetNextId', runtime__pb2.GetNextIdRequest.SerializeToString, runtime__pb2.GetNextIdResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def GetState(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/spec.proto.runtime.v1.Runtime/GetState', runtime__pb2.GetStateRequest.SerializeToString, runtime__pb2.GetStateResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def GetBulkState(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/spec.proto.runtime.v1.Runtime/GetBulkState', runtime__pb2.GetBulkStateRequest.SerializeToString, runtime__pb2.GetBulkStateResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SaveState(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/spec.proto.runtime.v1.Runtime/SaveState', runtime__pb2.SaveStateRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def DeleteState(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/spec.proto.runtime.v1.Runtime/DeleteState', runtime__pb2.DeleteStateRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def DeleteBulkState(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/spec.proto.runtime.v1.Runtime/DeleteBulkState', runtime__pb2.DeleteBulkStateRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def ExecuteStateTransaction(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/spec.proto.runtime.v1.Runtime/ExecuteStateTransaction', runtime__pb2.ExecuteStateTransactionRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def PublishEvent(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/spec.proto.runtime.v1.Runtime/PublishEvent', runtime__pb2.PublishEventRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def GetFile(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_stream(request, target, '/spec.proto.runtime.v1.Runtime/GetFile', runtime__pb2.GetFileRequest.SerializeToString, runtime__pb2.GetFileResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def PutFile(request_iterator, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.stream_unary(request_iterator, target, '/spec.proto.runtime.v1.Runtime/PutFile', runtime__pb2.PutFileRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def ListFile(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/spec.proto.runtime.v1.Runtime/ListFile', runtime__pb2.ListFileRequest.SerializeToString, runtime__pb2.ListFileResp.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def DelFile(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/spec.proto.runtime.v1.Runtime/DelFile', runtime__pb2.DelFileRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def GetFileMeta(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/spec.proto.runtime.v1.Runtime/GetFileMeta', runtime__pb2.GetFileMetaRequest.SerializeToString, runtime__pb2.GetFileMetaResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def InvokeBinding(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/spec.proto.runtime.v1.Runtime/InvokeBinding', runtime__pb2.InvokeBindingRequest.SerializeToString, runtime__pb2.InvokeBindingResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
[ "grpc.unary_stream_rpc_method_handler", "grpc.experimental.stream_unary", "grpc.method_handlers_generic_handler", "grpc.stream_stream_rpc_method_handler", "grpc.unary_unary_rpc_method_handler", "grpc.experimental.stream_stream", "grpc.experimental.unary_stream", "grpc.stream_unary_rpc_method_handler",...
[((20592, 20686), 'grpc.method_handlers_generic_handler', 'grpc.method_handlers_generic_handler', (['"""spec.proto.runtime.v1.Runtime"""', 'rpc_method_handlers'], {}), "('spec.proto.runtime.v1.Runtime',\n rpc_method_handlers)\n", (20628, 20686), False, 'import grpc\n'), ((13916, 14111), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SayHello'], {'request_deserializer': 'runtime__pb2.SayHelloRequest.FromString', 'response_serializer': 'runtime__pb2.SayHelloResponse.SerializeToString'}), '(servicer.SayHello, request_deserializer\n =runtime__pb2.SayHelloRequest.FromString, response_serializer=\n runtime__pb2.SayHelloResponse.SerializeToString)\n', (13951, 14111), False, 'import grpc\n'), ((14207, 14408), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.InvokeService'], {'request_deserializer': 'runtime__pb2.InvokeServiceRequest.FromString', 'response_serializer': 'runtime__pb2.InvokeResponse.SerializeToString'}), '(servicer.InvokeService,\n request_deserializer=runtime__pb2.InvokeServiceRequest.FromString,\n response_serializer=runtime__pb2.InvokeResponse.SerializeToString)\n', (14242, 14408), False, 'import grpc\n'), ((14509, 14731), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.GetConfiguration'], {'request_deserializer': 'runtime__pb2.GetConfigurationRequest.FromString', 'response_serializer': 'runtime__pb2.GetConfigurationResponse.SerializeToString'}), '(servicer.GetConfiguration,\n request_deserializer=runtime__pb2.GetConfigurationRequest.FromString,\n response_serializer=runtime__pb2.GetConfigurationResponse.SerializeToString\n )\n', (14544, 14731), False, 'import grpc\n'), ((14828, 15055), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SaveConfiguration'], {'request_deserializer': 'runtime__pb2.SaveConfigurationRequest.FromString', 'response_serializer': 'google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString'}), '(servicer.SaveConfiguration,\n request_deserializer=runtime__pb2.SaveConfigurationRequest.FromString,\n response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.\n SerializeToString)\n', (14863, 15055), False, 'import grpc\n'), ((15154, 15385), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.DeleteConfiguration'], {'request_deserializer': 'runtime__pb2.DeleteConfigurationRequest.FromString', 'response_serializer': 'google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString'}), '(servicer.DeleteConfiguration,\n request_deserializer=runtime__pb2.DeleteConfigurationRequest.FromString,\n response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.\n SerializeToString)\n', (15189, 15385), False, 'import grpc\n'), ((15487, 15730), 'grpc.stream_stream_rpc_method_handler', 'grpc.stream_stream_rpc_method_handler', (['servicer.SubscribeConfiguration'], {'request_deserializer': 'runtime__pb2.SubscribeConfigurationRequest.FromString', 'response_serializer': 'runtime__pb2.SubscribeConfigurationResponse.SerializeToString'}), '(servicer.SubscribeConfiguration,\n request_deserializer=runtime__pb2.SubscribeConfigurationRequest.\n FromString, response_serializer=runtime__pb2.\n SubscribeConfigurationResponse.SerializeToString)\n', (15524, 15730), False, 'import grpc\n'), ((15816, 16008), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.TryLock'], {'request_deserializer': 'runtime__pb2.TryLockRequest.FromString', 'response_serializer': 'runtime__pb2.TryLockResponse.SerializeToString'}), '(servicer.TryLock, request_deserializer=\n runtime__pb2.TryLockRequest.FromString, response_serializer=\n runtime__pb2.TryLockResponse.SerializeToString)\n', (15851, 16008), False, 'import grpc\n'), ((16097, 16286), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.Unlock'], {'request_deserializer': 'runtime__pb2.UnlockRequest.FromString', 'response_serializer': 'runtime__pb2.UnlockResponse.SerializeToString'}), '(servicer.Unlock, request_deserializer=\n runtime__pb2.UnlockRequest.FromString, response_serializer=runtime__pb2\n .UnlockResponse.SerializeToString)\n', (16132, 16286), False, 'import grpc\n'), ((16378, 16574), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.GetNextId'], {'request_deserializer': 'runtime__pb2.GetNextIdRequest.FromString', 'response_serializer': 'runtime__pb2.GetNextIdResponse.SerializeToString'}), '(servicer.GetNextId,\n request_deserializer=runtime__pb2.GetNextIdRequest.FromString,\n response_serializer=runtime__pb2.GetNextIdResponse.SerializeToString)\n', (16413, 16574), False, 'import grpc\n'), ((16667, 16862), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.GetState'], {'request_deserializer': 'runtime__pb2.GetStateRequest.FromString', 'response_serializer': 'runtime__pb2.GetStateResponse.SerializeToString'}), '(servicer.GetState, request_deserializer\n =runtime__pb2.GetStateRequest.FromString, response_serializer=\n runtime__pb2.GetStateResponse.SerializeToString)\n', (16702, 16862), False, 'import grpc\n'), ((16957, 17162), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.GetBulkState'], {'request_deserializer': 'runtime__pb2.GetBulkStateRequest.FromString', 'response_serializer': 'runtime__pb2.GetBulkStateResponse.SerializeToString'}), '(servicer.GetBulkState,\n request_deserializer=runtime__pb2.GetBulkStateRequest.FromString,\n response_serializer=runtime__pb2.GetBulkStateResponse.SerializeToString)\n', (16992, 17162), False, 'import grpc\n'), ((17256, 17467), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SaveState'], {'request_deserializer': 'runtime__pb2.SaveStateRequest.FromString', 'response_serializer': 'google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString'}), '(servicer.SaveState,\n request_deserializer=runtime__pb2.SaveStateRequest.FromString,\n response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.\n SerializeToString)\n', (17291, 17467), False, 'import grpc\n'), ((17558, 17773), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.DeleteState'], {'request_deserializer': 'runtime__pb2.DeleteStateRequest.FromString', 'response_serializer': 'google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString'}), '(servicer.DeleteState,\n request_deserializer=runtime__pb2.DeleteStateRequest.FromString,\n response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.\n SerializeToString)\n', (17593, 17773), False, 'import grpc\n'), ((17868, 18091), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.DeleteBulkState'], {'request_deserializer': 'runtime__pb2.DeleteBulkStateRequest.FromString', 'response_serializer': 'google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString'}), '(servicer.DeleteBulkState,\n request_deserializer=runtime__pb2.DeleteBulkStateRequest.FromString,\n response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.\n SerializeToString)\n', (17903, 18091), False, 'import grpc\n'), ((18194, 18434), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.ExecuteStateTransaction'], {'request_deserializer': 'runtime__pb2.ExecuteStateTransactionRequest.FromString', 'response_serializer': 'google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString'}), '(servicer.ExecuteStateTransaction,\n request_deserializer=runtime__pb2.ExecuteStateTransactionRequest.\n FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.\n Empty.SerializeToString)\n', (18229, 18434), False, 'import grpc\n'), ((18525, 18742), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.PublishEvent'], {'request_deserializer': 'runtime__pb2.PublishEventRequest.FromString', 'response_serializer': 'google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString'}), '(servicer.PublishEvent,\n request_deserializer=runtime__pb2.PublishEventRequest.FromString,\n response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.\n SerializeToString)\n', (18560, 18742), False, 'import grpc\n'), ((18829, 19022), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.GetFile'], {'request_deserializer': 'runtime__pb2.GetFileRequest.FromString', 'response_serializer': 'runtime__pb2.GetFileResponse.SerializeToString'}), '(servicer.GetFile, request_deserializer\n =runtime__pb2.GetFileRequest.FromString, response_serializer=\n runtime__pb2.GetFileResponse.SerializeToString)\n', (18865, 19022), False, 'import grpc\n'), ((19112, 19317), 'grpc.stream_unary_rpc_method_handler', 'grpc.stream_unary_rpc_method_handler', (['servicer.PutFile'], {'request_deserializer': 'runtime__pb2.PutFileRequest.FromString', 'response_serializer': 'google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString'}), '(servicer.PutFile, request_deserializer\n =runtime__pb2.PutFileRequest.FromString, response_serializer=\n google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString)\n', (19148, 19317), False, 'import grpc\n'), ((19408, 19599), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.ListFile'], {'request_deserializer': 'runtime__pb2.ListFileRequest.FromString', 'response_serializer': 'runtime__pb2.ListFileResp.SerializeToString'}), '(servicer.ListFile, request_deserializer\n =runtime__pb2.ListFileRequest.FromString, response_serializer=\n runtime__pb2.ListFileResp.SerializeToString)\n', (19443, 19599), False, 'import grpc\n'), ((19689, 19893), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.DelFile'], {'request_deserializer': 'runtime__pb2.DelFileRequest.FromString', 'response_serializer': 'google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString'}), '(servicer.DelFile, request_deserializer=\n runtime__pb2.DelFileRequest.FromString, response_serializer=\n google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString)\n', (19724, 19893), False, 'import grpc\n'), ((19987, 20189), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.GetFileMeta'], {'request_deserializer': 'runtime__pb2.GetFileMetaRequest.FromString', 'response_serializer': 'runtime__pb2.GetFileMetaResponse.SerializeToString'}), '(servicer.GetFileMeta,\n request_deserializer=runtime__pb2.GetFileMetaRequest.FromString,\n response_serializer=runtime__pb2.GetFileMetaResponse.SerializeToString)\n', (20022, 20189), False, 'import grpc\n'), ((20287, 20495), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.InvokeBinding'], {'request_deserializer': 'runtime__pb2.InvokeBindingRequest.FromString', 'response_serializer': 'runtime__pb2.InvokeBindingResponse.SerializeToString'}), '(servicer.InvokeBinding,\n request_deserializer=runtime__pb2.InvokeBindingRequest.FromString,\n response_serializer=runtime__pb2.InvokeBindingResponse.SerializeToString)\n', (20322, 20495), False, 'import grpc\n'), ((21212, 21514), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/spec.proto.runtime.v1.Runtime/SayHello"""', 'runtime__pb2.SayHelloRequest.SerializeToString', 'runtime__pb2.SayHelloResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/spec.proto.runtime.v1.Runtime/SayHello', runtime__pb2.SayHelloRequest\n .SerializeToString, runtime__pb2.SayHelloResponse.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (21241, 21514), False, 'import grpc\n'), ((21873, 22184), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/spec.proto.runtime.v1.Runtime/InvokeService"""', 'runtime__pb2.InvokeServiceRequest.SerializeToString', 'runtime__pb2.InvokeResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/spec.proto.runtime.v1.Runtime/InvokeService', runtime__pb2.\n InvokeServiceRequest.SerializeToString, runtime__pb2.InvokeResponse.\n FromString, options, channel_credentials, insecure, call_credentials,\n compression, wait_for_ready, timeout, metadata)\n", (21902, 22184), False, 'import grpc\n'), ((22545, 22872), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/spec.proto.runtime.v1.Runtime/GetConfiguration"""', 'runtime__pb2.GetConfigurationRequest.SerializeToString', 'runtime__pb2.GetConfigurationResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/spec.proto.runtime.v1.Runtime/GetConfiguration', runtime__pb2.\n GetConfigurationRequest.SerializeToString, runtime__pb2.\n GetConfigurationResponse.FromString, options, channel_credentials,\n insecure, call_credentials, compression, wait_for_ready, timeout, metadata)\n", (22574, 22872), False, 'import grpc\n'), ((23234, 23569), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/spec.proto.runtime.v1.Runtime/SaveConfiguration"""', 'runtime__pb2.SaveConfigurationRequest.SerializeToString', 'google_dot_protobuf_dot_empty__pb2.Empty.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/spec.proto.runtime.v1.Runtime/SaveConfiguration', runtime__pb2.\n SaveConfigurationRequest.SerializeToString,\n google_dot_protobuf_dot_empty__pb2.Empty.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (23263, 23569), False, 'import grpc\n'), ((23930, 24269), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/spec.proto.runtime.v1.Runtime/DeleteConfiguration"""', 'runtime__pb2.DeleteConfigurationRequest.SerializeToString', 'google_dot_protobuf_dot_empty__pb2.Empty.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/spec.proto.runtime.v1.Runtime/DeleteConfiguration', runtime__pb2.\n DeleteConfigurationRequest.SerializeToString,\n google_dot_protobuf_dot_empty__pb2.Empty.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (23959, 24269), False, 'import grpc\n'), ((24642, 24998), 'grpc.experimental.stream_stream', 'grpc.experimental.stream_stream', (['request_iterator', 'target', '"""/spec.proto.runtime.v1.Runtime/SubscribeConfiguration"""', 'runtime__pb2.SubscribeConfigurationRequest.SerializeToString', 'runtime__pb2.SubscribeConfigurationResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request_iterator, target,\n '/spec.proto.runtime.v1.Runtime/SubscribeConfiguration', runtime__pb2.\n SubscribeConfigurationRequest.SerializeToString, runtime__pb2.\n SubscribeConfigurationResponse.FromString, options, channel_credentials,\n insecure, call_credentials, compression, wait_for_ready, timeout, metadata)\n", (24673, 24998), False, 'import grpc\n'), ((25350, 25649), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/spec.proto.runtime.v1.Runtime/TryLock"""', 'runtime__pb2.TryLockRequest.SerializeToString', 'runtime__pb2.TryLockResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/spec.proto.runtime.v1.Runtime/TryLock', runtime__pb2.TryLockRequest.\n SerializeToString, runtime__pb2.TryLockResponse.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (25379, 25649), False, 'import grpc\n'), ((26001, 26297), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/spec.proto.runtime.v1.Runtime/Unlock"""', 'runtime__pb2.UnlockRequest.SerializeToString', 'runtime__pb2.UnlockResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/spec.proto.runtime.v1.Runtime/Unlock', runtime__pb2.UnlockRequest.\n SerializeToString, runtime__pb2.UnlockResponse.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (26030, 26297), False, 'import grpc\n'), ((26652, 26958), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/spec.proto.runtime.v1.Runtime/GetNextId"""', 'runtime__pb2.GetNextIdRequest.SerializeToString', 'runtime__pb2.GetNextIdResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/spec.proto.runtime.v1.Runtime/GetNextId', runtime__pb2.\n GetNextIdRequest.SerializeToString, runtime__pb2.GetNextIdResponse.\n FromString, options, channel_credentials, insecure, call_credentials,\n compression, wait_for_ready, timeout, metadata)\n", (26681, 26958), False, 'import grpc\n'), ((27311, 27613), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/spec.proto.runtime.v1.Runtime/GetState"""', 'runtime__pb2.GetStateRequest.SerializeToString', 'runtime__pb2.GetStateResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/spec.proto.runtime.v1.Runtime/GetState', runtime__pb2.GetStateRequest\n .SerializeToString, runtime__pb2.GetStateResponse.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (27340, 27613), False, 'import grpc\n'), ((27971, 28286), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/spec.proto.runtime.v1.Runtime/GetBulkState"""', 'runtime__pb2.GetBulkStateRequest.SerializeToString', 'runtime__pb2.GetBulkStateResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/spec.proto.runtime.v1.Runtime/GetBulkState', runtime__pb2.\n GetBulkStateRequest.SerializeToString, runtime__pb2.\n GetBulkStateResponse.FromString, options, channel_credentials, insecure,\n call_credentials, compression, wait_for_ready, timeout, metadata)\n", (28000, 28286), False, 'import grpc\n'), ((28640, 28956), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/spec.proto.runtime.v1.Runtime/SaveState"""', 'runtime__pb2.SaveStateRequest.SerializeToString', 'google_dot_protobuf_dot_empty__pb2.Empty.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/spec.proto.runtime.v1.Runtime/SaveState', runtime__pb2.\n SaveStateRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.\n Empty.FromString, options, channel_credentials, insecure,\n call_credentials, compression, wait_for_ready, timeout, metadata)\n", (28669, 28956), False, 'import grpc\n'), ((29312, 29635), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/spec.proto.runtime.v1.Runtime/DeleteState"""', 'runtime__pb2.DeleteStateRequest.SerializeToString', 'google_dot_protobuf_dot_empty__pb2.Empty.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/spec.proto.runtime.v1.Runtime/DeleteState', runtime__pb2.\n DeleteStateRequest.SerializeToString,\n google_dot_protobuf_dot_empty__pb2.Empty.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (29341, 29635), False, 'import grpc\n'), ((29992, 30323), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/spec.proto.runtime.v1.Runtime/DeleteBulkState"""', 'runtime__pb2.DeleteBulkStateRequest.SerializeToString', 'google_dot_protobuf_dot_empty__pb2.Empty.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/spec.proto.runtime.v1.Runtime/DeleteBulkState', runtime__pb2.\n DeleteBulkStateRequest.SerializeToString,\n google_dot_protobuf_dot_empty__pb2.Empty.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (30021, 30323), False, 'import grpc\n'), ((30688, 31035), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/spec.proto.runtime.v1.Runtime/ExecuteStateTransaction"""', 'runtime__pb2.ExecuteStateTransactionRequest.SerializeToString', 'google_dot_protobuf_dot_empty__pb2.Empty.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/spec.proto.runtime.v1.Runtime/ExecuteStateTransaction', runtime__pb2.\n ExecuteStateTransactionRequest.SerializeToString,\n google_dot_protobuf_dot_empty__pb2.Empty.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (30717, 31035), False, 'import grpc\n'), ((31389, 31714), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/spec.proto.runtime.v1.Runtime/PublishEvent"""', 'runtime__pb2.PublishEventRequest.SerializeToString', 'google_dot_protobuf_dot_empty__pb2.Empty.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/spec.proto.runtime.v1.Runtime/PublishEvent', runtime__pb2.\n PublishEventRequest.SerializeToString,\n google_dot_protobuf_dot_empty__pb2.Empty.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (31418, 31714), False, 'import grpc\n'), ((32063, 32363), 'grpc.experimental.unary_stream', 'grpc.experimental.unary_stream', (['request', 'target', '"""/spec.proto.runtime.v1.Runtime/GetFile"""', 'runtime__pb2.GetFileRequest.SerializeToString', 'runtime__pb2.GetFileResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/spec.proto.runtime.v1.Runtime/GetFile', runtime__pb2.GetFileRequest.\n SerializeToString, runtime__pb2.GetFileResponse.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (32093, 32363), False, 'import grpc\n'), ((32725, 33046), 'grpc.experimental.stream_unary', 'grpc.experimental.stream_unary', (['request_iterator', 'target', '"""/spec.proto.runtime.v1.Runtime/PutFile"""', 'runtime__pb2.PutFileRequest.SerializeToString', 'google_dot_protobuf_dot_empty__pb2.Empty.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request_iterator, target,\n '/spec.proto.runtime.v1.Runtime/PutFile', runtime__pb2.PutFileRequest.\n SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString,\n options, channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (32755, 33046), False, 'import grpc\n'), ((33400, 33698), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/spec.proto.runtime.v1.Runtime/ListFile"""', 'runtime__pb2.ListFileRequest.SerializeToString', 'runtime__pb2.ListFileResp.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/spec.proto.runtime.v1.Runtime/ListFile', runtime__pb2.ListFileRequest\n .SerializeToString, runtime__pb2.ListFileResp.FromString, options,\n channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (33429, 33698), False, 'import grpc\n'), ((34051, 34362), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/spec.proto.runtime.v1.Runtime/DelFile"""', 'runtime__pb2.DelFileRequest.SerializeToString', 'google_dot_protobuf_dot_empty__pb2.Empty.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/spec.proto.runtime.v1.Runtime/DelFile', runtime__pb2.DelFileRequest.\n SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString,\n options, channel_credentials, insecure, call_credentials, compression,\n wait_for_ready, timeout, metadata)\n", (34080, 34362), False, 'import grpc\n'), ((34719, 35031), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/spec.proto.runtime.v1.Runtime/GetFileMeta"""', 'runtime__pb2.GetFileMetaRequest.SerializeToString', 'runtime__pb2.GetFileMetaResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/spec.proto.runtime.v1.Runtime/GetFileMeta', runtime__pb2.\n GetFileMetaRequest.SerializeToString, runtime__pb2.GetFileMetaResponse.\n FromString, options, channel_credentials, insecure, call_credentials,\n compression, wait_for_ready, timeout, metadata)\n", (34748, 35031), False, 'import grpc\n'), ((35389, 35707), 'grpc.experimental.unary_unary', 'grpc.experimental.unary_unary', (['request', 'target', '"""/spec.proto.runtime.v1.Runtime/InvokeBinding"""', 'runtime__pb2.InvokeBindingRequest.SerializeToString', 'runtime__pb2.InvokeBindingResponse.FromString', 'options', 'channel_credentials', 'insecure', 'call_credentials', 'compression', 'wait_for_ready', 'timeout', 'metadata'], {}), "(request, target,\n '/spec.proto.runtime.v1.Runtime/InvokeBinding', runtime__pb2.\n InvokeBindingRequest.SerializeToString, runtime__pb2.\n InvokeBindingResponse.FromString, options, channel_credentials,\n insecure, call_credentials, compression, wait_for_ready, timeout, metadata)\n", (35418, 35707), False, 'import grpc\n')]
import heterocl as hcl hcl.init() target = hcl.Platform.xilinx_zc706 initiation_interval = 4 a = hcl.placeholder((10, 20), name="a") b = hcl.placeholder((10, 20), name="b") c = hcl.placeholder((10, 20), name="c") d = hcl.placeholder((10, 20), name="d") e = hcl.placeholder((10, 20), name="e") def add_mul(a, b, c, d, e): @hcl.def_([a.shape, b.shape, c.shape]) def ret_add(a, b, c): with hcl.for_(0, a.shape[0]) as i: with hcl.for_(0, a.shape[1]) as j: c[i, j] = a[i, j] + b[i, j] @hcl.def_([c.shape, d.shape, e.shape]) def ret_mul(c, d, e): # hcl.update(c, lambda x, y: a[x, y] * b[x, y], 'c_mul') with hcl.for_(0, c.shape[0]) as i: with hcl.for_(0, c.shape[1]) as j: e[i, j] = c[i, j] * d[i, j] ret_add(a, b, c) ret_mul(c, d, e) # compute customization s = hcl.create_schedule([a, b, c, d, e], add_mul) # op1 = add_mul.ret_add.c # op2 = add_mul.ret_mul.c # s[op1].pipeline(op1.axis[0], initiation_interval) # stream into modules / device a0, b0 = s.to([a, b], target.xcel) d0 = s.to(d, target.xcel) #s.partition(b0, dim=2, factor=2) s.to([a0, b0], s[add_mul.ret_add]) s.to(d0, s[add_mul.ret_mul]) # within device move producer to consumer s.to(c, s[add_mul.ret_mul], s[add_mul.ret_add], depth=10) # return tensor for inter-device move # e0 = s.stream_to(e, hcl.CPU('riscv')) # print(add_mul.ret_mul._buf, c._buf) print(hcl.lower(s)) code = hcl.build(s, target) print(code) # # with open("example.cl", "w") as f: # f.write(code) # f.close()
[ "heterocl.for_", "heterocl.placeholder", "heterocl.def_", "heterocl.create_schedule", "heterocl.build", "heterocl.init", "heterocl.lower" ]
[((24, 34), 'heterocl.init', 'hcl.init', ([], {}), '()\n', (32, 34), True, 'import heterocl as hcl\n'), ((99, 134), 'heterocl.placeholder', 'hcl.placeholder', (['(10, 20)'], {'name': '"""a"""'}), "((10, 20), name='a')\n", (114, 134), True, 'import heterocl as hcl\n'), ((139, 174), 'heterocl.placeholder', 'hcl.placeholder', (['(10, 20)'], {'name': '"""b"""'}), "((10, 20), name='b')\n", (154, 174), True, 'import heterocl as hcl\n'), ((179, 214), 'heterocl.placeholder', 'hcl.placeholder', (['(10, 20)'], {'name': '"""c"""'}), "((10, 20), name='c')\n", (194, 214), True, 'import heterocl as hcl\n'), ((220, 255), 'heterocl.placeholder', 'hcl.placeholder', (['(10, 20)'], {'name': '"""d"""'}), "((10, 20), name='d')\n", (235, 255), True, 'import heterocl as hcl\n'), ((260, 295), 'heterocl.placeholder', 'hcl.placeholder', (['(10, 20)'], {'name': '"""e"""'}), "((10, 20), name='e')\n", (275, 295), True, 'import heterocl as hcl\n'), ((869, 914), 'heterocl.create_schedule', 'hcl.create_schedule', (['[a, b, c, d, e]', 'add_mul'], {}), '([a, b, c, d, e], add_mul)\n', (888, 914), True, 'import heterocl as hcl\n'), ((1464, 1484), 'heterocl.build', 'hcl.build', (['s', 'target'], {}), '(s, target)\n', (1473, 1484), True, 'import heterocl as hcl\n'), ((330, 367), 'heterocl.def_', 'hcl.def_', (['[a.shape, b.shape, c.shape]'], {}), '([a.shape, b.shape, c.shape])\n', (338, 367), True, 'import heterocl as hcl\n'), ((534, 571), 'heterocl.def_', 'hcl.def_', (['[c.shape, d.shape, e.shape]'], {}), '([c.shape, d.shape, e.shape])\n', (542, 571), True, 'import heterocl as hcl\n'), ((1443, 1455), 'heterocl.lower', 'hcl.lower', (['s'], {}), '(s)\n', (1452, 1455), True, 'import heterocl as hcl\n'), ((407, 430), 'heterocl.for_', 'hcl.for_', (['(0)', 'a.shape[0]'], {}), '(0, a.shape[0])\n', (415, 430), True, 'import heterocl as hcl\n'), ((676, 699), 'heterocl.for_', 'hcl.for_', (['(0)', 'c.shape[0]'], {}), '(0, c.shape[0])\n', (684, 699), True, 'import heterocl as hcl\n'), ((454, 477), 'heterocl.for_', 'hcl.for_', (['(0)', 'a.shape[1]'], {}), '(0, a.shape[1])\n', (462, 477), True, 'import heterocl as hcl\n'), ((723, 746), 'heterocl.for_', 'hcl.for_', (['(0)', 'c.shape[1]'], {}), '(0, c.shape[1])\n', (731, 746), True, 'import heterocl as hcl\n')]
import os from setuptools import setup here = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(here, 'README.md')) as readme_file: README = readme_file.read() setup( name='k8s-workload-scaler', version='0.0.2', packages=['k8s_workload_scaler'], url='github.com/eminaktas/k8s-workload-scaler', license='MIT', author='emin.aktas', author_email='<EMAIL>', description='Kubernetes workload scaler', long_description=README, install_requires=[ 'setuptools~=54.2.0', 'kubernetes~=12.0.1', 'requests~=2.25.1', 'prometheus-api-client~=0.4.2', ] )
[ "os.path.join", "os.path.dirname", "setuptools.setup" ]
[((183, 579), 'setuptools.setup', 'setup', ([], {'name': '"""k8s-workload-scaler"""', 'version': '"""0.0.2"""', 'packages': "['k8s_workload_scaler']", 'url': '"""github.com/eminaktas/k8s-workload-scaler"""', 'license': '"""MIT"""', 'author': '"""emin.aktas"""', 'author_email': '"""<EMAIL>"""', 'description': '"""Kubernetes workload scaler"""', 'long_description': 'README', 'install_requires': "['setuptools~=54.2.0', 'kubernetes~=12.0.1', 'requests~=2.25.1',\n 'prometheus-api-client~=0.4.2']"}), "(name='k8s-workload-scaler', version='0.0.2', packages=[\n 'k8s_workload_scaler'], url='github.com/eminaktas/k8s-workload-scaler',\n license='MIT', author='emin.aktas', author_email='<EMAIL>', description\n ='Kubernetes workload scaler', long_description=README,\n install_requires=['setuptools~=54.2.0', 'kubernetes~=12.0.1',\n 'requests~=2.25.1', 'prometheus-api-client~=0.4.2'])\n", (188, 579), False, 'from setuptools import setup\n'), ((63, 88), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (78, 88), False, 'import os\n'), ((101, 132), 'os.path.join', 'os.path.join', (['here', '"""README.md"""'], {}), "(here, 'README.md')\n", (113, 132), False, 'import os\n')]