code
stringlengths
22
1.05M
apis
listlengths
1
3.31k
extract_api
stringlengths
75
3.25M
#!/usr/bin/env python # Copyright (C) 2016 Hewlett Packard Enterprise Development LP # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from opsvalidator.base import BaseValidator from opsvalidator import error from opsvalidator.error import ValidationError from opsrest.utils.utils import get_column_data_from_row import os from copy import copy global list_of_timezones list_of_timezones = None def build_timezone_db(): global list_of_timezones path = "/usr/share/zoneinfo/posix/" for root, directories, filenames in os.walk(path): for filename in filenames: full_path = os.path.join(root, filename) timezone = copy(full_path) timezone = timezone.replace(path, "") list_of_timezones[timezone] = full_path def check_valid_timezone(timezone_user_input): global list_of_timezones if list_of_timezones is None: list_of_timezones = {} build_timezone_db() if timezone_user_input in list_of_timezones.keys(): return True else: return False class SystemValidator(BaseValidator): resource = "system" def validate_modification(self, validation_args): system_row = validation_args.resource_row if hasattr(system_row, "timezone"): timezone = get_column_data_from_row(system_row, "timezone")[0] if (check_valid_timezone(timezone) is False): details = "Invalid timezone %s." % (timezone) raise ValidationError(error.VERIFICATION_FAILED, details)
[ "opsvalidator.error.ValidationError", "os.walk", "opsrest.utils.utils.get_column_data_from_row", "copy.copy", "os.path.join" ]
[((1049, 1062), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (1056, 1062), False, 'import os\n'), ((1123, 1151), 'os.path.join', 'os.path.join', (['root', 'filename'], {}), '(root, filename)\n', (1135, 1151), False, 'import os\n'), ((1175, 1190), 'copy.copy', 'copy', (['full_path'], {}), '(full_path)\n', (1179, 1190), False, 'from copy import copy\n'), ((1807, 1855), 'opsrest.utils.utils.get_column_data_from_row', 'get_column_data_from_row', (['system_row', '"""timezone"""'], {}), "(system_row, 'timezone')\n", (1831, 1855), False, 'from opsrest.utils.utils import get_column_data_from_row\n'), ((2001, 2052), 'opsvalidator.error.ValidationError', 'ValidationError', (['error.VERIFICATION_FAILED', 'details'], {}), '(error.VERIFICATION_FAILED, details)\n', (2016, 2052), False, 'from opsvalidator.error import ValidationError\n')]
from vmad.lib import linalg, mpi from vmad.testing import BaseScalarTest from mpi4py import MPI import numpy from pprint import pprint class Test_allreduce(BaseScalarTest): to_scalar = staticmethod(linalg.to_scalar) comm = MPI.COMM_WORLD x = comm.rank + 1.0 y = comm.allreduce(x) ** 2 x_ = numpy.eye(1) # self.x is distributed, thus allreduce along the rank axis. def inner(self, a, b): return self.comm.allreduce(numpy.sum(a * b)) def model(self, x): return mpi.allreduce(x, self.comm) class Test_allbcast(BaseScalarTest): to_scalar = staticmethod(lambda x: x) comm = MPI.COMM_WORLD x = 2.0 y = comm.allreduce(x * (comm.rank + 1)) x_ = numpy.eye(1) # self.x is universal, thus no special allreduce here. def inner(self, a, b): return numpy.sum(a*b) def model(self, x): x = mpi.allbcast(x, self.comm) x = x * (self.comm.rank + 1) return mpi.allreduce(x, comm=self.comm)
[ "vmad.lib.mpi.allreduce", "numpy.eye", "vmad.lib.mpi.allbcast", "numpy.sum" ]
[((315, 327), 'numpy.eye', 'numpy.eye', (['(1)'], {}), '(1)\n', (324, 327), False, 'import numpy\n'), ((715, 727), 'numpy.eye', 'numpy.eye', (['(1)'], {}), '(1)\n', (724, 727), False, 'import numpy\n'), ((514, 541), 'vmad.lib.mpi.allreduce', 'mpi.allreduce', (['x', 'self.comm'], {}), '(x, self.comm)\n', (527, 541), False, 'from vmad.lib import linalg, mpi\n'), ((830, 846), 'numpy.sum', 'numpy.sum', (['(a * b)'], {}), '(a * b)\n', (839, 846), False, 'import numpy\n'), ((882, 908), 'vmad.lib.mpi.allbcast', 'mpi.allbcast', (['x', 'self.comm'], {}), '(x, self.comm)\n', (894, 908), False, 'from vmad.lib import linalg, mpi\n'), ((961, 993), 'vmad.lib.mpi.allreduce', 'mpi.allreduce', (['x'], {'comm': 'self.comm'}), '(x, comm=self.comm)\n', (974, 993), False, 'from vmad.lib import linalg, mpi\n'), ((456, 472), 'numpy.sum', 'numpy.sum', (['(a * b)'], {}), '(a * b)\n', (465, 472), False, 'import numpy\n')]
"""Strict shared-folder permission checks""" import stat def file_groupreadable(path): """Check whether a given path has bad permissons.""" if not bool(stat.S_IRGRP & path.stat().mode): return 'PROB_FILE_NOT_GRPRD' def file_group_executable(path): """Check if a file should be group executable""" mode = path.stat().mode if stat.S_ISDIR(mode): return if bool(stat.S_IXUSR & mode) and not bool(stat.S_IXGRP & mode): return 'PROB_FILE_NOT_GRPEXEC' def dir_group_readable(path): """Check if a directory is accessible and readable""" mode = path.stat().mode if not stat.S_ISDIR(mode): return else: if not bool(stat.S_IXGRP & mode): return 'PROB_DIR_NOT_ACCESSIBLE' elif not bool(stat.S_IWGRP & mode): return 'PROB_DIR_NOT_WRITABLE' ALLSCHECKS = (file_groupreadable, file_group_executable, dir_group_readable)
[ "stat.S_ISDIR" ]
[((355, 373), 'stat.S_ISDIR', 'stat.S_ISDIR', (['mode'], {}), '(mode)\n', (367, 373), False, 'import stat\n'), ((625, 643), 'stat.S_ISDIR', 'stat.S_ISDIR', (['mode'], {}), '(mode)\n', (637, 643), False, 'import stat\n')]
#!/usr/bin/env python3 from sys import stderr import os import json import requests from urllib.parse import quote as urlquote from google.oauth2.service_account import IDTokenCredentials from google.oauth2 import id_token from google.auth.transport.requests import Request from twisted.internet import reactor, ssl from twisted.web import proxy, server from twisted.protocols.tls import TLSMemoryBIOFactory from twisted.logger import globalLogBeginner, textFileLogObserver globalLogBeginner.beginLoggingTo([textFileLogObserver(stderr)]) def get_oidc_token(request, client_id, service_account): sa_info = json.loads(service_account) credentials = IDTokenCredentials.from_service_account_info( sa_info, target_audience=client_id ) credentials.refresh(request) return credentials.token def exchange_google_id_token_for_gcip_id_token(api_key, google_open_id_connect_token): SIGN_IN_WITH_IDP_API = 'https://identitytoolkit.googleapis.com/v1/accounts:signInWithIdp' url = SIGN_IN_WITH_IDP_API + '?key=' + api_key data={'requestUri': 'http://localhost', 'returnSecureToken': True, 'postBody':'id_token=' + google_open_id_connect_token + '&providerId=google.com'} resp = requests.post(url, data) res = resp.json() return res['idToken'] class IAPReverseProxyResource(proxy.ReverseProxyResource): def proxyClientFactoryClass(self, *args, **kwargs): return TLSMemoryBIOFactory( ssl.optionsForClientTLS(self.host), True, super().proxyClientFactoryClass(*args, **kwargs), ) def __init__(self, id_token, custom_auth_header, target_uri, target_port, path=b""): super().__init__(target_uri, target_port, path) self.id_token = id_token self.custom_auth_header = custom_auth_header def render(self, request): if self.custom_auth_header and request.requestHeaders.hasHeader(b"authorization"): request.requestHeaders.setRawHeaders( self.custom_auth_header, request.requestHeaders.getRawHeaders(b"authorization", []), ) request.requestHeaders.setRawHeaders(b"authorization", ['Bearer {}'.format(self.id_token)]) return super().render(request) def getChild(self, path, request): return IAPReverseProxyResource( self.id_token, self.custom_auth_header, self.host, self.port, self.path + b"/" + urlquote(path, safe=b"").encode("utf-8"), ) custom_auth_header = os.environ.get("IAP_CUSTOM_AUTH_HEADER") target_host = os.environ["IAP_TARGET_HOST"] target_port = ( int(os.environ.get("IAP_TARGET_PORT")) if os.environ.get("TARGET_PORT") else 443 ) client_id = os.environ["IAP_CLIENT_ID"] sa_data = os.environ["IAP_SA"] api_key = os.environ["API_KEY"] open_id_connect_token = get_oidc_token(Request(), client_id, sa_data) id_token = exchange_google_id_token_for_gcip_id_token(api_key, open_id_connect_token) site = server.Site( IAPReverseProxyResource(id_token, custom_auth_header, target_host, target_port) ) reactor.listenTCP(9000, site, interface="127.0.0.1") reactor.run()
[ "twisted.logger.textFileLogObserver", "google.auth.transport.requests.Request", "json.loads", "twisted.internet.reactor.listenTCP", "os.environ.get", "google.oauth2.service_account.IDTokenCredentials.from_service_account_info", "twisted.internet.ssl.optionsForClientTLS", "urllib.parse.quote", "twisted.internet.reactor.run", "requests.post" ]
[((2559, 2599), 'os.environ.get', 'os.environ.get', (['"""IAP_CUSTOM_AUTH_HEADER"""'], {}), "('IAP_CUSTOM_AUTH_HEADER')\n", (2573, 2599), False, 'import os\n'), ((3115, 3167), 'twisted.internet.reactor.listenTCP', 'reactor.listenTCP', (['(9000)', 'site'], {'interface': '"""127.0.0.1"""'}), "(9000, site, interface='127.0.0.1')\n", (3132, 3167), False, 'from twisted.internet import reactor, ssl\n'), ((3168, 3181), 'twisted.internet.reactor.run', 'reactor.run', ([], {}), '()\n', (3179, 3181), False, 'from twisted.internet import reactor, ssl\n'), ((615, 642), 'json.loads', 'json.loads', (['service_account'], {}), '(service_account)\n', (625, 642), False, 'import json\n'), ((661, 746), 'google.oauth2.service_account.IDTokenCredentials.from_service_account_info', 'IDTokenCredentials.from_service_account_info', (['sa_info'], {'target_audience': 'client_id'}), '(sa_info, target_audience=client_id\n )\n', (705, 746), False, 'from google.oauth2.service_account import IDTokenCredentials\n'), ((1227, 1251), 'requests.post', 'requests.post', (['url', 'data'], {}), '(url, data)\n', (1240, 1251), False, 'import requests\n'), ((2706, 2735), 'os.environ.get', 'os.environ.get', (['"""TARGET_PORT"""'], {}), "('TARGET_PORT')\n", (2720, 2735), False, 'import os\n'), ((2890, 2899), 'google.auth.transport.requests.Request', 'Request', ([], {}), '()\n', (2897, 2899), False, 'from google.auth.transport.requests import Request\n'), ((513, 540), 'twisted.logger.textFileLogObserver', 'textFileLogObserver', (['stderr'], {}), '(stderr)\n', (532, 540), False, 'from twisted.logger import globalLogBeginner, textFileLogObserver\n'), ((2668, 2701), 'os.environ.get', 'os.environ.get', (['"""IAP_TARGET_PORT"""'], {}), "('IAP_TARGET_PORT')\n", (2682, 2701), False, 'import os\n'), ((1461, 1495), 'twisted.internet.ssl.optionsForClientTLS', 'ssl.optionsForClientTLS', (['self.host'], {}), '(self.host)\n', (1484, 1495), False, 'from twisted.internet import reactor, ssl\n'), ((2485, 2509), 'urllib.parse.quote', 'urlquote', (['path'], {'safe': "b''"}), "(path, safe=b'')\n", (2493, 2509), True, 'from urllib.parse import quote as urlquote\n')]
# ========================================================================================= # Copyright 2016 Community Information Online Consortium (CIOC) and KCL Software Solutions Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ========================================================================================= from __future__ import absolute_import import os from pyramid.config import Configurator from sqlalchemy import create_engine from sqlalchemy.exc import OperationalError from pyramid_beaker import session_factory_from_settings from pyramid.authentication import SessionAuthenticationPolicy from pyramid.authorization import ACLAuthorizationPolicy from pyramid.security import NO_PERMISSION_REQUIRED, Authenticated, Deny, Allow, Everyone from apscheduler.schedulers.background import BackgroundScheduler from offlinetools.models import initialize_sql, get_config from offlinetools.request import passvars_pregen from offlinetools.scheduler import scheduled_pull, key_to_schedule from offlinetools.logtools import _get_app_data_dir import logging log = logging.getLogger('offlinetools') def groupfinder(userid, request): user = request.user if user is not None: log.debug('user: %s, %d', user.UserName, user.ViewType) return ['group:' + str(user.ViewType)] return None class RootFactory(object): __acl__ = [(Allow, Authenticated, 'view'), (Deny, Everyone, 'view')] def __init__(self, request): try: if not request.config.machine_name: self.__acl__ = [(Allow, Everyone, 'view')] except OperationalError: log.critical('request.url: %s', request.path_qs) pass def found_view(request): return request.context sched = None def main(global_config, **settings): """ This function returns a Pyramid WSGI application. """ global sched app_data_dir = _get_app_data_dir() engine = create_engine('sqlite:///%s\\OfflineTools.db' % app_data_dir, isolation_level='READ UNCOMMITTED') initialize_sql(engine) cfg = get_config() sched = BackgroundScheduler() sched.start() sched.add_job(scheduled_pull, 'cron', **key_to_schedule(cfg.public_key)) session_lock_dir = os.path.join(app_data_dir, 'session') try: os.makedirs(session_lock_dir) except os.error: pass settings['beaker.session.lock_dir'] = session_lock_dir session_factory = session_factory_from_settings(settings) authn_policy = SessionAuthenticationPolicy(callback=groupfinder, debug=True) authz_policy = ACLAuthorizationPolicy() config = Configurator(settings=settings, session_factory=session_factory, root_factory=RootFactory, request_factory='offlinetools.request.OfflineToolsRequest', authentication_policy=authn_policy, authorization_policy=authz_policy) config.include('pyramid_mako') config.add_translation_dirs('offlinetools:locale') config.add_static_view('static', 'offlinetools:static', cache_max_age=3600, permission=NO_PERMISSION_REQUIRED) config.add_route('search', '/', pregenerator=passvars_pregen) config.add_view('offlinetools.views.search.Search', route_name='search', attr='search', permission='view', renderer='search.mak') config.add_route('results', '/results', pregenerator=passvars_pregen) config.add_view('offlinetools.views.search.Search', route_name='results', attr='results', permission='view', renderer='results.mak') config.add_route('record', '/record/{num}', factory='offlinetools.views.record.RecordRootFactory', pregenerator=passvars_pregen) config.add_view('offlinetools.views.record.Record', route_name='record', permission='view', renderer='record.mak') config.add_route('comgen', '/comgen', pregenerator=passvars_pregen) config.add_view('offlinetools.views.comgen.ComGen', renderer='json', route_name='comgen', permission='view') config.add_route('keywordgen', '/keywordgen', pregenerator=passvars_pregen) config.add_view('offlinetools.views.comgen.KeywordGen', renderer='json', route_name='keywordgen') config.add_route('login', '/login', pregenerator=passvars_pregen) config.add_view('offlinetools.views.login.Login', renderer='login.mak', route_name='login', request_method='POST', attr='post', permission=NO_PERMISSION_REQUIRED) config.add_view('offlinetools.views.login.Login', renderer='login.mak', route_name='login', attr='get', permission=NO_PERMISSION_REQUIRED) config.add_view('offlinetools.views.login.Login', renderer='login.mak', context='pyramid.httpexceptions.HTTPForbidden', attr='get', permission=NO_PERMISSION_REQUIRED) config.add_route('logout', '/logout', pregenerator=passvars_pregen) config.add_view('offlinetools.views.login.logout', route_name='logout', permission=NO_PERMISSION_REQUIRED) config.add_route('register', '/register', pregenerator=passvars_pregen) config.add_view('offlinetools.views.register.Register', route_name='register', request_method='POST', attr='post', renderer='register.mak', permission=NO_PERMISSION_REQUIRED) config.add_view('offlinetools.views.register.Register', route_name='register', attr='get', renderer='register.mak', permission=NO_PERMISSION_REQUIRED) config.add_route('updateconfig', '/config', pregenerator=passvars_pregen) config.add_view('offlinetools.views.register.UpdateUrl', route_name='updateconfig', request_method='POST', attr='post', renderer='updateurl.mak', permission=NO_PERMISSION_REQUIRED) config.add_view('offlinetools.views.register.UpdateUrl', route_name='updateconfig', attr='get', renderer='updateurl.mak', permission=NO_PERMISSION_REQUIRED) config.add_route('pull', '/pull', pregenerator=passvars_pregen) config.add_view('offlinetools.views.pull.Pull', route_name='pull', renderer='pull.mak') config.add_route('pull_status', '/pullstatus', pregenerator=passvars_pregen, factory='pyramid.traversal.DefaultRootFactory') config.add_view('offlinetools.views.pull.PullStatus', route_name='pull_status', renderer='json', permission=NO_PERMISSION_REQUIRED) config.add_route('status', '/status', factory='offlinetools.views.status.StatusRootFactory', pregenerator=passvars_pregen) config.add_view('offlinetools.views.status.Status', route_name='status', renderer='status.mak', permission='view') config.add_subscriber('offlinetools.subscribers.add_renderer_globals', 'pyramid.events.BeforeRender') config.scan() return config.make_wsgi_app()
[ "apscheduler.schedulers.background.BackgroundScheduler", "os.makedirs", "offlinetools.scheduler.key_to_schedule", "offlinetools.models.initialize_sql", "offlinetools.logtools._get_app_data_dir", "pyramid_beaker.session_factory_from_settings", "pyramid.authorization.ACLAuthorizationPolicy", "sqlalchemy.create_engine", "pyramid.config.Configurator", "pyramid.authentication.SessionAuthenticationPolicy", "os.path.join", "logging.getLogger", "offlinetools.models.get_config" ]
[((1594, 1627), 'logging.getLogger', 'logging.getLogger', (['"""offlinetools"""'], {}), "('offlinetools')\n", (1611, 1627), False, 'import logging\n'), ((2420, 2439), 'offlinetools.logtools._get_app_data_dir', '_get_app_data_dir', ([], {}), '()\n', (2437, 2439), False, 'from offlinetools.logtools import _get_app_data_dir\n'), ((2454, 2555), 'sqlalchemy.create_engine', 'create_engine', (["('sqlite:///%s\\\\OfflineTools.db' % app_data_dir)"], {'isolation_level': '"""READ UNCOMMITTED"""'}), "('sqlite:///%s\\\\OfflineTools.db' % app_data_dir,\n isolation_level='READ UNCOMMITTED')\n", (2467, 2555), False, 'from sqlalchemy import create_engine\n'), ((2556, 2578), 'offlinetools.models.initialize_sql', 'initialize_sql', (['engine'], {}), '(engine)\n', (2570, 2578), False, 'from offlinetools.models import initialize_sql, get_config\n'), ((2590, 2602), 'offlinetools.models.get_config', 'get_config', ([], {}), '()\n', (2600, 2602), False, 'from offlinetools.models import initialize_sql, get_config\n'), ((2616, 2637), 'apscheduler.schedulers.background.BackgroundScheduler', 'BackgroundScheduler', ([], {}), '()\n', (2635, 2637), False, 'from apscheduler.schedulers.background import BackgroundScheduler\n'), ((2757, 2794), 'os.path.join', 'os.path.join', (['app_data_dir', '"""session"""'], {}), "(app_data_dir, 'session')\n", (2769, 2794), False, 'import os\n'), ((2958, 2997), 'pyramid_beaker.session_factory_from_settings', 'session_factory_from_settings', (['settings'], {}), '(settings)\n', (2987, 2997), False, 'from pyramid_beaker import session_factory_from_settings\n'), ((3018, 3079), 'pyramid.authentication.SessionAuthenticationPolicy', 'SessionAuthenticationPolicy', ([], {'callback': 'groupfinder', 'debug': '(True)'}), '(callback=groupfinder, debug=True)\n', (3045, 3079), False, 'from pyramid.authentication import SessionAuthenticationPolicy\n'), ((3099, 3123), 'pyramid.authorization.ACLAuthorizationPolicy', 'ACLAuthorizationPolicy', ([], {}), '()\n', (3121, 3123), False, 'from pyramid.authorization import ACLAuthorizationPolicy\n'), ((3137, 3372), 'pyramid.config.Configurator', 'Configurator', ([], {'settings': 'settings', 'session_factory': 'session_factory', 'root_factory': 'RootFactory', 'request_factory': '"""offlinetools.request.OfflineToolsRequest"""', 'authentication_policy': 'authn_policy', 'authorization_policy': 'authz_policy'}), "(settings=settings, session_factory=session_factory,\n root_factory=RootFactory, request_factory=\n 'offlinetools.request.OfflineToolsRequest', authentication_policy=\n authn_policy, authorization_policy=authz_policy)\n", (3149, 3372), False, 'from pyramid.config import Configurator\n'), ((2812, 2841), 'os.makedirs', 'os.makedirs', (['session_lock_dir'], {}), '(session_lock_dir)\n', (2823, 2841), False, 'import os\n'), ((2700, 2731), 'offlinetools.scheduler.key_to_schedule', 'key_to_schedule', (['cfg.public_key'], {}), '(cfg.public_key)\n', (2715, 2731), False, 'from offlinetools.scheduler import scheduled_pull, key_to_schedule\n')]
import numpy as np from tensorflow.keras.models import load_model import tensorflow as tf import time import gen_data import util import parametric_si def run(): d = 8 IMG_WIDTH = d IMG_HEIGHT = d IMG_CHANNELS = 1 mu_1 = 0 mu_2 = 2 threshold = 20 # np.random.seed(1) X_test, Y_test = gen_data.generate(1, IMG_WIDTH, mu_1, mu_2) model = load_model('./model/test_' + str(d) + '.h5') output = model.predict(X_test, verbose=1) output = output.flatten() binary_vec = [] for each_e in output: if each_e <= 0.5: binary_vec.append(0) else: binary_vec.append(1) # print("Observe", binary_vec) X_vec = (X_test.flatten()).reshape((d * d, 1)) x_obs = X_vec eta, etaTx = util.construct_test_statistic(x_obs, binary_vec, d * d) u, v = util.compute_u_v(x_obs, eta, d * d) list_zk, list_results = parametric_si.run_parametric_si(u, v, model, d, IMG_CHANNELS, threshold) z_interval = util.construct_z(binary_vec, list_zk, list_results) length = 0 for interval in z_interval: length = length + (interval[1] - interval[0]) # print(length) return length from mpi4py import MPI COMM = MPI.COMM_WORLD start_time = None if COMM.rank == 0: start_time = time.time() max_iteration = 120 no_thread = COMM.size iter_each_thread = int(max_iteration / no_thread) else: iter_each_thread = None iter_each_thread = COMM.bcast(iter_each_thread, root=0) local_list_length = [] for i in range(iter_each_thread): length = run() if length is not None: local_list_length.append(length) total_list_length = COMM.gather(local_list_length, root=0) if COMM.rank == 0: total_list_length = [_i for temp in total_list_length for _i in temp] print(total_list_length) print("--- %s seconds ---" % (time.time() - start_time))
[ "gen_data.generate", "time.time", "parametric_si.run_parametric_si", "util.compute_u_v", "util.construct_z", "util.construct_test_statistic" ]
[((346, 389), 'gen_data.generate', 'gen_data.generate', (['(1)', 'IMG_WIDTH', 'mu_1', 'mu_2'], {}), '(1, IMG_WIDTH, mu_1, mu_2)\n', (363, 389), False, 'import gen_data\n'), ((824, 879), 'util.construct_test_statistic', 'util.construct_test_statistic', (['x_obs', 'binary_vec', '(d * d)'], {}), '(x_obs, binary_vec, d * d)\n', (853, 879), False, 'import util\n'), ((892, 927), 'util.compute_u_v', 'util.compute_u_v', (['x_obs', 'eta', '(d * d)'], {}), '(x_obs, eta, d * d)\n', (908, 927), False, 'import util\n'), ((959, 1031), 'parametric_si.run_parametric_si', 'parametric_si.run_parametric_si', (['u', 'v', 'model', 'd', 'IMG_CHANNELS', 'threshold'], {}), '(u, v, model, d, IMG_CHANNELS, threshold)\n', (990, 1031), False, 'import parametric_si\n'), ((1052, 1103), 'util.construct_z', 'util.construct_z', (['binary_vec', 'list_zk', 'list_results'], {}), '(binary_vec, list_zk, list_results)\n', (1068, 1103), False, 'import util\n'), ((1366, 1377), 'time.time', 'time.time', ([], {}), '()\n', (1375, 1377), False, 'import time\n'), ((1973, 1984), 'time.time', 'time.time', ([], {}), '()\n', (1982, 1984), False, 'import time\n')]
import pytest from psh.model import Word, Id, CommandSequence, Command, Case, VarRef, ConstantString from psh.glob import STAR from psh.local import make_env w = lambda w: Word([Id(w)]) a = Word([VarRef(Id("a"))]) echo = lambda out: CommandSequence([Command([Word([Id("echo")]), Word([ConstantString(out)])])]) x = w("x") cmd = lambda *cs: CommandSequence([Command([*cs])]) star = Word([STAR]) @pytest.mark.parametrize(("cmd", "variable", "expected"), ( (CommandSequence([Case(a)]), "", ""), (CommandSequence([Case(a).with_case(x, echo("foo"))]), "", ""), (CommandSequence([Case(a).with_case(x, echo("foo"))]), "y", ""), (CommandSequence([Case(a).with_case(x, echo("foo"))]), "x", "foo"), (CommandSequence([Case(a).with_case(x, echo("foo")).with_case(star, echo("bar"))]), "", "bar"), (CommandSequence([Case(a).with_case(x, echo("foo")).with_case(star, echo("bar"))]), "y", "bar"), (CommandSequence([Case(a).with_case(x, echo("foo")).with_case(star, echo("bar"))]), "x", "foo"), ), ids=lambda x: x.replace(" ", "_") if isinstance(x, str) else x) def test_basic(cmd, variable, expected): env = make_env() env["a"] = variable assert cmd.evaluate(env) == expected
[ "psh.model.ConstantString", "psh.model.Case", "psh.model.Word", "psh.model.Id", "psh.local.make_env", "psh.model.Command" ]
[((384, 396), 'psh.model.Word', 'Word', (['[STAR]'], {}), '([STAR])\n', (388, 396), False, 'from psh.model import Word, Id, CommandSequence, Command, Case, VarRef, ConstantString\n'), ((1130, 1140), 'psh.local.make_env', 'make_env', ([], {}), '()\n', (1138, 1140), False, 'from psh.local import make_env\n'), ((181, 186), 'psh.model.Id', 'Id', (['w'], {}), '(w)\n', (183, 186), False, 'from psh.model import Word, Id, CommandSequence, Command, Case, VarRef, ConstantString\n'), ((206, 213), 'psh.model.Id', 'Id', (['"""a"""'], {}), "('a')\n", (208, 213), False, 'from psh.model import Word, Id, CommandSequence, Command, Case, VarRef, ConstantString\n'), ((360, 374), 'psh.model.Command', 'Command', (['[*cs]'], {}), '([*cs])\n', (367, 374), False, 'from psh.model import Word, Id, CommandSequence, Command, Case, VarRef, ConstantString\n'), ((481, 488), 'psh.model.Case', 'Case', (['a'], {}), '(a)\n', (485, 488), False, 'from psh.model import Word, Id, CommandSequence, Command, Case, VarRef, ConstantString\n'), ((268, 278), 'psh.model.Id', 'Id', (['"""echo"""'], {}), "('echo')\n", (270, 278), False, 'from psh.model import Word, Id, CommandSequence, Command, Case, VarRef, ConstantString\n'), ((288, 307), 'psh.model.ConstantString', 'ConstantString', (['out'], {}), '(out)\n', (302, 307), False, 'from psh.model import Word, Id, CommandSequence, Command, Case, VarRef, ConstantString\n'), ((523, 530), 'psh.model.Case', 'Case', (['a'], {}), '(a)\n', (527, 530), False, 'from psh.model import Word, Id, CommandSequence, Command, Case, VarRef, ConstantString\n'), ((591, 598), 'psh.model.Case', 'Case', (['a'], {}), '(a)\n', (595, 598), False, 'from psh.model import Word, Id, CommandSequence, Command, Case, VarRef, ConstantString\n'), ((660, 667), 'psh.model.Case', 'Case', (['a'], {}), '(a)\n', (664, 667), False, 'from psh.model import Word, Id, CommandSequence, Command, Case, VarRef, ConstantString\n'), ((732, 739), 'psh.model.Case', 'Case', (['a'], {}), '(a)\n', (736, 739), False, 'from psh.model import Word, Id, CommandSequence, Command, Case, VarRef, ConstantString\n'), ((832, 839), 'psh.model.Case', 'Case', (['a'], {}), '(a)\n', (836, 839), False, 'from psh.model import Word, Id, CommandSequence, Command, Case, VarRef, ConstantString\n'), ((933, 940), 'psh.model.Case', 'Case', (['a'], {}), '(a)\n', (937, 940), False, 'from psh.model import Word, Id, CommandSequence, Command, Case, VarRef, ConstantString\n')]
import psutil from service_stats.stats.globals import Global class Disk(): @staticmethod def serve_data(): """Serve disk info """ # Title disk_title = '='*15 + ' Disk Information ' + '='*15 partition_title = 'Partitions and Usage:' # Disk Information partitions = psutil.disk_partitions() disk = '' for partition in partitions: device = f'=== Device: {partition.device} ===' mountpoint = f' Mountpoint: {partition.mountpoint}' filesystem_type = f' File system type: {partition.fstype}' try: partition_usage = psutil.disk_usage(partition.mountpoint) except PermissionError: # Catch errors when a disk isn't ready continue total_size = f' Total Size: {Global.get_size(partition_usage.total)}' # noqa used = f' Used: {Global.get_size(partition_usage.used)}' free = f' Free: {Global.get_size(partition_usage.free)}' percentage = f' Percentage: {partition_usage.percent}%' # Combine each disk into a variable disk += device + '\n' + mountpoint + '\n' + filesystem_type + \ '\n' + total_size + '\n' + used + '\n' + free + '\n' + \ percentage + '\n' # Get IO stats since boot disk_io = psutil.disk_io_counters() total_read = f'Total read (since boot): {Global.get_size(disk_io.read_bytes)}' # noqa total_write = f'Total write (since boot): {Global.get_size(disk_io.write_bytes)}' # noqa final_message = '\n' + disk_title + '\n' + partition_title + \ '\n' + disk + '\n' + total_read + '\n' + total_write return final_message
[ "service_stats.stats.globals.Global.get_size", "psutil.disk_partitions", "psutil.disk_io_counters", "psutil.disk_usage" ]
[((331, 355), 'psutil.disk_partitions', 'psutil.disk_partitions', ([], {}), '()\n', (353, 355), False, 'import psutil\n'), ((1398, 1423), 'psutil.disk_io_counters', 'psutil.disk_io_counters', ([], {}), '()\n', (1421, 1423), False, 'import psutil\n'), ((658, 697), 'psutil.disk_usage', 'psutil.disk_usage', (['partition.mountpoint'], {}), '(partition.mountpoint)\n', (675, 697), False, 'import psutil\n'), ((1473, 1508), 'service_stats.stats.globals.Global.get_size', 'Global.get_size', (['disk_io.read_bytes'], {}), '(disk_io.read_bytes)\n', (1488, 1508), False, 'from service_stats.stats.globals import Global\n'), ((1570, 1606), 'service_stats.stats.globals.Global.get_size', 'Global.get_size', (['disk_io.write_bytes'], {}), '(disk_io.write_bytes)\n', (1585, 1606), False, 'from service_stats.stats.globals import Global\n'), ((856, 894), 'service_stats.stats.globals.Global.get_size', 'Global.get_size', (['partition_usage.total'], {}), '(partition_usage.total)\n', (871, 894), False, 'from service_stats.stats.globals import Global\n'), ((935, 972), 'service_stats.stats.globals.Global.get_size', 'Global.get_size', (['partition_usage.used'], {}), '(partition_usage.used)\n', (950, 972), False, 'from service_stats.stats.globals import Global\n'), ((1005, 1042), 'service_stats.stats.globals.Global.get_size', 'Global.get_size', (['partition_usage.free'], {}), '(partition_usage.free)\n', (1020, 1042), False, 'from service_stats.stats.globals import Global\n')]
from helpers import inputs from submarine.submarine import Submarine def solution(day): report = inputs.read_to_list(f"inputs/{day}.txt") s = Submarine() power_consumption = s.diagnostics("power_consumption", report) return power_consumption
[ "helpers.inputs.read_to_list", "submarine.submarine.Submarine" ]
[((103, 143), 'helpers.inputs.read_to_list', 'inputs.read_to_list', (['f"""inputs/{day}.txt"""'], {}), "(f'inputs/{day}.txt')\n", (122, 143), False, 'from helpers import inputs\n'), ((152, 163), 'submarine.submarine.Submarine', 'Submarine', ([], {}), '()\n', (161, 163), False, 'from submarine.submarine import Submarine\n')]
from django.contrib import admin from .models import Proposal, ProposalStatus, Feedback # Register your models here. admin.site.register(Proposal) admin.site.register(ProposalStatus) admin.site.register(Feedback)
[ "django.contrib.admin.site.register" ]
[((118, 147), 'django.contrib.admin.site.register', 'admin.site.register', (['Proposal'], {}), '(Proposal)\n', (137, 147), False, 'from django.contrib import admin\n'), ((148, 183), 'django.contrib.admin.site.register', 'admin.site.register', (['ProposalStatus'], {}), '(ProposalStatus)\n', (167, 183), False, 'from django.contrib import admin\n'), ((184, 213), 'django.contrib.admin.site.register', 'admin.site.register', (['Feedback'], {}), '(Feedback)\n', (203, 213), False, 'from django.contrib import admin\n')]
import errno import glob from os.path import join from typing import List from jpype import JClass, getDefaultJVMPath, shutdownJVM, startJVM, java import Generator as gnr from Word import Word path = '1150haber/*.txt' files = glob.glob(path) if __name__ == '__main__': ZEMBEREK_PATH: str = join("bin/zemberek-full.jar") startJVM( getDefaultJVMPath(), '-ea', f'-Djava.class.path={ZEMBEREK_PATH}', convertStrings=False ) TurkishSentenceExtractor: JClass = JClass( 'zemberek.tokenization.TurkishSentenceExtractor' ) extractor: TurkishSentenceExtractor = TurkishSentenceExtractor.DEFAULT TurkishMorphology: JClass = JClass('zemberek.morphology.TurkishMorphology') morphology: TurkishMorphology = TurkishMorphology.createWithDefaults() Nouns = [] Adjectives = [] Verbs = [] Conjunctions = [] PostPositives = [] all_words = [] """"" We reading all files here and distributing them to 5 different lists depends on their pos'es """"" for name in files: try: with open(name) as f: sentences = extractor.fromParagraph(f.read()) for i, word in enumerate(sentences): x = f'{word}' sentence: str = x analysis: java.util.ArrayList = ( morphology.analyzeAndDisambiguate(sentence).bestAnalysis() ) pos: List[str] = [] for i, analysis in enumerate(analysis, start=1): if f'{analysis.getPos()}' != "Punctuation": x = f'{analysis}' p = x.find(':') # cleaning data x = x[1:p] # cleaning data all_words.append(Word(x, gnr.get_weight(x), f'{analysis.getPos()}')) # all_words if f'{analysis.getPos()}' == 'Noun': Nouns.append(Word(x, gnr.get_weight(x), f'{analysis.getPos()}')) # Nouns if f'{analysis.getPos()}' == 'Verb': Verbs.append(Word(x, gnr.get_weight(x), f'{analysis.getPos()}')) # Verbs if f'{analysis.getPos()}' == 'Conjunction': Conjunctions.append(Word(x, gnr.get_weight(x), f'{analysis.getPos()}')) # Conjunctions if f'{analysis.getPos()}' == 'PostPositive': PostPositives.append(Word(x, gnr.get_weight(x), f'{analysis.getPos()}')) # PostPositives if f'{analysis.getPos()}' == 'Adjective': Adjectives.append(Word(x, gnr.get_weight(x), f'{analysis.getPos()}')) # Adjectives else: continue except IOError as exc: if exc.errno != errno.EISDIR: raise w_sentences = gnr.generate_sentences(300, 1000, Nouns, Verbs, Adjectives, Conjunctions, PostPositives) for i in w_sentences: print('weight -> ', i.lentgth_of_sentence(), ' ', end='') w = i.words for t in w: print(t.name, ' ', end='') print('') w_words = gnr.generate_random_weighted_words(all_words, 25, 100) for i in w_words: print(i.name, ' ', i.weight) shutdownJVM()
[ "jpype.shutdownJVM", "jpype.JClass", "Generator.generate_random_weighted_words", "Generator.get_weight", "jpype.getDefaultJVMPath", "Generator.generate_sentences", "glob.glob", "os.path.join" ]
[((227, 242), 'glob.glob', 'glob.glob', (['path'], {}), '(path)\n', (236, 242), False, 'import glob\n'), ((2847, 2939), 'Generator.generate_sentences', 'gnr.generate_sentences', (['(300)', '(1000)', 'Nouns', 'Verbs', 'Adjectives', 'Conjunctions', 'PostPositives'], {}), '(300, 1000, Nouns, Verbs, Adjectives, Conjunctions,\n PostPositives)\n', (2869, 2939), True, 'import Generator as gnr\n'), ((3112, 3166), 'Generator.generate_random_weighted_words', 'gnr.generate_random_weighted_words', (['all_words', '(25)', '(100)'], {}), '(all_words, 25, 100)\n', (3146, 3166), True, 'import Generator as gnr\n'), ((3219, 3232), 'jpype.shutdownJVM', 'shutdownJVM', ([], {}), '()\n', (3230, 3232), False, 'from jpype import JClass, getDefaultJVMPath, shutdownJVM, startJVM, java\n'), ((297, 326), 'os.path.join', 'join', (['"""bin/zemberek-full.jar"""'], {}), "('bin/zemberek-full.jar')\n", (301, 326), False, 'from os.path import join\n'), ((507, 563), 'jpype.JClass', 'JClass', (['"""zemberek.tokenization.TurkishSentenceExtractor"""'], {}), "('zemberek.tokenization.TurkishSentenceExtractor')\n", (513, 563), False, 'from jpype import JClass, getDefaultJVMPath, shutdownJVM, startJVM, java\n'), ((687, 734), 'jpype.JClass', 'JClass', (['"""zemberek.morphology.TurkishMorphology"""'], {}), "('zemberek.morphology.TurkishMorphology')\n", (693, 734), False, 'from jpype import JClass, getDefaultJVMPath, shutdownJVM, startJVM, java\n'), ((350, 369), 'jpype.getDefaultJVMPath', 'getDefaultJVMPath', ([], {}), '()\n', (367, 369), False, 'from jpype import JClass, getDefaultJVMPath, shutdownJVM, startJVM, java\n'), ((1750, 1767), 'Generator.get_weight', 'gnr.get_weight', (['x'], {}), '(x)\n', (1764, 1767), True, 'import Generator as gnr\n'), ((1917, 1934), 'Generator.get_weight', 'gnr.get_weight', (['x'], {}), '(x)\n', (1931, 1934), True, 'import Generator as gnr\n'), ((2080, 2097), 'Generator.get_weight', 'gnr.get_weight', (['x'], {}), '(x)\n', (2094, 2097), True, 'import Generator as gnr\n'), ((2257, 2274), 'Generator.get_weight', 'gnr.get_weight', (['x'], {}), '(x)\n', (2271, 2274), True, 'import Generator as gnr\n'), ((2443, 2460), 'Generator.get_weight', 'gnr.get_weight', (['x'], {}), '(x)\n', (2457, 2460), True, 'import Generator as gnr\n'), ((2624, 2641), 'Generator.get_weight', 'gnr.get_weight', (['x'], {}), '(x)\n', (2638, 2641), True, 'import Generator as gnr\n')]
# Generated by Django 3.2.4 on 2021-07-01 07:15 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('crud', '0007_auto_20210701_0713'), ] operations = [ migrations.AlterField( model_name='cat', name='color', field=models.CharField(blank=True, choices=[('WHITE', '하얀색'), ('GRAY', '회색'), ('YELLOW', '노란색'), ('BLACK', '검은색')], max_length=20, null=True), ), migrations.AlterField( model_name='cat', name='gender', field=models.CharField(blank=True, choices=[('DONT_KNOW', '모름'), ('FEMALE', '암컷'), ('MALE', '수컷')], max_length=20, null=True), ), migrations.AlterField( model_name='cat', name='neutering', field=models.CharField(blank=True, choices=[('DONT_KNOW', '모름'), ('O', 'O'), ('X', 'X')], max_length=10, null=True), ), migrations.AlterField( model_name='cat', name='upload_user', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='upload', to=settings.AUTH_USER_MODEL), ), ]
[ "django.db.models.CharField", "django.db.migrations.swappable_dependency", "django.db.models.ForeignKey" ]
[((227, 284), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (258, 284), False, 'from django.db import migrations, models\n'), ((462, 602), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'choices': "[('WHITE', '하얀색'), ('GRAY', '회색'), ('YELLOW', '노란색'), ('BLACK', '검은색')]", 'max_length': '(20)', 'null': '(True)'}), "(blank=True, choices=[('WHITE', '하얀색'), ('GRAY', '회색'), (\n 'YELLOW', '노란색'), ('BLACK', '검은색')], max_length=20, null=True)\n", (478, 602), False, 'from django.db import migrations, models\n'), ((716, 839), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'choices': "[('DONT_KNOW', '모름'), ('FEMALE', '암컷'), ('MALE', '수컷')]", 'max_length': '(20)', 'null': '(True)'}), "(blank=True, choices=[('DONT_KNOW', '모름'), ('FEMALE', '암컷'),\n ('MALE', '수컷')], max_length=20, null=True)\n", (732, 839), False, 'from django.db import migrations, models\n'), ((957, 1070), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'choices': "[('DONT_KNOW', '모름'), ('O', 'O'), ('X', 'X')]", 'max_length': '(10)', 'null': '(True)'}), "(blank=True, choices=[('DONT_KNOW', '모름'), ('O', 'O'), ('X',\n 'X')], max_length=10, null=True)\n", (973, 1070), False, 'from django.db import migrations, models\n'), ((1190, 1333), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'related_name': '"""upload"""', 'to': 'settings.AUTH_USER_MODEL'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.SET_NULL, related_name='upload', to=settings.AUTH_USER_MODEL)\n", (1207, 1333), False, 'from django.db import migrations, models\n')]
import torch.nn as nn import numpy as np import torch class DQN(nn.Module): ''' pytorch CNN model for Atari games ''' def __init__(self,img_shape,num_actions): super(DQN,self).__init__() self._conv=nn.Sequential( nn.Conv2d(4,16,kernel_size=5,stride=2), nn.BatchNorm2d(16), nn.Conv2d(16,32,kernel_size=5,stride=2), nn.BatchNorm2d(32), nn.Conv2d(32,64,kernel_size=5,stride=2), nn.BatchNorm2d(64) ) convw=img_shape[0] convh=img_shape[1] for i in range(3): convw=self._getConvSize(convw,5,2) convh=self._getConvSize(convh,5,2) linear_input_size=convh*convw*64 self._linear=nn.Sequential( nn.Linear(linear_input_size,512), nn.ReLU(), nn.Linear(512,num_actions) ) self.num_actions=num_actions def _getConvSize(self,size,size_kernal,stride): ''' get the tensor size after Conv operation :param size: :param size_kernal: :param stride: :return: ''' return (size-(size_kernal-1)-1)//stride+1 def forward(self,img_in): ''' :param x:input image:N*C*W*H :return:Q-values of actions N*num_actions ''' x=self._conv(img_in) x=x.view(x.size(0),-1) return self._linear(x) def _selectAction(self,img_in,eps_threshold): ''' select action according to Q values, :param img_in:input images :return:action selected ''' sample=np.random.random() if(sample>eps_threshold): with torch.no_grad(): q_value = self.forward(img_in) return q_value.max(1)[1].item() else: return np.random.randint(0,self.num_actions) def main(): ''' unitest :return: ''' import torch import numpy as np dqn=DQN((100,100,3),4) dqn.eval() img=torch.Tensor(np.zeros((1,4,100,100))) q=dqn.forward(img) print(q) print(q.max(1)) print(dqn._selectAction(img,0.01)) print("finish test") if __name__=="__main__": main()
[ "torch.nn.ReLU", "torch.nn.Conv2d", "numpy.zeros", "torch.nn.BatchNorm2d", "numpy.random.random", "numpy.random.randint", "torch.nn.Linear", "torch.no_grad" ]
[((1618, 1636), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (1634, 1636), True, 'import numpy as np\n'), ((2027, 2053), 'numpy.zeros', 'np.zeros', (['(1, 4, 100, 100)'], {}), '((1, 4, 100, 100))\n', (2035, 2053), True, 'import numpy as np\n'), ((257, 298), 'torch.nn.Conv2d', 'nn.Conv2d', (['(4)', '(16)'], {'kernel_size': '(5)', 'stride': '(2)'}), '(4, 16, kernel_size=5, stride=2)\n', (266, 298), True, 'import torch.nn as nn\n'), ((309, 327), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(16)'], {}), '(16)\n', (323, 327), True, 'import torch.nn as nn\n'), ((341, 383), 'torch.nn.Conv2d', 'nn.Conv2d', (['(16)', '(32)'], {'kernel_size': '(5)', 'stride': '(2)'}), '(16, 32, kernel_size=5, stride=2)\n', (350, 383), True, 'import torch.nn as nn\n'), ((394, 412), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(32)'], {}), '(32)\n', (408, 412), True, 'import torch.nn as nn\n'), ((426, 468), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', '(64)'], {'kernel_size': '(5)', 'stride': '(2)'}), '(32, 64, kernel_size=5, stride=2)\n', (435, 468), True, 'import torch.nn as nn\n'), ((479, 497), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(64)'], {}), '(64)\n', (493, 497), True, 'import torch.nn as nn\n'), ((772, 805), 'torch.nn.Linear', 'nn.Linear', (['linear_input_size', '(512)'], {}), '(linear_input_size, 512)\n', (781, 805), True, 'import torch.nn as nn\n'), ((818, 827), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (825, 827), True, 'import torch.nn as nn\n'), ((841, 868), 'torch.nn.Linear', 'nn.Linear', (['(512)', 'num_actions'], {}), '(512, num_actions)\n', (850, 868), True, 'import torch.nn as nn\n'), ((1830, 1868), 'numpy.random.randint', 'np.random.randint', (['(0)', 'self.num_actions'], {}), '(0, self.num_actions)\n', (1847, 1868), True, 'import numpy as np\n'), ((1689, 1704), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1702, 1704), False, 'import torch\n')]
from curris.test.base import compare_json def test_script(): compare_json('curris/test/resource/script.md', 'curris/test/resource/script.json')
[ "curris.test.base.compare_json" ]
[((66, 152), 'curris.test.base.compare_json', 'compare_json', (['"""curris/test/resource/script.md"""', '"""curris/test/resource/script.json"""'], {}), "('curris/test/resource/script.md',\n 'curris/test/resource/script.json')\n", (78, 152), False, 'from curris.test.base import compare_json\n')]
#!/usr/bin/python # -*- coding: utf-8 -*- """Tests for the file-like object implementation using pysmraw.""" import os import unittest from dfvfs.path import raw_path_spec from dfvfs.path import os_path_spec from tests.file_io import test_lib class RawFileTest(test_lib.ImageFileTestCase): """The unit test for the RAW storage media image file-like object.""" def setUp(self): """Sets up the needed objects used throughout the test.""" super(RawFileTest, self).setUp() test_file = os.path.join(u'test_data', u'ímynd.dd') path_spec = os_path_spec.OSPathSpec(location=test_file) self._raw_path_spec = raw_path_spec.RawPathSpec(parent=path_spec) def testOpenCloseInode(self): """Test the open and close functionality using an inode.""" self._TestOpenCloseInode(self._raw_path_spec) def testOpenCloseLocation(self): """Test the open and close functionality using a location.""" self._TestOpenCloseLocation(self._raw_path_spec) def testSeek(self): """Test the seek functionality.""" self._TestSeek(self._raw_path_spec) def testRead(self): """Test the read functionality.""" self._TestRead(self._raw_path_spec) class SplitRawFileTest(test_lib.ImageFileTestCase): """The unit test for the split storage media image file-like object.""" def setUp(self): """Sets up the needed objects used throughout the test.""" super(SplitRawFileTest, self).setUp() test_file = os.path.join(u'test_data', u'image.raw.000') path_spec = os_path_spec.OSPathSpec(location=test_file) self._raw_path_spec = raw_path_spec.RawPathSpec(parent=path_spec) def testOpenCloseInode(self): """Test the open and close functionality using an inode.""" self._TestOpenCloseInode(self._raw_path_spec) def testOpenCloseLocation(self): """Test the open and close functionality using a location.""" self._TestOpenCloseLocation(self._raw_path_spec) def testSeek(self): """Test the seek functionality.""" self._TestSeek(self._raw_path_spec) def testRead(self): """Test the read functionality.""" self._TestRead(self._raw_path_spec) if __name__ == '__main__': unittest.main()
[ "unittest.main", "dfvfs.path.raw_path_spec.RawPathSpec", "os.path.join", "dfvfs.path.os_path_spec.OSPathSpec" ]
[((2160, 2175), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2173, 2175), False, 'import unittest\n'), ((502, 541), 'os.path.join', 'os.path.join', (['u"""test_data"""', 'u"""ímynd.dd"""'], {}), "(u'test_data', u'ímynd.dd')\n", (514, 541), False, 'import os\n'), ((558, 601), 'dfvfs.path.os_path_spec.OSPathSpec', 'os_path_spec.OSPathSpec', ([], {'location': 'test_file'}), '(location=test_file)\n', (581, 601), False, 'from dfvfs.path import os_path_spec\n'), ((628, 671), 'dfvfs.path.raw_path_spec.RawPathSpec', 'raw_path_spec.RawPathSpec', ([], {'parent': 'path_spec'}), '(parent=path_spec)\n', (653, 671), False, 'from dfvfs.path import raw_path_spec\n'), ((1448, 1492), 'os.path.join', 'os.path.join', (['u"""test_data"""', 'u"""image.raw.000"""'], {}), "(u'test_data', u'image.raw.000')\n", (1460, 1492), False, 'import os\n'), ((1509, 1552), 'dfvfs.path.os_path_spec.OSPathSpec', 'os_path_spec.OSPathSpec', ([], {'location': 'test_file'}), '(location=test_file)\n', (1532, 1552), False, 'from dfvfs.path import os_path_spec\n'), ((1579, 1622), 'dfvfs.path.raw_path_spec.RawPathSpec', 'raw_path_spec.RawPathSpec', ([], {'parent': 'path_spec'}), '(parent=path_spec)\n', (1604, 1622), False, 'from dfvfs.path import raw_path_spec\n')]
# Third exercise using PDF libraries in Python # Importing libraries and frameworks import PyPDF2 # Defining Global Variables pdf1File = open('meetingminutes.pdf', 'rb') pdf2File = open('meetingminutes2.pdf', 'rb') pdf1Reader = PyPDF2.PdfFileReader(pdf1File) pdf2Reader = PyPDF2.PdfFileReader(pdf2File) # Program logic pdfWriter = PyPDF2.PdfFileWriter() for pageNum in range(pdf1Reader.numPages): pageObject = pdf1Reader.getPage(pageNum) pdfWriter.addPage(pageObject) for pageNum in range(pdf2Reader.numPages): pageObject = pdf2Reader.getPage(pageNum) pdfWriter.addPage(pageObject) pdfOutputFile = open('combinedminutes.pdf', 'wb') pdfWriter.write(pdfOutputFile) pdfOutputFile.close() pdf1File.close() pdf2File.close()
[ "PyPDF2.PdfFileReader", "PyPDF2.PdfFileWriter" ]
[((242, 272), 'PyPDF2.PdfFileReader', 'PyPDF2.PdfFileReader', (['pdf1File'], {}), '(pdf1File)\n', (262, 272), False, 'import PyPDF2\n'), ((287, 317), 'PyPDF2.PdfFileReader', 'PyPDF2.PdfFileReader', (['pdf2File'], {}), '(pdf2File)\n', (307, 317), False, 'import PyPDF2\n'), ((352, 374), 'PyPDF2.PdfFileWriter', 'PyPDF2.PdfFileWriter', ([], {}), '()\n', (372, 374), False, 'import PyPDF2\n')]
from setuptools import setup package_name = 'awsiotcore_to_navigation2' setup( name=package_name, version='0.1.0', packages=[package_name], data_files=[ ('share/ament_index/resource_index/packages', ['resource/' + package_name]), ('share/' + package_name, ['package.xml']), ], install_requires=['setuptools'], zip_safe=True, maintainer='<NAME>', maintainer_email='<EMAIL>', description='Receive positional information ' 'from AWS IoT Core and send it to Navigation2', license='MIT', tests_require=['pytest'], entry_points={ 'console_scripts': [ 'iotcore_to_nav2 = ' 'awsiotcore_to_navigation2.awsiotcore_to_nav2_node:main' ], }, )
[ "setuptools.setup" ]
[((74, 656), 'setuptools.setup', 'setup', ([], {'name': 'package_name', 'version': '"""0.1.0"""', 'packages': '[package_name]', 'data_files': "[('share/ament_index/resource_index/packages', ['resource/' + package_name]\n ), ('share/' + package_name, ['package.xml'])]", 'install_requires': "['setuptools']", 'zip_safe': '(True)', 'maintainer': '"""<NAME>"""', 'maintainer_email': '"""<EMAIL>"""', 'description': '"""Receive positional information from AWS IoT Core and send it to Navigation2"""', 'license': '"""MIT"""', 'tests_require': "['pytest']", 'entry_points': "{'console_scripts': [\n 'iotcore_to_nav2 = awsiotcore_to_navigation2.awsiotcore_to_nav2_node:main']\n }"}), "(name=package_name, version='0.1.0', packages=[package_name],\n data_files=[('share/ament_index/resource_index/packages', ['resource/' +\n package_name]), ('share/' + package_name, ['package.xml'])],\n install_requires=['setuptools'], zip_safe=True, maintainer='<NAME>',\n maintainer_email='<EMAIL>', description=\n 'Receive positional information from AWS IoT Core and send it to Navigation2'\n , license='MIT', tests_require=['pytest'], entry_points={\n 'console_scripts': [\n 'iotcore_to_nav2 = awsiotcore_to_navigation2.awsiotcore_to_nav2_node:main'\n ]})\n", (79, 656), False, 'from setuptools import setup\n')]
''' This is an example of a dendrogram plot showing the hierarchical structure of clustering. Inspired from the "Unsupervised Learning" course on Datacamp.com Author: <NAME> ''' # Import normalize from sklearn.preprocessing import normalize # Normalize the movements: normalized_movements normalized_movements = normalize(movements) # Calculate the linkage: mergings mergings = linkage(normalized_movements, 'complete') # Plot the dendrogram dendrogram(mergings, labels=companies, leaf_rotation=90, leaf_font_size=6) plt.show()
[ "sklearn.preprocessing.normalize" ]
[((315, 335), 'sklearn.preprocessing.normalize', 'normalize', (['movements'], {}), '(movements)\n', (324, 335), False, 'from sklearn.preprocessing import normalize\n')]
from PyQt5 import QtQml from .cvcapture import CVCapture, CVAbstractFilter from .cvitem import CVItem def registerTypes(uri = "PyCVQML"): QtQml.qmlRegisterType(CVCapture, uri, 1, 0, "CVCapture") QtQml.qmlRegisterType(CVItem, uri, 1, 0, "CVItem") def stopCamera(): CVCapture.stopCamera()
[ "PyQt5.QtQml.qmlRegisterType" ]
[((145, 201), 'PyQt5.QtQml.qmlRegisterType', 'QtQml.qmlRegisterType', (['CVCapture', 'uri', '(1)', '(0)', '"""CVCapture"""'], {}), "(CVCapture, uri, 1, 0, 'CVCapture')\n", (166, 201), False, 'from PyQt5 import QtQml\n'), ((206, 256), 'PyQt5.QtQml.qmlRegisterType', 'QtQml.qmlRegisterType', (['CVItem', 'uri', '(1)', '(0)', '"""CVItem"""'], {}), "(CVItem, uri, 1, 0, 'CVItem')\n", (227, 256), False, 'from PyQt5 import QtQml\n')]
from pyecharts import options as opts from pyecharts.charts import Bar from pyecharts.commons.utils import JsCode from pyecharts.faker import Faker c = ( Bar() .add_xaxis(Faker.choose()) .add_yaxis("商家A", Faker.values(), category_gap="60%") .set_series_opts( itemstyle_opts={ "normal": { "color": JsCode( """new echarts.graphic.LinearGradient(0, 0, 0, 1, [{ offset: 0, color: 'rgba(0, 244, 255, 1)' }, { offset: 1, color: 'rgba(0, 77, 167, 1)' }], false)""" ), "barBorderRadius": [30, 30, 30, 30], "shadowColor": "rgb(0, 160, 221)", } } ) .set_global_opts(title_opts=opts.TitleOpts(title="Bar-渐变圆柱")) .render("bar_border_radius.html") )
[ "pyecharts.charts.Bar", "pyecharts.options.TitleOpts", "pyecharts.faker.Faker.choose", "pyecharts.faker.Faker.values", "pyecharts.commons.utils.JsCode" ]
[((804, 836), 'pyecharts.options.TitleOpts', 'opts.TitleOpts', ([], {'title': '"""Bar-渐变圆柱"""'}), "(title='Bar-渐变圆柱')\n", (818, 836), True, 'from pyecharts import options as opts\n'), ((222, 236), 'pyecharts.faker.Faker.values', 'Faker.values', ([], {}), '()\n', (234, 236), False, 'from pyecharts.faker import Faker\n'), ((350, 608), 'pyecharts.commons.utils.JsCode', 'JsCode', (['"""new echarts.graphic.LinearGradient(0, 0, 0, 1, [{\n offset: 0,\n color: \'rgba(0, 244, 255, 1)\'\n }, {\n offset: 1,\n color: \'rgba(0, 77, 167, 1)\'\n }], false)"""'], {}), '(\n """new echarts.graphic.LinearGradient(0, 0, 0, 1, [{\n offset: 0,\n color: \'rgba(0, 244, 255, 1)\'\n }, {\n offset: 1,\n color: \'rgba(0, 77, 167, 1)\'\n }], false)"""\n )\n', (356, 608), False, 'from pyecharts.commons.utils import JsCode\n'), ((180, 194), 'pyecharts.faker.Faker.choose', 'Faker.choose', ([], {}), '()\n', (192, 194), False, 'from pyecharts.faker import Faker\n'), ((159, 164), 'pyecharts.charts.Bar', 'Bar', ([], {}), '()\n', (162, 164), False, 'from pyecharts.charts import Bar\n')]
#!/usr/bin/env python3 import argparse import binascii import struct def get_state_height(file_handler): res = file_handler.read(4) res = struct.unpack('I', res)[0] return res def get_block_hash(file_handler): res = file_handler.read(32)[::-1] res = binascii.hexlify(res).decode('utf-8') return res def get_num_chunks(file_handler): res = file_handler.read(4) res = struct.unpack('I', res)[0] return res if __name__ == '__main__': argparser = argparse.ArgumentParser() argparser.add_argument('filename', type=str, help='Name of the state file to load') args = argparser.parse_args() with open(args.filename, 'rb') as f: state_height = get_state_height(f) state_latest_block_hash = get_block_hash(f) state_num_chunks = get_num_chunks(f) print('State file name: {}'.format(args.filename)) print('') print('State block height: {}'.format(state_height)) print('Latest block hash: {}'.format(state_latest_block_hash)) print('Number chunks: {}'.format(state_num_chunks))
[ "binascii.hexlify", "struct.unpack", "argparse.ArgumentParser" ]
[((489, 514), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (512, 514), False, 'import argparse\n'), ((148, 171), 'struct.unpack', 'struct.unpack', (['"""I"""', 'res'], {}), "('I', res)\n", (161, 171), False, 'import struct\n'), ((402, 425), 'struct.unpack', 'struct.unpack', (['"""I"""', 'res'], {}), "('I', res)\n", (415, 425), False, 'import struct\n'), ((273, 294), 'binascii.hexlify', 'binascii.hexlify', (['res'], {}), '(res)\n', (289, 294), False, 'import binascii\n')]
#TimeExample1.py import time #Printing the no.of ticks spent since 12AM, 1st January 1970 print("No.of total ticks since 1970 : ",time.time())
[ "time.time" ]
[((137, 148), 'time.time', 'time.time', ([], {}), '()\n', (146, 148), False, 'import time\n')]
import forum from forum.settings import MAINTAINANCE_MODE, APP_LOGO, APP_TITLE from forum.http_responses import HttpResponseServiceUnavailable class RequestUtils(object): def process_request(self, request): if MAINTAINANCE_MODE.value is not None and isinstance(MAINTAINANCE_MODE.value.get('allow_ips', None), list): ip = request.META['REMOTE_ADDR'] if not ip in MAINTAINANCE_MODE.value['allow_ips']: return HttpResponseServiceUnavailable(MAINTAINANCE_MODE.value.get('message', '')) if request.session.get('redirect_POST_data', None): request.POST = request.session.pop('redirect_POST_data') request.META['REQUEST_METHOD'] = "POST" self.request = request forum.REQUEST_HOLDER.request = request return None def process_response(self, request, response): forum.REQUEST_HOLDER.request = None return response
[ "forum.settings.MAINTAINANCE_MODE.value.get" ]
[((278, 324), 'forum.settings.MAINTAINANCE_MODE.value.get', 'MAINTAINANCE_MODE.value.get', (['"""allow_ips"""', 'None'], {}), "('allow_ips', None)\n", (305, 324), False, 'from forum.settings import MAINTAINANCE_MODE, APP_LOGO, APP_TITLE\n'), ((496, 538), 'forum.settings.MAINTAINANCE_MODE.value.get', 'MAINTAINANCE_MODE.value.get', (['"""message"""', '""""""'], {}), "('message', '')\n", (523, 538), False, 'from forum.settings import MAINTAINANCE_MODE, APP_LOGO, APP_TITLE\n')]
# coding:utf-8 from load_data import load_data, timer from sklearn.linear_model import LogisticRegression from sklearn.naive_bayes import GaussianNB from sklearn.svm import SVC from sklearn.model_selection import StratifiedShuffleSplit from sklearn.model_selection import GridSearchCV import numpy as np import pandas as pd @timer def use_logistic_regression(X_train, y_train, X_test, y_test): model = LogisticRegression() print("Start to train a logistic regression model.") model.fit(X_train, y_train) score = model.score(X_test, y_test) print("Score of logistic regression:", score) @timer def use_naive_bayes(X_train, y_train, X_test, y_test): model = GaussianNB() print("Start to train a naive bayes model.") model.fit(X_train, y_train) score = model.score(X_test, y_test) print("Score of naive bayes:", score) @timer def use_SVM(X_train, y_train, X_test, y_test, kernel="linear"): try: model = SVC(kernel=kernel, C=10.0, gamma=0.001) print("Start to train a SVM model(kernel: {0}).".format(kernel)) model.fit(X_train, y_train) score = model.score(X_test, y_test) print("Score of SVM(kernel: {0}):".format(kernel), score) except: print("Error!") def optimize_SVM(X_train, y_train, X_test, y_test): C_range = np.logspace(-4, 3, 8) gamma_range = np.logspace(-4, 3, 8) kernel_range = ["linear", "rbf"] param_grid = dict(gamma=gamma_range, C=C_range, kernel=kernel_range) grid = GridSearchCV(SVC(), param_grid=param_grid, n_jobs=-1,) grid.fit(X_train[:100], y_train[:100]) print("The best parameters are %s with a score of %0.2f" % (grid.best_params_, grid.best_score_)) if __name__ == '__main__': X_train, y_train, X_test, y_test = load_data() #use_logistic_regression(X_train, y_train, X_test, y_test) #use_naive_bayes(X_train, y_train, X_test, y_test) SVM_kernels = ["linear", "rbf", "sigmoid"] for kernel in SVM_kernels: use_SVM(X_train, y_train, X_test, y_test, kernel) #optimize_SVM(X_train, y_train, X_test, y_test) '''Sample Output: Start to load training data from file. Runtime:54.356s Start to load testing data from file. Runtime:13.156s Start to load training data from feature file. Runtime:0.276s Start to load testinging data from feature file. Runtime:0.068s Start to train a logistic regression model. Score of logistic regression: 0.75 Runtime:0.026s Start to train a naive bayes model. Score of naive bayes: 0.720543806647 Runtime:0.016s Start to train a SVM model(kernel: linear). Score of SVM(kernel: linear): 0.730362537764 Runtime:6.807s Start to train a SVM model(kernel: rbf). Score of SVM(kernel: rbf): 0.690332326284 Runtime:2.324s Start to train a SVM model(kernel: sigmoid). Score of SVM(kernel: sigmoid): 0.615558912387 Runtime:1.207s ''' # The best parameters are {'C': 1, 'gamma': 0.125, 'kernel': 'linear'} with a score of 0.78
[ "sklearn.naive_bayes.GaussianNB", "numpy.logspace", "load_data.load_data", "sklearn.linear_model.LogisticRegression", "sklearn.svm.SVC" ]
[((409, 429), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {}), '()\n', (427, 429), False, 'from sklearn.linear_model import LogisticRegression\n'), ((685, 697), 'sklearn.naive_bayes.GaussianNB', 'GaussianNB', ([], {}), '()\n', (695, 697), False, 'from sklearn.naive_bayes import GaussianNB\n'), ((1322, 1343), 'numpy.logspace', 'np.logspace', (['(-4)', '(3)', '(8)'], {}), '(-4, 3, 8)\n', (1333, 1343), True, 'import numpy as np\n'), ((1362, 1383), 'numpy.logspace', 'np.logspace', (['(-4)', '(3)', '(8)'], {}), '(-4, 3, 8)\n', (1373, 1383), True, 'import numpy as np\n'), ((1807, 1818), 'load_data.load_data', 'load_data', ([], {}), '()\n', (1816, 1818), False, 'from load_data import load_data, timer\n'), ((959, 998), 'sklearn.svm.SVC', 'SVC', ([], {'kernel': 'kernel', 'C': '(10.0)', 'gamma': '(0.001)'}), '(kernel=kernel, C=10.0, gamma=0.001)\n', (962, 998), False, 'from sklearn.svm import SVC\n'), ((1518, 1523), 'sklearn.svm.SVC', 'SVC', ([], {}), '()\n', (1521, 1523), False, 'from sklearn.svm import SVC\n')]
import pymongo client = pymongo.MongoClient(host='localhost', port=27017) db = client.test collection = db.students result = collection.remove({'name': 'Kevin'}) print(result)
[ "pymongo.MongoClient" ]
[((25, 74), 'pymongo.MongoClient', 'pymongo.MongoClient', ([], {'host': '"""localhost"""', 'port': '(27017)'}), "(host='localhost', port=27017)\n", (44, 74), False, 'import pymongo\n')]
#! /bin/env python3 # Copyright DEWETRON GmbH 2019 import sys import time sys.path.append('../../../trion_api/python') # Import the core and GUI elements of Qt from PySide2.QtCore import Qt, QObject, QPointF, QTimer, Slot, Signal, QThread from PySide2 import QtGui from PySide2.QtWidgets import * from PySide2.QtCharts import * from dewepxi_load import * from dewepxi_apicore import * from xml.etree import ElementTree as et class MainDialog(QWidget): """ Sample main window """ def __init__(self, parent=None): super(MainDialog, self).__init__(parent) self.chart = QtCharts.QChart() self.chart.setAnimationOptions(QtCharts.QChart.NoAnimation) self.worker = TrionMeasurementWorker(self) self.worker.signal_show_message.connect(self.showStatus, Qt.QueuedConnection) self.worker.add_channel_data.connect(self.addChannelData, Qt.QueuedConnection) self.chart_series = dict() self.setupGUI() self.redrawChart() def setupGUI(self): self.setWindowTitle("TRION Measure qt") self.groupbox_api_selection = QGroupBox("&Select API", self) self.api_trion_api = QRadioButton("&TRION", self) self.api_trionet_api = QRadioButton("&TRIONet", self) layout = QHBoxLayout() layout.addWidget(self.api_trion_api) layout.addWidget(self.api_trionet_api) self.groupbox_api_selection.setLayout(layout) self.groupbox_board_selection = QGroupBox("&Select Board", self) self.cb_trion_board = QComboBox() layout = QVBoxLayout() layout.addWidget(self.cb_trion_board) self.groupbox_board_selection.setLayout(layout) self.groupbox_channel_selection = QGroupBox("&Select Channel", self) self.cb_channel = QComboBox() layout = QVBoxLayout() layout.addWidget(self.cb_channel) self.groupbox_channel_selection.setLayout(layout) self.groupbox_channel_config = QGroupBox("&Channel Config", self) self.cb_range = QComboBox() self.cb_sample_rate = QComboBox() layout = QHBoxLayout() layout.addWidget(self.cb_range) layout.addWidget(self.cb_sample_rate) self.groupbox_channel_config.setLayout(layout) self.statusbar = QStatusBar(self) self.statuslabel = QLabel("Status", self) self.statuslabel.setFrameStyle(QFrame.Panel | QFrame.Sunken) self.statusbar.addPermanentWidget(self.statuslabel, 1) groupbox_chart = QGroupBox("Channel Data", self) self.chart_view = QtCharts.QChartView(self.chart) self.chart_view.setRenderHint(QtGui.QPainter.Antialiasing) self.chart_view.setMinimumSize(400, 200) layout = QVBoxLayout() layout.addWidget(self.chart_view) groupbox_chart.setLayout(layout) def onApiChanged(): if self.api_trion_api.isChecked(): self.worker.selectAPI("TRION") elif self.api_trionet_api.isChecked(): self.worker.selectAPI("TRIONET") self.api_trion_api.toggled.connect(onApiChanged) self.api_trion_api.setChecked(True) main_layout = QVBoxLayout() main_layout.addWidget(self.groupbox_api_selection) main_layout.addWidget(self.groupbox_board_selection) main_layout.addWidget(self.groupbox_channel_selection) main_layout.addWidget(self.groupbox_channel_config) main_layout.addWidget(groupbox_chart) main_layout.addWidget(self.statusbar) self.setLayout(main_layout) @Slot(str, str) def showStatus(self, text, style = "color:black"): """ show text in status bar """ self.statuslabel.setText(text) self.statuslabel.setStyleSheet(style) def initChart(self): self.chart.removeAllSeries() for axis in self.chart.axes(Qt.Horizontal): self.chart.removeAxis(axis) for axis in self.chart.axes(Qt.Vertical): self.chart.removeAxis(axis) def redrawChart(self): self.initChart() def addChannelData(self, channel_data_list): """ Add new sample block """ self.chart.removeAllSeries() series = QtCharts.QLineSeries() series.append(channel_data_list) self.chart.addSeries(series) class TrionMeasurementWorker(QThread): """ Measurement worker thread """ signal_show_message = Signal(str, str) add_channel_data = Signal(list) def __init__(self, parent=None): """ constructor """ QThread.__init__(self, parent) self.gui = parent self.exiting = False self.is_api_loaded = False self.board_id = 0 def run(self): """ ACQ loop """ self.configureChannel() self.configureAcquisition() nReadPos = 0 nAvailSamples = 0 nRawData = 0 sample_index = 0 # Get detailed information about the ring buffer # to be able to handle the wrap around [nErrorCode, nBufEndPos] = DeWeGetParam_i64( self.board_id, CMD_BUFFER_END_POINTER) [nErrorCode, nBufSize] = DeWeGetParam_i32( self.board_id, CMD_BUFFER_TOTAL_MEM_SIZE) nErrorCode = DeWeSetParam_i32( self.board_id, CMD_START_ACQUISITION, 0) while self.exiting==False: # Get the number of samples already stored in the ring buffer [nErrorCode, nAvailSamples] = DeWeGetParam_i32( self.board_id, CMD_BUFFER_AVAIL_NO_SAMPLE) if nAvailSamples > 0: # Get the current read pointer [nErrorCode, nReadPos] = DeWeGetParam_i64( self.board_id, CMD_BUFFER_ACT_SAMPLE_POS) channel_data = [] # Read the current samples from the ring buffer for i in range(0, nAvailSamples): # Get the sample value at the read pointer of the ring buffer nRawData = DeWeGetSampleData(nReadPos) # Print the sample value # print(nRawData) # sys.stdout.flush() channel_data.append(QPointF(sample_index, nRawData)) sample_index += 1 # Increment the read pointer nReadPos = nReadPos + 4 # Handle the ring buffer wrap around if nReadPos > nBufEndPos: nReadPos -= nBufSize # Free the ring buffer after read of all values nErrorCode = DeWeSetParam_i32( self.board_id, CMD_BUFFER_FREE_NO_SAMPLE, nAvailSamples) self.addChannelData(channel_data) # wait for 100ms time.sleep(0.1) nErrorCode = DeWeSetParam_i32( self.board_id, CMD_STOP_ACQUISITION, 0) def startWorker(self): """ Start worker thread """ if not self.isRunning(): self.start() def stopWorker(self): """ Stop worker thread """ if self.isRunning(): self.exiting = True self.terminate() def selectAPI(self, api_name): """ Select and load TRION or TRIONET api. """ self.stopWorker() if self.is_api_loaded: DeWeSetParam_i32(0, CMD_CLOSE_BOARD_ALL, 0) DeWeDriverDeInit() DeWePxiUnload() if not DeWePxiLoad(api_name): if api_name == "TRION": self.showStatus("dwpxi_api.dll could not be found.") if api_name == "TRIONET": self.showStatus("dwpxi_netapi.dll could not be found.") return self.is_api_loaded = True self.api_backend_name = api_name self.initTrion() self.startWorker() def initTrion(self): """ Initialize TRION (or TRIONET) """ if self.isRunning(): self.showStatus("initTrion not possible with active worker thread") return [nErrorCode, nNoOfBoards] = DeWeDriverInit() if abs(nNoOfBoards) == 0: self.showStatus("No Trion cards found") elif nNoOfBoards < 0: self.showStatus("%d Trion cards found (Simulation)" % abs(nNoOfBoards)) else: self.showStatus("%d Trion cards found" % nNoOfBoards) self.gui.cb_trion_board.clear() self.gui.cb_channel.clear() num_boards = abs(nNoOfBoards) if num_boards > 0: nErrorCode = DeWeSetParam_i32(0, CMD_OPEN_BOARD_ALL, 0) nErrorCode = DeWeSetParam_i32(0, CMD_RESET_BOARD_ALL, 0) for i in range(num_boards): [nErrorCode, board_name] = DeWeGetParamStruct_str("BoardID%d" % i, "BoardName") if len(board_name) == 0: board_name = "Unknown board" self.gui.cb_trion_board.addItem("%d: %s " % ( i, board_name)) [nErrorCode, board_prop_xml] = DeWeGetParamStruct_str("BoardID%d" % i, "BoardProperties") prop_doc = et.fromstring(board_prop_xml) elem_list = prop_doc.findall("ChannelProperties/*") for elem in elem_list: if elem.tag != "XMLVersion": # add channel names self.gui.cb_channel.addItem(elem.tag) def configureAcquisition(self): """ configure Acquisition setup """ # Set configuration to use one board in standalone operation target = "BoardID%d/AcqProp" % self.board_id nErrorCode = DeWeSetParamStruct_str( target, "OperationMode", "Slave") nErrorCode = DeWeSetParamStruct_str( target, "ExtTrigger", "False") nErrorCode = DeWeSetParamStruct_str( target, "ExtClk", "False") nErrorCode = DeWeSetParam_i32(self.board_id, CMD_BUFFER_BLOCK_SIZE, 200) nErrorCode = DeWeSetParam_i32(self.board_id, CMD_BUFFER_BLOCK_COUNT, 50) nErrorCode = DeWeSetParam_i32(self.board_id, CMD_UPDATE_PARAM_ALL, 0) def configureChannel(self): """ configureChannel (has to be called before configureAcquisition) """ nErrorCode = DeWeSetParamStruct_str( "BoardID0/AIAll", "Used", "False") nErrorCode = DeWeSetParamStruct_str( "BoardID0/AI0", "Used", "True") def showStatus(self, text, style = "color:black"): """ show text in status bar """ self.signal_show_message.emit(text, style) def addChannelData(self, channel_data): """ add samples to graph """ self.add_channel_data.emit(channel_data) if __name__ == "__main__": app = QApplication(sys.argv) widget = MainDialog() widget.show() ret = app.exec_() widget.worker.stopWorker() sys.exit(ret)
[ "sys.path.append", "PySide2.QtCore.Slot", "PySide2.QtCore.QPointF", "xml.etree.ElementTree.fromstring", "time.sleep", "PySide2.QtCore.QThread.__init__", "PySide2.QtCore.Signal", "sys.exit" ]
[((75, 119), 'sys.path.append', 'sys.path.append', (['"""../../../trion_api/python"""'], {}), "('../../../trion_api/python')\n", (90, 119), False, 'import sys\n'), ((3586, 3600), 'PySide2.QtCore.Slot', 'Slot', (['str', 'str'], {}), '(str, str)\n', (3590, 3600), False, 'from PySide2.QtCore import Qt, QObject, QPointF, QTimer, Slot, Signal, QThread\n'), ((4445, 4461), 'PySide2.QtCore.Signal', 'Signal', (['str', 'str'], {}), '(str, str)\n', (4451, 4461), False, 'from PySide2.QtCore import Qt, QObject, QPointF, QTimer, Slot, Signal, QThread\n'), ((4485, 4497), 'PySide2.QtCore.Signal', 'Signal', (['list'], {}), '(list)\n', (4491, 4497), False, 'from PySide2.QtCore import Qt, QObject, QPointF, QTimer, Slot, Signal, QThread\n'), ((10898, 10911), 'sys.exit', 'sys.exit', (['ret'], {}), '(ret)\n', (10906, 10911), False, 'import sys\n'), ((4588, 4618), 'PySide2.QtCore.QThread.__init__', 'QThread.__init__', (['self', 'parent'], {}), '(self, parent)\n', (4604, 4618), False, 'from PySide2.QtCore import Qt, QObject, QPointF, QTimer, Slot, Signal, QThread\n'), ((6782, 6797), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (6792, 6797), False, 'import time\n'), ((9149, 9178), 'xml.etree.ElementTree.fromstring', 'et.fromstring', (['board_prop_xml'], {}), '(board_prop_xml)\n', (9162, 9178), True, 'from xml.etree import ElementTree as et\n'), ((6195, 6226), 'PySide2.QtCore.QPointF', 'QPointF', (['sample_index', 'nRawData'], {}), '(sample_index, nRawData)\n', (6202, 6226), False, 'from PySide2.QtCore import Qt, QObject, QPointF, QTimer, Slot, Signal, QThread\n')]
import os import re import subprocess import pythoncom from win32com.client import Dispatch, gencache from tkinter import Tk # from tkinter.filedialog import askopenfilenames from tkinter import filedialog # Подключение к API7 программы Компас 3D def get_kompas_api7(): module = gencache.EnsureModule("{69AC2981-37C0-4379-84FD-5DD2F3C0A520}", 0, 1, 0) api = module.IKompasAPIObject( Dispatch("Kompas.Application.7")._oleobj_.QueryInterface(module.IKompasAPIObject.CLSID, pythoncom.IID_IDispatch)) const = gencache.EnsureModule("{75C9F5D0-B5B8-4526-8681-9903C567D2ED}", 0, 1, 0).constants return module, api, const # Функция проверки, запущена-ли программа КОМПАС 3D def is_running(): proc_list = \ subprocess.Popen('tasklist /NH /FI "IMAGENAME eq KOMPAS*"', shell=False, stdout=subprocess.PIPE).communicate()[0] return True if proc_list else False # Посчитаем количество листов каждого из формата def amount_sheet(doc7): sheets = {"A0": 0, "A1": 0, "A2": 0, "A3": 0, "A4": 0, "A5": 0} for sheet in range(doc7.LayoutSheets.Count): format = doc7.LayoutSheets.Item(sheet).Format # sheet - номер листа, отсчёт начинается от 0 sheets["A" + str(format.Format)] += 1 * format.FormatMultiplicity return sheets # Прочитаем основную надпись чертежа def stamp(doc7): for sheet in range(doc7.LayoutSheets.Count): style_filename = os.path.basename(doc7.LayoutSheets.Item(sheet).LayoutLibraryFileName) style_number = int(doc7.LayoutSheets.Item(sheet).LayoutStyleNumber) if style_filename.lower() == 'graphic.lyt' and style_number in [1, 3]: stamp = doc7.LayoutSheets.Item(sheet).Stamp return {"Scale": re.findall(r"\d+:\d+", stamp.Text(6).Str)[0], "FirstUsage": stamp.Text(25).Str, # Первичное применение "Checked": stamp.Text(111).Str, "TChecked": stamp.Text(112).Str, "NChecked": stamp.Text(114).Str, "Approved": stamp.Text(115).Str, # Утвердил "Number": stamp.Text(2).Str, # Номер документа "Material": stamp.Text(3).Str, # Материал "Designer": stamp.Text(110).Str} # Форматка для перечней элементов elif style_filename.lower() == 'eskw_gr.lyt' and style_number == 60: stamp = doc7.LayoutSheets.Item(sheet).Stamp return {"Scale": re.findall(r"\d+:\d+", stamp.Text(6).Str)[0], "FirstUsage": stamp.Text(25).Str, # Первичное применение "Checked": stamp.Text(111).Str, "TChecked": stamp.Text(112).Str, "NChecked": stamp.Text(114).Str, "Approved": stamp.Text(115).Str, # Утвердил "Number": stamp.Text(2).Str, # Номер документа "Material": stamp.Text(3).Str, # Материал "Designer": stamp.Text(110).Str} elif style_filename.lower() == 'graphic.lyt' and style_number in [17, 51]: stamp = doc7.LayoutSheets.Item(sheet).Stamp # обработка спецификаций и групповых спецификаций return { "FirstUsage": stamp.Text(25).Str, # Первичное применение "Checked": stamp.Text(111).Str, "TChecked": stamp.Text(112).Str, "NChecked": stamp.Text(114).Str, "Approved": stamp.Text(115).Str, # Утвердил "Number": stamp.Text(2).Str, # Номер документа # "Material": stamp.Text(3).Str, # Материал "Designer": stamp.Text(110).Str} return {} def specWork(doc7): IDrawingDocument = doc7._oleobj_.QueryInterface(module7.NamesToIIDMap['IDrawingDocument'], pythoncom.IID_IDispatch) def parse_design_documents(paths): is_run = is_running() # True, если программа Компас уже запущена module7, api7, const7 = get_kompas_api7() # Подключаемся к программе app7 = api7.Application # Получаем основной интерфейс программы app7.Visible = True # Показываем окно пользователю (если скрыто) app7.HideMessage = const7.ksHideMessageNo # Отвечаем НЕТ на любые вопросы программы table = [] # Создаём таблицу парметров for path in paths: print("Чтение файла: " + path + "\n") doc7 = app7.Documents.Open(PathName=path, Visible=False, ReadOnly=True) # Откроем файл в видимом режиме без права его изменять row = amount_sheet(doc7) # Посчитаем кол-во листов каждого формат row.update(stamp(doc7)) # Читаем основную надпись row.update({ "Filename": doc7.Name, # Имя файла }) table.append(row) # Добавляем строку параметров в таблицу doc7.Close(const7.kdDoNotSaveChanges) # Закроем файл без изменения if not is_run: app7.Quit() # Закрываем программу при необходимости return table def getKeyFromDict(myDict, myKey): return myDict[myKey] if (myKey) in myDict else "" def print_to_excel(result): excel = Dispatch("Excel.Application") # Подключаемся к программе Excel excel.Visible = True # Делаем окно видимым wb = excel.Workbooks.Add() # Добавляем новую книгу sheet = wb.ActiveSheet # Получаем ссылку на активный лист # Создаём заголовок таблицы sheet.Range("A1:Q1").value = ["Имя файла", "Разработчик", "Проверил", "Т.Контр.", "Н.Контр.", "Утвердил", "Перв.Прим.", "Децимальный номер", "Материал", "Кол-во размеров", "Кол-во пунктов ТТ", "А0", "А1", "А2", "А3", "А4", "Масштаб"] # Заполняем таблицу for i, row in enumerate(result): sheet.Cells(i + 2, 1).value = row['Filename'] sheet.Cells(i + 2, 2).value = getKeyFromDict(row, 'Designer') sheet.Cells(i + 2, 3).value = getKeyFromDict(row, 'Checked') sheet.Cells(i + 2, 4).value = getKeyFromDict(row, 'TChecked') sheet.Cells(i + 2, 5).value = getKeyFromDict(row, 'NChecked') sheet.Cells(i + 2, 6).value = getKeyFromDict(row, 'Approved') sheet.Cells(i + 2, 7).value = getKeyFromDict(row, 'FirstUsage') sheet.Cells(i + 2, 8).value = getKeyFromDict(row, 'Number') sheet.Cells(i + 2, 9).value = getKeyFromDict(row, 'Material') sheet.Cells(i + 2, 10).value = getKeyFromDict(row, 'CountDim') sheet.Cells(i + 2, 11).value = getKeyFromDict(row, 'CountTD') sheet.Cells(i + 2, 12).value = getKeyFromDict(row, 'A0') sheet.Cells(i + 2, 13).value = getKeyFromDict(row, 'A1') sheet.Cells(i + 2, 14).value = getKeyFromDict(row, 'A2') sheet.Cells(i + 2, 15).value = getKeyFromDict(row, 'A3') sheet.Cells(i + 2, 16).value = getKeyFromDict(row, 'A4') sheet.Cells(i + 2, 17).value = "".join(('="', row['Scale'], '"')) if ('Scale') in row else "" def getFilesFromDir(dirName, listNames): names = os.listdir(dirName) for name in names: fullname = os.path.join(dirName, name).replace("\\", "/") # получаем полное имя ext = os.path.splitext(fullname)[1][1:] if os.path.isfile(fullname) and ext == "cdw" : listNames.append(fullname) elif os.path.isdir(fullname): listNames = getFilesFromDir(fullname, listNames) return listNames if __name__ == "__main__": root = Tk() root.withdraw() # Скрываем основное окно и сразу окно выбора файлов dirName = filedialog.askdirectory() print("Каталог поиска файлов " + dirName + "\n") listNames = [] filenames = getFilesFromDir(dirName, listNames) # Исключаем файлы в каталогах old filenames = [filename for filename in filenames if filename.find('/old/') == -1] table = [] if len(filenames) != 0: table += (parse_design_documents(filenames)) else: print("Нет файлов чертежей") # Вывод отчёта print_to_excel(table) root.destroy() # Уничтожаем основное окно root.mainloop()
[ "os.listdir", "subprocess.Popen", "os.path.isdir", "tkinter.filedialog.askdirectory", "os.path.isfile", "os.path.splitext", "win32com.client.Dispatch", "win32com.client.gencache.EnsureModule", "os.path.join", "tkinter.Tk" ]
[((285, 357), 'win32com.client.gencache.EnsureModule', 'gencache.EnsureModule', (['"""{69AC2981-37C0-4379-84FD-5DD2F3C0A520}"""', '(0)', '(1)', '(0)'], {}), "('{69AC2981-37C0-4379-84FD-5DD2F3C0A520}', 0, 1, 0)\n", (306, 357), False, 'from win32com.client import Dispatch, gencache\n'), ((5257, 5286), 'win32com.client.Dispatch', 'Dispatch', (['"""Excel.Application"""'], {}), "('Excel.Application')\n", (5265, 5286), False, 'from win32com.client import Dispatch, gencache\n'), ((7193, 7212), 'os.listdir', 'os.listdir', (['dirName'], {}), '(dirName)\n', (7203, 7212), False, 'import os\n'), ((7627, 7631), 'tkinter.Tk', 'Tk', ([], {}), '()\n', (7629, 7631), False, 'from tkinter import Tk\n'), ((7720, 7745), 'tkinter.filedialog.askdirectory', 'filedialog.askdirectory', ([], {}), '()\n', (7743, 7745), False, 'from tkinter import filedialog\n'), ((592, 664), 'win32com.client.gencache.EnsureModule', 'gencache.EnsureModule', (['"""{75C9F5D0-B5B8-4526-8681-9903C567D2ED}"""', '(0)', '(1)', '(0)'], {}), "('{75C9F5D0-B5B8-4526-8681-9903C567D2ED}', 0, 1, 0)\n", (613, 664), False, 'from win32com.client import Dispatch, gencache\n'), ((7383, 7407), 'os.path.isfile', 'os.path.isfile', (['fullname'], {}), '(fullname)\n', (7397, 7407), False, 'import os\n'), ((7479, 7502), 'os.path.isdir', 'os.path.isdir', (['fullname'], {}), '(fullname)\n', (7492, 7502), False, 'import os\n'), ((799, 899), 'subprocess.Popen', 'subprocess.Popen', (['"""tasklist /NH /FI "IMAGENAME eq KOMPAS*\\""""'], {'shell': '(False)', 'stdout': 'subprocess.PIPE'}), '(\'tasklist /NH /FI "IMAGENAME eq KOMPAS*"\', shell=False,\n stdout=subprocess.PIPE)\n', (815, 899), False, 'import subprocess\n'), ((7255, 7282), 'os.path.join', 'os.path.join', (['dirName', 'name'], {}), '(dirName, name)\n', (7267, 7282), False, 'import os\n'), ((7338, 7364), 'os.path.splitext', 'os.path.splitext', (['fullname'], {}), '(fullname)\n', (7354, 7364), False, 'import os\n'), ((401, 433), 'win32com.client.Dispatch', 'Dispatch', (['"""Kompas.Application.7"""'], {}), "('Kompas.Application.7')\n", (409, 433), False, 'from win32com.client import Dispatch, gencache\n')]
# -*- coding: utf-8 -*- # # Copyright 2015 Google LLC. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """app logs read command.""" from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals from googlecloudsdk.api_lib.app import logs_util from googlecloudsdk.api_lib.logging import common from googlecloudsdk.calliope import base from googlecloudsdk.command_lib.app import flags from googlecloudsdk.core import log from googlecloudsdk.core import properties class Read(base.Command): """Reads log entries for the current App Engine app.""" @staticmethod def Args(parser): """Register flags for this command.""" flags.SERVICE.AddToParser(parser) flags.VERSION.AddToParser(parser) flags.LEVEL.AddToParser(parser) flags.LOGS.AddToParser(parser) parser.add_argument('--limit', required=False, type=int, default=200, help='Number of log entries to show.') def Run(self, args): """This is what gets called when the user runs this command. Args: args: an argparse namespace. All the arguments that were provided to this command invocation. Returns: The list of log entries. """ printer = logs_util.LogPrinter() printer.RegisterFormatter(logs_util.FormatRequestLogEntry) printer.RegisterFormatter(logs_util.FormatNginxLogEntry) printer.RegisterFormatter(logs_util.FormatAppEntry) project = properties.VALUES.core.project.Get(required=True) filters = logs_util.GetFilters(project, args.logs, args.service, args.version, args.level) lines = [] # pylint: disable=g-builtin-op, For the .keys() method for entry in common.FetchLogs(log_filter=' AND '.join(filters), order_by='DESC', limit=args.limit): lines.append(printer.Format(entry)) for line in reversed(lines): log.out.Print(line) Read.detailed_help = { 'DESCRIPTION': """\ Display the latest log entries from stdout, stderr and crash log for the current Google App Engine app in a human readable format. This command requires that the caller have the logging.logEntries.list permission. """, 'EXAMPLES': """\ To display the latest entries for the current app, run: $ {command} To show only the entries with severity at `warning` or higher, run: $ {command} --level=warning To show only the entries with a specific version, run: $ {command} --version=v1 To show only the 10 latest log entries for the default service, run: $ {command} --limit=10 --service=default To show only the logs from the request log for standard apps, run: $ {command} --logs=request_log To show only the logs from the request log for Flex apps, run: $ {command} --logs=nginx.request """, }
[ "googlecloudsdk.core.properties.VALUES.core.project.Get", "googlecloudsdk.api_lib.app.logs_util.GetFilters", "googlecloudsdk.command_lib.app.flags.LEVEL.AddToParser", "googlecloudsdk.command_lib.app.flags.SERVICE.AddToParser", "googlecloudsdk.api_lib.app.logs_util.LogPrinter", "googlecloudsdk.core.log.out.Print", "googlecloudsdk.command_lib.app.flags.LOGS.AddToParser", "googlecloudsdk.command_lib.app.flags.VERSION.AddToParser" ]
[((1201, 1234), 'googlecloudsdk.command_lib.app.flags.SERVICE.AddToParser', 'flags.SERVICE.AddToParser', (['parser'], {}), '(parser)\n', (1226, 1234), False, 'from googlecloudsdk.command_lib.app import flags\n'), ((1239, 1272), 'googlecloudsdk.command_lib.app.flags.VERSION.AddToParser', 'flags.VERSION.AddToParser', (['parser'], {}), '(parser)\n', (1264, 1272), False, 'from googlecloudsdk.command_lib.app import flags\n'), ((1277, 1308), 'googlecloudsdk.command_lib.app.flags.LEVEL.AddToParser', 'flags.LEVEL.AddToParser', (['parser'], {}), '(parser)\n', (1300, 1308), False, 'from googlecloudsdk.command_lib.app import flags\n'), ((1313, 1343), 'googlecloudsdk.command_lib.app.flags.LOGS.AddToParser', 'flags.LOGS.AddToParser', (['parser'], {}), '(parser)\n', (1335, 1343), False, 'from googlecloudsdk.command_lib.app import flags\n'), ((1756, 1778), 'googlecloudsdk.api_lib.app.logs_util.LogPrinter', 'logs_util.LogPrinter', ([], {}), '()\n', (1776, 1778), False, 'from googlecloudsdk.api_lib.app import logs_util\n'), ((1973, 2022), 'googlecloudsdk.core.properties.VALUES.core.project.Get', 'properties.VALUES.core.project.Get', ([], {'required': '(True)'}), '(required=True)\n', (2007, 2022), False, 'from googlecloudsdk.core import properties\n'), ((2037, 2122), 'googlecloudsdk.api_lib.app.logs_util.GetFilters', 'logs_util.GetFilters', (['project', 'args.logs', 'args.service', 'args.version', 'args.level'], {}), '(project, args.logs, args.service, args.version, args.level\n )\n', (2057, 2122), False, 'from googlecloudsdk.api_lib.app import logs_util\n'), ((2481, 2500), 'googlecloudsdk.core.log.out.Print', 'log.out.Print', (['line'], {}), '(line)\n', (2494, 2500), False, 'from googlecloudsdk.core import log\n')]
""" Runs one instance of the Atari environment and optimizes using DQN algorithm. Can use a GPU for the agent (applies to both sample and train). No parallelism employed, so everything happens in one python process; can be easier to debug. The kwarg snapshot_mode="last" to logger context will save the latest model at every log point (see inside the logger for other options). In viskit, whatever (nested) key-value pairs appear in config will become plottable keys for showing several experiments. If you need to add more after an experiment, use rlpyt.utils.logging.context.add_exp_param(). """ from rlpyt.runners.async_rl import AsyncRlEval from rlpyt.samplers.serial.sampler import SerialSampler from rlpyt.envs.atari.atari_env import AtariEnv, AtariTrajInfo from rlpyt.algos.dqn.dqn import DQN from rlpyt.agents.dqn.atari.atari_dqn_agent import AtariDqnAgent from rlpyt.runners.minibatch_rl import MinibatchRlEval from rlpyt.utils.logging.context import logger_context # R2D1 from rlpyt.samplers.parallel.gpu.sampler import GpuSampler from rlpyt.samplers.parallel.gpu.collectors import GpuWaitResetCollector from rlpyt.samplers.async_.gpu_sampler import AsyncGpuSampler from rlpyt.samplers.async_.collectors import DbGpuResetCollector from examples.voxel_r2d1 import configs from rlpyt.algos.dqn.r2d1 import R2D1 from rlpyt.runners.minibatch_rl import MinibatchRl from rlpyt.agents.dqn.atari.atari_r2d1_agent import AtariR2d1Agent from rlpyt.utils.launching.affinity import affinity_from_code, encode_affinity, quick_affinity_code # Voxel from rlpyt.envs.gym import voxel_make def build_and_train(game="TowerBuilding", run_ID=0, cuda_idx=None): # Either manually set the resources for the experiment: affinity_code = encode_affinity( n_cpu_core=2, n_gpu=1, # hyperthread_offset=8, # if auto-detect doesn't work, number of CPU cores # n_socket=1, # if auto-detect doesn't work, can force (or force to 1) run_slot=0, cpu_per_run=1, set_affinity=True, # it can help to restrict workers to individual CPUs ) affinity = affinity_from_code(affinity_code) config = configs["r2d1"] config["env"]["game"] = game config["eval_env"]["game"] = config["env"]["game"] sampler = AsyncGpuSampler( EnvCls=voxel_make, env_kwargs=config["env"], CollectorCls=DbGpuResetCollector, TrajInfoCls=AtariTrajInfo, eval_env_kwargs=config["eval_env"], **config["sampler"] ) algo = R2D1(optim_kwargs=config["optim"], **config["algo"]) agent = AtariR2d1Agent(model_kwargs=config["model"], **config["agent"]) runner = AsyncRlEval( algo=algo, agent=agent, sampler=sampler, affinity=affinity, **config["runner"] ) config = dict(game=game) name = "r2d1_" + game log_dir = "tower_building" with logger_context(log_dir, run_ID, name, config, snapshot_mode="last"): runner.train() if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('--game', help='Voxel game', default='TowerBuilding') parser.add_argument('--run_ID', help='run identifier (logging)', type=int, default=0) parser.add_argument('--cuda_idx', help='gpu to use ', type=int, default=None) args = parser.parse_args() build_and_train( game=args.game, run_ID=args.run_ID, cuda_idx=args.cuda_idx, )
[ "rlpyt.samplers.async_.gpu_sampler.AsyncGpuSampler", "rlpyt.utils.launching.affinity.affinity_from_code", "argparse.ArgumentParser", "rlpyt.utils.logging.context.logger_context", "rlpyt.algos.dqn.r2d1.R2D1", "rlpyt.runners.async_rl.AsyncRlEval", "rlpyt.utils.launching.affinity.encode_affinity", "rlpyt.agents.dqn.atari.atari_r2d1_agent.AtariR2d1Agent" ]
[((1739, 1827), 'rlpyt.utils.launching.affinity.encode_affinity', 'encode_affinity', ([], {'n_cpu_core': '(2)', 'n_gpu': '(1)', 'run_slot': '(0)', 'cpu_per_run': '(1)', 'set_affinity': '(True)'}), '(n_cpu_core=2, n_gpu=1, run_slot=0, cpu_per_run=1,\n set_affinity=True)\n', (1754, 1827), False, 'from rlpyt.utils.launching.affinity import affinity_from_code, encode_affinity, quick_affinity_code\n'), ((2104, 2137), 'rlpyt.utils.launching.affinity.affinity_from_code', 'affinity_from_code', (['affinity_code'], {}), '(affinity_code)\n', (2122, 2137), False, 'from rlpyt.utils.launching.affinity import affinity_from_code, encode_affinity, quick_affinity_code\n'), ((2270, 2458), 'rlpyt.samplers.async_.gpu_sampler.AsyncGpuSampler', 'AsyncGpuSampler', ([], {'EnvCls': 'voxel_make', 'env_kwargs': "config['env']", 'CollectorCls': 'DbGpuResetCollector', 'TrajInfoCls': 'AtariTrajInfo', 'eval_env_kwargs': "config['eval_env']"}), "(EnvCls=voxel_make, env_kwargs=config['env'], CollectorCls=\n DbGpuResetCollector, TrajInfoCls=AtariTrajInfo, eval_env_kwargs=config[\n 'eval_env'], **config['sampler'])\n", (2285, 2458), False, 'from rlpyt.samplers.async_.gpu_sampler import AsyncGpuSampler\n'), ((2514, 2566), 'rlpyt.algos.dqn.r2d1.R2D1', 'R2D1', ([], {'optim_kwargs': "config['optim']"}), "(optim_kwargs=config['optim'], **config['algo'])\n", (2518, 2566), False, 'from rlpyt.algos.dqn.r2d1 import R2D1\n'), ((2579, 2642), 'rlpyt.agents.dqn.atari.atari_r2d1_agent.AtariR2d1Agent', 'AtariR2d1Agent', ([], {'model_kwargs': "config['model']"}), "(model_kwargs=config['model'], **config['agent'])\n", (2593, 2642), False, 'from rlpyt.agents.dqn.atari.atari_r2d1_agent import AtariR2d1Agent\n'), ((2656, 2752), 'rlpyt.runners.async_rl.AsyncRlEval', 'AsyncRlEval', ([], {'algo': 'algo', 'agent': 'agent', 'sampler': 'sampler', 'affinity': 'affinity'}), "(algo=algo, agent=agent, sampler=sampler, affinity=affinity, **\n config['runner'])\n", (2667, 2752), False, 'from rlpyt.runners.async_rl import AsyncRlEval\n'), ((3043, 3122), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), '(formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n', (3066, 3122), False, 'import argparse\n'), ((2889, 2956), 'rlpyt.utils.logging.context.logger_context', 'logger_context', (['log_dir', 'run_ID', 'name', 'config'], {'snapshot_mode': '"""last"""'}), "(log_dir, run_ID, name, config, snapshot_mode='last')\n", (2903, 2956), False, 'from rlpyt.utils.logging.context import logger_context\n')]
# Copyright 2018 The TensorFlow Authors All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Sequence tagging module.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import tensorflow as tf from research.cvt_text.corpus_processing import minibatching from research.cvt_text.model import model_helpers from research.cvt_text.model import task_module class TaggingModule(task_module.SemiSupervisedModule): def __init__(self, config, task_name, n_classes, inputs, encoder): super(TaggingModule, self).__init__() self.task_name = task_name self.n_classes = n_classes self.labels = labels = tf.placeholder(tf.float32, [None, None, None], name=task_name + '_labels') class PredictionModule(object): def __init__(self, name, input_reprs, roll_direction=0, activate=True): self.name = name with tf.variable_scope(name + '/predictions'): projected = model_helpers.project(input_reprs, config.projection_size) if activate: projected = tf.nn.relu(projected) self.logits = tf.layers.dense(projected, n_classes, name='predict') targets = labels targets *= (1 - inputs.label_smoothing) targets += inputs.label_smoothing / n_classes self.loss = model_helpers.masked_ce_loss( self.logits, targets, inputs.mask, roll_direction=roll_direction) primary = PredictionModule('primary', ([encoder.uni_reprs, encoder.bi_reprs])) ps = [ PredictionModule('full', ([encoder.uni_reprs, encoder.bi_reprs]), activate=False), PredictionModule('forwards', [encoder.uni_fw]), PredictionModule('backwards', [encoder.uni_bw]), PredictionModule('future', [encoder.uni_fw], roll_direction=1), PredictionModule('past', [encoder.uni_bw], roll_direction=-1), ] self.unsupervised_loss = sum(p.loss for p in ps) self.supervised_loss = primary.loss self.probs = tf.nn.softmax(primary.logits) self.preds = tf.argmax(primary.logits, axis=-1) def update_feed_dict(self, feed, mb): if self.task_name in mb.teacher_predictions: feed[self.labels] = mb.teacher_predictions[self.task_name] elif mb.task_name != 'unlabeled': labels = minibatching.build_array( [[0] + e.labels + [0] for e in mb.examples]) feed[self.labels] = np.eye(self.n_classes)[labels]
[ "tensorflow.nn.softmax", "tensorflow.nn.relu", "research.cvt_text.corpus_processing.minibatching.build_array", "tensorflow.argmax", "tensorflow.layers.dense", "tensorflow.variable_scope", "tensorflow.placeholder", "research.cvt_text.model.model_helpers.project", "research.cvt_text.model.model_helpers.masked_ce_loss", "numpy.eye" ]
[((1326, 1400), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, None, None]'], {'name': "(task_name + '_labels')"}), "(tf.float32, [None, None, None], name=task_name + '_labels')\n", (1340, 1400), True, 'import tensorflow as tf\n'), ((2901, 2930), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['primary.logits'], {}), '(primary.logits)\n', (2914, 2930), True, 'import tensorflow as tf\n'), ((2952, 2986), 'tensorflow.argmax', 'tf.argmax', (['primary.logits'], {'axis': '(-1)'}), '(primary.logits, axis=-1)\n', (2961, 2986), True, 'import tensorflow as tf\n'), ((2118, 2216), 'research.cvt_text.model.model_helpers.masked_ce_loss', 'model_helpers.masked_ce_loss', (['self.logits', 'targets', 'inputs.mask'], {'roll_direction': 'roll_direction'}), '(self.logits, targets, inputs.mask,\n roll_direction=roll_direction)\n', (2146, 2216), False, 'from research.cvt_text.model import model_helpers\n'), ((3217, 3288), 'research.cvt_text.corpus_processing.minibatching.build_array', 'minibatching.build_array', (['[([0] + e.labels + [0]) for e in mb.examples]'], {}), '([([0] + e.labels + [0]) for e in mb.examples])\n', (3241, 3288), False, 'from research.cvt_text.corpus_processing import minibatching\n'), ((1626, 1666), 'tensorflow.variable_scope', 'tf.variable_scope', (["(name + '/predictions')"], {}), "(name + '/predictions')\n", (1643, 1666), True, 'import tensorflow as tf\n'), ((1700, 1758), 'research.cvt_text.model.model_helpers.project', 'model_helpers.project', (['input_reprs', 'config.projection_size'], {}), '(input_reprs, config.projection_size)\n', (1721, 1758), False, 'from research.cvt_text.model import model_helpers\n'), ((1884, 1937), 'tensorflow.layers.dense', 'tf.layers.dense', (['projected', 'n_classes'], {'name': '"""predict"""'}), "(projected, n_classes, name='predict')\n", (1899, 1937), True, 'import tensorflow as tf\n'), ((3336, 3358), 'numpy.eye', 'np.eye', (['self.n_classes'], {}), '(self.n_classes)\n', (3342, 3358), True, 'import numpy as np\n'), ((1828, 1849), 'tensorflow.nn.relu', 'tf.nn.relu', (['projected'], {}), '(projected)\n', (1838, 1849), True, 'import tensorflow as tf\n')]
# A simple MDP where agent has to traverse a specific path # in gridworld - wrong action will throw player back to start or do nothing. # Player is rewarded for reaching new maximum length in the episode. # # State is represented by a positive ndim vector that tells # where the player is. This is designed to mimic coordinate-systems # and also deliberately confuse networks (e.g. might think higher value # on axis 0 means we should take one specific action always) # import random import numpy as np import gym # Fix for older gym versions import gym.spaces def generate_path(game_length: int, ndim: int, num_mines: int, seed: int = 42) -> np.ndarray: """Generate the path player has to follow. Args: game_length: Length of the path to generate ndim: Number of dimensions in the environment num_mines: Number of mines per step seed: Seed used to generate path Returns: path: List of ints, representing actions player should take in each state. mines: List of List of ints, representing which actions are mines in each state. """ path = [] mines = [] gen = np.random.default_rng(seed) for i in range(game_length): action_ordering = gen.permutation(ndim) # First item goes to path, next num_mines go to mines path.append(action_ordering[0].item()) mines.append(action_ordering[1:1 + num_mines].tolist()) return path, mines class DangerousPathEnv(gym.Env): """ A N-dimensional environment where player has to choose the exact correct action at any given location (follow a very specific path). Otherwise game terminates or player stays still, depending on if they hit a mine or not. If `discrete_obs` is True, observation space tells location of player in path. If False, uses continuous observations that tell coordinate-like information of location of the player. `mine_ratio` specifies the amount of mines (terminal states) versus no-move moves per state. """ def __init__( self, game_length=100, ndim=2, seed=42, discrete_obs=False, random_action_p=0.0, mine_ratio=1.0 ): super().__init__() self.game_length = game_length self.ndim = ndim self.mine_ratio = mine_ratio self.num_mines_per_step = np.floor(ndim * mine_ratio) self.path, self.mines = generate_path(game_length, ndim, seed) # Emperically found to be a necessary adjustment self.step_size = 1.0 self.discrete_obs = discrete_obs self.random_action_p = random_action_p if discrete_obs: self.observation_space = gym.spaces.Discrete(n=self.game_length) else: self.observation_space = gym.spaces.Box(0, 1, shape=(self.ndim,)) self.action_space = gym.spaces.Discrete(n=self.ndim) self.path_location = 0 self.max_path_location = 0 self.num_steps = 0 self.player_location = np.zeros((self.ndim,)) def step(self, action): if self.random_action_p > 0.0 and random.random() < self.random_action_p: action = self.action_space.sample() done = False reward = 0 action = int(action) if action == self.path[self.path_location]: # You chose wisely self.path_location += 1 # Only reward progressing once if self.path_location > self.max_path_location: reward = 1 self.max_path_location += 1 # Small step sizes self.player_location[action] += self.step_size if self.path_location == (self.game_length - 1): done = True else: # You chose poorly reward = 0 if action in self.mines[self.path_location]: # You chose very poorly, back to start self.path_location = 0 self.player_location = np.zeros((self.ndim,)) self.num_steps += 1 if self.num_steps >= self.game_length: done = True return self.path_location if self.discrete_obs else self.player_location, reward, done, {} def reset(self): self.path_location = 0 self.max_path_location = 0 self.num_steps = 0 self.player_location = np.zeros((self.ndim,)) return self.path_location if self.discrete_obs else self.player_location def seed(self, seed): self.path, self.mines = generate_path(self.game_length, self.ndim, seed)
[ "numpy.floor", "gym.spaces.Discrete", "numpy.zeros", "numpy.random.default_rng", "random.random", "gym.spaces.Box" ]
[((1139, 1166), 'numpy.random.default_rng', 'np.random.default_rng', (['seed'], {}), '(seed)\n', (1160, 1166), True, 'import numpy as np\n'), ((2372, 2399), 'numpy.floor', 'np.floor', (['(ndim * mine_ratio)'], {}), '(ndim * mine_ratio)\n', (2380, 2399), True, 'import numpy as np\n'), ((2869, 2901), 'gym.spaces.Discrete', 'gym.spaces.Discrete', ([], {'n': 'self.ndim'}), '(n=self.ndim)\n', (2888, 2901), False, 'import gym\n'), ((3027, 3049), 'numpy.zeros', 'np.zeros', (['(self.ndim,)'], {}), '((self.ndim,))\n', (3035, 3049), True, 'import numpy as np\n'), ((4374, 4396), 'numpy.zeros', 'np.zeros', (['(self.ndim,)'], {}), '((self.ndim,))\n', (4382, 4396), True, 'import numpy as np\n'), ((2709, 2748), 'gym.spaces.Discrete', 'gym.spaces.Discrete', ([], {'n': 'self.game_length'}), '(n=self.game_length)\n', (2728, 2748), False, 'import gym\n'), ((2800, 2840), 'gym.spaces.Box', 'gym.spaces.Box', (['(0)', '(1)'], {'shape': '(self.ndim,)'}), '(0, 1, shape=(self.ndim,))\n', (2814, 2840), False, 'import gym\n'), ((3121, 3136), 'random.random', 'random.random', ([], {}), '()\n', (3134, 3136), False, 'import random\n'), ((4005, 4027), 'numpy.zeros', 'np.zeros', (['(self.ndim,)'], {}), '((self.ndim,))\n', (4013, 4027), True, 'import numpy as np\n')]
# coding: utf-8 from __future__ import division __author__ = "<NAME>" __copyright__ = "Copyright 2017, The Materials Project" __version__ = "1.0" __maintainer__ = "<NAME>" __email__ = "<EMAIL>" __status__ = "Development" __date__ = "July 19, 2017" import os from pymatgen.core import Element from pymatgen.core.structure import PeriodicSite, Structure, Lattice # from pymatgen.entries.computed_entries import ComputedStructureEntry from pymatgen.util.testing import PymatgenTest from pymatgen.analysis.defects.core import Vacancy, DefectEntry from pymatgen.analysis.defects.thermodynamics import DefectPhaseDiagram from pycdt.utils.plotter import DefectPlotter # from pycdt.core.defects_analyzer import ComputedDefect, DefectsAnalyzer class DefectPlotterTest(PymatgenTest): def setUp(self): l = Lattice([[3.52,0.0,2.033], [1.174,3.32,2.033], \ [0.0,0.0,4.066]]) s_bulk = Structure(l, ['Ga', 'As'], \ [[0.0000, 0.0000, 0.0000], \ [0.2500, 0.2500, 0.2500]]) defect_site = PeriodicSite( 'As', [0.25, 0.25, 0.25], l) defect = Vacancy( s_bulk, defect_site, charge = 1.) defect_entry = DefectEntry( defect, 0.) entries = [defect_entry] vbm = 0.2 band_gap = 1. dpd = DefectPhaseDiagram( entries, vbm, band_gap) self.dp = DefectPlotter(dpd) def test_get_plot_form_energy(self): mu_elts = {Element('As'): 0, Element('Ga'): 0} self.dp.get_plot_form_energy(mu_elts).savefig('test.pdf') self.assertTrue(os.path.exists('test.pdf')) os.system('rm test.pdf') # def test_plot_conc_temp(self): # self.dp.plot_conc_temp().savefig('test.pdf') # self.assertTrue(os.path.exists('test.pdf')) # os.system('rm test.pdf') # # def test_plot_carriers_ef(self): # self.dp.plot_carriers_ef().savefig('test.pdf') # self.assertTrue(os.path.exists('test.pdf')) # os.system('rm test.pdf') # def tearDown(self): # self.da import unittest if __name__ == '__main__': unittest.main()
[ "unittest.main", "pymatgen.analysis.defects.core.Vacancy", "pymatgen.core.structure.Lattice", "pymatgen.core.Element", "pycdt.utils.plotter.DefectPlotter", "os.path.exists", "os.system", "pymatgen.core.structure.Structure", "pymatgen.analysis.defects.core.DefectEntry", "pymatgen.analysis.defects.thermodynamics.DefectPhaseDiagram", "pymatgen.core.structure.PeriodicSite" ]
[((2088, 2103), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2101, 2103), False, 'import unittest\n'), ((814, 884), 'pymatgen.core.structure.Lattice', 'Lattice', (['[[3.52, 0.0, 2.033], [1.174, 3.32, 2.033], [0.0, 0.0, 4.066]]'], {}), '([[3.52, 0.0, 2.033], [1.174, 3.32, 2.033], [0.0, 0.0, 4.066]])\n', (821, 884), False, 'from pymatgen.core.structure import PeriodicSite, Structure, Lattice\n'), ((914, 979), 'pymatgen.core.structure.Structure', 'Structure', (['l', "['Ga', 'As']", '[[0.0, 0.0, 0.0], [0.25, 0.25, 0.25]]'], {}), "(l, ['Ga', 'As'], [[0.0, 0.0, 0.0], [0.25, 0.25, 0.25]])\n", (923, 979), False, 'from pymatgen.core.structure import PeriodicSite, Structure, Lattice\n'), ((1053, 1094), 'pymatgen.core.structure.PeriodicSite', 'PeriodicSite', (['"""As"""', '[0.25, 0.25, 0.25]', 'l'], {}), "('As', [0.25, 0.25, 0.25], l)\n", (1065, 1094), False, 'from pymatgen.core.structure import PeriodicSite, Structure, Lattice\n'), ((1113, 1153), 'pymatgen.analysis.defects.core.Vacancy', 'Vacancy', (['s_bulk', 'defect_site'], {'charge': '(1.0)'}), '(s_bulk, defect_site, charge=1.0)\n', (1120, 1153), False, 'from pymatgen.analysis.defects.core import Vacancy, DefectEntry\n'), ((1179, 1203), 'pymatgen.analysis.defects.core.DefectEntry', 'DefectEntry', (['defect', '(0.0)'], {}), '(defect, 0.0)\n', (1190, 1203), False, 'from pymatgen.analysis.defects.core import Vacancy, DefectEntry\n'), ((1292, 1334), 'pymatgen.analysis.defects.thermodynamics.DefectPhaseDiagram', 'DefectPhaseDiagram', (['entries', 'vbm', 'band_gap'], {}), '(entries, vbm, band_gap)\n', (1310, 1334), False, 'from pymatgen.analysis.defects.thermodynamics import DefectPhaseDiagram\n'), ((1354, 1372), 'pycdt.utils.plotter.DefectPlotter', 'DefectPlotter', (['dpd'], {}), '(dpd)\n', (1367, 1372), False, 'from pycdt.utils.plotter import DefectPlotter\n'), ((1596, 1620), 'os.system', 'os.system', (['"""rm test.pdf"""'], {}), "('rm test.pdf')\n", (1605, 1620), False, 'import os\n'), ((1434, 1447), 'pymatgen.core.Element', 'Element', (['"""As"""'], {}), "('As')\n", (1441, 1447), False, 'from pymatgen.core import Element\n'), ((1452, 1465), 'pymatgen.core.Element', 'Element', (['"""Ga"""'], {}), "('Ga')\n", (1459, 1465), False, 'from pymatgen.core import Element\n'), ((1560, 1586), 'os.path.exists', 'os.path.exists', (['"""test.pdf"""'], {}), "('test.pdf')\n", (1574, 1586), False, 'import os\n')]
import globals from discussion_question_manager import DiscussionQuestionManager import pickle import discord from discord.channel import TextChannel from discord.message import Message from discord.ext import tasks from discord import RawReactionActionEvent as RawReaction import logging import os import asyncio logger: logging.Logger class Client(discord.Client): # TODO # have people DM the bot, then the bot DMs me, and I react to the question in the DM with the bot # which i guess I should have a way to toggle off "who asked the question" if they want to be anonymous manager: DiscussionQuestionManager = DiscussionQuestionManager() async def on_ready(self): print("READY!!") await self.start_message_loop() # people react to message to add/remove themself from the list of people to be notified # if they are already added, they are removed. else they are added. async def change_notifiee(self, reaction: RawReaction): # if we're in test mode if globals.TEST_MODE: # check if the msg is the same as the designated test one if reaction.message_id != globals.TEST_REACTION_MSG_ID: return # otherwise else: # heck if it's the same as the designated real one if reaction.message_id != globals.ACTUAL_REACTION_MSG_ID: return if reaction.user_id != globals.KINJO_ID: # don't care if kinjo's reacting, he will always be notified self.manager.change_notifiee(reaction.user_id) async def add_question(self, reaction: RawReaction): # if we're testing, allow me and kinjo if globals.TEST_MODE: if reaction.member.id not in globals.ALLOWED_IDS: return # if we're live, only allow kinjo else: if reaction.member.id != globals.KINJO_ID: return channel: TextChannel = await self.fetch_channel(reaction.channel_id) msg: Message = await channel.fetch_message(reaction.message_id) # add new question to manager self.manager.add_question_from_msg(msg) async def on_raw_reaction_add(self, reaction: RawReaction): if reaction.event_type != "REACTION_ADD": return if reaction.emoji.name == "kaneko_ok": await self.change_notifiee(reaction) if reaction.emoji.name == "✅": await self.add_question(reaction) # save updated manager to file with open("manager.txt", "wb") as f: pickle.dump(self.manager, f) async def on_raw_reaction_remove(self, reaction: RawReaction): if reaction.event_type != "REACTION_REMOVE": return if reaction.emoji.name == "kaneko_ok": await self.change_notifiee(reaction) # save updated manager to file with open("manager.txt", "wb") as f: pickle.dump(self.manager, f) async def start_message_loop(self): while True: if globals.TEST_MODE: await asyncio.sleep(globals.TEST_MESSAGE_DELAY_S) else: await asyncio.sleep(globals.ACTUAL_MESSAGE_DELAY_S) msg = str(self.manager) if globals.TEST_MODE: channel = await self.fetch_channel(globals.TEST_CHANNEL) else: channel = await self.fetch_channel(globals.ACTUAL_CHANNEL) await channel.send(msg) with open("manager.txt", "wb") as f: pickle.dump(self.manager, f) def main(): # set up logging logger = logging.getLogger('discord') logger.setLevel(logging.DEBUG) handler = logging.FileHandler(filename='discord.log', encoding='utf-8', mode='w') handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s')) logger.addHandler(handler) # set up environment tokens try: token = os.environ["GOLDBAR_BOT_TOKEN"] except KeyError as e: print(f"Token {e} not found. Please set your environment variable properly. See README. Exiting.") exit() intents = discord.Intents.default() # choose intents client = Client(intents=intents) # make bot object with open("manager.txt", "rb") as f: # load existing question manager, if any try: client.manager = pickle.load(f) except EOFError: pass # start bot client.run(token) if __name__ == "__main__": main()
[ "pickle.dump", "logging.FileHandler", "asyncio.sleep", "logging.Formatter", "pickle.load", "discussion_question_manager.DiscussionQuestionManager", "discord.Intents.default", "logging.getLogger" ]
[((634, 661), 'discussion_question_manager.DiscussionQuestionManager', 'DiscussionQuestionManager', ([], {}), '()\n', (659, 661), False, 'from discussion_question_manager import DiscussionQuestionManager\n'), ((3705, 3733), 'logging.getLogger', 'logging.getLogger', (['"""discord"""'], {}), "('discord')\n", (3722, 3733), False, 'import logging\n'), ((3783, 3854), 'logging.FileHandler', 'logging.FileHandler', ([], {'filename': '"""discord.log"""', 'encoding': '"""utf-8"""', 'mode': '"""w"""'}), "(filename='discord.log', encoding='utf-8', mode='w')\n", (3802, 3854), False, 'import logging\n'), ((4238, 4263), 'discord.Intents.default', 'discord.Intents.default', ([], {}), '()\n', (4261, 4263), False, 'import discord\n'), ((3880, 3948), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s:%(levelname)s:%(name)s: %(message)s"""'], {}), "('%(asctime)s:%(levelname)s:%(name)s: %(message)s')\n", (3897, 3948), False, 'import logging\n'), ((2637, 2665), 'pickle.dump', 'pickle.dump', (['self.manager', 'f'], {}), '(self.manager, f)\n', (2648, 2665), False, 'import pickle\n'), ((3009, 3037), 'pickle.dump', 'pickle.dump', (['self.manager', 'f'], {}), '(self.manager, f)\n', (3020, 3037), False, 'import pickle\n'), ((4461, 4475), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (4472, 4475), False, 'import pickle\n'), ((3625, 3653), 'pickle.dump', 'pickle.dump', (['self.manager', 'f'], {}), '(self.manager, f)\n', (3636, 3653), False, 'import pickle\n'), ((3156, 3199), 'asyncio.sleep', 'asyncio.sleep', (['globals.TEST_MESSAGE_DELAY_S'], {}), '(globals.TEST_MESSAGE_DELAY_S)\n', (3169, 3199), False, 'import asyncio\n'), ((3240, 3285), 'asyncio.sleep', 'asyncio.sleep', (['globals.ACTUAL_MESSAGE_DELAY_S'], {}), '(globals.ACTUAL_MESSAGE_DELAY_S)\n', (3253, 3285), False, 'import asyncio\n')]
# coding: utf-8 import numpy as np import matplotlib.pyplot as plt class Naca_4_digit(object): def __init__(self, int_4, attack_angle_deg, resolution, quasi_equidistant=True, length_adjust=False, from5digit=False): if from5digit == False: self.m = float(int_4[0]) / 100 # maximum camber self.p = float(int_4[1]) / 10 # position of the maximum camber self.t = float(int_4[2:4]) / 100 # maximum thickness self.load_setting(attack_angle_deg, resolution, quasi_equidistant, length_adjust) self.__y_c() self.__dyc_dx() self.__y_t() self.theta = np.arctan(self.dyc_dx) self.get_surface() if quasi_equidistant == True: self.get_quasi_equidistant_line() def load_setting(self, attack_angle_deg, resolution, quasi_equidistant=True, length_adjust=False): self.use_quasi_equidistant = quasi_equidistant self.reshape = length_adjust if quasi_equidistant == True: self.resolution = 100 * resolution else: self.resolution = resolution self.new_resolution = resolution self.attack_angle = attack_angle_deg self.x = np.linspace(start = 0, stop = 1, num = self.resolution) def __y_c(self): x_lt_p = lambda m, p, x: m / (p ** 2) * (2.0 * p * x - x ** 2) x_ge_p = lambda m, p, x: m / ((1 - p) ** 2) * ((1.0 - 2.0 * p) + 2.0 * p * x - x ** 2) m = self.m p = self.p x = self.x if ((p != 0) and (p != 1)): self.y_c = np.where(x < p, x_lt_p(m, p, x), x_ge_p(m, p, x)) elif (p == 0): self.y_c = m * (1.0 - x**2) elif (p == 1): self.y_c = m * (2.0 * x - x ** 2) def __y_t(self): t = self.t x = self.x self.y_t = t / 0.2 * (0.2969 * np.sqrt(x) - 0.1260 * x - 0.3516 * x**2 + 0.2843 * x**3 - 0.1015 * x**4) def __dyc_dx(self): x_lt_p = lambda m, p, x: 2.0 * m / (p ** 2) * (p - x) x_ge_p = lambda m, p, x: 2.0 * m / ((1.0 - p) ** 2) * (p - x) m = self.m p = self.p x = self.x if ((p != 0) and (p != 1)): self.dyc_dx = np.where(x < p, x_lt_p(m, p, x), x_ge_p(m, p, x)) elif (p == 0): self.dyc_dx = - 2.0 * m * x elif (p == 1): self.dyc_dx = 2.0 * m * (1.0 - x) def get_surface(self): # original NACA-4digit wings # upper vec_l = np.full((3, self.resolution), 1.0) vec_u = np.full((3, self.resolution), 1.0) vec_u[0] = self.x - self.y_t * np.sin(self.theta) - 0.5 vec_u[1] = self.y_c + self.y_t * np.cos(self.theta) # lower vec_l[0] = self.x + self.y_t * np.sin(self.theta) - 0.5 vec_l[1] = self.y_c - self.y_t * np.cos(self.theta) attack_angle = self.attack_angle / 180 * (np.pi) rotMat = np.array([[np.cos(attack_angle), np.sin(attack_angle), 0], [- np.sin(attack_angle), np.cos(attack_angle), 0], [0, 0, 1]]) rot_l = rotMat.dot(vec_l) rot_u = rotMat.dot(vec_u) if self.reshape == True: x_min = min(np.min(rot_l[0]), np.min(rot_u[0])) x_max = max(np.max(rot_l[0]), np.max(rot_u[0])) rate = 1.0 / (x_max - x_min) if rate != 1.0: expMat = np.array([[rate, 0, 0], [0, rate, 0], [0, 0, 1]]) rot_l = expMat.dot(rot_l) rot_u = expMat.dot(rot_u) self.x_l = rot_l[0] + 0.5 self.y_l = rot_l[1] + 0.5 self.x_u = rot_u[0] + 0.5 self.y_u = rot_u[1] + 0.5 def plot(self): plt.xlim([0, 1]) plt.ylim([0, 1]) plt.plot(self.x_u, self.y_u) plt.plot(self.x_l, self.y_l) plt.show() def get_quasi_equidistant_line(self): new_resolution = self.new_resolution x_min = min(np.min(self.x_u), np.min(self.x_l)) x_max = max(np.max(self.x_u), np.max(self.x_l)) if self.reshape == False: self.equidistant_x = np.linspace(start = 0, stop = 1, num = new_resolution) else: self.equidistant_x = np.linspace(start=x_min, stop=x_max, num=new_resolution) self.equidistant_y_l = np.zeros(new_resolution) self.equidistant_y_u = np.zeros(new_resolution) for index in range(new_resolution): if ((x_min <= self.equidistant_x[index]) and (x_max >= self.equidistant_x[index])): self.equidistant_y_l[index] = self.y_l[np.argmin(np.abs(self.x_l - self.equidistant_x[index]))] self.equidistant_y_u[index] = self.y_u[np.argmin(np.abs(self.x_u - self.equidistant_x[index]))] else: self.equidistant_y_l[index] = -1.0 # 外れ値 self.equidistant_y_u[index] = -1.0 def plot_quasi_equidistant_shape(self): plt.xlim([0, 1]) plt.ylim([0, 1]) plt.plot(self.equidistant_x, self.equidistant_y_u, "o") plt.plot(self.equidistant_x, self.equidistant_y_l, "o") plt.show() def transform2complex(self): z_u_reverse = (self.x_u + 1j * self.y_u)[::-1] z_l = self.x_l + 1j * self.y_l if self.use_quasi_equidistant == True: return np.concatenate([z_u_reverse[::100], z_l[::100], z_u_reverse[0].reshape(-1)]) else: if z_u_reverse[self.resolution - 1] == z_l[0]: return np.concatenate([z_u_reverse, z_l[1:], z_u_reverse[0].reshape(-1)]) else: return np.concatenate([z_u_reverse, z_l, z_u_reverse[0].reshape(-1)]) class Naca_5_digit(Naca_4_digit): def __init__(self, int_5, attack_angle_deg, resolution, quasi_equidistant = True, length_adjust = False, from5digit = True): self.cl = float(int_5[0])*(3.0/2.0) / 10 # designed lift_coefficient self.p = float(int_5[1]) / 2.0 / 100 # position of the maximum camber self.ref = int_5[2] # enable / disable reflect self.t = float(int_5[3:5]) / 100.0 # maximum thickness self.camberline_plofile = int(int_5[0:3]) self.camberline_plofile_table() self.load_setting(attack_angle_deg, resolution, quasi_equidistant, length_adjust) self.__y_c() self.__dyc_dx() super(Naca_5_digit, self).__init__(int_5, attack_angle_deg, resolution, quasi_equidistant = quasi_equidistant, length_adjust = length_adjust, from5digit = True) def __y_c(self): x_lt_m_nr = lambda m, k1, x: k1 / 6.0 * (x ** 3 - 3.0 * m * x ** 2 + m ** 2 * (3.0 - m) * x) x_gt_m_nr = lambda m, k1, x: k1 / 6.0 * m ** 3 * (1.0 - x) x_lt_m_rf = lambda m, k1, k2_k1, x: k1 / 6.0 * ((x - m)**3 - k2_k1 * (1.0-m)**3 * x - m**3 * x + m**3) x_gt_m_rf = lambda m, k1, k2_k1, x: k1 / 6.0 * (k2_k1 * (x - m)**3 - k2_k1 * (1.0 - m)**3 * x - m**3 * x + m**3) m = self.m k1 = self.k1 x = self.x if int(self.ref) == 0: # not reflected self.y_c = np.where(x < m, x_lt_m_nr(m, k1, x), x_gt_m_nr(m, k1, x)) else: k2_k1 = self.k2byk1 self.y_c = np.where(x < m, x_lt_m_rf(m, k1, k2_k1, x), x_gt_m_rf(m, k1, k2_k1, x)) def __dyc_dx(self): x_lt_m_nr = lambda m, k1, x: k1 / 6.0 * (3.0 * x ** 2 - 6.0 * m * x + m ** 2 * (3.0 - m)) x_gt_m_nr = lambda m, k1, x: - k1 / 6.0 * m ** 3 x_lt_m_rf = lambda m, k1, k2_k1, x: k1 / 6.0 * (3.0 * (x - m) ** 2 - k2_k1 * (1.0 - m) ** 3 - m ** 3) x_gt_m_rf = lambda m, k1, k2_k1, x: k1 / 6.0 * (3.0 * k2_k1 * (x - m) ** 2 - k2_k1 * (1.0 - m) ** 3 - m ** 3) m = self.m k1 = self.k1 x = self.x if int(self.ref) == 0: # not reflected self.dyc_dx = np.where(x < m, x_lt_m_nr(m, k1, x), x_gt_m_nr(m, k1, x)) else: k2_k1 = self.k2byk1 self.dyc_dx = np.where(x < m, x_lt_m_rf(m, k1, k2_k1, x), x_gt_m_rf(m, k1, k2_k1, x)) def camberline_plofile_table(self): if self.camberline_plofile == 210: self.m = 0.058 self.k1 = 361.4 elif self.camberline_plofile == 220: self.m = 0.126 self.k1 = 51.64 elif self.camberline_plofile == 230: self.m = 0.2025 self.k1 = 15.957 elif self.camberline_plofile == 240: self.m = 0.29 self.k1 = 6.643 elif self.camberline_plofile == 250: self.m = 0.391 self.k1 = 3.230 elif self.camberline_plofile == 221: self.m = 0.130 self.k1 = 51.990 self.k2byk1 = 0.000764 elif self.camberline_plofile == 231: self.m = 0.217 self.k1 = 15.793 self.k2byk1 = 0.00677 elif self.camberline_plofile == 241: self.m = 0.318 self.k1 = 6.520 self.k2byk1 = 0.0303 elif self.camberline_plofile == 251: self.m = 0.441 self.k1 = 3.191 self.k2byk1 = 0.1355 else: print("this type wing is not defined") exit() def main(): deg = 0.0 naca = Naca_4_digit(int_4="0012", attack_angle_deg=deg, resolution=100, quasi_equidistant=True, length_adjust=True) naca.plot() naca.plot_quasi_equidistant_shape() naca = Naca_5_digit(int_5="23012", attack_angle_deg=deg, resolution=100, quasi_equidistant=True, length_adjust=True) naca.plot() naca.plot_quasi_equidistant_shape() if __name__ == '__main__': main()
[ "numpy.full", "matplotlib.pyplot.xlim", "matplotlib.pyplot.show", "numpy.abs", "matplotlib.pyplot.plot", "matplotlib.pyplot.ylim", "numpy.zeros", "numpy.min", "numpy.max", "numpy.array", "numpy.sin", "numpy.linspace", "numpy.cos", "numpy.arctan", "numpy.sqrt" ]
[((646, 668), 'numpy.arctan', 'np.arctan', (['self.dyc_dx'], {}), '(self.dyc_dx)\n', (655, 668), True, 'import numpy as np\n'), ((1227, 1276), 'numpy.linspace', 'np.linspace', ([], {'start': '(0)', 'stop': '(1)', 'num': 'self.resolution'}), '(start=0, stop=1, num=self.resolution)\n', (1238, 1276), True, 'import numpy as np\n'), ((2548, 2582), 'numpy.full', 'np.full', (['(3, self.resolution)', '(1.0)'], {}), '((3, self.resolution), 1.0)\n', (2555, 2582), True, 'import numpy as np\n'), ((2599, 2633), 'numpy.full', 'np.full', (['(3, self.resolution)', '(1.0)'], {}), '((3, self.resolution), 1.0)\n', (2606, 2633), True, 'import numpy as np\n'), ((3735, 3751), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[0, 1]'], {}), '([0, 1])\n', (3743, 3751), True, 'import matplotlib.pyplot as plt\n'), ((3760, 3776), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[0, 1]'], {}), '([0, 1])\n', (3768, 3776), True, 'import matplotlib.pyplot as plt\n'), ((3785, 3813), 'matplotlib.pyplot.plot', 'plt.plot', (['self.x_u', 'self.y_u'], {}), '(self.x_u, self.y_u)\n', (3793, 3813), True, 'import matplotlib.pyplot as plt\n'), ((3822, 3850), 'matplotlib.pyplot.plot', 'plt.plot', (['self.x_l', 'self.y_l'], {}), '(self.x_l, self.y_l)\n', (3830, 3850), True, 'import matplotlib.pyplot as plt\n'), ((3859, 3869), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3867, 3869), True, 'import matplotlib.pyplot as plt\n'), ((4336, 4360), 'numpy.zeros', 'np.zeros', (['new_resolution'], {}), '(new_resolution)\n', (4344, 4360), True, 'import numpy as np\n'), ((4392, 4416), 'numpy.zeros', 'np.zeros', (['new_resolution'], {}), '(new_resolution)\n', (4400, 4416), True, 'import numpy as np\n'), ((4962, 4978), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[0, 1]'], {}), '([0, 1])\n', (4970, 4978), True, 'import matplotlib.pyplot as plt\n'), ((4987, 5003), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[0, 1]'], {}), '([0, 1])\n', (4995, 5003), True, 'import matplotlib.pyplot as plt\n'), ((5012, 5067), 'matplotlib.pyplot.plot', 'plt.plot', (['self.equidistant_x', 'self.equidistant_y_u', '"""o"""'], {}), "(self.equidistant_x, self.equidistant_y_u, 'o')\n", (5020, 5067), True, 'import matplotlib.pyplot as plt\n'), ((5076, 5131), 'matplotlib.pyplot.plot', 'plt.plot', (['self.equidistant_x', 'self.equidistant_y_l', '"""o"""'], {}), "(self.equidistant_x, self.equidistant_y_l, 'o')\n", (5084, 5131), True, 'import matplotlib.pyplot as plt\n'), ((5140, 5150), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5148, 5150), True, 'import matplotlib.pyplot as plt\n'), ((3986, 4002), 'numpy.min', 'np.min', (['self.x_u'], {}), '(self.x_u)\n', (3992, 4002), True, 'import numpy as np\n'), ((4004, 4020), 'numpy.min', 'np.min', (['self.x_l'], {}), '(self.x_l)\n', (4010, 4020), True, 'import numpy as np\n'), ((4042, 4058), 'numpy.max', 'np.max', (['self.x_u'], {}), '(self.x_u)\n', (4048, 4058), True, 'import numpy as np\n'), ((4060, 4076), 'numpy.max', 'np.max', (['self.x_l'], {}), '(self.x_l)\n', (4066, 4076), True, 'import numpy as np\n'), ((4146, 4194), 'numpy.linspace', 'np.linspace', ([], {'start': '(0)', 'stop': '(1)', 'num': 'new_resolution'}), '(start=0, stop=1, num=new_resolution)\n', (4157, 4194), True, 'import numpy as np\n'), ((4248, 4304), 'numpy.linspace', 'np.linspace', ([], {'start': 'x_min', 'stop': 'x_max', 'num': 'new_resolution'}), '(start=x_min, stop=x_max, num=new_resolution)\n', (4259, 4304), True, 'import numpy as np\n'), ((2748, 2766), 'numpy.cos', 'np.cos', (['self.theta'], {}), '(self.theta)\n', (2754, 2766), True, 'import numpy as np\n'), ((2888, 2906), 'numpy.cos', 'np.cos', (['self.theta'], {}), '(self.theta)\n', (2894, 2906), True, 'import numpy as np\n'), ((3239, 3255), 'numpy.min', 'np.min', (['rot_l[0]'], {}), '(rot_l[0])\n', (3245, 3255), True, 'import numpy as np\n'), ((3257, 3273), 'numpy.min', 'np.min', (['rot_u[0]'], {}), '(rot_u[0])\n', (3263, 3273), True, 'import numpy as np\n'), ((3299, 3315), 'numpy.max', 'np.max', (['rot_l[0]'], {}), '(rot_l[0])\n', (3305, 3315), True, 'import numpy as np\n'), ((3317, 3333), 'numpy.max', 'np.max', (['rot_u[0]'], {}), '(rot_u[0])\n', (3323, 3333), True, 'import numpy as np\n'), ((3430, 3479), 'numpy.array', 'np.array', (['[[rate, 0, 0], [0, rate, 0], [0, 0, 1]]'], {}), '([[rate, 0, 0], [0, rate, 0], [0, 0, 1]])\n', (3438, 3479), True, 'import numpy as np\n'), ((2682, 2700), 'numpy.sin', 'np.sin', (['self.theta'], {}), '(self.theta)\n', (2688, 2700), True, 'import numpy as np\n'), ((2822, 2840), 'numpy.sin', 'np.sin', (['self.theta'], {}), '(self.theta)\n', (2828, 2840), True, 'import numpy as np\n'), ((2993, 3013), 'numpy.cos', 'np.cos', (['attack_angle'], {}), '(attack_angle)\n', (2999, 3013), True, 'import numpy as np\n'), ((3015, 3035), 'numpy.sin', 'np.sin', (['attack_angle'], {}), '(attack_angle)\n', (3021, 3035), True, 'import numpy as np\n'), ((3066, 3086), 'numpy.cos', 'np.cos', (['attack_angle'], {}), '(attack_angle)\n', (3072, 3086), True, 'import numpy as np\n'), ((3044, 3064), 'numpy.sin', 'np.sin', (['attack_angle'], {}), '(attack_angle)\n', (3050, 3064), True, 'import numpy as np\n'), ((4622, 4666), 'numpy.abs', 'np.abs', (['(self.x_l - self.equidistant_x[index])'], {}), '(self.x_l - self.equidistant_x[index])\n', (4628, 4666), True, 'import numpy as np\n'), ((4734, 4778), 'numpy.abs', 'np.abs', (['(self.x_u - self.equidistant_x[index])'], {}), '(self.x_u - self.equidistant_x[index])\n', (4740, 4778), True, 'import numpy as np\n'), ((1898, 1908), 'numpy.sqrt', 'np.sqrt', (['x'], {}), '(x)\n', (1905, 1908), True, 'import numpy as np\n')]
import webapp2 import os import jinja2 from utility import check_secure_val, filterKey, showCount from models import User template_dir = os.path.join(os.path.dirname(__file__), '../views') jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir), autoescape=True) jinja_env.filters['filterKey'] = filterKey jinja_env.filters['showCount'] = showCount def render_str(template, **params): t = jinja_env.get_template(template) return t.render(params) class BlogHandler(webapp2.RequestHandler): def write(self, *a, **kw): self.response.out.write(*a, **kw) def render_str(self, template, **params): return render_str(template, **params) def render(self, template, **kw): self.write(self.render_str(template, **kw)) def get_user_from_cookie(self): random = self.check_for_valid_cookie() if random: return User.get_by_id(int(random)) else: return None def check_for_valid_cookie(self): random = self.request.cookies.get('random') if random: is_valid_cookie = check_secure_val(random) if is_valid_cookie: return self.request.cookies.get('random').split("|")[0] return None
[ "jinja2.FileSystemLoader", "os.path.dirname", "utility.check_secure_val" ]
[((151, 176), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (166, 176), False, 'import os\n'), ((228, 265), 'jinja2.FileSystemLoader', 'jinja2.FileSystemLoader', (['template_dir'], {}), '(template_dir)\n', (251, 265), False, 'import jinja2\n'), ((1135, 1159), 'utility.check_secure_val', 'check_secure_val', (['random'], {}), '(random)\n', (1151, 1159), False, 'from utility import check_secure_val, filterKey, showCount\n')]
"""Settings shared by functions for indexing Landsat 8 data""" from rf.utils.io import s3 organization = 'dfac6307-b5ef-43f7-beda-b9f208bb7726' # Band 8 is panchromatic and at 15m resolution. All other bands # are at the 30m resolution. Bands are: # 1: Coastal aerosol # 2: Blue # 3: Green # 4: Red # 5: Near infrared (NIR) # 6: SWIR 1 # 7: SWIR 2 # 8: Panchromatic # 9: Cirrus # 10: Themral infrared (TIRS 1) (resampled to 30m from 100m in product) # 11: Themral infrared (TIRS 2) (resampled to 30m from 100m in product) # # Source: http://landsat.usgs.gov/band_designations_landsat_satellites.php band_lookup = { '15m': [{ 'name': 'panchromatic - 8', 'number': 0, 'wavelength': [500, 680] }], '30m': [{ 'name': 'coastal aerosol - 1', 'number': 0, 'wavelength': [430, 450] }, { 'name': 'blue - 2', 'number': 0, 'wavelength': [450, 510] }, { 'name': 'green - 3', 'number': 0, 'wavelength': [530, 590] }, { 'name': 'red - 4', 'number': 0, 'wavelength': [640, 670] }, { 'name': 'near infrared - 5', 'number': 0, 'wavelength': [850, 880] }, { 'name': 'swir - 6', 'number': 0, 'wavelength': [1570, 1650] }, { 'name': 'swir - 7', 'number': 0, 'wavelength': [2110, 2290] }, { 'name': 'cirrus - 9', 'number': 0, 'wavelength': [1360, 1380] }, { 'name': 'thermal infrared - 10', 'number': 0, 'wavelength': [10600, 11190] }, { 'name': 'thermal infrared - 11', 'number': 0, 'wavelength': [11500, 12510] }] } datasource_id = '697a0b91-b7a8-446e-842c-97cda155554d' usgs_landsat_url = ( 'https://landsat.usgs.gov/landsat/metadata_service/bulk_metadata_files/LANDSAT_8.csv' ) aws_landsat_base = 'http://landsat-pds.s3.amazonaws.com/' bucket_name = 'landsat-pds' bucket = s3.Bucket(bucket_name)
[ "rf.utils.io.s3.Bucket" ]
[((1992, 2014), 'rf.utils.io.s3.Bucket', 's3.Bucket', (['bucket_name'], {}), '(bucket_name)\n', (2001, 2014), False, 'from rf.utils.io import s3\n')]
# Generated by Django 2.2.24 on 2021-12-13 14:03 from django.conf import settings from django.db import migrations, models import django.db.models.deletion import main.validators import proposals.utils.proposal_utils import proposals.validators class Migration(migrations.Migration): dependencies = [ ('proposals', '0033_auto_20210521_1154'), ] operations = [ migrations.AlterField( model_name='proposal', name='avg_understood', field=models.BooleanField(default=False, validators=[proposals.validators.AVGUnderstoodValidator], verbose_name='Ik heb kennis genomen van het bovenstaande en begrijp mijn verantwoordelijkheden ten opzichte van de AVG.'), ), migrations.AlterField( model_name='proposal', name='date_start', field=models.DateField(blank=True, null=True, verbose_name='Wat is de beoogde startdatum van het onderzoek waarvoor deze aanvraag wordt ingediend?'), ), migrations.AlterField( model_name='proposal', name='dmp_file', field=models.FileField(blank=True, storage=proposals.utils.proposal_utils.OverwriteStorage(), upload_to=proposals.utils.proposal_utils.FilenameFactory('DMP'), validators=[main.validators.validate_pdf_or_doc], verbose_name='Als je een Data Management Plan hebt voor deze aanvraag, kan je kiezen om deze hier bij te voegen. Het aanleveren van een DMP vergemakkelijkt het toetsingsproces aanzienlijk.'), ), migrations.AlterField( model_name='proposal', name='funding_name', field=models.CharField(blank=True, help_text='De titel die je hier opgeeft zal in de formele toestemmingsbrief gebruikt worden.', max_length=200, verbose_name='Wat is de naam van het gefinancierde project?'), ), migrations.AlterField( model_name='proposal', name='has_minor_revision', field=models.BooleanField(default=False, verbose_name='Is er een revisie geweest na het indienen van deze aanvraag?'), ), migrations.AlterField( model_name='proposal', name='inform_local_staff', field=models.BooleanField(blank=True, default=None, null=True, verbose_name='<p>Je hebt aangegeven dat je gebruik wilt gaan maken van één van de faciliteiten van het UiL OTS, namelijk de database, Zep software en/of het UiL OTS lab. Het lab supportteam van het UiL OTS zou graag op de hoogte willen worden gesteld van aankomende onderzoeken. Daarom vragen wij hier jouw toestemming om delen van deze aanvraag door te sturen naar het lab supportteam.</p> <p>Vind je het goed dat de volgende delen uit de aanvraag worden doorgestuurd:</p> - Jouw naam en de namen van de andere betrokkenen <br/> - De eindverantwoordelijke van het onderzoek <br/> - De titel van het onderzoek <br/> - De beoogde startdatum <br/> - Van welke faciliteiten je gebruik wil maken (database, lab, Zep software)'), ), migrations.AlterField( model_name='proposal', name='institution', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='proposals.Institution', verbose_name='Aan welk onderzoeksinstituut ben je verbonden?'), ), migrations.AlterField( model_name='proposal', name='is_pre_approved', field=models.BooleanField(blank=True, default=None, null=True, verbose_name='Heb je formele toestemming van een ethische toetsingcommissie, uitgezonderd deze FETC-GW commissie?'), ), migrations.AlterField( model_name='proposal', name='is_revision', field=models.BooleanField(default=False, verbose_name='Is deze aanvraag een revisie van of amendement op een ingediende aanvraag?'), ), migrations.AlterField( model_name='proposal', name='other_applicants', field=models.BooleanField(default=False, verbose_name='Zijn er nog andere onderzoekers bij deze aanvraag betrokken die geaffilieerd zijn aan één van de onderzoeksinstituten ICON, OFR, OGK of UiL OTS?'), ), migrations.AlterField( model_name='proposal', name='other_stakeholders', field=models.BooleanField(default=False, verbose_name='Zijn er nog andere onderzoekers bij deze aanvraag betrokken die <strong>niet</strong> geaffilieerd zijn aan een van de onderzoeksinstituten van de Faculteit Geestwetenschappen van de UU? '), ), migrations.AlterField( model_name='proposal', name='parent', field=models.ForeignKey(help_text='Dit veld toont enkel aanvragen waar je zelf een medeuitvoerende bent.', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='proposals.Proposal', verbose_name='Te kopiëren aanvraag'), ), migrations.AlterField( model_name='proposal', name='pre_approval_institute', field=models.CharField(blank=True, max_length=200, null=True, verbose_name='Welk instituut heeft de aanvraag goedgekeurd?'), ), migrations.AlterField( model_name='proposal', name='pre_approval_pdf', field=models.FileField(blank=True, upload_to=proposals.utils.proposal_utils.FilenameFactory('Pre_Approval'), validators=[main.validators.validate_pdf_or_doc], verbose_name='Upload hier je formele toestemmingsbrief van dit instituut (in .pdf of .doc(x)-formaat)'), ), migrations.AlterField( model_name='proposal', name='pre_assessment_pdf', field=models.FileField(blank=True, upload_to=proposals.utils.proposal_utils.FilenameFactory('Preassessment'), validators=[main.validators.validate_pdf_or_doc], verbose_name='Upload hier je aanvraag (in .pdf of .doc(x)-formaat)'), ), migrations.AlterField( model_name='proposal', name='relation', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='proposals.Relation', verbose_name='In welke hoedanigheid ben je betrokken bij dit onderzoek?'), ), migrations.AlterField( model_name='proposal', name='reviewing_committee', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='auth.Group', verbose_name='Door welke comissie dient deze aanvraag te worden beoordeeld?'), ), migrations.AlterField( model_name='proposal', name='status', field=models.PositiveIntegerField(choices=[(1, 'Concept'), (40, 'Opgestuurd ter beoordeling door eindverantwoordelijke'), (50, 'Opgestuurd ter beoordeling door FETC-GW'), (55, 'Aanvraag is beoordeeld door FETC-GW'), (60, 'Aanvraag is beoordeeld door FETC-GW')], default=1), ), migrations.AlterField( model_name='proposal', name='supervisor', field=models.ForeignKey(blank=True, help_text='Aan het einde van de procedure kan je deze aanvraag ter\n verificatie naar je eindverantwoordelijke sturen. De\n eindverantwoordelijke zal de aanvraag vervolgens kunnen aanpassen en\n indienen bij de FETC-GW. <br><br><strong>Tip</strong>: Type een\n aantal letters van de voornaam, achternaam, of Solis ID van het\n persoon die je toe wilt voegen in de zoekbalk hiernaast.\n Merk op dat het laden even kan duren.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='Eindverantwoordelijke onderzoeker'), ), migrations.AlterField( model_name='proposal', name='title', field=models.CharField(help_text='De titel die je hier opgeeft is zichtbaar voor de FETC-GW-leden en, wanneer de aanvraag is goedgekeurd, ook voor alle medewerkers die in het archief van deze portal kijken. De titel mag niet identiek zijn aan een vorige titel van een aanvraag die je hebt ingediend.', max_length=200, verbose_name='Wat is de titel van je aanvraag? Deze titel zal worden gebruikt in alle formele correspondentie.'), ), migrations.AlterField( model_name='wmo', name='metc_application', field=models.BooleanField(default=False, verbose_name='Je onderzoek moet beoordeeld worden door een METC, maar dient nog wel bij de FETC-GW te worden geregistreerd. Is dit onderzoek al aangemeld bij een METC?'), ), ]
[ "django.db.models.CharField", "django.db.models.ForeignKey", "django.db.models.PositiveIntegerField", "django.db.models.BooleanField", "django.db.models.DateField" ]
[((504, 733), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'validators': '[proposals.validators.AVGUnderstoodValidator]', 'verbose_name': '"""Ik heb kennis genomen van het bovenstaande en begrijp mijn verantwoordelijkheden ten opzichte van de AVG."""'}), "(default=False, validators=[proposals.validators.\n AVGUnderstoodValidator], verbose_name=\n 'Ik heb kennis genomen van het bovenstaande en begrijp mijn verantwoordelijkheden ten opzichte van de AVG.'\n )\n", (523, 733), False, 'from django.db import migrations, models\n'), ((846, 998), 'django.db.models.DateField', 'models.DateField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Wat is de beoogde startdatum van het onderzoek waarvoor deze aanvraag wordt ingediend?"""'}), "(blank=True, null=True, verbose_name=\n 'Wat is de beoogde startdatum van het onderzoek waarvoor deze aanvraag wordt ingediend?'\n )\n", (862, 998), False, 'from django.db import migrations, models\n'), ((1637, 1853), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'help_text': '"""De titel die je hier opgeeft zal in de formele toestemmingsbrief gebruikt worden."""', 'max_length': '(200)', 'verbose_name': '"""Wat is de naam van het gefinancierde project?"""'}), "(blank=True, help_text=\n 'De titel die je hier opgeeft zal in de formele toestemmingsbrief gebruikt worden.'\n , max_length=200, verbose_name=\n 'Wat is de naam van het gefinancierde project?')\n", (1653, 1853), False, 'from django.db import migrations, models\n'), ((1974, 2090), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""Is er een revisie geweest na het indienen van deze aanvraag?"""'}), "(default=False, verbose_name=\n 'Is er een revisie geweest na het indienen van deze aanvraag?')\n", (1993, 2090), False, 'from django.db import migrations, models\n'), ((2221, 3011), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'blank': '(True)', 'default': 'None', 'null': '(True)', 'verbose_name': '"""<p>Je hebt aangegeven dat je gebruik wilt gaan maken van één van de faciliteiten van het UiL OTS, namelijk de database, Zep software en/of het UiL OTS lab. Het lab supportteam van het UiL OTS zou graag op de hoogte willen worden gesteld van aankomende onderzoeken. Daarom vragen wij hier jouw toestemming om delen van deze aanvraag door te sturen naar het lab supportteam.</p> <p>Vind je het goed dat de volgende delen uit de aanvraag worden doorgestuurd:</p> - Jouw naam en de namen van de andere betrokkenen <br/> - De eindverantwoordelijke van het onderzoek <br/> - De titel van het onderzoek <br/> - De beoogde startdatum <br/> - Van welke faciliteiten je gebruik wil maken (database, lab, Zep software)"""'}), "(blank=True, default=None, null=True, verbose_name=\n '<p>Je hebt aangegeven dat je gebruik wilt gaan maken van één van de faciliteiten van het UiL OTS, namelijk de database, Zep software en/of het UiL OTS lab. Het lab supportteam van het UiL OTS zou graag op de hoogte willen worden gesteld van aankomende onderzoeken. Daarom vragen wij hier jouw toestemming om delen van deze aanvraag door te sturen naar het lab supportteam.</p> <p>Vind je het goed dat de volgende delen uit de aanvraag worden doorgestuurd:</p> - Jouw naam en de namen van de andere betrokkenen <br/> - De eindverantwoordelijke van het onderzoek <br/> - De titel van het onderzoek <br/> - De beoogde startdatum <br/> - Van welke faciliteiten je gebruik wil maken (database, lab, Zep software)'\n )\n", (2240, 3011), False, 'from django.db import migrations, models\n'), ((3130, 3293), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.PROTECT', 'to': '"""proposals.Institution"""', 'verbose_name': '"""Aan welk onderzoeksinstituut ben je verbonden?"""'}), "(on_delete=django.db.models.deletion.PROTECT, to=\n 'proposals.Institution', verbose_name=\n 'Aan welk onderzoeksinstituut ben je verbonden?')\n", (3147, 3293), False, 'from django.db import migrations, models\n'), ((3416, 3598), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'blank': '(True)', 'default': 'None', 'null': '(True)', 'verbose_name': '"""Heb je formele toestemming van een ethische toetsingcommissie, uitgezonderd deze FETC-GW commissie?"""'}), "(blank=True, default=None, null=True, verbose_name=\n 'Heb je formele toestemming van een ethische toetsingcommissie, uitgezonderd deze FETC-GW commissie?'\n )\n", (3435, 3598), False, 'from django.db import migrations, models\n'), ((3717, 3852), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""Is deze aanvraag een revisie van of amendement op een ingediende aanvraag?"""'}), "(default=False, verbose_name=\n 'Is deze aanvraag een revisie van of amendement op een ingediende aanvraag?'\n )\n", (3736, 3852), False, 'from django.db import migrations, models\n'), ((3976, 4181), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""Zijn er nog andere onderzoekers bij deze aanvraag betrokken die geaffilieerd zijn aan één van de onderzoeksinstituten ICON, OFR, OGK of UiL OTS?"""'}), "(default=False, verbose_name=\n 'Zijn er nog andere onderzoekers bij deze aanvraag betrokken die geaffilieerd zijn aan één van de onderzoeksinstituten ICON, OFR, OGK of UiL OTS?'\n )\n", (3995, 4181), False, 'from django.db import migrations, models\n'), ((4307, 4555), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""Zijn er nog andere onderzoekers bij deze aanvraag betrokken die <strong>niet</strong> geaffilieerd zijn aan een van de onderzoeksinstituten van de Faculteit Geestwetenschappen van de UU? """'}), "(default=False, verbose_name=\n 'Zijn er nog andere onderzoekers bij deze aanvraag betrokken die <strong>niet</strong> geaffilieerd zijn aan een van de onderzoeksinstituten van de Faculteit Geestwetenschappen van de UU? '\n )\n", (4326, 4555), False, 'from django.db import migrations, models\n'), ((4669, 4926), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'help_text': '"""Dit veld toont enkel aanvragen waar je zelf een medeuitvoerende bent."""', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""children"""', 'to': '"""proposals.Proposal"""', 'verbose_name': '"""Te kopiëren aanvraag"""'}), "(help_text=\n 'Dit veld toont enkel aanvragen waar je zelf een medeuitvoerende bent.',\n null=True, on_delete=django.db.models.deletion.CASCADE, related_name=\n 'children', to='proposals.Proposal', verbose_name='Te kopiëren aanvraag')\n", (4686, 4926), False, 'from django.db import migrations, models\n'), ((5052, 5174), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(200)', 'null': '(True)', 'verbose_name': '"""Welk instituut heeft de aanvraag goedgekeurd?"""'}), "(blank=True, max_length=200, null=True, verbose_name=\n 'Welk instituut heeft de aanvraag goedgekeurd?')\n", (5068, 5174), False, 'from django.db import migrations, models\n'), ((6043, 6237), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""proposals.Relation"""', 'verbose_name': '"""In welke hoedanigheid ben je betrokken bij dit onderzoek?"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='proposals.Relation', verbose_name=\n 'In welke hoedanigheid ben je betrokken bij dit onderzoek?')\n", (6060, 6237), False, 'from django.db import migrations, models\n'), ((6364, 6531), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.PROTECT', 'to': '"""auth.Group"""', 'verbose_name': '"""Door welke comissie dient deze aanvraag te worden beoordeeld?"""'}), "(on_delete=django.db.models.deletion.PROTECT, to=\n 'auth.Group', verbose_name=\n 'Door welke comissie dient deze aanvraag te worden beoordeeld?')\n", (6381, 6531), False, 'from django.db import migrations, models\n'), ((6645, 6927), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'choices': "[(1, 'Concept'), (40,\n 'Opgestuurd ter beoordeling door eindverantwoordelijke'), (50,\n 'Opgestuurd ter beoordeling door FETC-GW'), (55,\n 'Aanvraag is beoordeeld door FETC-GW'), (60,\n 'Aanvraag is beoordeeld door FETC-GW')]", 'default': '(1)'}), "(choices=[(1, 'Concept'), (40,\n 'Opgestuurd ter beoordeling door eindverantwoordelijke'), (50,\n 'Opgestuurd ter beoordeling door FETC-GW'), (55,\n 'Aanvraag is beoordeeld door FETC-GW'), (60,\n 'Aanvraag is beoordeeld door FETC-GW')], default=1)\n", (6672, 6927), False, 'from django.db import migrations, models\n'), ((7039, 7684), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'help_text': '"""Aan het einde van de procedure kan je deze aanvraag ter\n verificatie naar je eindverantwoordelijke sturen. De\n eindverantwoordelijke zal de aanvraag vervolgens kunnen aanpassen en\n indienen bij de FETC-GW. <br><br><strong>Tip</strong>: Type een\n aantal letters van de voornaam, achternaam, of Solis ID van het\n persoon die je toe wilt voegen in de zoekbalk hiernaast.\n Merk op dat het laden even kan duren."""', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""Eindverantwoordelijke onderzoeker"""'}), '(blank=True, help_text=\n """Aan het einde van de procedure kan je deze aanvraag ter\n verificatie naar je eindverantwoordelijke sturen. De\n eindverantwoordelijke zal de aanvraag vervolgens kunnen aanpassen en\n indienen bij de FETC-GW. <br><br><strong>Tip</strong>: Type een\n aantal letters van de voornaam, achternaam, of Solis ID van het\n persoon die je toe wilt voegen in de zoekbalk hiernaast.\n Merk op dat het laden even kan duren."""\n , null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.\n AUTH_USER_MODEL, verbose_name=\'Eindverantwoordelijke onderzoeker\')\n', (7056, 7684), False, 'from django.db import migrations, models\n'), ((7794, 8238), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': '"""De titel die je hier opgeeft is zichtbaar voor de FETC-GW-leden en, wanneer de aanvraag is goedgekeurd, ook voor alle medewerkers die in het archief van deze portal kijken. De titel mag niet identiek zijn aan een vorige titel van een aanvraag die je hebt ingediend."""', 'max_length': '(200)', 'verbose_name': '"""Wat is de titel van je aanvraag? Deze titel zal worden gebruikt in alle formele correspondentie."""'}), "(help_text=\n 'De titel die je hier opgeeft is zichtbaar voor de FETC-GW-leden en, wanneer de aanvraag is goedgekeurd, ook voor alle medewerkers die in het archief van deze portal kijken. De titel mag niet identiek zijn aan een vorige titel van een aanvraag die je hebt ingediend.'\n , max_length=200, verbose_name=\n 'Wat is de titel van je aanvraag? Deze titel zal worden gebruikt in alle formele correspondentie.'\n )\n", (7810, 8238), False, 'from django.db import migrations, models\n'), ((8347, 8561), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""Je onderzoek moet beoordeeld worden door een METC, maar dient nog wel bij de FETC-GW te worden geregistreerd. Is dit onderzoek al aangemeld bij een METC?"""'}), "(default=False, verbose_name=\n 'Je onderzoek moet beoordeeld worden door een METC, maar dient nog wel bij de FETC-GW te worden geregistreerd. Is dit onderzoek al aangemeld bij een METC?'\n )\n", (8366, 8561), False, 'from django.db import migrations, models\n')]
from django.contrib import admin # Register your models here. from .models import Tasks, Audit admin.site.register(Tasks) admin.site.register(Audit)
[ "django.contrib.admin.site.register" ]
[((97, 123), 'django.contrib.admin.site.register', 'admin.site.register', (['Tasks'], {}), '(Tasks)\n', (116, 123), False, 'from django.contrib import admin\n'), ((124, 150), 'django.contrib.admin.site.register', 'admin.site.register', (['Audit'], {}), '(Audit)\n', (143, 150), False, 'from django.contrib import admin\n')]
# coding=utf-8 """ Common functions for click-based cli scripts. """ from __future__ import absolute_import import functools import logging import os import re import copy import click from datacube import config, __version__ from datacube.executor import get_executor from datacube.index import index_connect from pathlib import Path from sqlalchemy.exc import OperationalError, ProgrammingError _LOG_FORMAT_STRING = '%(asctime)s %(levelname)s %(message)s' CLICK_SETTINGS = dict(help_option_names=['-h', '--help']) def _print_version(ctx, param, value): if not value or ctx.resilient_parsing: return click.echo( '{prog}, version {version}'.format( prog='Data Cube', version=__version__ ) ) ctx.exit() def compose(*functions): """ >>> compose( ... lambda x: x+1, ... lambda y: y+2 ... )(1) 4 """ def compose2(f, g): return lambda x: f(g(x)) return functools.reduce(compose2, functions, lambda x: x) class ColorFormatter(logging.Formatter): colors = { 'info': dict(fg='white'), 'error': dict(fg='red'), 'exception': dict(fg='red'), 'critical': dict(fg='red'), 'debug': dict(fg='blue'), 'warning': dict(fg='yellow') } def format(self, record): if not record.exc_info: record = copy.copy(record) record.levelname = click.style(record.levelname, **self.colors.get(record.levelname.lower(), {})) return logging.Formatter.format(self, record) class ClickHandler(logging.Handler): def emit(self, record): try: msg = self.format(record) click.echo(msg, err=True) except (KeyboardInterrupt, SystemExit): raise except: # pylint: disable=bare-except self.handleError(record) def _init_logging(ctx, param, value): handler = ClickHandler() handler.formatter = ColorFormatter(_LOG_FORMAT_STRING) logging.root.addHandler(handler) logging_level = logging.WARN - 10 * value logging.root.setLevel(logging_level) logging.getLogger('datacube').setLevel(logging_level) if not ctx.obj: ctx.obj = {} ctx.obj['verbosity'] = value def _add_logfile(ctx, param, value): formatter = logging.Formatter(_LOG_FORMAT_STRING) for logfile in value: handler = logging.FileHandler(logfile) handler.formatter = formatter logging.root.addHandler(handler) def _log_queries(ctx, param, value): if value: logging.getLogger('sqlalchemy.engine').setLevel('INFO') def _set_config(ctx, param, value): if value: if not any(os.path.exists(p) for p in value): raise ValueError('No specified config paths exist: {}' % value) paths = value else: paths = config.DEFAULT_CONF_PATHS parsed_config = config.LocalConfig.find(paths=paths) if not ctx.obj: ctx.obj = {} ctx.obj['config_file'] = parsed_config #: pylint: disable=invalid-name version_option = click.option('--version', is_flag=True, callback=_print_version, expose_value=False, is_eager=True) #: pylint: disable=invalid-name verbose_option = click.option('--verbose', '-v', count=True, callback=_init_logging, is_eager=True, expose_value=False, help="Use multiple times for more verbosity") #: pylint: disable=invalid-name logfile_option = click.option('--log-file', multiple=True, callback=_add_logfile, is_eager=True, expose_value=False, help="Specify log file") #: pylint: disable=invalid-name config_option = click.option('--config_file', '-C', multiple=True, default='', callback=_set_config, expose_value=False) #: pylint: disable=invalid-name log_queries_option = click.option('--log-queries', is_flag=True, callback=_log_queries, expose_value=False, help="Print database queries.") # This is a function, so it's valid to be lowercase. #: pylint: disable=invalid-name global_cli_options = compose( version_option, verbose_option, logfile_option, config_option, log_queries_option ) @click.group(help="Data Cube command-line interface", context_settings=CLICK_SETTINGS) @global_cli_options def cli(): pass def pass_config(f): """Get a datacube config as the first argument. """ def new_func(*args, **kwargs): config_ = click.get_current_context().obj['config_file'] return f(config_, *args, **kwargs) return functools.update_wrapper(new_func, f) def pass_index(app_name=None, expect_initialised=True): """Get a connection to the index as the first argument. A short name name of the application can be specified for logging purposes. """ def decorate(f): def with_index(*args, **kwargs): ctx = click.get_current_context() try: index = index_connect(ctx.obj['config_file'], application_name=app_name or ctx.command_path, validate_connection=expect_initialised) return f(index, *args, **kwargs) except (OperationalError, ProgrammingError) as e: handle_exception('Error Connecting to database: %s', e) return functools.update_wrapper(with_index, f) return decorate def parse_endpoint(value): ip, port = tuple(value.split(':')) return ip, int(port) EXECUTOR_TYPES = { 'serial': lambda _: get_executor(None, None), 'multiproc': lambda workers: get_executor(None, int(workers)), 'distributed': lambda addr: get_executor(parse_endpoint(addr), True) } def _setup_executor(ctx, param, value): try: return EXECUTOR_TYPES[value[0]](value[1]) except ValueError: ctx.fail("Failed to create '%s' executor with '%s'" % value) executor_cli_options = click.option('--executor', type=(click.Choice(EXECUTOR_TYPES.keys()), str), default=('serial', None), help="Run parallelized, either locally or distrbuted. eg:\n" "--executor multiproc 4 (OR)\n" "--executor distributed 10.0.0.8:8888", callback=_setup_executor) def handle_exception(msg, e): """ Exit following an exception in a CLI app If verbosity (-v flag) specified, dump out a stack trace. Otherwise, simply print the given error message. Include a '%s' in the message to print the single line message from the exception. :param e: caught Exception :param msg: Message to User with optional %s """ ctx = click.get_current_context() if ctx.obj['verbosity'] >= 1: raise e else: if '%s' in msg: click.echo(msg % e) else: click.echo(msg) ctx.exit(1) def to_pathlib(ctx, param, value): if value: return Path(value) else: return None
[ "datacube.config.LocalConfig.find", "logging.Formatter.format", "logging.FileHandler", "click.get_current_context", "logging.root.addHandler", "click.option", "copy.copy", "click.echo", "datacube.executor.get_executor", "logging.Formatter", "functools.update_wrapper", "pathlib.Path", "logging.getLogger", "datacube.index.index_connect", "os.path.exists", "functools.reduce", "click.group", "logging.root.setLevel" ]
[((3074, 3177), 'click.option', 'click.option', (['"""--version"""'], {'is_flag': '(True)', 'callback': '_print_version', 'expose_value': '(False)', 'is_eager': '(True)'}), "('--version', is_flag=True, callback=_print_version,\n expose_value=False, is_eager=True)\n", (3086, 3177), False, 'import click\n'), ((3253, 3410), 'click.option', 'click.option', (['"""--verbose"""', '"""-v"""'], {'count': '(True)', 'callback': '_init_logging', 'is_eager': '(True)', 'expose_value': '(False)', 'help': '"""Use multiple times for more verbosity"""'}), "('--verbose', '-v', count=True, callback=_init_logging,\n is_eager=True, expose_value=False, help=\n 'Use multiple times for more verbosity')\n", (3265, 3410), False, 'import click\n'), ((3481, 3610), 'click.option', 'click.option', (['"""--log-file"""'], {'multiple': '(True)', 'callback': '_add_logfile', 'is_eager': '(True)', 'expose_value': '(False)', 'help': '"""Specify log file"""'}), "('--log-file', multiple=True, callback=_add_logfile, is_eager=\n True, expose_value=False, help='Specify log file')\n", (3493, 3610), False, 'import click\n'), ((3684, 3793), 'click.option', 'click.option', (['"""--config_file"""', '"""-C"""'], {'multiple': '(True)', 'default': '""""""', 'callback': '_set_config', 'expose_value': '(False)'}), "('--config_file', '-C', multiple=True, default='', callback=\n _set_config, expose_value=False)\n", (3696, 3793), False, 'import click\n'), ((3871, 3993), 'click.option', 'click.option', (['"""--log-queries"""'], {'is_flag': '(True)', 'callback': '_log_queries', 'expose_value': '(False)', 'help': '"""Print database queries."""'}), "('--log-queries', is_flag=True, callback=_log_queries,\n expose_value=False, help='Print database queries.')\n", (3883, 3993), False, 'import click\n'), ((4247, 4337), 'click.group', 'click.group', ([], {'help': '"""Data Cube command-line interface"""', 'context_settings': 'CLICK_SETTINGS'}), "(help='Data Cube command-line interface', context_settings=\n CLICK_SETTINGS)\n", (4258, 4337), False, 'import click\n'), ((975, 1025), 'functools.reduce', 'functools.reduce', (['compose2', 'functions', '(lambda x: x)'], {}), '(compose2, functions, lambda x: x)\n', (991, 1025), False, 'import functools\n'), ((2005, 2037), 'logging.root.addHandler', 'logging.root.addHandler', (['handler'], {}), '(handler)\n', (2028, 2037), False, 'import logging\n'), ((2089, 2125), 'logging.root.setLevel', 'logging.root.setLevel', (['logging_level'], {}), '(logging_level)\n', (2110, 2125), False, 'import logging\n'), ((2315, 2352), 'logging.Formatter', 'logging.Formatter', (['_LOG_FORMAT_STRING'], {}), '(_LOG_FORMAT_STRING)\n', (2332, 2352), False, 'import logging\n'), ((2900, 2936), 'datacube.config.LocalConfig.find', 'config.LocalConfig.find', ([], {'paths': 'paths'}), '(paths=paths)\n', (2923, 2936), False, 'from datacube import config, __version__\n'), ((4607, 4644), 'functools.update_wrapper', 'functools.update_wrapper', (['new_func', 'f'], {}), '(new_func, f)\n', (4631, 4644), False, 'import functools\n'), ((6866, 6893), 'click.get_current_context', 'click.get_current_context', ([], {}), '()\n', (6891, 6893), False, 'import click\n'), ((1528, 1566), 'logging.Formatter.format', 'logging.Formatter.format', (['self', 'record'], {}), '(self, record)\n', (1552, 1566), False, 'import logging\n'), ((2397, 2425), 'logging.FileHandler', 'logging.FileHandler', (['logfile'], {}), '(logfile)\n', (2416, 2425), False, 'import logging\n'), ((2472, 2504), 'logging.root.addHandler', 'logging.root.addHandler', (['handler'], {}), '(handler)\n', (2495, 2504), False, 'import logging\n'), ((5402, 5441), 'functools.update_wrapper', 'functools.update_wrapper', (['with_index', 'f'], {}), '(with_index, f)\n', (5426, 5441), False, 'import functools\n'), ((5601, 5625), 'datacube.executor.get_executor', 'get_executor', (['None', 'None'], {}), '(None, None)\n', (5613, 5625), False, 'from datacube.executor import get_executor\n'), ((7138, 7149), 'pathlib.Path', 'Path', (['value'], {}), '(value)\n', (7142, 7149), False, 'from pathlib import Path\n'), ((1385, 1402), 'copy.copy', 'copy.copy', (['record'], {}), '(record)\n', (1394, 1402), False, 'import copy\n'), ((1697, 1722), 'click.echo', 'click.echo', (['msg'], {'err': '(True)'}), '(msg, err=True)\n', (1707, 1722), False, 'import click\n'), ((2130, 2159), 'logging.getLogger', 'logging.getLogger', (['"""datacube"""'], {}), "('datacube')\n", (2147, 2159), False, 'import logging\n'), ((4933, 4960), 'click.get_current_context', 'click.get_current_context', ([], {}), '()\n', (4958, 4960), False, 'import click\n'), ((6990, 7009), 'click.echo', 'click.echo', (['(msg % e)'], {}), '(msg % e)\n', (7000, 7009), False, 'import click\n'), ((7036, 7051), 'click.echo', 'click.echo', (['msg'], {}), '(msg)\n', (7046, 7051), False, 'import click\n'), ((2566, 2604), 'logging.getLogger', 'logging.getLogger', (['"""sqlalchemy.engine"""'], {}), "('sqlalchemy.engine')\n", (2583, 2604), False, 'import logging\n'), ((4505, 4532), 'click.get_current_context', 'click.get_current_context', ([], {}), '()\n', (4530, 4532), False, 'import click\n'), ((5002, 5131), 'datacube.index.index_connect', 'index_connect', (["ctx.obj['config_file']"], {'application_name': '(app_name or ctx.command_path)', 'validate_connection': 'expect_initialised'}), "(ctx.obj['config_file'], application_name=app_name or ctx.\n command_path, validate_connection=expect_initialised)\n", (5015, 5131), False, 'from datacube.index import index_connect\n'), ((2693, 2710), 'os.path.exists', 'os.path.exists', (['p'], {}), '(p)\n', (2707, 2710), False, 'import os\n')]
# Generated by Django 3.1.2 on 2020-10-20 19:06 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Ticket', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('protcol', models.CharField(max_length=10)), ('status', models.BooleanField(default=False)), ('short_description', models.CharField(max_length=50)), ('description', models.CharField(max_length=200)), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='Action', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('short_description', models.CharField(max_length=50)), ('description', models.CharField(max_length=200)), ('ticket', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tickets.ticket')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), ]
[ "django.db.migrations.swappable_dependency", "django.db.models.CharField", "django.db.models.ForeignKey", "django.db.models.BooleanField", "django.db.models.AutoField" ]
[((247, 304), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (278, 304), False, 'from django.db import migrations, models\n'), ((435, 528), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (451, 528), False, 'from django.db import migrations, models\n'), ((555, 586), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)'}), '(max_length=10)\n', (571, 586), False, 'from django.db import migrations, models\n'), ((616, 650), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (635, 650), False, 'from django.db import migrations, models\n'), ((691, 722), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (707, 722), False, 'from django.db import migrations, models\n'), ((757, 789), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (773, 789), False, 'from django.db import migrations, models\n'), ((817, 913), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(on_delete=django.db.models.deletion.CASCADE, to=settings.\n AUTH_USER_MODEL)\n', (834, 913), False, 'from django.db import migrations, models\n'), ((1040, 1133), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1056, 1133), False, 'from django.db import migrations, models\n'), ((1170, 1201), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (1186, 1201), False, 'from django.db import migrations, models\n'), ((1236, 1268), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1252, 1268), False, 'from django.db import migrations, models\n'), ((1298, 1386), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""tickets.ticket"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'tickets.ticket')\n", (1315, 1386), False, 'from django.db import migrations, models\n'), ((1409, 1505), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(on_delete=django.db.models.deletion.CASCADE, to=settings.\n AUTH_USER_MODEL)\n', (1426, 1505), False, 'from django.db import migrations, models\n')]
""" Copyright (c) Facebook, Inc. and its affiliates. This source code is licensed under the MIT license found in the LICENSE file in the root directory of this source tree. """ from argparse import ArgumentParser from collections import defaultdict from typing import Optional import fastmri import numpy as np import torch import torch.nn as nn from fastmri import evaluate from fastmri.data import transforms from fastmri.data.transforms import VarNetSample from fastmri.models.adaptive_varnet import AdaptiveSensitivityModel, AdaptiveVarNetBlock from fastmri.models.varnet import NormUnet from fastmri.pl_modules.mri_module import MriModule from .metrics import DistributedMetricSum, DistributedArraySum class VarNet(nn.Module): """ A full variational network model. This model applies a combination of soft data consistency with a U-Net regularizer. To use non-U-Net regularizers, use VarNetBlock. """ def __init__( self, num_cascades: int = 12, sens_chans: int = 8, sens_pools: int = 4, chans: int = 18, pools: int = 4, num_sense_lines: Optional[int] = None, hard_dc: bool = False, dc_mode: str = "simul", sparse_dc_gradients: bool = True, ): """ Args: num_cascades: Number of cascades (i.e., layers) for variational network. sens_chans: Number of channels for sensitivity map U-Net. sens_pools Number of downsampling and upsampling layers for sensitivity map U-Net. chans: Number of channels for cascade U-Net. pools: Number of downsampling and upsampling layers for cascade U-Net. num_sense_lines: Number of low-frequency lines to use for sensitivity map computation, must be even or `None`. Default `None` will automatically compute the number from masks. Default behaviour may cause some slices to use more low-frequency lines than others, when used in conjunction with e.g. the EquispacedMaskFunc defaults. hard_dc: Whether to do hard DC layers instead of soft (learned). dc_mode: str, whether to do DC before ('first'), after ('last') or simultaneously ('simul') with Refinement step. Default 'simul'. sparse_dc_gradients: Whether to sparsify the gradients in DC by using torch.where() with the mask: this essentially removes gradients for the policy on unsampled rows. This should change nothing for the non-active VarNet. """ super().__init__() self.num_sense_lines = num_sense_lines self.hard_dc = hard_dc self.dc_mode = dc_mode self.sparse_dc_gradients = sparse_dc_gradients self.sens_net = AdaptiveSensitivityModel( sens_chans, sens_pools, num_sense_lines=num_sense_lines ) self.cascades = nn.ModuleList( [ AdaptiveVarNetBlock( NormUnet(chans, pools), hard_dc=hard_dc, dc_mode=dc_mode, sparse_dc_gradients=sparse_dc_gradients, ) for _ in range(num_cascades) ] ) def forward( self, kspace: torch.Tensor, masked_kspace: torch.Tensor, mask: torch.Tensor ): extra_outputs = defaultdict(list) sens_maps = self.sens_net(masked_kspace, mask) extra_outputs["sense"].append(sens_maps.detach().cpu()) kspace_pred = masked_kspace.clone() extra_outputs["masks"].append(mask.detach().cpu()) # Store current reconstruction current_recon = fastmri.complex_abs( self.sens_reduce(masked_kspace, sens_maps) ).squeeze(1) extra_outputs["recons"].append(current_recon.detach().cpu()) for cascade in self.cascades: kspace_pred = cascade( kspace_pred, masked_kspace, mask, sens_maps, kspace=kspace ) # Store current reconstruction current_recon = fastmri.complex_abs( self.sens_reduce(masked_kspace, sens_maps) ).squeeze(1) extra_outputs["recons"].append(current_recon.detach().cpu()) # Could presumably do complex_abs(complex_rss()) instead and get same result? output = fastmri.rss(fastmri.complex_abs(fastmri.ifft2c(kspace_pred)), dim=1) return output, extra_outputs def sens_reduce(self, x: torch.Tensor, sens_maps: torch.Tensor) -> torch.Tensor: x = fastmri.ifft2c(x) return fastmri.complex_mul(x, fastmri.complex_conj(sens_maps)).sum( dim=1, keepdim=True ) class VarNetModule(MriModule): """ VarNet training module. This can be used to train variational networks from the paper: <NAME> al. End-to-end variational networks for accelerated MRI reconstruction. In International Conference on Medical Image Computing and Computer-Assisted Intervention, 2020. which was inspired by the earlier paper: <NAME> et al. Learning a variational network for reconstruction of accelerated MRI data. Magnetic Resonance inMedicine, 79(6):3055–3071, 2018. """ def __init__( self, num_cascades: int = 12, pools: int = 4, chans: int = 18, sens_pools: int = 4, sens_chans: int = 8, lr: float = 0.0003, lr_step_size: int = 40, lr_gamma: float = 0.1, weight_decay: float = 0.0, num_sense_lines: int = None, hard_dc: bool = False, dc_mode: str = "simul", sparse_dc_gradients: bool = True, **kwargs, ): """ Args: num_cascades: Number of cascades (i.e., layers) for variational network. pools: Number of downsampling and upsampling layers for cascade U-Net. chans: Number of channels for cascade U-Net. sens_pools: Number of downsampling and upsampling layers for sensitivity map U-Net. sens_chans: Number of channels for sensitivity map U-Net. lr: Learning rate. lr_step_size: Learning rate step size. lr_gamma: Learning rate gamma decay. weight_decay: Parameter for penalizing weights norm. num_sense_lines: Number of low-frequency lines to use for sensitivity map computation, must be even or `None`. Default `None` will automatically compute the number from masks. Default behaviour may cause some slices to use more low-frequency lines than others, when used in conjunction with e.g. the EquispacedMaskFunc defaults. hard_dc: Whether to do hard DC layers instead of soft (learned). dc_mode: str, whether to do DC before ('first'), after ('last') or simultaneously ('simul') with Refinement step. Default 'simul'. sparse_dc_gradients: Whether to sparsify the gradients in DC by using torch.where() with the mask: this essentially removes gradients for the policy on unsampled rows. This should change nothing for the non-active VarNet. """ super().__init__() self.save_hyperparameters() self.num_cascades = num_cascades self.pools = pools self.chans = chans self.sens_pools = sens_pools self.sens_chans = sens_chans self.lr = lr self.lr_step_size = lr_step_size self.lr_gamma = lr_gamma self.weight_decay = weight_decay self.num_sense_lines = num_sense_lines self.hard_dc = hard_dc self.dc_mode = dc_mode self.sparse_dc_gradients = sparse_dc_gradients # logging functions self.NMSE = DistributedMetricSum() self.SSIM = DistributedMetricSum() self.PSNR = DistributedMetricSum() self.ValLoss = DistributedMetricSum() self.TotExamples = DistributedMetricSum() self.TotSliceExamples = DistributedMetricSum() self.ValMargDist = DistributedArraySum() self.ValCondEnt = DistributedMetricSum() self.TrainNMSE = DistributedMetricSum() self.TrainSSIM = DistributedMetricSum() self.TrainPSNR = DistributedMetricSum() self.TrainLoss = DistributedMetricSum() self.TrainTotExamples = DistributedMetricSum() self.TrainTotSliceExamples = DistributedMetricSum() self.TrainMargDist = DistributedArraySum() self.TrainCondEnt = DistributedMetricSum() self.varnet = VarNet( num_cascades=self.num_cascades, sens_chans=self.sens_chans, sens_pools=self.sens_pools, chans=self.chans, pools=self.pools, num_sense_lines=self.num_sense_lines, hard_dc=self.hard_dc, dc_mode=self.dc_mode, sparse_dc_gradients=self.sparse_dc_gradients, ) self.loss = fastmri.SSIMLoss() def forward(self, kspace, masked_kspace, mask): return self.varnet(kspace, masked_kspace, mask) def training_step(self, batch, batch_idx): output, extra_outputs = self(batch.kspace, batch.masked_kspace, batch.mask) target, output = transforms.center_crop_to_smallest(batch.target, output) # NOTE: Using max value here... loss = self.loss( output.unsqueeze(1), target.unsqueeze(1), data_range=batch.max_value ) self.log("train_loss", loss) # Return same stuff as on validation step here return { "batch_idx": batch_idx, "fname": batch.fname, "slice_num": batch.slice_num, "max_value": batch.max_value, "output": output, "target": target, "loss": loss, "extra_outputs": extra_outputs, } def training_step_end(self, train_logs): # check inputs for k in ( "batch_idx", "fname", "slice_num", "max_value", "output", "target", "loss", "extra_outputs", ): if k not in train_logs.keys(): raise RuntimeError( f"Expected key {k} in dict returned by training_step." ) if train_logs["output"].ndim == 2: train_logs["output"] = train_logs["output"].unsqueeze(0) elif train_logs["output"].ndim != 3: raise RuntimeError("Unexpected output size from training_step.") if train_logs["target"].ndim == 2: train_logs["target"] = train_logs["target"].unsqueeze(0) elif train_logs["target"].ndim != 3: raise RuntimeError("Unexpected output size from training_step.") # compute evaluation metrics mse_vals = defaultdict(dict) target_norms = defaultdict(dict) ssim_vals = defaultdict(dict) max_vals = dict() for i, fname in enumerate(train_logs["fname"]): slice_num = int(train_logs["slice_num"][i].cpu()) maxval = train_logs["max_value"][i].cpu().numpy() output = train_logs["output"][i].detach().cpu().numpy() target = train_logs["target"][i].cpu().numpy() mse_vals[fname][slice_num] = torch.tensor( evaluate.mse(target, output) ).view(1) target_norms[fname][slice_num] = torch.tensor( evaluate.mse(target, np.zeros_like(target)) ).view(1) ssim_vals[fname][slice_num] = torch.tensor( evaluate.ssim(target[None, ...], output[None, ...], maxval=maxval) ).view(1) max_vals[fname] = maxval return { "loss": train_logs["loss"], "mse_vals": mse_vals, "target_norms": target_norms, "ssim_vals": ssim_vals, "max_vals": max_vals, } def validation_step(self, batch, batch_idx): batch: VarNetSample output, extra_outputs = self.forward( batch.kspace, batch.masked_kspace, batch.mask ) target, output = transforms.center_crop_to_smallest(batch.target, output) return { "batch_idx": batch_idx, "fname": batch.fname, "slice_num": batch.slice_num, "max_value": batch.max_value, "output": output, "target": target, "val_loss": self.loss( output.unsqueeze(1), target.unsqueeze(1), data_range=batch.max_value ), "extra_outputs": extra_outputs, } def validation_step_end(self, val_logs): # check inputs for k in ( "batch_idx", "fname", "slice_num", "max_value", "output", "target", "val_loss", ): if k not in val_logs.keys(): raise RuntimeError( f"Expected key {k} in dict returned by validation_step." ) if val_logs["output"].ndim == 2: val_logs["output"] = val_logs["output"].unsqueeze(0) elif val_logs["output"].ndim != 3: raise RuntimeError("Unexpected output size from validation_step.") if val_logs["target"].ndim == 2: val_logs["target"] = val_logs["target"].unsqueeze(0) elif val_logs["target"].ndim != 3: raise RuntimeError("Unexpected output size from validation_step.") # pick a set of images to log if we don't have one already if self.val_log_indices is None: self.val_log_indices = list( np.random.permutation(len(self.trainer.val_dataloaders[0]))[ : self.num_log_images ] ) # log images to tensorboard if isinstance(val_logs["batch_idx"], int): batch_indices = [val_logs["batch_idx"]] else: batch_indices = val_logs["batch_idx"] for i, batch_idx in enumerate(batch_indices): if batch_idx in self.val_log_indices: key = f"val_images_idx_{batch_idx}" target = val_logs["target"][i].unsqueeze(0) output = val_logs["output"][i].unsqueeze(0) error = torch.abs(target - output) output = output / output.max() target = target / target.max() error = error / error.max() self.log_image(f"{key}/target", target) self.log_image(f"{key}/reconstruction", output) self.log_image(f"{key}/error", error) # compute evaluation metrics mse_vals = defaultdict(dict) target_norms = defaultdict(dict) ssim_vals = defaultdict(dict) max_vals = dict() for i, fname in enumerate(val_logs["fname"]): slice_num = int(val_logs["slice_num"][i].cpu()) maxval = val_logs["max_value"][i].cpu().numpy() output = val_logs["output"][i].cpu().numpy() target = val_logs["target"][i].cpu().numpy() mse_vals[fname][slice_num] = torch.tensor( evaluate.mse(target, output) ).view(1) target_norms[fname][slice_num] = torch.tensor( evaluate.mse(target, np.zeros_like(target)) ).view(1) ssim_vals[fname][slice_num] = torch.tensor( evaluate.ssim(target[None, ...], output[None, ...], maxval=maxval) ).view(1) max_vals[fname] = maxval return { "val_loss": val_logs["val_loss"], "mse_vals": mse_vals, "target_norms": target_norms, "ssim_vals": ssim_vals, "max_vals": max_vals, } def training_epoch_end(self, train_logs): losses = [] mse_vals = defaultdict(dict) target_norms = defaultdict(dict) ssim_vals = defaultdict(dict) max_vals = dict() # use dict updates to handle duplicate slices for train_log in train_logs: losses.append(train_log["loss"].data.view(-1)) for k in train_log["mse_vals"].keys(): mse_vals[k].update(train_log["mse_vals"][k]) for k in train_log["target_norms"].keys(): target_norms[k].update(train_log["target_norms"][k]) for k in train_log["ssim_vals"].keys(): ssim_vals[k].update(train_log["ssim_vals"][k]) for k in train_log["max_vals"]: max_vals[k] = train_log["max_vals"][k] # check to make sure we have all files in all metrics assert ( mse_vals.keys() == target_norms.keys() == ssim_vals.keys() == max_vals.keys() ) # apply means across image volumes metrics = {"nmse": 0, "ssim": 0, "psnr": 0} local_examples = 0 for fname in mse_vals.keys(): local_examples = local_examples + 1 mse_val = torch.mean( torch.cat([v.view(-1) for _, v in mse_vals[fname].items()]) ) target_norm = torch.mean( torch.cat([v.view(-1) for _, v in target_norms[fname].items()]) ) metrics["nmse"] = metrics["nmse"] + mse_val / target_norm metrics["psnr"] = ( metrics["psnr"] + 20 * torch.log10( torch.tensor( max_vals[fname], dtype=mse_val.dtype, device=mse_val.device ) ) - 10 * torch.log10(mse_val) ) metrics["ssim"] = metrics["ssim"] + torch.mean( torch.cat([v.view(-1) for _, v in ssim_vals[fname].items()]) ) # reduce across ddp via sum metrics["nmse"] = self.TrainNMSE(metrics["nmse"]) metrics["ssim"] = self.TrainSSIM(metrics["ssim"]) metrics["psnr"] = self.TrainPSNR(metrics["psnr"]) tot_examples = self.TrainTotExamples(torch.tensor(local_examples)) train_loss = self.TrainLoss(torch.sum(torch.cat(losses))) tot_slice_examples = self.TrainTotSliceExamples( torch.tensor(len(losses), dtype=torch.float) ) self.log("training_loss", train_loss / tot_slice_examples, prog_bar=True) for metric, value in metrics.items(): self.log(f"train_metrics/{metric}", value / tot_examples) def validation_epoch_end(self, val_logs): # aggregate losses losses = [] mse_vals = defaultdict(dict) target_norms = defaultdict(dict) ssim_vals = defaultdict(dict) max_vals = dict() # use dict updates to handle duplicate slices for val_log in val_logs: losses.append(val_log["val_loss"].view(-1)) for k in val_log["mse_vals"].keys(): mse_vals[k].update(val_log["mse_vals"][k]) for k in val_log["target_norms"].keys(): target_norms[k].update(val_log["target_norms"][k]) for k in val_log["ssim_vals"].keys(): ssim_vals[k].update(val_log["ssim_vals"][k]) for k in val_log["max_vals"]: max_vals[k] = val_log["max_vals"][k] # check to make sure we have all files in all metrics assert ( mse_vals.keys() == target_norms.keys() == ssim_vals.keys() == max_vals.keys() ) # apply means across image volumes metrics = {"nmse": 0, "ssim": 0, "psnr": 0} local_examples = 0 for fname in mse_vals.keys(): local_examples = local_examples + 1 mse_val = torch.mean( torch.cat([v.view(-1) for _, v in mse_vals[fname].items()]) ) target_norm = torch.mean( torch.cat([v.view(-1) for _, v in target_norms[fname].items()]) ) metrics["nmse"] = metrics["nmse"] + mse_val / target_norm metrics["psnr"] = ( metrics["psnr"] + 20 * torch.log10( torch.tensor( max_vals[fname], dtype=mse_val.dtype, device=mse_val.device ) ) - 10 * torch.log10(mse_val) ) metrics["ssim"] = metrics["ssim"] + torch.mean( torch.cat([v.view(-1) for _, v in ssim_vals[fname].items()]) ) # reduce across ddp via sum metrics["nmse"] = self.NMSE(metrics["nmse"]) metrics["ssim"] = self.SSIM(metrics["ssim"]) metrics["psnr"] = self.PSNR(metrics["psnr"]) tot_examples = self.TotExamples(torch.tensor(local_examples)) val_loss = self.ValLoss(torch.sum(torch.cat(losses))) tot_slice_examples = self.TotSliceExamples( torch.tensor(len(losses), dtype=torch.float) ) self.log("validation_loss", val_loss / tot_slice_examples, prog_bar=True) for metric, value in metrics.items(): self.log(f"val_metrics/{metric}", value / tot_examples) def test_step(self, batch, batch_idx): kspace, masked_kspace, mask, _, fname, slice_num, _, crop_size = batch crop_size = crop_size[0] # always have a batch size of 1 for varnet output, extra_outputs = self(kspace, masked_kspace, mask) # check for FLAIR 203 if output.shape[-1] < crop_size[1]: crop_size = (output.shape[-1], output.shape[-1]) output = transforms.center_crop(output, crop_size) return { "fname": fname, "slice": slice_num, "output": output.cpu().numpy(), } def configure_optimizers(self): # This needs to be a class attribute for storing of gradients workaround self.optim = torch.optim.Adam( self.parameters(), lr=self.lr, weight_decay=self.weight_decay ) scheduler = torch.optim.lr_scheduler.StepLR( self.optim, self.lr_step_size, self.lr_gamma ) return [self.optim], [scheduler] @staticmethod def add_model_specific_args(parent_parser): # pragma: no-cover """ Define parameters that only apply to this model """ parser = ArgumentParser(parents=[parent_parser], add_help=False) parser = MriModule.add_model_specific_args(parser) # param overwrites # network params parser.add_argument( "--num_cascades", default=12, type=int, help="Number of VarNet cascades", ) parser.add_argument( "--pools", default=4, type=int, help="Number of U-Net pooling layers in VarNet blocks", ) parser.add_argument( "--chans", default=18, type=int, help="Number of channels for U-Net in VarNet blocks", ) parser.add_argument( "--sens_pools", default=4, type=int, help="Number of pooling layers for sense map estimation U-Net in VarNet", ) parser.add_argument( "--sens_chans", default=8, type=float, help="Number of channels for sense map estimation U-Net in VarNet", ) # training params (opt) parser.add_argument( "--lr", default=0.0003, type=float, help="Adam learning rate" ) parser.add_argument( "--lr_step_size", default=40, type=int, help="Epoch at which to decrease step size", ) parser.add_argument( "--lr_gamma", default=0.1, type=float, help="Extent to which step size should be decreased", ) parser.add_argument( "--weight_decay", default=0.0, type=float, help="Strength of weight decay regularization", ) return parser
[ "fastmri.data.transforms.center_crop_to_smallest", "numpy.zeros_like", "torch.optim.lr_scheduler.StepLR", "argparse.ArgumentParser", "fastmri.ifft2c", "torch.cat", "collections.defaultdict", "fastmri.data.transforms.center_crop", "fastmri.pl_modules.mri_module.MriModule.add_model_specific_args", "fastmri.models.varnet.NormUnet", "torch.log10", "fastmri.complex_conj", "fastmri.SSIMLoss", "fastmri.evaluate.ssim", "fastmri.models.adaptive_varnet.AdaptiveSensitivityModel", "torch.abs", "fastmri.evaluate.mse", "torch.tensor" ]
[((2896, 2982), 'fastmri.models.adaptive_varnet.AdaptiveSensitivityModel', 'AdaptiveSensitivityModel', (['sens_chans', 'sens_pools'], {'num_sense_lines': 'num_sense_lines'}), '(sens_chans, sens_pools, num_sense_lines=\n num_sense_lines)\n', (2920, 2982), False, 'from fastmri.models.adaptive_varnet import AdaptiveSensitivityModel, AdaptiveVarNetBlock\n'), ((3490, 3507), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (3501, 3507), False, 'from collections import defaultdict\n'), ((4684, 4701), 'fastmri.ifft2c', 'fastmri.ifft2c', (['x'], {}), '(x)\n', (4698, 4701), False, 'import fastmri\n'), ((9169, 9187), 'fastmri.SSIMLoss', 'fastmri.SSIMLoss', ([], {}), '()\n', (9185, 9187), False, 'import fastmri\n'), ((9455, 9511), 'fastmri.data.transforms.center_crop_to_smallest', 'transforms.center_crop_to_smallest', (['batch.target', 'output'], {}), '(batch.target, output)\n', (9489, 9511), False, 'from fastmri.data import transforms\n'), ((11059, 11076), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (11070, 11076), False, 'from collections import defaultdict\n'), ((11100, 11117), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (11111, 11117), False, 'from collections import defaultdict\n'), ((11138, 11155), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (11149, 11155), False, 'from collections import defaultdict\n'), ((12382, 12438), 'fastmri.data.transforms.center_crop_to_smallest', 'transforms.center_crop_to_smallest', (['batch.target', 'output'], {}), '(batch.target, output)\n', (12416, 12438), False, 'from fastmri.data import transforms\n'), ((14952, 14969), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (14963, 14969), False, 'from collections import defaultdict\n'), ((14993, 15010), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (15004, 15010), False, 'from collections import defaultdict\n'), ((15031, 15048), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (15042, 15048), False, 'from collections import defaultdict\n'), ((16131, 16148), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (16142, 16148), False, 'from collections import defaultdict\n'), ((16172, 16189), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (16183, 16189), False, 'from collections import defaultdict\n'), ((16210, 16227), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (16221, 16227), False, 'from collections import defaultdict\n'), ((18878, 18895), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (18889, 18895), False, 'from collections import defaultdict\n'), ((18919, 18936), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (18930, 18936), False, 'from collections import defaultdict\n'), ((18957, 18974), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (18968, 18974), False, 'from collections import defaultdict\n'), ((21879, 21920), 'fastmri.data.transforms.center_crop', 'transforms.center_crop', (['output', 'crop_size'], {}), '(output, crop_size)\n', (21901, 21920), False, 'from fastmri.data import transforms\n'), ((22314, 22391), 'torch.optim.lr_scheduler.StepLR', 'torch.optim.lr_scheduler.StepLR', (['self.optim', 'self.lr_step_size', 'self.lr_gamma'], {}), '(self.optim, self.lr_step_size, self.lr_gamma)\n', (22345, 22391), False, 'import torch\n'), ((22640, 22695), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'parents': '[parent_parser]', 'add_help': '(False)'}), '(parents=[parent_parser], add_help=False)\n', (22654, 22695), False, 'from argparse import ArgumentParser\n'), ((22713, 22754), 'fastmri.pl_modules.mri_module.MriModule.add_model_specific_args', 'MriModule.add_model_specific_args', (['parser'], {}), '(parser)\n', (22746, 22754), False, 'from fastmri.pl_modules.mri_module import MriModule\n'), ((18346, 18374), 'torch.tensor', 'torch.tensor', (['local_examples'], {}), '(local_examples)\n', (18358, 18374), False, 'import torch\n'), ((21050, 21078), 'torch.tensor', 'torch.tensor', (['local_examples'], {}), '(local_examples)\n', (21062, 21078), False, 'import torch\n'), ((4512, 4539), 'fastmri.ifft2c', 'fastmri.ifft2c', (['kspace_pred'], {}), '(kspace_pred)\n', (4526, 4539), False, 'import fastmri\n'), ((14556, 14582), 'torch.abs', 'torch.abs', (['(target - output)'], {}), '(target - output)\n', (14565, 14582), False, 'import torch\n'), ((18422, 18439), 'torch.cat', 'torch.cat', (['losses'], {}), '(losses)\n', (18431, 18439), False, 'import torch\n'), ((21122, 21139), 'torch.cat', 'torch.cat', (['losses'], {}), '(losses)\n', (21131, 21139), False, 'import torch\n'), ((3110, 3132), 'fastmri.models.varnet.NormUnet', 'NormUnet', (['chans', 'pools'], {}), '(chans, pools)\n', (3118, 3132), False, 'from fastmri.models.varnet import NormUnet\n'), ((4740, 4771), 'fastmri.complex_conj', 'fastmri.complex_conj', (['sens_maps'], {}), '(sens_maps)\n', (4760, 4771), False, 'import fastmri\n'), ((17904, 17924), 'torch.log10', 'torch.log10', (['mse_val'], {}), '(mse_val)\n', (17915, 17924), False, 'import torch\n'), ((20628, 20648), 'torch.log10', 'torch.log10', (['mse_val'], {}), '(mse_val)\n', (20639, 20648), False, 'import torch\n'), ((11561, 11589), 'fastmri.evaluate.mse', 'evaluate.mse', (['target', 'output'], {}), '(target, output)\n', (11573, 11589), False, 'from fastmri import evaluate\n'), ((11825, 11891), 'fastmri.evaluate.ssim', 'evaluate.ssim', (['target[None, ...]', 'output[None, ...]'], {'maxval': 'maxval'}), '(target[None, ...], output[None, ...], maxval=maxval)\n', (11838, 11891), False, 'from fastmri import evaluate\n'), ((15435, 15463), 'fastmri.evaluate.mse', 'evaluate.mse', (['target', 'output'], {}), '(target, output)\n', (15447, 15463), False, 'from fastmri import evaluate\n'), ((15699, 15765), 'fastmri.evaluate.ssim', 'evaluate.ssim', (['target[None, ...]', 'output[None, ...]'], {'maxval': 'maxval'}), '(target[None, ...], output[None, ...], maxval=maxval)\n', (15712, 15765), False, 'from fastmri import evaluate\n'), ((11708, 11729), 'numpy.zeros_like', 'np.zeros_like', (['target'], {}), '(target)\n', (11721, 11729), True, 'import numpy as np\n'), ((15582, 15603), 'numpy.zeros_like', 'np.zeros_like', (['target'], {}), '(target)\n', (15595, 15603), True, 'import numpy as np\n'), ((17743, 17816), 'torch.tensor', 'torch.tensor', (['max_vals[fname]'], {'dtype': 'mse_val.dtype', 'device': 'mse_val.device'}), '(max_vals[fname], dtype=mse_val.dtype, device=mse_val.device)\n', (17755, 17816), False, 'import torch\n'), ((20467, 20540), 'torch.tensor', 'torch.tensor', (['max_vals[fname]'], {'dtype': 'mse_val.dtype', 'device': 'mse_val.device'}), '(max_vals[fname], dtype=mse_val.dtype, device=mse_val.device)\n', (20479, 20540), False, 'import torch\n')]
from jslinker.js_file_stitcher import JsFileStitcher import sys import os class JsStitcherCUI: @staticmethod def showHelp(): print(""" Run: jsstitcher inputfile Works like a simplified version of the C++ preprocessor. - require ("<file path>"); Tells jsstitcher to include file content of <file path> into the output. E.g. fileA: require ("fileB.js"); IMPORTANT: Use exaclty the pattern described above: - There must be exactly one space before the bracket - Use doublequotes ( " ) only - no single quotes - There must be a semicolon at the end Constraints: - Does not solve dependency loops and would run forever then. (E.g. A -> B -> C -> A) """) def _run (self, infile): stitcher = JsFileStitcher ([infile]) result = stitcher.run() if (result == True): print (stitcher.getStitchedContent ()) else: print (result) def run(self,args): if (len(args) < 1): self.__class__.showHelp() return infile = args[0] os.path.abspath (infile) self._run (infile)
[ "os.path.abspath", "jslinker.js_file_stitcher.JsFileStitcher" ]
[((772, 796), 'jslinker.js_file_stitcher.JsFileStitcher', 'JsFileStitcher', (['[infile]'], {}), '([infile])\n', (786, 796), False, 'from jslinker.js_file_stitcher import JsFileStitcher\n'), ((1045, 1068), 'os.path.abspath', 'os.path.abspath', (['infile'], {}), '(infile)\n', (1060, 1068), False, 'import os\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ loading_dataset.py Created on Thu May 3 12:47:36 2018 @author: sungkyun """ import torch from torch.utils.data.dataset import Dataset #from torch import from_numpy import numpy as np import pandas as pd #from sklearn import preprocessing #from sklearn.preprocessing import StandardScaler #from sklearn.externals import joblib import glob from nnmnkwii import minmax_scale, scale DIM_INDEX = dict() DIM_INDEX['linguistic'] = np.arange(0,420) # source: /linguistic DIM_INDEX['f0'] = [0] # source: /pyworld DIM_INDEX['log-f0'] = [1] # source: /pyworld DIM_INDEX['vuv'] = [2] # source: /pyworld DIM_INDEX['bap'] = [3] # source: /pyworld DIM_INDEX['melcep'] = np.arange(4,64) # source: /pyworld DIM_INDEX['pyspec'] = np.arange(64,577) # source: /pyworld DIM_INDEX['melspec'] = np.arange(0, 128) # source: /melmfcc DIM_INDEX['mfcc'] = np.arange(128,153) # source: /melmfcc class CmuArcticDataset(Dataset): def __init__(self, data_root_dir=None, random_zpad=bool, cond_feature_select=None, transform=None): #data_root_dir = 'data/processed_slt_arctic/TRAIN/' #data_root_dir = 'data/processed_slt_arctic/TEST/' self.mulaw_filepaths = sorted(glob.glob(data_root_dir + 'mulaw/*.npy')) self.linguistic_filepaths = sorted(glob.glob(data_root_dir + 'linguistic/*.npy')) self.melmfcc_filepaths = sorted(glob.glob(data_root_dir + 'melmfcc/*.npy')) self.pyworld_filepaths = sorted(glob.glob(data_root_dir + 'pyworld/*.npy')) self.file_ids = [path.split('/')[-1][:-4] for path in self.mulaw_filepaths] self.random_zpad = random_zpad self.cond_feature_select = cond_feature_select # ['linguistic', 'f0', 'log-f0', 'vuv','bap', 'melcep', 'pyspec', 'melspec', 'mfcc'] self.transform = transform self.scale_factor = np.load(data_root_dir + '../scale_factors.npy') # Construct conditional feature selection info global DIM_INDEX self.cond_info = dict() self.cond_dim = 0 # total dimension of condition features for sel in self.cond_feature_select: self.cond_info[sel] = np.arange(self.cond_dim, self.cond_dim + len(DIM_INDEX[sel])) self.cond_dim += len(DIM_INDEX[sel]) def __getitem__(self, index): # Get 3 items: (file_id, mulaw, cond) file_id = self.file_ids[index] x = np.load(self.mulaw_filepaths[index]) # size(x) = (T,) cond = np.empty((len(x),0), np.float16) # size(cond) = (T,d) cond_linguistic, cond_pyworld, cond_melmfcc = [], [], [] if any(sel in self.cond_feature_select for sel in ['linguistic']): cond_linguistic = np.load(self.linguistic_filepaths[index]) if any(sel in self.cond_feature_select for sel in ['f0', 'log-f0', 'vuv', 'bap', 'melcep', 'pyspec']): cond_pyworld = np.load(self.pyworld_filepaths[index]) if any(sel in self.cond_feature_select for sel in ['melspec', 'mfcc']): cond_melmfcc = np.load(self.melmfcc_filepaths[index]) global DIM_INDEX for sel in self.cond_feature_select: if sel is 'linguistic': cond = np.hstack((cond, cond_linguistic)) elif sel in ['f0', 'log-f0', 'vuv', 'bap', 'melcep', 'pyspec']: cond = np.hstack((cond, cond_pyworld[:,DIM_INDEX[sel]])) elif sel in ['melspec', 'mfcc']: cond = np.hstack((cond, cond_melmfcc[:,DIM_INDEX[sel]])) assert(cond.shape[1]==self.cond_dim) # check if stacked cond feature size mismatches # Feature-scaling cond = self.featScaler(cond) # Transpose cond = np.transpose(cond) # size(cond) = (T,d) --> (d, T): required for pytorch dataloading # Random zeropadding 20~50% if self.random_zpad is True: zpad_sz = int(len(x) * np.random.uniform(0.2,0.5)) x[0:zpad_sz] = 128 # fill first <zpad_sz> samples with zeros (in mulaw-enc, 128) cond[:,0:zpad_sz] = 0. return file_id, torch.LongTensor(x), cond def featScaler(self, feat): for sel in self.cond_feature_select: if sel is 'linguistic': feat[:,self.cond_info[sel]] = minmax_scale(feat[:,self.cond_info[sel]], self.scale_factor['linguistic_min'], self.scale_factor['linguistic_max'], feature_range=(0.01, 0.99)) return feat def __len__(self): return len(self.file_ids) # return the number of examples that we have class YesNoDataset(Dataset): def __init__(self, csv_path=None, zpad_target_len=int, transform=None): # Internal variables #csv_path = 'data/processed_yesno/test.csv' #csv_path = 'data/processed_yesno/train.csv' self.zpad_target_len = zpad_target_len self.transform = transform self.file_ids = None self.mulaw_filepaths = None self.mfcc_filepaths = None # Reading .csv file df = pd.read_csv(csv_path, index_col=0) # ['file_id', 'mulaw_filepath', 'mfcc_filepath'] self.file_ids = df.iloc[:,0] self.mulaw_filepaths = df.iloc[:,1] self.mfcc_filepaths = df.iloc[:,2] def __getitem__(self, index): # Get 3 items: (file_id, x = mulaw, cond = mfcc) file_id = self.file_ids[index] x = np.load(self.mulaw_filepaths[index]) # size = (T,) cond = np.load(self.mfcc_filepaths[index]) # size = (25,T) if self.zpad_target_len: x_length = x.shape[0] if x_length > self.zpad_target_len: x = x[0:self.zpad_target_len] elif x_length < self.zpad_target_len: zpad_sz = self.zpad_target_len - x_length x = np.pad(x, (zpad_sz,0), mode='constant', constant_values=128) # padding first 48,000 samples with zeros cond_length = cond.shape[1] if cond_length > self.zpad_target_len: cond = cond[:, 0:self.zpad_target_len] elif cond_length < self.zpad_target_len: zpad_sz = self.zpad_target_len - cond_length cond = np.pad(cond, ((0,0),(zpad_sz, 0)), mode='constant') return file_id, torch.LongTensor(x), cond def __len__(self): return len(self.file_ids) # return the number of examples that we have
[ "numpy.pad", "numpy.random.uniform", "numpy.load", "nnmnkwii.minmax_scale", "torch.LongTensor", "pandas.read_csv", "numpy.transpose", "numpy.hstack", "numpy.arange", "glob.glob" ]
[((482, 499), 'numpy.arange', 'np.arange', (['(0)', '(420)'], {}), '(0, 420)\n', (491, 499), True, 'import numpy as np\n'), ((799, 815), 'numpy.arange', 'np.arange', (['(4)', '(64)'], {}), '(4, 64)\n', (808, 815), True, 'import numpy as np\n'), ((861, 879), 'numpy.arange', 'np.arange', (['(64)', '(577)'], {}), '(64, 577)\n', (870, 879), True, 'import numpy as np\n'), ((924, 941), 'numpy.arange', 'np.arange', (['(0)', '(128)'], {}), '(0, 128)\n', (933, 941), True, 'import numpy as np\n'), ((983, 1002), 'numpy.arange', 'np.arange', (['(128)', '(153)'], {}), '(128, 153)\n', (992, 1002), True, 'import numpy as np\n'), ((1964, 2011), 'numpy.load', 'np.load', (["(data_root_dir + '../scale_factors.npy')"], {}), "(data_root_dir + '../scale_factors.npy')\n", (1971, 2011), True, 'import numpy as np\n'), ((2538, 2574), 'numpy.load', 'np.load', (['self.mulaw_filepaths[index]'], {}), '(self.mulaw_filepaths[index])\n', (2545, 2574), True, 'import numpy as np\n'), ((3917, 3935), 'numpy.transpose', 'np.transpose', (['cond'], {}), '(cond)\n', (3929, 3935), True, 'import numpy as np\n'), ((5300, 5334), 'pandas.read_csv', 'pd.read_csv', (['csv_path'], {'index_col': '(0)'}), '(csv_path, index_col=0)\n', (5311, 5334), True, 'import pandas as pd\n'), ((5669, 5705), 'numpy.load', 'np.load', (['self.mulaw_filepaths[index]'], {}), '(self.mulaw_filepaths[index])\n', (5676, 5705), True, 'import numpy as np\n'), ((5735, 5770), 'numpy.load', 'np.load', (['self.mfcc_filepaths[index]'], {}), '(self.mfcc_filepaths[index])\n', (5742, 5770), True, 'import numpy as np\n'), ((1329, 1369), 'glob.glob', 'glob.glob', (["(data_root_dir + 'mulaw/*.npy')"], {}), "(data_root_dir + 'mulaw/*.npy')\n", (1338, 1369), False, 'import glob\n'), ((1414, 1459), 'glob.glob', 'glob.glob', (["(data_root_dir + 'linguistic/*.npy')"], {}), "(data_root_dir + 'linguistic/*.npy')\n", (1423, 1459), False, 'import glob\n'), ((1501, 1543), 'glob.glob', 'glob.glob', (["(data_root_dir + 'melmfcc/*.npy')"], {}), "(data_root_dir + 'melmfcc/*.npy')\n", (1510, 1543), False, 'import glob\n'), ((1585, 1627), 'glob.glob', 'glob.glob', (["(data_root_dir + 'pyworld/*.npy')"], {}), "(data_root_dir + 'pyworld/*.npy')\n", (1594, 1627), False, 'import glob\n'), ((2881, 2922), 'numpy.load', 'np.load', (['self.linguistic_filepaths[index]'], {}), '(self.linguistic_filepaths[index])\n', (2888, 2922), True, 'import numpy as np\n'), ((3061, 3099), 'numpy.load', 'np.load', (['self.pyworld_filepaths[index]'], {}), '(self.pyworld_filepaths[index])\n', (3068, 3099), True, 'import numpy as np\n'), ((3211, 3249), 'numpy.load', 'np.load', (['self.melmfcc_filepaths[index]'], {}), '(self.melmfcc_filepaths[index])\n', (3218, 3249), True, 'import numpy as np\n'), ((4308, 4327), 'torch.LongTensor', 'torch.LongTensor', (['x'], {}), '(x)\n', (4324, 4327), False, 'import torch\n'), ((6562, 6581), 'torch.LongTensor', 'torch.LongTensor', (['x'], {}), '(x)\n', (6578, 6581), False, 'import torch\n'), ((3388, 3422), 'numpy.hstack', 'np.hstack', (['(cond, cond_linguistic)'], {}), '((cond, cond_linguistic))\n', (3397, 3422), True, 'import numpy as np\n'), ((4517, 4671), 'nnmnkwii.minmax_scale', 'minmax_scale', (['feat[:, self.cond_info[sel]]', "self.scale_factor['linguistic_min']", "self.scale_factor['linguistic_max']"], {'feature_range': '(0.01, 0.99)'}), "(feat[:, self.cond_info[sel]], self.scale_factor[\n 'linguistic_min'], self.scale_factor['linguistic_max'], feature_range=(\n 0.01, 0.99))\n", (4529, 4671), False, 'from nnmnkwii import minmax_scale, scale\n'), ((3522, 3572), 'numpy.hstack', 'np.hstack', (['(cond, cond_pyworld[:, DIM_INDEX[sel]])'], {}), '((cond, cond_pyworld[:, DIM_INDEX[sel]]))\n', (3531, 3572), True, 'import numpy as np\n'), ((4126, 4153), 'numpy.random.uniform', 'np.random.uniform', (['(0.2)', '(0.5)'], {}), '(0.2, 0.5)\n', (4143, 4153), True, 'import numpy as np\n'), ((6085, 6146), 'numpy.pad', 'np.pad', (['x', '(zpad_sz, 0)'], {'mode': '"""constant"""', 'constant_values': '(128)'}), "(x, (zpad_sz, 0), mode='constant', constant_values=128)\n", (6091, 6146), True, 'import numpy as np\n'), ((6486, 6539), 'numpy.pad', 'np.pad', (['cond', '((0, 0), (zpad_sz, 0))'], {'mode': '"""constant"""'}), "(cond, ((0, 0), (zpad_sz, 0)), mode='constant')\n", (6492, 6539), True, 'import numpy as np\n'), ((3640, 3690), 'numpy.hstack', 'np.hstack', (['(cond, cond_melmfcc[:, DIM_INDEX[sel]])'], {}), '((cond, cond_melmfcc[:, DIM_INDEX[sel]]))\n', (3649, 3690), True, 'import numpy as np\n')]
import os from glob import glob import argparse import cherrypy from jinja2 import Environment, FileSystemLoader env = Environment(loader=FileSystemLoader('templates')) DATA_FOLDER = 'data' class Server(object): @cherrypy.expose def index(self, data=None): data = data if data is not None else DATA_FOLDER csv_file = '{0}.csv'.format(data) images_folder = data if not os.path.exists(os.path.join('public', csv_file)): return "Error: csv file does not exist in public folder: {0}".format(csv_file) if not os.path.exists(os.path.join('public', images_folder)): return "Error: data folder does not exist in public folder: {0}".format(images_folder) if len(glob(os.path.join('public', images_folder)+'/*')) <= 1: return "Error: data folder does not seem to contain any images" tmpl = env.get_template('index.html') return tmpl.render(csv_file=csv_file, images_folder=images_folder) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Visualize 2D data. Useful with t-sne data.') parser.add_argument('--host', type=str, default='0.0.0.0', help='socket host') parser.add_argument('--port', type=int, default=8080, help='socket port') parser.add_argument('--data', type=str, default='data', help='data path') args = parser.parse_args() DATA_FOLDER = args.data conf = { 'global' : { 'server.socket_host' : args.host, 'server.socket_port' : args.port }, '/': { 'tools.sessions.on': True, 'tools.staticdir.root': os.path.abspath(os.getcwd()) }, '/static': { 'tools.staticdir.on': True, 'tools.staticdir.dir': './public' } } cherrypy.quickstart(Server(), '/', conf)
[ "os.getcwd", "jinja2.FileSystemLoader", "os.path.join", "argparse.ArgumentParser" ]
[((1034, 1120), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Visualize 2D data. Useful with t-sne data."""'}), "(description=\n 'Visualize 2D data. Useful with t-sne data.')\n", (1057, 1120), False, 'import argparse\n'), ((139, 168), 'jinja2.FileSystemLoader', 'FileSystemLoader', (['"""templates"""'], {}), "('templates')\n", (155, 168), False, 'from jinja2 import Environment, FileSystemLoader\n'), ((427, 459), 'os.path.join', 'os.path.join', (['"""public"""', 'csv_file'], {}), "('public', csv_file)\n", (439, 459), False, 'import os\n'), ((584, 621), 'os.path.join', 'os.path.join', (['"""public"""', 'images_folder'], {}), "('public', images_folder)\n", (596, 621), False, 'import os\n'), ((1661, 1672), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1670, 1672), False, 'import os\n'), ((744, 781), 'os.path.join', 'os.path.join', (['"""public"""', 'images_folder'], {}), "('public', images_folder)\n", (756, 781), False, 'import os\n')]
# API FORMS from django import forms class FormularioContacto(forms.Form): asunto=forms.CharField() email=forms.EmailField() mensaje=forms.CharField()
[ "django.forms.CharField", "django.forms.EmailField" ]
[((89, 106), 'django.forms.CharField', 'forms.CharField', ([], {}), '()\n', (104, 106), False, 'from django import forms\n'), ((117, 135), 'django.forms.EmailField', 'forms.EmailField', ([], {}), '()\n', (133, 135), False, 'from django import forms\n'), ((148, 165), 'django.forms.CharField', 'forms.CharField', ([], {}), '()\n', (163, 165), False, 'from django import forms\n')]
from base64 import b64encode, b64decode from random import shuffle, sample, randint from fractions import Fraction a = {"a":1,"b":2,"c":3,"d":4,"e":5,"f":6,"g":7,"h":8,"i":9,"j":10,"k":11,"l":12, "m":13,"n":14,"o":15,"p":16,"q":17,"r":18,"s":19,"t":20,"u":21,"v":22,"w":23, "x":24,"y":25,"z":26,"?" : 0} b = {0:"?", 1: 'a', 2: 'b', 3: 'c', 4: 'd', 5: 'e', 6: 'f', 7: 'g', 8: 'h', 9: 'i', 10: 'j', 11: 'k', 12: 'l', 13: 'm', 14: 'n', 15: 'o', 16: 'p', 17: 'q', 18: 'r', 19: 's', 20: 't', 21: 'u', 22: 'v', 23: 'w', 24: 'x', 25: 'y', 26: 'z'} special = list("!@#$%^&*()~`") def shikharEncode(data): specials = special shuffle(specials) qw = "" for l in data: qw += str(a[l])+str(0) qw = int(qw) random_num = randint(9999999,99999999) multiply = qw * random_num new_str = "" for i,num in enumerate(str(multiply)): if num == "0": new_str += num else: new_str += b[int(num)] len_new_str = len(new_str) samples = sample(range(1,len_new_str),10) new_str_lst = list(new_str) for i,key in enumerate(samples): new_str_lst.insert(key,special[i]) asa ="".join([b[int(x)] for x in str(random_num)]) join = "".join(new_str_lst) + "|~|" + asa return b64encode(bytes(join,"utf-8")) def shikharDecode(data): res = b64decode(data).decode().split("|~|") random_num = int("".join([str(a[x]) for x in list(res[1])])) lst = [x for x in list(res[0]) if x not in special] number_str = "" for char in lst: if char == "0": number_str += "0" else: number_str += str(a[char]) number_str = Fraction(int(number_str)) random_num = Fraction(random_num) real_num = number_str/random_num get = str(real_num).split("0") return "".join([ b[int(x)] for x in get if x != ""])
[ "random.shuffle", "fractions.Fraction", "random.randint", "base64.b64decode" ]
[((644, 661), 'random.shuffle', 'shuffle', (['specials'], {}), '(specials)\n', (651, 661), False, 'from random import shuffle, sample, randint\n'), ((758, 784), 'random.randint', 'randint', (['(9999999)', '(99999999)'], {}), '(9999999, 99999999)\n', (765, 784), False, 'from random import shuffle, sample, randint\n'), ((1709, 1729), 'fractions.Fraction', 'Fraction', (['random_num'], {}), '(random_num)\n', (1717, 1729), False, 'from fractions import Fraction\n'), ((1342, 1357), 'base64.b64decode', 'b64decode', (['data'], {}), '(data)\n', (1351, 1357), False, 'from base64 import b64encode, b64decode\n')]
# -*- coding: UTF-8 -*- # Copyright 2013-2016 <NAME> # # License: BSD (see file COPYING for details) """ .. autosummary:: :toctree: models """ from lino.ad import Plugin from django.utils.translation import ugettext_lazy as _ class Plugin(Plugin): verbose_name = _("Blog") needs_plugins = ['lino_xl.lib.topics'] def setup_main_menu(self, site, user_type, m): # mg = self.get_menu_group() mg = site.plugins.office m = m.add_menu(mg.app_label, mg.verbose_name) m.add_action('blogs.MyEntries') def setup_config_menu(self, site, user_type, m): mg = self.get_menu_group() m = m.add_menu(mg.app_label, mg.verbose_name) m.add_action('blogs.EntryTypes') def setup_explorer_menu(self, site, user_type, m): mg = self.get_menu_group() m = m.add_menu(mg.app_label, mg.verbose_name) m.add_action('blogs.AllEntries') # m.add_action('blogs.AllTaggings') def get_dashboard_items(self, user): from lino.core.dashboard import ActorItem yield ActorItem( self.site.models.blogs.LatestEntries, header_level=None) # yield CustomItem( # 'blogs.Entry.latest_entries', # self.models.blogs.Entry.latest_entries, max_num=10)
[ "django.utils.translation.ugettext_lazy", "lino.core.dashboard.ActorItem" ]
[((281, 290), 'django.utils.translation.ugettext_lazy', '_', (['"""Blog"""'], {}), "('Blog')\n", (282, 290), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1084, 1150), 'lino.core.dashboard.ActorItem', 'ActorItem', (['self.site.models.blogs.LatestEntries'], {'header_level': 'None'}), '(self.site.models.blogs.LatestEntries, header_level=None)\n', (1093, 1150), False, 'from lino.core.dashboard import ActorItem\n')]
import pandas as pd import numpy as np def combineData(df_list): combined = pd.DataFrame() for df in df_list: combined = combined.append(df) combined = combined.fillna(method='ffill').drop_duplicates() return combined def modifyWeapons(df): modified = pd.DataFrame() modified["Name"] = df["Name"] modified['perksList'] = df["Perks 0"].str.cat(df[["Perks 2", "Perks 3", "Perks 4", "Perks 5", "Perks 6"]], sep=', ') return modified
[ "pandas.DataFrame" ]
[((87, 101), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (99, 101), True, 'import pandas as pd\n'), ((293, 307), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (305, 307), True, 'import pandas as pd\n')]
# -*- coding: utf-8 -*- # Generated by Django 1.11.18 on 2019-02-11 14:53 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Student', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('nom', models.CharField(default=b'', max_length=100, verbose_name=b'Nom')), ('cognom', models.CharField(default=b'', max_length=100, verbose_name=b'Cognom')), ('dni', models.CharField(default=b'', max_length=100, unique=True, verbose_name=b'DNI')), ('adreca', models.CharField(default=b'', max_length=100, verbose_name=b'Adre\xc3\xa7a')), ('poblacio', models.CharField(default=b'', max_length=100, verbose_name=b'Poblaci\xc3\xb3')), ('codi_postal', models.CharField(default=b'', max_length=100, verbose_name=b'Codi Postal')), ('telefon', models.CharField(default=b'', max_length=100, verbose_name=b'Tel\xc3\xa8fon')), ('correu_electronic', models.CharField(default=b'', max_length=100, verbose_name=b'Correu electronic')), ('edat', models.CharField(default=b'', max_length=100, verbose_name=b'Edat')), ('estudis', models.CharField(default=b'', max_length=1000, verbose_name=b'Estudis')), ('experiencia', models.CharField(default=b'', max_length=1000, verbose_name=b'Experiencia')), ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name=b'Nom del usuari')), ], ), ]
[ "django.db.models.CharField", "django.db.models.OneToOneField", "django.db.migrations.swappable_dependency", "django.db.models.AutoField" ]
[((313, 370), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (344, 370), False, 'from django.db import migrations, models\n'), ((502, 595), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (518, 595), False, 'from django.db import migrations, models\n'), ((618, 684), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)', 'verbose_name': "b'Nom'"}), "(default=b'', max_length=100, verbose_name=b'Nom')\n", (634, 684), False, 'from django.db import migrations, models\n'), ((714, 783), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)', 'verbose_name': "b'Cognom'"}), "(default=b'', max_length=100, verbose_name=b'Cognom')\n", (730, 783), False, 'from django.db import migrations, models\n'), ((810, 889), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)', 'unique': '(True)', 'verbose_name': "b'DNI'"}), "(default=b'', max_length=100, unique=True, verbose_name=b'DNI')\n", (826, 889), False, 'from django.db import migrations, models\n'), ((919, 995), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)', 'verbose_name': "b'Adre\\xc3\\xa7a'"}), "(default=b'', max_length=100, verbose_name=b'Adre\\xc3\\xa7a')\n", (935, 995), False, 'from django.db import migrations, models\n'), ((1027, 1105), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)', 'verbose_name': "b'Poblaci\\xc3\\xb3'"}), "(default=b'', max_length=100, verbose_name=b'Poblaci\\xc3\\xb3')\n", (1043, 1105), False, 'from django.db import migrations, models\n'), ((1140, 1214), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)', 'verbose_name': "b'Codi Postal'"}), "(default=b'', max_length=100, verbose_name=b'Codi Postal')\n", (1156, 1214), False, 'from django.db import migrations, models\n'), ((1245, 1322), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)', 'verbose_name': "b'Tel\\xc3\\xa8fon'"}), "(default=b'', max_length=100, verbose_name=b'Tel\\xc3\\xa8fon')\n", (1261, 1322), False, 'from django.db import migrations, models\n'), ((1363, 1448), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)', 'verbose_name': "b'Correu electronic'"}), "(default=b'', max_length=100, verbose_name=b'Correu electronic'\n )\n", (1379, 1448), False, 'from django.db import migrations, models\n'), ((1471, 1538), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(100)', 'verbose_name': "b'Edat'"}), "(default=b'', max_length=100, verbose_name=b'Edat')\n", (1487, 1538), False, 'from django.db import migrations, models\n'), ((1569, 1640), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(1000)', 'verbose_name': "b'Estudis'"}), "(default=b'', max_length=1000, verbose_name=b'Estudis')\n", (1585, 1640), False, 'from django.db import migrations, models\n'), ((1675, 1750), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b''", 'max_length': '(1000)', 'verbose_name': "b'Experiencia'"}), "(default=b'', max_length=1000, verbose_name=b'Experiencia')\n", (1691, 1750), False, 'from django.db import migrations, models\n'), ((1778, 1909), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': "b'Nom del usuari'"}), "(on_delete=django.db.models.deletion.CASCADE, to=\n settings.AUTH_USER_MODEL, verbose_name=b'Nom del usuari')\n", (1798, 1909), False, 'from django.db import migrations, models\n')]
from __future__ import division, print_function, unicode_literals, absolute_import import unittest import sys sys.path.append("../ScopeReaders/") class TestImport(unittest.TestCase): def test_basic(self): import ScopeReaders as sr print(sr.__version__) self.assertTrue(True)
[ "sys.path.append" ]
[((110, 145), 'sys.path.append', 'sys.path.append', (['"""../ScopeReaders/"""'], {}), "('../ScopeReaders/')\n", (125, 145), False, 'import sys\n')]
from typing import Union, Optional from mason.clients.response import Response from mason.configurations.config import Config from mason.engines.execution.models.jobs import ExecutedJob, InvalidJob from mason.engines.execution.models.jobs.summary_job import SummaryJob from mason.engines.metastore.models.credentials import MetastoreCredentials, InvalidCredentials from mason.engines.metastore.models.table.table import Table from mason.engines.storage.models.path import Path from mason.operators.operator_definition import OperatorDefinition from mason.operators.operator_response import OperatorResponse, DelayedOperatorResponse from mason.parameters.validated_parameters import ValidatedParameters from mason.util.environment import MasonEnvironment class TableSummarize(OperatorDefinition): def run(self, env: MasonEnvironment, config: Config, parameters: ValidatedParameters, response: Response) -> OperatorResponse: database_name: str = parameters.get_required("database_name") table_name: str = parameters.get_required("table_name") read_headers: bool = isinstance(parameters.get_optional("read_headers"), str) options = {"read_headers": read_headers} table, response = config.metastore().get_table(database_name, table_name, options, response) if isinstance(table, Table): summary, response = config.metastore().summarize_table(table, options, response) else: summary = table return OperatorResponse(response, summary) def run_async(self, env: MasonEnvironment, config: Config, parameters: ValidatedParameters, response: Response) -> DelayedOperatorResponse: database_name: str = parameters.get_required("database_name") table_name: str = parameters.get_required("table_name") read_headers: bool = isinstance(parameters.get_optional("read_headers"), str) out_path: Optional[str] = parameters.get_optional("output_path") input_path: Path = config.storage().table_path(database_name, table_name) if out_path: output_path: Path = config.storage().path(out_path) credentials: Union[MetastoreCredentials, InvalidCredentials] = config.metastore().credentials() if isinstance(credentials, MetastoreCredentials): job = SummaryJob(input_path, output_path, credentials, read_headers) run, response = config.execution().run_job(job) else: run = InvalidJob("Invalid Metastore Credentials") else: run = InvalidJob("Must specify output_path for asynchronous execution client") return DelayedOperatorResponse(run, response)
[ "mason.operators.operator_response.DelayedOperatorResponse", "mason.engines.execution.models.jobs.summary_job.SummaryJob", "mason.operators.operator_response.OperatorResponse", "mason.engines.execution.models.jobs.InvalidJob" ]
[((1508, 1543), 'mason.operators.operator_response.OperatorResponse', 'OperatorResponse', (['response', 'summary'], {}), '(response, summary)\n', (1524, 1543), False, 'from mason.operators.operator_response import OperatorResponse, DelayedOperatorResponse\n'), ((2699, 2737), 'mason.operators.operator_response.DelayedOperatorResponse', 'DelayedOperatorResponse', (['run', 'response'], {}), '(run, response)\n', (2722, 2737), False, 'from mason.operators.operator_response import OperatorResponse, DelayedOperatorResponse\n'), ((2602, 2674), 'mason.engines.execution.models.jobs.InvalidJob', 'InvalidJob', (['"""Must specify output_path for asynchronous execution client"""'], {}), "('Must specify output_path for asynchronous execution client')\n", (2612, 2674), False, 'from mason.engines.execution.models.jobs import ExecutedJob, InvalidJob\n'), ((2359, 2421), 'mason.engines.execution.models.jobs.summary_job.SummaryJob', 'SummaryJob', (['input_path', 'output_path', 'credentials', 'read_headers'], {}), '(input_path, output_path, credentials, read_headers)\n', (2369, 2421), False, 'from mason.engines.execution.models.jobs.summary_job import SummaryJob\n'), ((2526, 2569), 'mason.engines.execution.models.jobs.InvalidJob', 'InvalidJob', (['"""Invalid Metastore Credentials"""'], {}), "('Invalid Metastore Credentials')\n", (2536, 2569), False, 'from mason.engines.execution.models.jobs import ExecutedJob, InvalidJob\n')]
import numpy as np import torch import torch.nn as nn import torch.nn.functional as F from torch.autograd import Variable from rlkit.torch.core import PyTorchModule from rlkit.torch.networks import Mlp from rlkit.torch import pytorch_util as ptu from rlkit.torch.torch_meta_irl_algorithm import np_to_pytorch_batch from rlkit.torch.irl.encoders.aggregators import sum_aggregator_unmasked, tanh_sum_aggregator_unmasked from rlkit.torch.irl.encoders.aggregators import sum_aggregator, tanh_sum_aggregator from rlkit.torch.distributions import ReparamMultivariateNormalDiag class TrivialR2ZMap(PyTorchModule): def __init__( self, r_dim, z_dim, hid_dim, # this makes it be closer to deterministic, makes it easier to train # before we turn on the KL regularization LOG_STD_SUBTRACT_VALUE=2.0 ): self.save_init_params(locals()) super().__init__() self.trunk = nn.Sequential( nn.Linear(r_dim, hid_dim), nn.BatchNorm1d(hid_dim), nn.ReLU(), nn.Linear(hid_dim, hid_dim), nn.BatchNorm1d(hid_dim), nn.ReLU() ) self.mean_fc = nn.Linear(hid_dim, z_dim) self.log_sig_fc = nn.Linear(hid_dim, z_dim) self.LOG_STD_SUBTRACT_VALUE = LOG_STD_SUBTRACT_VALUE print('LOG STD SUBTRACT VALUE IS FOR APPROX POSTERIOR IS %f' % LOG_STD_SUBTRACT_VALUE) def forward(self, r): trunk_output = self.trunk(r) mean = self.mean_fc(trunk_output) log_sig = self.log_sig_fc(trunk_output) - self.LOG_STD_SUBTRACT_VALUE return mean, log_sig class TimestepBasedEncoder(PyTorchModule): def __init__( self, input_dim, #(s,a,s') or (s,s') depending on state-only r_dim, z_dim, enc_hid_dim, r2z_hid_dim, num_enc_layer_blocks, hid_act='relu', use_bn=True, within_traj_agg='sum', # 'sum' or 'mean', state_only=False # if state-only, we only condition on the states and not actions ): self.save_init_params(locals()) super().__init__() if hid_act == 'relu': hid_act_class = nn.ReLU elif hid_act == 'tanh': hid_act_class = nn.Tanh else: raise NotImplementedError() self.r_dim, self.z_dim = r_dim, z_dim # build the timestep encoder mod_list = nn.ModuleList([nn.Linear(input_dim, enc_hid_dim)]) if use_bn: mod_list.append(nn.BatchNorm1d(enc_hid_dim)) mod_list.append(hid_act_class()) for i in range(num_enc_layer_blocks - 1): mod_list.append(nn.Linear(enc_hid_dim, enc_hid_dim)) if use_bn: mod_list.append(nn.BatchNorm1d(enc_hid_dim)) mod_list.append(hid_act_class()) mod_list.append(nn.Linear(enc_hid_dim, r_dim)) self.timestep_encoder = nn.Sequential(*mod_list) assert within_traj_agg in ['sum', 'mean'] self.use_sum_for_traj_agg = within_traj_agg == 'sum' print('\nWITHIN TRAJ AGG IS SUM: {}'.format(self.use_sum_for_traj_agg)) # aggregator self.agg = sum_aggregator_unmasked self.agg_masked = sum_aggregator # build the r to z map self.r2z_map = TrivialR2ZMap(r_dim, z_dim, r2z_hid_dim) self.state_only = state_only print('STATE-ONLY ENCODER: {}'.format(self.state_only)) def forward(self, context=None, mask=None, r=None): if r is None: obs = np.array([[d['observations'] for d in task_trajs] for task_trajs in context]) next_obs = np.array([[d['next_observations'] for d in task_trajs] for task_trajs in context]) if not self.state_only: acts = np.array([[d['actions'] for d in task_trajs] for task_trajs in context]) all_timesteps = np.concatenate([obs, acts, next_obs], axis=-1) else: all_timesteps = np.concatenate([obs, next_obs], axis=-1) # FOR DEBUGGING THE ENCODER # all_timesteps = all_timesteps[:,:,-1:,:] all_timesteps = Variable(ptu.from_numpy(all_timesteps), requires_grad=False) # N_tasks x N_trajs x Len x Dim N_tasks, N_trajs, Len, Dim = all_timesteps.size(0), all_timesteps.size(1), all_timesteps.size(2), all_timesteps.size(3) all_timesteps = all_timesteps.view(-1, Dim) embeddings = self.timestep_encoder(all_timesteps) embeddings = embeddings.view(N_tasks, N_trajs, Len, self.r_dim) if self.use_sum_for_traj_agg: traj_embeddings = torch.sum(embeddings, dim=2) else: traj_embeddings = torch.mean(embeddings, dim=2) # get r if mask is None: r = self.agg(traj_embeddings) else: r = self.agg_masked(traj_embeddings, mask) post_mean, post_log_sig_diag = self.r2z_map(r) return ReparamMultivariateNormalDiag(post_mean, post_log_sig_diag)
[ "torch.mean", "torch.nn.ReLU", "torch.nn.Sequential", "torch.nn.BatchNorm1d", "rlkit.torch.pytorch_util.from_numpy", "rlkit.torch.distributions.ReparamMultivariateNormalDiag", "numpy.array", "torch.nn.Linear", "torch.sum", "numpy.concatenate" ]
[((1195, 1220), 'torch.nn.Linear', 'nn.Linear', (['hid_dim', 'z_dim'], {}), '(hid_dim, z_dim)\n', (1204, 1220), True, 'import torch.nn as nn\n'), ((1247, 1272), 'torch.nn.Linear', 'nn.Linear', (['hid_dim', 'z_dim'], {}), '(hid_dim, z_dim)\n', (1256, 1272), True, 'import torch.nn as nn\n'), ((2918, 2942), 'torch.nn.Sequential', 'nn.Sequential', (['*mod_list'], {}), '(*mod_list)\n', (2931, 2942), True, 'import torch.nn as nn\n'), ((5023, 5082), 'rlkit.torch.distributions.ReparamMultivariateNormalDiag', 'ReparamMultivariateNormalDiag', (['post_mean', 'post_log_sig_diag'], {}), '(post_mean, post_log_sig_diag)\n', (5052, 5082), False, 'from rlkit.torch.distributions import ReparamMultivariateNormalDiag\n'), ((975, 1000), 'torch.nn.Linear', 'nn.Linear', (['r_dim', 'hid_dim'], {}), '(r_dim, hid_dim)\n', (984, 1000), True, 'import torch.nn as nn\n'), ((1014, 1037), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['hid_dim'], {}), '(hid_dim)\n', (1028, 1037), True, 'import torch.nn as nn\n'), ((1051, 1060), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1058, 1060), True, 'import torch.nn as nn\n'), ((1074, 1101), 'torch.nn.Linear', 'nn.Linear', (['hid_dim', 'hid_dim'], {}), '(hid_dim, hid_dim)\n', (1083, 1101), True, 'import torch.nn as nn\n'), ((1115, 1138), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['hid_dim'], {}), '(hid_dim)\n', (1129, 1138), True, 'import torch.nn as nn\n'), ((1152, 1161), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1159, 1161), True, 'import torch.nn as nn\n'), ((2855, 2884), 'torch.nn.Linear', 'nn.Linear', (['enc_hid_dim', 'r_dim'], {}), '(enc_hid_dim, r_dim)\n', (2864, 2884), True, 'import torch.nn as nn\n'), ((3537, 3614), 'numpy.array', 'np.array', (["[[d['observations'] for d in task_trajs] for task_trajs in context]"], {}), "([[d['observations'] for d in task_trajs] for task_trajs in context])\n", (3545, 3614), True, 'import numpy as np\n'), ((3638, 3724), 'numpy.array', 'np.array', (["[[d['next_observations'] for d in task_trajs] for task_trajs in context]"], {}), "([[d['next_observations'] for d in task_trajs] for task_trajs in\n context])\n", (3646, 3724), True, 'import numpy as np\n'), ((2452, 2485), 'torch.nn.Linear', 'nn.Linear', (['input_dim', 'enc_hid_dim'], {}), '(input_dim, enc_hid_dim)\n', (2461, 2485), True, 'import torch.nn as nn\n'), ((2523, 2550), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['enc_hid_dim'], {}), '(enc_hid_dim)\n', (2537, 2550), True, 'import torch.nn as nn\n'), ((2672, 2707), 'torch.nn.Linear', 'nn.Linear', (['enc_hid_dim', 'enc_hid_dim'], {}), '(enc_hid_dim, enc_hid_dim)\n', (2681, 2707), True, 'import torch.nn as nn\n'), ((3780, 3852), 'numpy.array', 'np.array', (["[[d['actions'] for d in task_trajs] for task_trajs in context]"], {}), "([[d['actions'] for d in task_trajs] for task_trajs in context])\n", (3788, 3852), True, 'import numpy as np\n'), ((3885, 3931), 'numpy.concatenate', 'np.concatenate', (['[obs, acts, next_obs]'], {'axis': '(-1)'}), '([obs, acts, next_obs], axis=-1)\n', (3899, 3931), True, 'import numpy as np\n'), ((3982, 4022), 'numpy.concatenate', 'np.concatenate', (['[obs, next_obs]'], {'axis': '(-1)'}), '([obs, next_obs], axis=-1)\n', (3996, 4022), True, 'import numpy as np\n'), ((4169, 4198), 'rlkit.torch.pytorch_util.from_numpy', 'ptu.from_numpy', (['all_timesteps'], {}), '(all_timesteps)\n', (4183, 4198), True, 'from rlkit.torch import pytorch_util as ptu\n'), ((4669, 4697), 'torch.sum', 'torch.sum', (['embeddings'], {'dim': '(2)'}), '(embeddings, dim=2)\n', (4678, 4697), False, 'import torch\n'), ((4750, 4779), 'torch.mean', 'torch.mean', (['embeddings'], {'dim': '(2)'}), '(embeddings, dim=2)\n', (4760, 4779), False, 'import torch\n'), ((2748, 2775), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['enc_hid_dim'], {}), '(enc_hid_dim)\n', (2762, 2775), True, 'import torch.nn as nn\n')]
import wpath from torm import Model from torm import f class Group(Model): __config__ = "mongo" group = f.EmailList() group_hash = f.Str() display_name = f.Str() update_at = f.Timestamp() create_at = f.Timestamp() user_email = '<EMAIL>' groups = Group.FindMany({"group": user_email}) print(groups)
[ "torm.f.EmailList", "torm.f.Timestamp", "torm.f.Str" ]
[((115, 128), 'torm.f.EmailList', 'f.EmailList', ([], {}), '()\n', (126, 128), False, 'from torm import f\n'), ((146, 153), 'torm.f.Str', 'f.Str', ([], {}), '()\n', (151, 153), False, 'from torm import f\n'), ((173, 180), 'torm.f.Str', 'f.Str', ([], {}), '()\n', (178, 180), False, 'from torm import f\n'), ((198, 211), 'torm.f.Timestamp', 'f.Timestamp', ([], {}), '()\n', (209, 211), False, 'from torm import f\n'), ((228, 241), 'torm.f.Timestamp', 'f.Timestamp', ([], {}), '()\n', (239, 241), False, 'from torm import f\n')]
""" Module for Magellan/FIRE specific methods. Important Notes: - If you are reducing old FIRE data (before the broken happened in 2016), please change the ord_spat_pos array (see lines from ~220 to ~230) .. include:: ../include/links.rst """ from pkg_resources import resource_filename import numpy as np from pypeit import msgs from pypeit import telescopes from pypeit.core import framematch from pypeit.spectrographs import spectrograph from pypeit.images import detector_container class MagellanFIRESpectrograph(spectrograph.Spectrograph): """ Child to handle Magellan/FIRE specific code .. note:: For FIRE Echelle, we usually use high gain and SUTR read mode. The exposure time is usually around 900s. The detector parameters below are based on such mode. Standard star and calibrations are usually use Fowler 1 read mode in which case the read noise is ~20 electron. """ ndet = 1 telescope = telescopes.MagellanTelescopePar() def init_meta(self): """ Define how metadata are derived from the spectrograph files. That is, this associates the ``PypeIt``-specific metadata keywords with the instrument-specific header cards using :attr:`meta`. """ self.meta = {} # Required (core) self.meta['ra'] = dict(ext=0, card='RA') self.meta['dec'] = dict(ext=0, card='DEC') self.meta['target'] = dict(ext=0, card='OBJECT') self.meta['decker'] = dict(ext=0, card=None, default='default') self.meta['dichroic'] = dict(ext=0, card=None, default='default') self.meta['binning'] = dict(ext=0, card=None, default='1,1') self.meta['mjd'] = dict(ext=0, card='ACQTIME') self.meta['exptime'] = dict(ext=0, card='EXPTIME') self.meta['airmass'] = dict(ext=0, card='AIRMASS') # Extras for config and frametyping self.meta['dispname'] = dict(ext=0, card='GRISM') self.meta['idname'] = dict(ext=0, card='OBSTYPE') class MagellanFIREEchelleSpectrograph(MagellanFIRESpectrograph): """ Child to handle Magellan/FIRE Echelle data .. note:: For FIRE Echelle, we usually use high gain and SUTR read mode. The exposure time is usually around 900s. The detector parameters below are based on such mode. Standard star and calibrations are usually use Fowler 1 read mode in which case the read noise is ~20 electron. """ name = 'magellan_fire' camera = 'FIRE' pypeline = 'Echelle' supported = True comment = 'Magellan/FIRE in echelle mode' def get_detector_par(self, hdu, det): """ Return metadata for the selected detector. Args: hdu (`astropy.io.fits.HDUList`_): The open fits file with the raw image of interest. det (:obj:`int`): 1-indexed detector number. Returns: :class:`~pypeit.images.detector_container.DetectorContainer`: Object with the detector metadata. """ # Detector 1 detector_dict = dict( binning = '1,1', det = 1, dataext = 0, specaxis = 1, specflip = True, spatflip = False, platescale = 0.18, darkcurr = 0.01, #saturation = 20000., # high gain is 20000 ADU, low gain is 32000 ADU saturation = 100000., # This is an arbitrary value. nonlinear = 1.0, # high gain mode, low gain is 0.875 mincounts = -1e10, numamplifiers = 1, gain = np.atleast_1d(1.2), # high gain mode, low gain is 3.8 e-/DN ronoise = np.atleast_1d(5.0), # for high gain mode and SUTR read modes with exptime ~ 900s datasec = np.atleast_1d('[5:2044,5:2044]'), oscansec = np.atleast_1d('[5:2044,:5]') ) return detector_container.DetectorContainer(**detector_dict) @classmethod def default_pypeit_par(cls): """ Return the default parameters to use for this instrument. Returns: :class:`~pypeit.par.pypeitpar.PypeItPar`: Parameters required by all of ``PypeIt`` methods. """ par = super().default_pypeit_par() # Wavelengths # 1D wavelength solution with OH lines par['calibrations']['wavelengths']['rms_threshold'] = 1.0 par['calibrations']['wavelengths']['sigdetect']=[5,10,10,10,10,20,30,30,30,30,30,10,30,30,60,30,30,10,20,30,10] par['calibrations']['wavelengths']['n_first']=2 par['calibrations']['wavelengths']['n_final']=[3,3,3,2,4,4,4,3,4,4,4,3,4,4,4,4,4,4,6,6,4] par['calibrations']['wavelengths']['lamps'] = ['OH_FIRE_Echelle'] #par['calibrations']['wavelengths']['nonlinear_counts'] = self.detector[0]['nonlinear'] * self.detector[0]['saturation'] par['calibrations']['wavelengths']['method'] = 'reidentify' par['calibrations']['wavelengths']['cc_thresh'] = 0.35 par['calibrations']['wavelengths']['reid_arxiv'] = 'magellan_fire_echelle.fits' par['calibrations']['wavelengths']['match_toler']=30.0 # Echelle parameters par['calibrations']['wavelengths']['echelle'] = True par['calibrations']['wavelengths']['ech_fix_format'] = True par['calibrations']['wavelengths']['ech_nspec_coeff'] = 4 par['calibrations']['wavelengths']['ech_norder_coeff'] = 6 par['calibrations']['wavelengths']['ech_sigrej'] = 3.0 # Always correct for flexure, starting with default parameters par['scienceframe']['process']['sigclip'] = 20.0 par['scienceframe']['process']['satpix'] ='nothing' # Set slits and tilts parameters par['calibrations']['tilts']['tracethresh'] = 5 par['calibrations']['slitedges']['edge_thresh'] = 10. par['calibrations']['slitedges']['trace_thresh'] = 10. par['calibrations']['slitedges']['fit_order'] = 5 par['calibrations']['slitedges']['max_shift_adj'] = 0.5 par['calibrations']['slitedges']['fit_min_spec_length'] = 0.5 par['calibrations']['slitedges']['left_right_pca'] = True par['calibrations']['slitedges']['pca_order'] = 3 # Model entire slit par['reduce']['extraction']['model_full_slit'] = True # local sky subtraction operates on entire slit # Processing steps turn_off = dict(use_illumflat=False, use_biasimage=False, use_overscan=False, use_darkimage=False) par.reset_all_processimages_par(**turn_off) # Do not correct for flexure par['flexure']['spec_method'] = 'skip' # Set the default exposure time ranges for the frame typing par['calibrations']['standardframe']['exprng'] = [None, 60] par['calibrations']['arcframe']['exprng'] = [20, None] par['calibrations']['darkframe']['exprng'] = [20, None] par['scienceframe']['exprng'] = [20, None] # Sensitivity function parameters # Sensitivity function parameters par['sensfunc']['algorithm'] = 'IR' par['sensfunc']['polyorder'] = 8 # place holder for telgrid file par['sensfunc']['IR']['telgridfile'] \ = resource_filename('pypeit', '/data/telluric/TelFit_LasCampanas_3100_26100_R20000.fits') return par def check_frame_type(self, ftype, fitstbl, exprng=None): """ Check for frames of the provided type. Args: ftype (:obj:`str`): Type of frame to check. Must be a valid frame type; see frame-type :ref:`frame_type_defs`. fitstbl (`astropy.table.Table`_): The table with the metadata for one or more frames to check. exprng (:obj:`list`, optional): Range in the allowed exposure time for a frame of type ``ftype``. See :func:`pypeit.core.framematch.check_frame_exptime`. Returns: `numpy.ndarray`_: Boolean array with the flags selecting the exposures in ``fitstbl`` that are ``ftype`` type frames. """ good_exp = framematch.check_frame_exptime(fitstbl['exptime'], exprng) if ftype in ['pinhole', 'bias']: # No pinhole or bias frames return np.zeros(len(fitstbl), dtype=bool) if ftype in ['pixelflat', 'trace']: return good_exp & (fitstbl['idname'] == 'PixFlat') if ftype == 'standard': return good_exp & (fitstbl['idname'] == 'Telluric') if ftype == 'science': return good_exp & (fitstbl['idname'] == 'Science') if ftype in ['arc', 'tilt']: return good_exp & (fitstbl['idname'] == 'Science') msgs.warn('Cannot determine if frames are of type {0}.'.format(ftype)) return np.zeros(len(fitstbl), dtype=bool) @property def norders(self): """ Number of orders for this spectograph. Should only defined for echelle spectrographs, and it is undefined for the base class. """ return 21 @property def order_spat_pos(self): """ Return the expected spatial position of each echelle order. """ # ToDo: We somehow need to automate this. ## For OLD data, i.e. before 2017 #ord_spat_pos = np.array([0.06054688, 0.14160156, 0.17089844, 0.22753906, 0.27539062, # 0.32128906, 0.36474609, 0.40673828, 0.45019531, 0.48974609, # 0.52978516, 0.56054688, 0.59814453, 0.63378906, 0.66503906, # 0.70019531, 0.7421875 , 0.77978516, 0.82763672, 0.87109375, # 0.9296875]) ## For NEW data ord_spat_pos = np.array([0.078125, 0.13769531, 0.19189453, 0.24414062, 0.29296875, 0.34179688, 0.38330078, 0.42724609, 0.46582031, 0.50439453, 0.54199219, 0.57763672, 0.61279297, 0.6484375 , 0.68457031, 0.71875 , 0.75439453, 0.79443359, 0.83789062, 0.88671875, 0.94091797]) return ord_spat_pos @property def orders(self): """ Return the order number for each echelle order. """ return np.arange(31, 10, -1, dtype=int) @property def spec_min_max(self): """ Return the minimum and maximum spectral pixel expected for the spectral range of each order. """ spec_max = np.asarray([2048,2048,2048,2048,2048,2048,2048,2048,2048,2048,2048,2048,2048,2048,2048,2048, 2048,2048,2048,2048,2048]) spec_min = np.asarray([ 500, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]) return np.vstack((spec_min, spec_max)) def order_platescale(self, order_vec, binning=None): """ Return the platescale for each echelle order. Note that FIRE has no binning. Args: order_vec (`numpy.ndarray`_): The vector providing the order numbers. binning (:obj:`str`, optional): The string defining the spectral and spatial binning. **This is always ignored.** Returns: `numpy.ndarray`_: An array with the platescale for each order provided by ``order``. """ return np.full(order_vec.size, 0.15) @property def dloglam(self): """ Return the logarithmic step in wavelength for output spectra. """ # This number was determined using the resolution and sampling quoted on the FIRE website R = 6000.0 * 2.7 dloglam = 1.0 / R / np.log(10.0) return dloglam @property def loglam_minmax(self): """ Return the base-10 logarithm of the first and last wavelength for ouput spectra. """ return np.log10(8000.0), np.log10(25700) class MagellanFIRELONGSpectrograph(MagellanFIRESpectrograph): """ Child to handle Magellan/FIRE high-throughput data .. note:: For FIRE longslit, science data are usually taken with SUTR readout mode with ~600s exposure (at least for quasar hunting people) and the readout noise is ~6 e- """ name = 'magellan_fire_long' camera = 'FIRE' supported = True comment = 'Magellan/FIRE in long-slit/high-throughput mode' def get_detector_par(self, hdu, det): """ Return metadata for the selected detector. Args: hdu (`astropy.io.fits.HDUList`_): The open fits file with the raw image of interest. det (:obj:`int`): 1-indexed detector number. Returns: :class:`~pypeit.images.detector_container.DetectorContainer`: Object with the detector metadata. """ # Detector 1 detector_dict = dict( binning = '1,1', det = 1, dataext = 0, specaxis = 0, specflip = False, spatflip = False, platescale = 0.15, darkcurr = 0.01, saturation = 320000., #32000 for low gain, I set to a higher value to keep data in K-band nonlinear = 0.875, mincounts = -1e10, numamplifiers = 1, gain = np.atleast_1d(3.8), ronoise = np.atleast_1d(6.0), # SUTR readout mode with exposure~600s datasec = np.atleast_1d('[5:2044, 900:1250]'), oscansec = np.atleast_1d('[:5, 900:1250]') ) return detector_container.DetectorContainer(**detector_dict) @classmethod def default_pypeit_par(cls): """ Return the default parameters to use for this instrument. Returns: :class:`~pypeit.par.pypeitpar.PypeItPar`: Parameters required by all of ``PypeIt`` methods. """ par = super().default_pypeit_par() # Wavelengths # 1D wavelength solution with arc lines par['calibrations']['wavelengths']['rms_threshold'] = 1.0 par['calibrations']['wavelengths']['sigdetect']=3 par['calibrations']['wavelengths']['fwhm'] = 20 par['calibrations']['wavelengths']['n_first']=2 par['calibrations']['wavelengths']['n_final']=4 par['calibrations']['wavelengths']['lamps'] = ['ArI', 'ArII', 'ThAr', 'NeI'] #par['calibrations']['wavelengths']['nonlinear_counts'] = self.detector[0]['nonlinear'] * self.detector[0]['saturation'] par['calibrations']['wavelengths']['method'] = 'full_template' par['calibrations']['wavelengths']['reid_arxiv'] = 'magellan_fire_long.fits' par['calibrations']['wavelengths']['match_toler']=5.0 # Set slits and tilts parameters par['calibrations']['tilts']['tracethresh'] = 5 par['calibrations']['slitedges']['trace_thresh'] = 10. par['calibrations']['slitedges']['sync_predict'] = 'nearest' # Processing steps turn_off = dict(use_illumflat=False, use_biasimage=False, use_overscan=False, use_darkimage=False) par.reset_all_processimages_par(**turn_off) # Scienceimage parameters par['reduce']['findobj']['sig_thresh'] = 5 #par['reduce']['maxnumber'] = 2 par['reduce']['findobj']['find_trim_edge'] = [50,50] par['flexure']['spec_method'] = 'skip' par['sensfunc']['IR']['telgridfile'] \ = resource_filename('pypeit', '/data/telluric/TelFit_LasCampanas_3100_26100_R20000.fits') # Set the default exposure time ranges for the frame typing par['calibrations']['standardframe']['exprng'] = [None, 60] par['calibrations']['arcframe']['exprng'] = [1, 50] par['calibrations']['darkframe']['exprng'] = [20, None] par['scienceframe']['exprng'] = [20, None] return par def check_frame_type(self, ftype, fitstbl, exprng=None): """ Check for frames of the provided type. Args: ftype (:obj:`str`): Type of frame to check. Must be a valid frame type; see frame-type :ref:`frame_type_defs`. fitstbl (`astropy.table.Table`_): The table with the metadata for one or more frames to check. exprng (:obj:`list`, optional): Range in the allowed exposure time for a frame of type ``ftype``. See :func:`pypeit.core.framematch.check_frame_exptime`. Returns: `numpy.ndarray`_: Boolean array with the flags selecting the exposures in ``fitstbl`` that are ``ftype`` type frames. """ good_exp = framematch.check_frame_exptime(fitstbl['exptime'], exprng) if ftype in ['pinhole', 'bias']: # No pinhole or bias frames return np.zeros(len(fitstbl), dtype=bool) if ftype in ['pixelflat', 'trace']: return good_exp & (fitstbl['idname'] == 'PixFlat') if ftype == 'standard': return good_exp & (fitstbl['idname'] == 'Telluric') if ftype == 'science': return good_exp & (fitstbl['idname'] == 'Science') if ftype in ['arc', 'tilt']: return good_exp & (fitstbl['idname'] == 'Arc') msgs.warn('Cannot determine if frames are of type {0}.'.format(ftype)) return np.zeros(len(fitstbl), dtype=bool)
[ "numpy.full", "numpy.atleast_1d", "numpy.log", "pypeit.telescopes.MagellanTelescopePar", "numpy.asarray", "pypeit.core.framematch.check_frame_exptime", "pkg_resources.resource_filename", "numpy.array", "numpy.arange", "numpy.log10", "pypeit.images.detector_container.DetectorContainer", "numpy.vstack" ]
[((988, 1021), 'pypeit.telescopes.MagellanTelescopePar', 'telescopes.MagellanTelescopePar', ([], {}), '()\n', (1019, 1021), False, 'from pypeit import telescopes\n'), ((4078, 4131), 'pypeit.images.detector_container.DetectorContainer', 'detector_container.DetectorContainer', ([], {}), '(**detector_dict)\n', (4114, 4131), False, 'from pypeit.images import detector_container\n'), ((7436, 7527), 'pkg_resources.resource_filename', 'resource_filename', (['"""pypeit"""', '"""/data/telluric/TelFit_LasCampanas_3100_26100_R20000.fits"""'], {}), "('pypeit',\n '/data/telluric/TelFit_LasCampanas_3100_26100_R20000.fits')\n", (7453, 7527), False, 'from pkg_resources import resource_filename\n'), ((8399, 8457), 'pypeit.core.framematch.check_frame_exptime', 'framematch.check_frame_exptime', (["fitstbl['exptime']", 'exprng'], {}), "(fitstbl['exptime'], exprng)\n", (8429, 8457), False, 'from pypeit.core import framematch\n'), ((10039, 10309), 'numpy.array', 'np.array', (['[0.078125, 0.13769531, 0.19189453, 0.24414062, 0.29296875, 0.34179688, \n 0.38330078, 0.42724609, 0.46582031, 0.50439453, 0.54199219, 0.57763672,\n 0.61279297, 0.6484375, 0.68457031, 0.71875, 0.75439453, 0.79443359, \n 0.83789062, 0.88671875, 0.94091797]'], {}), '([0.078125, 0.13769531, 0.19189453, 0.24414062, 0.29296875, \n 0.34179688, 0.38330078, 0.42724609, 0.46582031, 0.50439453, 0.54199219,\n 0.57763672, 0.61279297, 0.6484375, 0.68457031, 0.71875, 0.75439453, \n 0.79443359, 0.83789062, 0.88671875, 0.94091797])\n', (10047, 10309), True, 'import numpy as np\n'), ((10592, 10624), 'numpy.arange', 'np.arange', (['(31)', '(10)', '(-1)'], {'dtype': 'int'}), '(31, 10, -1, dtype=int)\n', (10601, 10624), True, 'import numpy as np\n'), ((10820, 10963), 'numpy.asarray', 'np.asarray', (['[2048, 2048, 2048, 2048, 2048, 2048, 2048, 2048, 2048, 2048, 2048, 2048, \n 2048, 2048, 2048, 2048, 2048, 2048, 2048, 2048, 2048]'], {}), '([2048, 2048, 2048, 2048, 2048, 2048, 2048, 2048, 2048, 2048, \n 2048, 2048, 2048, 2048, 2048, 2048, 2048, 2048, 2048, 2048, 2048])\n', (10830, 10963), True, 'import numpy as np\n'), ((10990, 11067), 'numpy.asarray', 'np.asarray', (['[500, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]'], {}), '([500, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])\n', (11000, 11067), True, 'import numpy as np\n'), ((11156, 11187), 'numpy.vstack', 'np.vstack', (['(spec_min, spec_max)'], {}), '((spec_min, spec_max))\n', (11165, 11187), True, 'import numpy as np\n'), ((11777, 11806), 'numpy.full', 'np.full', (['order_vec.size', '(0.15)'], {}), '(order_vec.size, 0.15)\n', (11784, 11806), True, 'import numpy as np\n'), ((14111, 14164), 'pypeit.images.detector_container.DetectorContainer', 'detector_container.DetectorContainer', ([], {}), '(**detector_dict)\n', (14147, 14164), False, 'from pypeit.images import detector_container\n'), ((16027, 16118), 'pkg_resources.resource_filename', 'resource_filename', (['"""pypeit"""', '"""/data/telluric/TelFit_LasCampanas_3100_26100_R20000.fits"""'], {}), "('pypeit',\n '/data/telluric/TelFit_LasCampanas_3100_26100_R20000.fits')\n", (16044, 16118), False, 'from pkg_resources import resource_filename\n'), ((17301, 17359), 'pypeit.core.framematch.check_frame_exptime', 'framematch.check_frame_exptime', (["fitstbl['exptime']", 'exprng'], {}), "(fitstbl['exptime'], exprng)\n", (17331, 17359), False, 'from pypeit.core import framematch\n'), ((12090, 12102), 'numpy.log', 'np.log', (['(10.0)'], {}), '(10.0)\n', (12096, 12102), True, 'import numpy as np\n'), ((12306, 12322), 'numpy.log10', 'np.log10', (['(8000.0)'], {}), '(8000.0)\n', (12314, 12322), True, 'import numpy as np\n'), ((12324, 12339), 'numpy.log10', 'np.log10', (['(25700)'], {}), '(25700)\n', (12332, 12339), True, 'import numpy as np\n'), ((3755, 3773), 'numpy.atleast_1d', 'np.atleast_1d', (['(1.2)'], {}), '(1.2)\n', (3768, 3773), True, 'import numpy as np\n'), ((3845, 3863), 'numpy.atleast_1d', 'np.atleast_1d', (['(5.0)'], {}), '(5.0)\n', (3858, 3863), True, 'import numpy as np\n'), ((3956, 3988), 'numpy.atleast_1d', 'np.atleast_1d', (['"""[5:2044,5:2044]"""'], {}), "('[5:2044,5:2044]')\n", (3969, 3988), True, 'import numpy as np\n'), ((4020, 4048), 'numpy.atleast_1d', 'np.atleast_1d', (['"""[5:2044,:5]"""'], {}), "('[5:2044,:5]')\n", (4033, 4048), True, 'import numpy as np\n'), ((13848, 13866), 'numpy.atleast_1d', 'np.atleast_1d', (['(3.8)'], {}), '(3.8)\n', (13861, 13866), True, 'import numpy as np\n'), ((13898, 13916), 'numpy.atleast_1d', 'np.atleast_1d', (['(6.0)'], {}), '(6.0)\n', (13911, 13916), True, 'import numpy as np\n'), ((13987, 14022), 'numpy.atleast_1d', 'np.atleast_1d', (['"""[5:2044, 900:1250]"""'], {}), "('[5:2044, 900:1250]')\n", (14000, 14022), True, 'import numpy as np\n'), ((14054, 14085), 'numpy.atleast_1d', 'np.atleast_1d', (['"""[:5, 900:1250]"""'], {}), "('[:5, 900:1250]')\n", (14067, 14085), True, 'import numpy as np\n')]
#!/usr/bin/env python # # Copyright (c) 2015 Mirantis, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import pprint import signal import sys import time import netaddr from oslo_concurrency import processutils from oslo_config import cfg from oslo_utils import timeutils import six opts = [ cfg.IntOpt( "consume_interval", default=5, deprecated_name="sleep_between_consume_attempts", help=("Time that script will sleep between requests for consuming " "Zaqar messages in seconds."), ), cfg.StrOpt( "mount_dir", default="/tmp", help="Directory that will contain all mounted shares." ), cfg.ListOpt( "expected_ip_addresses", default=[], help=("List of IP addresses that are expected to be found in access " "rules to trigger [un]mount operation for a share.") ), ] CONF = cfg.CONF def print_with_time(data): time = six.text_type(timeutils.utcnow()) print(time + " " + six.text_type(data)) def print_pretty_dict(d): pprint.pprint(d) def pop_zaqar_messages(client, queues_names): if not isinstance(queues_names, (list, set, tuple)): queues_names = (queues_names, ) try: user = client.conf['auth_opts']['options']['os_username'] project = client.conf['auth_opts']['options']['os_project_name'] messages = [] for queue_name in queues_names: queue = client.queue(queue_name) messages.extend([six.text_type(m.body) for m in queue.pop()]) print_with_time( "Received %(len)s message[s] from '%(q)s' " "queue using '%(u)s' user and '%(p)s' project." % { 'len': len(messages), 'q': queue_name, 'u': user, 'p': project, } ) return messages except Exception as e: print_with_time("Caught exception - %s" % e) return [] def signal_handler(signal, frame): print("") print_with_time("Ctrl+C was pressed. Shutting down consumer.") sys.exit(0) def parse_str_to_dict(string): if not isinstance(string, six.string_types): return string result = eval(string) return result def handle_message(data): """Handles consumed message. Expected structure of a message is following: {'data': { 'access_rules': [ { 'access_id': u'b28268b9-36c6-40d3-a485-22534077328f', 'access_instance_id': u'd137b2cb-f549-4141-9dd7-36b2789fb973', 'access_level': u'rw', 'access_state': u'active', 'access_to': u'7.7.7.7', 'access_type': u'ip', } ], 'availability_zone': u'nova', 'export_locations': [u'127.0.0.1:/path/to/nfs/share'], 'is_allow_operation': True, 'share_id': u'053eae9a-726f-4f7e-8502-49d7b1adf290', 'share_instance_id': u'dc33e554-e0b9-40f5-9046-c198716d73a0', 'share_proto': u'NFS' }} """ if 'data' in data.keys(): data = data['data'] valid_access = ( 'access_rules' in data and len(data['access_rules']) == 1 and data['access_rules'][0].get('access_type', '?').lower() == 'ip' and data.get('share_proto', '?').lower() == 'nfs' ) if valid_access: is_allow_operation = data['is_allow_operation'] export_location = data['export_locations'][0] if is_allow_operation: mount_share(export_location, data['access_to']) else: unmount_share(export_location, data['access_to']) else: print_with_time('Do nothing with above message.') def execute(cmd): try: print_with_time('Executing following command: \n%s' % cmd) cmd = cmd.split() stdout, stderr = processutils.execute(*cmd) if stderr: print_with_time('Got error: %s' % stderr) return stdout, stderr except Exception as e: print_with_time('Got following error: %s' % e) return False, True def is_share_mounted(mount_point): mounts, stderr = execute('mount') return mount_point in mounts def rule_affects_me(ip_or_cidr): if '/' in ip_or_cidr: net = netaddr.IPNetwork(ip_or_cidr) for my_ip in CONF.zaqar.expected_ip_addresses: if netaddr.IPAddress(my_ip) in net: return True else: for my_ip in CONF.zaqar.expected_ip_addresses: if my_ip == ip_or_cidr: return True return False def mount_share(export_location, access_to): data = { 'mount_point': os.path.join(CONF.zaqar.mount_dir, export_location.split('/')[-1]), 'export_location': export_location, } if (rule_affects_me(access_to) and not is_share_mounted(data['mount_point'])): print_with_time( "Mounting '%(export_location)s' share to %(mount_point)s.") execute('sudo mkdir -p %(mount_point)s' % data) stdout, stderr = execute( 'sudo mount.nfs %(export_location)s %(mount_point)s' % data) if stderr: print_with_time("Mount operation failed.") else: print_with_time("Mount operation went OK.") def unmount_share(export_location, access_to): if rule_affects_me(access_to) and is_share_mounted(export_location): print_with_time("Unmounting '%(export_location)s' share.") stdout, stderr = execute('sudo umount %s' % export_location) if stderr: print_with_time("Unmount operation failed.") else: print_with_time("Unmount operation went OK.") def main(): # Register other local modules cur = os.path.dirname(__file__) pathtest = os.path.join(cur) sys.path.append(pathtest) # Init configuration CONF(sys.argv[1:], project="manila_notifier", version=1.0) CONF.register_opts(opts, group="zaqar") # Import common config and Zaqar client import zaqarclientwrapper # Handle SIGINT signal.signal(signal.SIGINT, signal_handler) # Run consumer print_with_time("Consumer was successfully run.") while(True): messages = pop_zaqar_messages( zaqarclientwrapper.ZAQARCLIENT, CONF.zaqar.zaqar_queues) if not messages: message = ("No new messages in '%s' queue[s] " "found." % ','.join(CONF.zaqar.zaqar_queues)) else: message = "Got following messages:" print_with_time(message) for message in messages: message = parse_str_to_dict(message) print_pretty_dict(message) handle_message(message) time.sleep(CONF.zaqar.consume_interval) if __name__ == '__main__': main()
[ "sys.path.append", "oslo_config.cfg.StrOpt", "oslo_config.cfg.IntOpt", "os.path.dirname", "oslo_utils.timeutils.utcnow", "netaddr.IPAddress", "six.text_type", "time.sleep", "oslo_concurrency.processutils.execute", "pprint.pprint", "netaddr.IPNetwork", "signal.signal", "os.path.join", "sys.exit", "oslo_config.cfg.ListOpt" ]
[((863, 1063), 'oslo_config.cfg.IntOpt', 'cfg.IntOpt', (['"""consume_interval"""'], {'default': '(5)', 'deprecated_name': '"""sleep_between_consume_attempts"""', 'help': '"""Time that script will sleep between requests for consuming Zaqar messages in seconds."""'}), "('consume_interval', default=5, deprecated_name=\n 'sleep_between_consume_attempts', help=\n 'Time that script will sleep between requests for consuming Zaqar messages in seconds.'\n )\n", (873, 1063), False, 'from oslo_config import cfg\n'), ((1112, 1212), 'oslo_config.cfg.StrOpt', 'cfg.StrOpt', (['"""mount_dir"""'], {'default': '"""/tmp"""', 'help': '"""Directory that will contain all mounted shares."""'}), "('mount_dir', default='/tmp', help=\n 'Directory that will contain all mounted shares.')\n", (1122, 1212), False, 'from oslo_config import cfg\n'), ((1243, 1420), 'oslo_config.cfg.ListOpt', 'cfg.ListOpt', (['"""expected_ip_addresses"""'], {'default': '[]', 'help': '"""List of IP addresses that are expected to be found in access rules to trigger [un]mount operation for a share."""'}), "('expected_ip_addresses', default=[], help=\n 'List of IP addresses that are expected to be found in access rules to trigger [un]mount operation for a share.'\n )\n", (1254, 1420), False, 'from oslo_config import cfg\n'), ((1630, 1646), 'pprint.pprint', 'pprint.pprint', (['d'], {}), '(d)\n', (1643, 1646), False, 'import pprint\n'), ((2698, 2709), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (2706, 2709), False, 'import sys\n'), ((6508, 6533), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (6523, 6533), False, 'import os\n'), ((6549, 6566), 'os.path.join', 'os.path.join', (['cur'], {}), '(cur)\n', (6561, 6566), False, 'import os\n'), ((6571, 6596), 'sys.path.append', 'sys.path.append', (['pathtest'], {}), '(pathtest)\n', (6586, 6596), False, 'import sys\n'), ((6830, 6874), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'signal_handler'], {}), '(signal.SIGINT, signal_handler)\n', (6843, 6874), False, 'import signal\n'), ((1534, 1552), 'oslo_utils.timeutils.utcnow', 'timeutils.utcnow', ([], {}), '()\n', (1550, 1552), False, 'from oslo_utils import timeutils\n'), ((4578, 4604), 'oslo_concurrency.processutils.execute', 'processutils.execute', (['*cmd'], {}), '(*cmd)\n', (4598, 4604), False, 'from oslo_concurrency import processutils\n'), ((5000, 5029), 'netaddr.IPNetwork', 'netaddr.IPNetwork', (['ip_or_cidr'], {}), '(ip_or_cidr)\n', (5017, 5029), False, 'import netaddr\n'), ((7487, 7526), 'time.sleep', 'time.sleep', (['CONF.zaqar.consume_interval'], {}), '(CONF.zaqar.consume_interval)\n', (7497, 7526), False, 'import time\n'), ((1577, 1596), 'six.text_type', 'six.text_type', (['data'], {}), '(data)\n', (1590, 1596), False, 'import six\n'), ((5100, 5124), 'netaddr.IPAddress', 'netaddr.IPAddress', (['my_ip'], {}), '(my_ip)\n', (5117, 5124), False, 'import netaddr\n'), ((2076, 2097), 'six.text_type', 'six.text_type', (['m.body'], {}), '(m.body)\n', (2089, 2097), False, 'import six\n')]
""" ******************************** * Created by mohammed-alaa * ******************************** Evaluate motion and spatial streams """ import frame_dataloader from evaluation import legacy_load_model, get_batch_size from evaluation.evaluation import * from utils.drive_manager import DriveManager """ Evaluate spatial stream """ # download drive_manager = DriveManager("spa-xception-adam-5e-06-imnet") drive_manager.download_file('1djGzpxAYFvNX-UaQ7ONqDHGgnzc8clBK', "spatial.zip") # load into ram print("Spatial stream") spatial_model_restored = legacy_load_model(filepath="spatial.h5", custom_objects={'sparse_categorical_cross_entropy_loss': sparse_categorical_cross_entropy_loss, "acc_top_1": acc_top_1, "acc_top_5": acc_top_5}) spatial_model_restored.summary() # evaluate _, spatial_test_loader, test_video_level_label = frame_dataloader.SpatialDataLoader( num_workers=workers, width=int(spatial_model_restored.inputs[0].shape[1]), height=int(spatial_model_restored.inputs[0].shape[2]) , use_multiprocessing=False, batch_size=get_batch_size(spatial_model_restored, spatial=True), testing_samples_per_video=19 ).run() video_level_loss, video_level_accuracy_1, video_level_accuracy_5, test_video_level_preds = eval_model(spatial_model_restored, spatial_test_loader, test_video_level_label, 19) print("Spatial Model validation", "prec@1", video_level_accuracy_1, "prec@5", video_level_accuracy_5, "loss", video_level_loss) """ Evaluate motion stream """ # download drive_manager = DriveManager("heavy-mot-xception-adam-1e-05-imnet") drive_manager.download_file('1kvslNL8zmZYaHRmhgAM6-l_pNDDA0EKZ', "motion.zip") # the id of the zip file contains my network # load into ram print("Motion stream") motion_model_restored = legacy_load_model(filepath="motion.h5", custom_objects={'sparse_categorical_cross_entropy_loss': sparse_categorical_cross_entropy_loss, "acc_top_1": acc_top_1, "acc_top_5": acc_top_5}) motion_model_restored.summary() # evaluate _, motion_test_loader, test_video_level_label = frame_dataloader.MotionDataLoader( num_workers=workers, width=int(motion_model_restored.inputs[0].shape[1]), height=int(motion_model_restored.inputs[0].shape[2]) , use_multiprocessing=False, batch_size=get_batch_size(motion_model_restored, spatial=True) , testing_samples_per_video=19).run() video_level_loss, video_level_accuracy_1, video_level_accuracy_5, test_video_level_preds = eval_model(motion_model_restored, motion_test_loader, test_video_level_label, 19) print("Motion Model validation", "prec@1", video_level_accuracy_1, "prec@5", video_level_accuracy_5, "loss", video_level_loss)
[ "utils.drive_manager.DriveManager", "evaluation.get_batch_size", "evaluation.legacy_load_model" ]
[((365, 410), 'utils.drive_manager.DriveManager', 'DriveManager', (['"""spa-xception-adam-5e-06-imnet"""'], {}), "('spa-xception-adam-5e-06-imnet')\n", (377, 410), False, 'from utils.drive_manager import DriveManager\n'), ((557, 755), 'evaluation.legacy_load_model', 'legacy_load_model', ([], {'filepath': '"""spatial.h5"""', 'custom_objects': "{'sparse_categorical_cross_entropy_loss':\n sparse_categorical_cross_entropy_loss, 'acc_top_1': acc_top_1,\n 'acc_top_5': acc_top_5}"}), "(filepath='spatial.h5', custom_objects={\n 'sparse_categorical_cross_entropy_loss':\n sparse_categorical_cross_entropy_loss, 'acc_top_1': acc_top_1,\n 'acc_top_5': acc_top_5})\n", (574, 755), False, 'from evaluation import legacy_load_model, get_batch_size\n'), ((1508, 1559), 'utils.drive_manager.DriveManager', 'DriveManager', (['"""heavy-mot-xception-adam-1e-05-imnet"""'], {}), "('heavy-mot-xception-adam-1e-05-imnet')\n", (1520, 1559), False, 'from utils.drive_manager import DriveManager\n'), ((1749, 1946), 'evaluation.legacy_load_model', 'legacy_load_model', ([], {'filepath': '"""motion.h5"""', 'custom_objects': "{'sparse_categorical_cross_entropy_loss':\n sparse_categorical_cross_entropy_loss, 'acc_top_1': acc_top_1,\n 'acc_top_5': acc_top_5}"}), "(filepath='motion.h5', custom_objects={\n 'sparse_categorical_cross_entropy_loss':\n sparse_categorical_cross_entropy_loss, 'acc_top_1': acc_top_1,\n 'acc_top_5': acc_top_5})\n", (1766, 1946), False, 'from evaluation import legacy_load_model, get_batch_size\n'), ((1054, 1106), 'evaluation.get_batch_size', 'get_batch_size', (['spatial_model_restored'], {'spatial': '(True)'}), '(spatial_model_restored, spatial=True)\n', (1068, 1106), False, 'from evaluation import legacy_load_model, get_batch_size\n'), ((2244, 2295), 'evaluation.get_batch_size', 'get_batch_size', (['motion_model_restored'], {'spatial': '(True)'}), '(motion_model_restored, spatial=True)\n', (2258, 2295), False, 'from evaluation import legacy_load_model, get_batch_size\n')]
# -*- coding: utf-8 -*- import json import logging from django.core.management import call_command from django.test.client import Client from networkapi.test.test_case import NetworkApiTestCase log = logging.getLogger(__name__) def setup(): call_command( 'loaddata', 'networkapi/system/fixtures/initial_variables.json', 'networkapi/api_pools/fixtures/initial_optionspool.json', 'networkapi/requisicaovips/fixtures/initial_optionsvip.json', 'networkapi/healthcheckexpect/fixtures/initial_healthcheck.json', 'networkapi/usuario/fixtures/initial_usuario.json', 'networkapi/grupo/fixtures/initial_ugrupo.json', 'networkapi/usuario/fixtures/initial_usuariogrupo.json', 'networkapi/api_ogp/fixtures/initial_objecttype.json', 'networkapi/api_ogp/fixtures/initial_objectgrouppermissiongeneral.json', 'networkapi/grupo/fixtures/initial_permissions.json', 'networkapi/grupo/fixtures/initial_permissoes_administrativas.json', 'networkapi/api_pools/fixtures/initial_base.json', 'networkapi/api_pools/fixtures/initial_pools_1.json', verbosity=0 ) class PoolPutSpecTestCase(NetworkApiTestCase): maxDiff = None def setUp(self): self.client = Client() def tearDown(self): pass def execute_some_put_verify_error(self, name_file): response = self.client.put( '/api/v3/pool/1/', data=json.dumps(self.load_json_file(name_file)), content_type='application/json', HTTP_AUTHORIZATION=self.get_http_authorization('test')) self.compare_status(400, response.status_code) def execute_some_put_verify_success(self, name_file): response = self.client.put( '/api/v3/pool/1/', data=json.dumps(self.load_json_file(name_file)), content_type='application/json', HTTP_AUTHORIZATION=self.get_http_authorization('test')) self.compare_status(200, response.status_code) # get datas updated response = self.client.get( '/api/v3/pool/1/', content_type='application/json', HTTP_AUTHORIZATION=self.get_http_authorization('test')) self.compare_status(200, response.status_code) self.compare_json(name_file, response.data) def test_put_valid_file(self): """ test_put_valid_file""" self.execute_some_put_verify_success( 'api_pools/tests/sanity/json/put/test_pool_put_valid_file.json') def test_put_out_of_range_port(self): """ test_put_out_of_range_port""" self.execute_some_put_verify_error( 'api_pools/tests/sanity/json/put/test_pool_put_out_of_range_port.json') def test_put_negative_port(self): """ test_put_negative_port""" self.execute_some_put_verify_error( 'api_pools/tests/sanity/json/put/test_pool_put_negative_port.json') def test_put_float_port(self): """ test_put_float_port""" self.execute_some_put_verify_error( 'api_pools/tests/sanity/json/put/test_pool_put_float_port.json') def test_put_zero_port(self): """ test_put_zero_port""" self.execute_some_put_verify_error( 'api_pools/tests/sanity/json/put/test_pool_put_zero_port.json') def test_put_string_port(self): """ test_put_string_port""" self.execute_some_put_verify_error( 'api_pools/tests/sanity/json/put/test_pool_put_string_port.json') def test_put_float_environment(self): """ test_put_float_environment""" self.execute_some_put_verify_error( 'api_pools/tests/sanity/json/put/test_pool_put_float_environment.json') def test_put_string_environment(self): """ test_put_string_environment""" self.execute_some_put_verify_error( 'api_pools/tests/sanity/json/put/test_pool_put_string_environment.json') def test_put_zero_environment(self): """ test_put_zero_environment""" self.execute_some_put_verify_error( 'api_pools/tests/sanity/json/put/test_pool_put_zero_environment.json') def test_put_negative_environment(self): """ test_put_negative_environment""" self.execute_some_put_verify_error( 'api_pools/tests/sanity/json/put/test_pool_put_negative_environment.json') def test_put_integer_name_servicedownaction(self): """ test_put_integer_name_servicedownaction""" self.execute_some_put_verify_error( 'api_pools/tests/sanity/json/put/test_pool_put_integer_name_servicedownaction.json') def test_put_invalid_healthcheck_type(self): """ test_put_invalid_healthcheck_type""" self.execute_some_put_verify_error( 'api_pools/tests/sanity/json/put/test_pool_put_invalid_healthcheck_type.json') def test_put_invalid_destination(self): """ test_put_invalid_destination""" self.execute_some_put_verify_error( 'api_pools/tests/sanity/json/put/test_pool_put_invalid_destination.json') def test_put_negative_default_limit(self): """ test_put_negative_default_limit""" self.execute_some_put_verify_error( 'api_pools/tests/sanity/json/put/test_pool_put_negative_default_limit.json') def test_put_integer_lb_method(self): """ test_put_integer_lb_method""" self.execute_some_put_verify_error( 'api_pools/tests/sanity/json/put/test_pool_put_integer_lb_method.json') def test_put_string_id_servicedownaction(self): """ test_put_string_id_servicedownaction""" self.execute_some_put_verify_error( 'api_pools/tests/sanity/json/put/test_pool_put_string_id_servicedownaction.json') def test_put_zero_id_servicedownaction(self): """ test_put_zero_id_servicedownaction""" self.execute_some_put_verify_error( 'api_pools/tests/sanity/json/put/test_pool_put_zero_id_servicedownaction.json') def test_put_negative_id_servicedownaction(self): """ test_put_negative_id_servicedownaction""" self.execute_some_put_verify_error( 'api_pools/tests/sanity/json/put/test_pool_put_negative_id_servicedownaction.json') def test_valid_post_after_equals_valid_put(self): """ test_valid_post_after_equals_valid_put""" # try to get datas response = self.client.get( '/api/v3/pool/1/', content_type='application/json', HTTP_AUTHORIZATION=self.get_http_authorization('test')) # test if data were not inserted self.assertEqual(200, response.status_code, 'Status code should be 200 and was %s' % response.status_code) response = self.client.put( '/api/v3/pool/1/', data=json.dumps(self.load_json_file( 'api_pools/tests/sanity/json/test_pool_put_and_post.json')), content_type='application/json', HTTP_AUTHORIZATION=self.get_http_authorization('test')) self.assertEqual(200, response.status_code, 'Status code should be 200 and was %s' % response.status_code) response = self.client.post( '/api/v3/pool/', data=json.dumps(self.load_json_file( 'api_pools/tests/sanity/json/test_pool_put_and_post.json')), content_type='application/json', HTTP_AUTHORIZATION=self.get_http_authorization('test')) self.assertEqual(400, response.status_code, 'Status code should be 500 and was %s' % response.status_code)
[ "django.test.client.Client", "django.core.management.call_command", "logging.getLogger" ]
[((203, 230), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (220, 230), False, 'import logging\n'), ((250, 1092), 'django.core.management.call_command', 'call_command', (['"""loaddata"""', '"""networkapi/system/fixtures/initial_variables.json"""', '"""networkapi/api_pools/fixtures/initial_optionspool.json"""', '"""networkapi/requisicaovips/fixtures/initial_optionsvip.json"""', '"""networkapi/healthcheckexpect/fixtures/initial_healthcheck.json"""', '"""networkapi/usuario/fixtures/initial_usuario.json"""', '"""networkapi/grupo/fixtures/initial_ugrupo.json"""', '"""networkapi/usuario/fixtures/initial_usuariogrupo.json"""', '"""networkapi/api_ogp/fixtures/initial_objecttype.json"""', '"""networkapi/api_ogp/fixtures/initial_objectgrouppermissiongeneral.json"""', '"""networkapi/grupo/fixtures/initial_permissions.json"""', '"""networkapi/grupo/fixtures/initial_permissoes_administrativas.json"""', '"""networkapi/api_pools/fixtures/initial_base.json"""', '"""networkapi/api_pools/fixtures/initial_pools_1.json"""'], {'verbosity': '(0)'}), "('loaddata',\n 'networkapi/system/fixtures/initial_variables.json',\n 'networkapi/api_pools/fixtures/initial_optionspool.json',\n 'networkapi/requisicaovips/fixtures/initial_optionsvip.json',\n 'networkapi/healthcheckexpect/fixtures/initial_healthcheck.json',\n 'networkapi/usuario/fixtures/initial_usuario.json',\n 'networkapi/grupo/fixtures/initial_ugrupo.json',\n 'networkapi/usuario/fixtures/initial_usuariogrupo.json',\n 'networkapi/api_ogp/fixtures/initial_objecttype.json',\n 'networkapi/api_ogp/fixtures/initial_objectgrouppermissiongeneral.json',\n 'networkapi/grupo/fixtures/initial_permissions.json',\n 'networkapi/grupo/fixtures/initial_permissoes_administrativas.json',\n 'networkapi/api_pools/fixtures/initial_base.json',\n 'networkapi/api_pools/fixtures/initial_pools_1.json', verbosity=0)\n", (262, 1092), False, 'from django.core.management import call_command\n'), ((1280, 1288), 'django.test.client.Client', 'Client', ([], {}), '()\n', (1286, 1288), False, 'from django.test.client import Client\n')]
from threading import RLock from pylgbst.peripherals import EncodedMotor from IotLib.pyUtils import startThread from IotLib.log import Log from IotLib.iotMotor import IotMotor from IotLib.iotEncodedMotor import IotEncodedMotor from .legoNode import SendCommand # todo: LegoMotor inherits both IotMotor and IotSteering. class LegoMotor(IotEncodedMotor): """ the class encapsulates a lego encoded motor based on pylgbst.Motor """ # subscribe data NoData = 0 # do not subscribe data SpeedData = 1 # subscribe speed data AngleData = 2 # subscribe angle data def __init__(self, name, parent, motor, data=0, minMovingSpeed=5, maxPower=1.0): """ construct a LegoMotor name: the name of the node parent: parent IotNode object. None for root node. motor: an instance of pylgbst.Motor data: which data to subscribe (NoData, SpeedData, or AngleData) minMovingSpeed: the minimum valid moving absolute speed maxPower: max power allowed for the motor """ super(LegoMotor, self).__init__(name, parent, minMovingSpeed=minMovingSpeed) self.motor = motor self.data = data self.maxPower = maxPower self._motorControlLock = RLock() # lock is required in case of emergencyStop() been called in separate thread def stop(self): """ stop the motor """ self._stop() return self.speed def run(self, speed, speed2=None): """ run the motor with specified speed speed: the speed for the motor, speed2: the speed for the secondary motor speed > 0 run forward max 100 speed < 0 run reverse max -100 speed = 0 stop return the running speed """ self._requestedSpeed = speed self._requestedSpeed2 = speed2 Log.info('Request %s to run at speed %i, %s' %(self.name, speed, str(speed2))) self._run(speed, speed2) return self.speed def runAngle(self, angle, speed, speed2 = None): """ move the motor by specified angle for encoded single or dual encoded motor angle is in degree (360 is one rotation) speed controls the direction ranges from -100 to 100 """ outspd = float(IotMotor._clampSpeed(speed)) / 100.0 outspd2 = speed2 if speed2 is not None: outspd2 = float(IotMotor._clampSpeed(speed2)) / 100.0 Log.info('MoveAngle %s by %i degrees at speed %f, %s' %(self.name, angle, outspd, str(outspd2))) self._motorControlLock.acquire SendCommand(self.motor, self.motor.angled, degrees=angle, speed_primary=outspd, speed_secondary=outspd2, max_power=self.maxPower) #self.motor.angled(angle, outspd, outspd2, max_power=self.maxPower) self._motorControlLock.release def runAngleAsync(self, angle, speed, speed2 = None): """ launch a thread to move the motor by specified angle for encoded single or dual motor angle is in degree (360 is one rotation) speed controls the direction ranges from -100 to 100 """ startThread('%s.moveAngle' %self.name, target=self.runAngle, front=True, args=(angle, speed, speed2)) def goToPosition(self, position, position2 = None, speed = 100): """ run the motor to specified positions for encoded single or dual motor positions are in degrees range from int.min to int.max speed controls the direction ranges from -100 to 100 """ outspd = float(IotMotor._clampSpeed(speed)) / 100.0 Log.info('GoToPosition %s to (%i, %s) at speed %f' %(self.name, position, str(position2), outspd)) self._motorControlLock.acquire SendCommand(self.motor, self.motor.goto_position, degrees_primary=position, degrees_secondary=position2, speed=outspd, max_power=self.maxPower) #self.motor.goto_position(position, position2, outspd, max_power=self.maxPower) self._motorControlLock.release def goToPositionAsync(self, position, position2 = None, speed = 100): """ launch a thread to run the motor to specified positions for encoded single or dual motor positions are in degrees range from int.min to int.max speed controls the direction ranges from -100 to 100 """ startThread('%s.goToPosition' %self.name, target=self.goToPosition, front=True, args=(position, position2, speed)) def extraSpeed(self, deltaSpeed): """ request extra speed in addition to the run speed by run(speed) """ self._extraSpeed = deltaSpeed if self.speed2 is not None and self.speed2 != self.speed: return if self._requestedSpeed == 0 or self.speed == 0: return absRequestedSpeed = abs(self._requestedSpeed) extraSpeed = self._extraSpeed + self._extraSteeringSpeed absRunSpeed = abs(self._requestedSpeed) + extraSpeed if absRunSpeed != abs(self.speed): if self._requestedSpeed > 0: self._run(absRunSpeed) else: self._run(-absRunSpeed) def _stop(self): """ internal method to stop the motor """ self._requestedSpeed = 0 self._requestedSpeed2 = 0 Log.info('Stop %s' %self.name) self._motorControlLock.acquire SendCommand(self.motor, self.motor.start_power, power_primary=0, power_secondary=0) #self.motor.start_power(0) self._motorControlLock.release self.speed = 0 self.speed2 = 0 return self.speed def _run(self, speed, speed2=None): """ internal method to run the motor with specified speed speed > 0 run forward max 100 speed < 0 run reverse max -100 speed = 0 stop return the running speed """ if abs(speed) < self._minMovingSpeed: # stop self._stop() outspd = 0 outspd = 0 else: outspd = float(IotMotor._clampSpeed(speed)) / 100.0 outspd2 = speed2 if speed2 is not None: outspd2 = float(IotMotor._clampSpeed(speed2)) / 100.0 Log.info('Run %s at speed %f, %s' %(self.name, outspd, str(outspd2))) self._motorControlLock.acquire SendCommand(self.motor, self.motor.start_speed, speed_primary=outspd, speed_secondary=outspd2, max_power=self.maxPower) #self.motor.start_speed(outspd, outspd2, max_power=self.maxPower) self._motorControlLock.release if self.data == LegoMotor.NoData: self.speed = outspd self.speed2 = outspd2 def _callbackSpeed(self, param1): Log.debug("Motor %s speed %s" %(self.name, str(param1))) self.speed = param1 def _callbackAngle(self, param1): Log.debug("Motor %s angle %s" %(self.name, str(param1))) self.angle = param1 def startUp(self): """ override to subscribe the data from lego sensor """ if self.data == LegoMotor.SpeedData: self.motor.subscribe(self._callbackSpeed, mode=EncodedMotor.SENSOR_SPEED, granularity=1) elif self.data == LegoMotor.AngleData: self.motor.subscribe(self._callbackAngle, mode=EncodedMotor.SENSOR_ANGLE, granularity=1) def shutDown(self): """ override to unsubscribe the data """ if self.data == LegoMotor.SpeedData: self.motor.unsubscribe(self._callbackSpeed) elif self.data == LegoMotor.AngleData: self.motor.unsubscribe(self._callbackAngle)
[ "threading.RLock", "IotLib.iotMotor.IotMotor._clampSpeed", "IotLib.log.Log.info", "IotLib.pyUtils.startThread" ]
[((1260, 1267), 'threading.RLock', 'RLock', ([], {}), '()\n', (1265, 1267), False, 'from threading import RLock\n'), ((3112, 3218), 'IotLib.pyUtils.startThread', 'startThread', (["('%s.moveAngle' % self.name)"], {'target': 'self.runAngle', 'front': '(True)', 'args': '(angle, speed, speed2)'}), "('%s.moveAngle' % self.name, target=self.runAngle, front=True,\n args=(angle, speed, speed2))\n", (3123, 3218), False, 'from IotLib.pyUtils import startThread\n'), ((4307, 4427), 'IotLib.pyUtils.startThread', 'startThread', (["('%s.goToPosition' % self.name)"], {'target': 'self.goToPosition', 'front': '(True)', 'args': '(position, position2, speed)'}), "('%s.goToPosition' % self.name, target=self.goToPosition, front=\n True, args=(position, position2, speed))\n", (4318, 4427), False, 'from IotLib.pyUtils import startThread\n'), ((5247, 5278), 'IotLib.log.Log.info', 'Log.info', (["('Stop %s' % self.name)"], {}), "('Stop %s' % self.name)\n", (5255, 5278), False, 'from IotLib.log import Log\n'), ((2269, 2296), 'IotLib.iotMotor.IotMotor._clampSpeed', 'IotMotor._clampSpeed', (['speed'], {}), '(speed)\n', (2289, 2296), False, 'from IotLib.iotMotor import IotMotor\n'), ((3525, 3552), 'IotLib.iotMotor.IotMotor._clampSpeed', 'IotMotor._clampSpeed', (['speed'], {}), '(speed)\n', (3545, 3552), False, 'from IotLib.iotMotor import IotMotor\n'), ((2390, 2418), 'IotLib.iotMotor.IotMotor._clampSpeed', 'IotMotor._clampSpeed', (['speed2'], {}), '(speed2)\n', (2410, 2418), False, 'from IotLib.iotMotor import IotMotor\n'), ((5974, 6001), 'IotLib.iotMotor.IotMotor._clampSpeed', 'IotMotor._clampSpeed', (['speed'], {}), '(speed)\n', (5994, 6001), False, 'from IotLib.iotMotor import IotMotor\n'), ((6107, 6135), 'IotLib.iotMotor.IotMotor._clampSpeed', 'IotMotor._clampSpeed', (['speed2'], {}), '(speed2)\n', (6127, 6135), False, 'from IotLib.iotMotor import IotMotor\n')]
# # This is a setup file for Cython # # # # # # from setuptools import setup from Cython.Build import cythonize setup( name = "Climate", ext_modules = cythonize(["*.py"], build_dir="output"), ) #python setup.py build_ext --inplace
[ "Cython.Build.cythonize" ]
[((161, 200), 'Cython.Build.cythonize', 'cythonize', (["['*.py']"], {'build_dir': '"""output"""'}), "(['*.py'], build_dir='output')\n", (170, 200), False, 'from Cython.Build import cythonize\n')]
# 2019-11-19 19:43:48(JST) import sys # import collections # import math # from string import ascii_lowercase, ascii_uppercase, digits # from bisect import bisect_left as bi_l, bisect_right as bi_r # import itertools # from functools import reduce # import operator as op # import re # import heapq # import array # from scipy.misc import comb # (default: exact=False) # import numpy as np def main(): a, b = [int(x) for x in sys.stdin.readline().split()] print(-1 if a >= 10 or b >= 10 else a * b) if __name__ == "__main__": main()
[ "sys.stdin.readline" ]
[((451, 471), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (469, 471), False, 'import sys\n')]
import justpy as jp from spreadsheet.googlesheet import GoogleSheet from lodstorage.lod import LOD from lodstorage.sparql import SPARQL from markupsafe import Markup import copy import datetime import re import os import pprint import sys import traceback from jpwidgets.widgets import LodGrid,MenuButton, MenuLink, QAlert,QPasswordDialog from spreadsheet.version import Version from spreadsheet.wikidata import Wikidata from spreadsheet.wbquery import WikibaseQuery DEBUG = 0 from argparse import ArgumentParser from argparse import RawDescriptionHelpFormatter class WikidataGrid(): ''' the tabular data to work with ''' def __init__(self,wbQueries): ''' constructor wbQueries(dict): the WikibaseQueries ''' self.wbQueries=wbQueries def setLodFromDataFrame(self,df): ''' set my List of Dicts from the given data frame Args: df(Dataframe): the dataframe to set my list of dicts from ''' lod=df.to_dict('records') self.setLod(lod) def setLod(self,lod:list): ''' set my list of dicts Args: lod(list): a list of dicts to work with ''' self.lod=lod if len(lod)<1: raise Exception("Empty List of dicts is not valid") self.columns=self.lod[0].keys() for index,row in enumerate(self.lod): row["lodRowIndex"]=index self.viewLod=copy.deepcopy(self.lod) def getColumnTypeAndVarname(self,entityName,propName): ''' slightly modified getter to account for "item" special case ''' wbQuery=self.wbQueries[entityName] if propName=="item": column="item" propType="" varName="item" else: column,propType,varName=wbQuery.getColumnTypeAndVarname(propName) return wbQuery,column,propType,varName def getHtmlColums(self,entityName): ''' get the columns that have html content(links) for the given entityName entityName(str): the name of the entity ''' htmlColumns=[0] # loop over columns of dataframe wbQuery=self.wbQueries[entityName] for columnIndex,column in enumerate(self.columns): # check whether there is metadata for the column if column in wbQuery.propertiesByColumn: propRow=wbQuery.propertiesByColumn[column] propType=propRow["Type"] if not propType or propType=="extid" or propType=="url": htmlColumns.append(columnIndex) return htmlColumns def createLink(self,url,text): ''' create a link from the given url and text Args: url(str): the url to create a link for text(str): the text to add for the link ''' link=f"<a href='{url}' style='color:blue'>{text}</a>" return link def linkWikidataItems(self,viewLod,itemColumn:str="item"): ''' link the wikidata entries in the given item column if containing Q values Args: viewLod(list): the list of dicts for the view itemColumn(str): the name of the column to handle ''' for row in viewLod: if itemColumn in row: item=row[itemColumn] if re.match(r"Q[0-9]+",item): itemLink=self.createLink(f"https://www.wikidata.org/wiki/{item}", item) row[itemColumn]=itemLink class GridSync(): ''' allow syncing the grid with data from wikibase ''' def __init__(self,wdgrid,sheetName,pk,debug:bool=False): self.wdgrid=wdgrid self.sheetName=sheetName self.pk=pk self.debug=debug self.itemRows=wdgrid.lod self.wbQuery,self.pkColumn,self.pkType,self.pkProp=wdgrid.getColumnTypeAndVarname(sheetName,pk) self.itemsByPk,_dup=LOD.getLookup(self.itemRows,self.pkColumn) if self.debug: print(self.itemsByPk.keys()) def query(self,sparql): ''' query the wikibase instance based on the list of dict ''' lang="en" if self.pkType =="text" else None valuesClause=self.wbQuery.getValuesClause(self.itemsByPk.keys(),self.pkProp,propType=self.pkType,lang=lang) self.sparqlQuery=self.wbQuery.asSparql(filterClause=valuesClause,orderClause=f"ORDER BY ?{self.pkProp}",pk=self.pk) if self.debug: print(self.sparqlQuery) self.wbRows=sparql.queryAsListOfDicts(self.sparqlQuery) if self.debug: pprint.pprint(self.wbRows) def checkCell(self,viewLodRow,column,value,propVarname,propType,propLabel,propUrl:str=None): ''' update the cell value for the given Args: viewLodRow(dict): the row to modify value(object): the value to set for the cell propVarName(str): the name of the property Variable set in the SPARQL statement propType(str): the abbreviation for the property Type propLabel(str): the propertyLabel (if any) propUrl(str): the propertyUrl (if any) ''' cellValue=viewLodRow[column] valueType=type(value) print(f"{column}({propVarname})={value}({propLabel}:{propUrl}:{valueType})⮂{cellValue}") # overwrite empty cells overwrite=not cellValue if cellValue: # overwrite values with links if propUrl and cellValue==value: overwrite=True if overwrite and value: doadd=True # create links for item properties if not propType: value=self.wdgrid.createLink(value, propLabel) elif propType=="extid": value=self.wdgrid.createLink(propUrl,value) if valueType==str: pass elif valueType==datetime.datetime: value=value.strftime('%Y-%m-%d') else: doadd=False print(f"{valueType} not added") if doadd: viewLodRow[column]=value def markViewLod(self,viewLod): ''' viewLod(list): a list of dict for the mark result ''' # now check the rows for wbRow in self.wbRows: # get the primary key value pkValue=wbRow[self.pkProp] pkValue=re.sub(r"http://www.wikidata.org/entity/(Q[0-9]+)", r"\1",pkValue) # if we have the primary key then we mark the whole row if pkValue in self.itemsByPk: if self.debug: print(pkValue) # https://stackoverflow.com/questions/14538885/how-to-get-the-index-with-the-key-in-a-dictionary lodRow=self.itemsByPk[pkValue] rowIndex=lodRow["lodRowIndex"] viewLodRow=viewLod[rowIndex] itemLink=self.wdgrid.createLink(wbRow["item"],wbRow["itemLabel"]) viewLodRow["item"]=itemLink itemDescription=wbRow.get("itemDescription","") self.checkCell(viewLodRow,"description",itemDescription,propVarname="itemDescription",propType="string",propLabel="") # loop over the result items for propVarname,value in wbRow.items(): # remap the property variable name to the original property description if propVarname in self.wbQuery.propertiesByVarname: propRow=self.wbQuery.propertiesByVarname[propVarname] column=propRow["Column"] propType=propRow["Type"] if not propType: propLabel=wbRow[f"{propVarname}Label"] else: propLabel="" if propType=="extid": propUrl=wbRow[f"{propVarname}Url"] else: propUrl="" # Linked Or if type(value)==str and value.startswith("http://www.wikidata.org/entity/") and f"{propVarname}Label" in wbRow: propUrl=value propLabel=wbRow[f"{propVarname}Label"] value=propLabel if column in lodRow: self.checkCell(viewLodRow,column,value,propVarname,propType,propLabel,propUrl) class GoogleSheetWikidataImport(): ''' reactive google sheet display to be used for wikidata import of the content ''' def __init__(self,url,sheetNames:list,pk:str,endpoint:str,lang:str="en",debug:bool=False): ''' constructor Args: url(str): the url of the google spreadsheet sheetNames(list): the name of the sheets to import data from pk(str): the primary key property to use for wikidata queries endpoint(str): the url of the endpoint to use lang(str): the languate to use for labels debug(bool): if True show debug information ''' self.debug=debug self.url=url self.sheetNames=sheetNames self.sheetName=sheetNames[0] self.pk=pk self.endpoint=endpoint self.sparql=SPARQL(self.endpoint) self.lang=lang # @TODO make configurable self.metaDataSheetName="WikidataMetadata" self.wd=Wikidata("https://www.wikidata.org",debug=True) self.agGrid=None self.wdgrid=None self.dryRun=True def clearErrors(self): ''' clear the error display ''' self.errors.inner_html="" def handleException(self,ex): ''' handle the given exception Args: ex(Exception): the exception to handle ''' errorMsg=str(ex) trace="" if self.debug: trace=traceback.format_exc() errorMsgHtml=f"{errorMsg}<pre>{trace}</pre>" self.errors.inner_html=errorMsgHtml print(errorMsg) if self.debug: print(trace) def load(self,url:str,sheetName:str,metaDataSheetName="WikidataMetadata"): ''' load my googlesheet, wikibaseQueries and dataframe Args: url(str): the url to load the spreadsheet from sheetName(str): the sheetName of the sheet/tab to load ''' wbQueries=WikibaseQuery.ofGoogleSheet(url, metaDataSheetName, debug=self.debug) self.wdgrid=WikidataGrid(wbQueries) self.gs=GoogleSheet(url) self.gs.open([sheetName]) self.wdgrid.setLod(self.gs.asListOfDicts(sheetName)) def onCheckWikidata(self,msg=None): ''' check clicked - check the wikidata content Args: msg(dict): the justpy message ''' if self.debug: print(msg) try: self.clearErrors() # prepare syncing the table results with the wikibase query result gridSync=GridSync(self.wdgrid,self.sheetName,self.pk,debug=self.debug) # query based on table content gridSync.query(self.sparql) # get the view copy to insert result as html statements viewLod=self.wdgrid.viewLod gridSync.markViewLod(viewLod) # reload the AG Grid with the html enriched content self.reloadAgGrid(viewLod) except Exception as ex: self.handleException(ex) def reloadAgGrid(self,viewLod:list,showLimit=10): ''' reload the agGrid with the given list of Dicts Args: viewLod(list): the list of dicts for the current view ''' self.agGrid.load_lod(viewLod) if self.debug: pprint.pprint(viewLod[:showLimit]) self.refreshGridSettings() def refreshGridSettings(self): ''' refresh the ag grid settings e.g. enable the row selection event handler enable row selection event handler ''' self.agGrid.on('rowSelected', self.onRowSelected) self.agGrid.options.columnDefs[0].checkboxSelection = True # set html columns according to types that have links self.agGrid.html_columns = self.wdgrid.getHtmlColums(self.sheetName) def reload(self,_msg=None,clearErrors=True): ''' reload the table content from myl url and sheet name ''' if clearErrors: self.clearErrors() self.load(self.url,self.sheetName,self.metaDataSheetName) # is there already agrid? if self.agGrid is None: self.agGrid = LodGrid(a=self.container) viewLod=self.wdgrid.viewLod self.wdgrid.linkWikidataItems(viewLod) self.reloadAgGrid(viewLod) # set up the primary key selector self.pkSelect.delete_components() self.pkSelect.add(jp.Option(value="item",text="item")) wbQuery=self.wdgrid.wbQueries[self.sheetName] for propertyName,row in wbQuery.propertiesByName.items(): columnName=row["Column"] if columnName: self.pkSelect.add(jp.Option(value=propertyName,text=columnName)) def onChangeSheet(self, msg:dict): ''' handle selection of a different sheet Args: msg(dict): the justpy message ''' if self.debug: print(msg) self.sheetName=msg.value try: self.reload() except Exception as ex: self.handleException(ex) def onChangePk(self, msg:dict): ''' handle selection of a different primary key Args: msg(dict): the justpy message ''' if self.debug: print(msg) self.pk=msg.value try: self.reload() except Exception as ex: self.handleException(ex) def onChangeUrl(self,msg:dict): ''' handle selection of a different url Args: msg(dict): the justpy message ''' if self.debug: print(msg) self.url=msg.value self.gsheetUrl.href=self.url self.gsheetUrl.text=self.url try: self.reload() except Exception as ex: self.handleException(ex) def onChangeDryRun(self,msg:dict): ''' handle change of DryRun setting Args: msg(dict): the justpy message ''' self.dryRun=msg.value def loginUser(self,user): self.loginButton.text=f"logout {user}" self.loginButton.icon="chevron_left" self.dryRunButton.disable=False def onloginViaDialog(self,_msg): ''' handle login via dialog ''' user=self.passwordDialog.userInput.value password=self.passwordDialog.passwordInput.value self.wd.loginWithCredentials(user, password) if self.wd.user is not None: self.loginUser(self.wd.user) def onLogin(self,msg:dict): ''' handle Login Args: msg(dict): the justpy message ''' if self.debug: print(msg) try: self.clearErrors() if self.wd.user is None: self.wd.loginWithCredentials() if self.wd.user is None: self.passwordDialog.loginButton.on("click",self.onloginViaDialog) self.passwordDialog.value=True else: self.loginUser(self.wd.user) else: self.wd.logout() self.dryRunButton.value=True self.dryRunButton.disable=True self.loginButton.text="login" self.loginButton.icon="chevron_right" except Exception as ex: self.handleException(ex) def onRowSelected(self, msg): ''' row selection event handler Args: msg(dict): row selection information ''' if self.debug: print(msg) self.clearErrors() if msg.selected: self.rowSelected = msg.rowIndex write=not self.dryRun label=msg.data["label"] try: mapDict=self.wdgrid.wbQueries[self.sheetName].propertiesById qid,errors=self.wd.addDict(msg.data, mapDict,write=write) if qid is not None: # set item link link=self.wdgrid.createLink(f"https://www.wikidata.org/wiki/{qid}", f"{label}") self.wdgrid.viewLod[msg.rowIndex]["item"]=link self.agGrid.load_lod(self.wdgrid.viewLod) self.refreshGridSettings() if len(errors)>0: self.errors.text=errors print(errors) if self.dryRun: prettyData=pprint.pformat(msg.data) html=Markup(f"<pre>{prettyData}</pre>") self.alertDialog.alertContent.inner_html=html self.alertDialog.alertTitle.text=f"Dry Run for {label}" self.alertDialog.value=True except Exception as ex: self.handleException(ex) def gridForDataFrame(self): ''' show aggrid for the given data frame ''' self.wp = jp.QuasarPage() self.container=jp.Div(a=self.wp) self.header=jp.Div(a=self.container) self.toolbar=jp.QToolbar(a=self.header) # for icons see https://quasar.dev/vue-components/icon # see justpy/templates/local/materialdesignicons/iconfont/codepoints for available icons self.reloadButton=MenuButton(a=self.toolbar,text='reload',icon="refresh",click=self.reload) self.checkButton=MenuButton(a=self.toolbar,text='check',icon='check_box',click=self.onCheckWikidata) MenuLink(a=self.toolbar,text="docs",icon="description",href='https://wiki.bitplan.com/index.php/PyOnlineSpreadSheetEditing') MenuLink(a=self.toolbar,text='github',icon='forum', href="https://github.com/WolfgangFahl/pyOnlineSpreadSheetEditing") self.loginButton=MenuButton(a=self.toolbar,icon='chevron_right',text="login",click=self.onLogin) self.passwordDialog=QPasswordDialog(a=self.wp) self.alertDialog=QAlert(a=self.wp) #jp.Br(a=self.header) # url urlLabelText="Google Spreadsheet Url" self.gsheetUrl=jp.A(a=self.header,href=self.url,target="_blank",title=urlLabelText) self.linkIcon=jp.QIcon(a=self.gsheetUrl,name="link",size="md") self.urlInput=jp.Input(a=self.header,placeholder=urlLabelText,size=80,value=self.url,change=self.onChangeUrl) self.dryRunButton=jp.QToggle(a=self.header,text="dry run",value=True,disable=True) self.dryRunButton.on("input",self.onChangeDryRun) jp.Br(a=self.header) # link to the wikidata item currently imported selectorClasses='w-32 m-4 p-2 bg-white' # select for sheets self.sheetSelect = jp.Select(classes=selectorClasses, a=self.header, value=self.sheetName, change=self.onChangeSheet) for sheetName in self.sheetNames: self.sheetSelect.add(jp.Option(value=sheetName, text=sheetName)) # selector for column/property self.pkSelect=jp.Select(classes=selectorClasses,a=self.header,value=self.pk, change=self.onChangePk) jp.Br(a=self.header) self.errors=jp.Span(a=self.container,style='color:red') try: self.reload() except Exception as ex: self.handleException(ex) return self.wp def start(self): ''' start the reactive justpy webserver ''' jp.justpy(self.gridForDataFrame) def main(argv=None): # IGNORE:C0111 '''main program.''' if argv is None: argv=sys.argv[1:] program_name = os.path.basename(__file__) program_version = "v%s" % Version.version program_build_date = str(Version.updated) program_version_message = '%%(prog)s %s (%s)' % (program_version, program_build_date) program_shortdesc = "Wikidata Import from google spreadsheet" user_name="<NAME>" program_license = '''%s Created by %s on %s. Copyright 2022 contributors. All rights reserved. Licensed under the Apache License 2.0 http://www.apache.org/licenses/LICENSE-2.0 Distributed on an "AS IS" basis without warranties or conditions of any kind, either express or implied. USAGE ''' % (program_shortdesc,user_name, str(Version.date)) try: # Setup argument parser parser = ArgumentParser(description=program_license, formatter_class=RawDescriptionHelpFormatter) parser.add_argument("-d", "--debug", dest="debug", action="store_true", help="set debug [default: %(default)s]") parser.add_argument('-V', '--version', action='version', version=program_version_message) parser.add_argument('--endpoint',help="the endpoint to use [default: %(default)s]",default="https://query.wikidata.org/sparql") #parser.add_argument('--dryrun', action="store_true", dest='dryrun', help="dry run only") parser.add_argument('--url') parser.add_argument('--sheets',nargs="+",required=True) parser.add_argument('--pk') args = parser.parse_args(argv) gswdi=GoogleSheetWikidataImport(args.url,args.sheets,pk=args.pk,endpoint=args.endpoint,debug=args.debug) gswdi.start() except KeyboardInterrupt: ### handle keyboard interrupt ### return 1 except Exception as e: if DEBUG: raise(e) indent = len(program_name) * " " sys.stderr.write(program_name + ": " + repr(e) + "\n") sys.stderr.write(indent + " for help use --help") print(traceback.format_exc()) return 2 if __name__ == "__main__": if DEBUG: sys.argv.append("-d") sys.exit(main())
[ "justpy.QToggle", "pprint.pformat", "jpwidgets.widgets.QPasswordDialog", "argparse.ArgumentParser", "jpwidgets.widgets.LodGrid", "justpy.QuasarPage", "jpwidgets.widgets.MenuLink", "justpy.A", "pprint.pprint", "justpy.Br", "jpwidgets.widgets.MenuButton", "lodstorage.sparql.SPARQL", "justpy.QToolbar", "justpy.justpy", "lodstorage.lod.LOD.getLookup", "traceback.format_exc", "justpy.Input", "spreadsheet.wbquery.WikibaseQuery.ofGoogleSheet", "re.sub", "copy.deepcopy", "spreadsheet.wikidata.Wikidata", "spreadsheet.googlesheet.GoogleSheet", "os.path.basename", "justpy.Span", "jpwidgets.widgets.QAlert", "re.match", "sys.stderr.write", "justpy.Option", "justpy.QIcon", "sys.argv.append", "justpy.Select", "markupsafe.Markup", "justpy.Div" ]
[((20573, 20599), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (20589, 20599), False, 'import os\n'), ((1509, 1532), 'copy.deepcopy', 'copy.deepcopy', (['self.lod'], {}), '(self.lod)\n', (1522, 1532), False, 'import copy\n'), ((4076, 4119), 'lodstorage.lod.LOD.getLookup', 'LOD.getLookup', (['self.itemRows', 'self.pkColumn'], {}), '(self.itemRows, self.pkColumn)\n', (4089, 4119), False, 'from lodstorage.lod import LOD\n'), ((9608, 9629), 'lodstorage.sparql.SPARQL', 'SPARQL', (['self.endpoint'], {}), '(self.endpoint)\n', (9614, 9629), False, 'from lodstorage.sparql import SPARQL\n'), ((9753, 9801), 'spreadsheet.wikidata.Wikidata', 'Wikidata', (['"""https://www.wikidata.org"""'], {'debug': '(True)'}), "('https://www.wikidata.org', debug=True)\n", (9761, 9801), False, 'from spreadsheet.wikidata import Wikidata\n'), ((10795, 10864), 'spreadsheet.wbquery.WikibaseQuery.ofGoogleSheet', 'WikibaseQuery.ofGoogleSheet', (['url', 'metaDataSheetName'], {'debug': 'self.debug'}), '(url, metaDataSheetName, debug=self.debug)\n', (10822, 10864), False, 'from spreadsheet.wbquery import WikibaseQuery\n'), ((10925, 10941), 'spreadsheet.googlesheet.GoogleSheet', 'GoogleSheet', (['url'], {}), '(url)\n', (10936, 10941), False, 'from spreadsheet.googlesheet import GoogleSheet\n'), ((17993, 18008), 'justpy.QuasarPage', 'jp.QuasarPage', ([], {}), '()\n', (18006, 18008), True, 'import justpy as jp\n'), ((18032, 18049), 'justpy.Div', 'jp.Div', ([], {'a': 'self.wp'}), '(a=self.wp)\n', (18038, 18049), True, 'import justpy as jp\n'), ((18070, 18094), 'justpy.Div', 'jp.Div', ([], {'a': 'self.container'}), '(a=self.container)\n', (18076, 18094), True, 'import justpy as jp\n'), ((18116, 18142), 'justpy.QToolbar', 'jp.QToolbar', ([], {'a': 'self.header'}), '(a=self.header)\n', (18127, 18142), True, 'import justpy as jp\n'), ((18334, 18410), 'jpwidgets.widgets.MenuButton', 'MenuButton', ([], {'a': 'self.toolbar', 'text': '"""reload"""', 'icon': '"""refresh"""', 'click': 'self.reload'}), "(a=self.toolbar, text='reload', icon='refresh', click=self.reload)\n", (18344, 18410), False, 'from jpwidgets.widgets import LodGrid, MenuButton, MenuLink, QAlert, QPasswordDialog\n'), ((18433, 18524), 'jpwidgets.widgets.MenuButton', 'MenuButton', ([], {'a': 'self.toolbar', 'text': '"""check"""', 'icon': '"""check_box"""', 'click': 'self.onCheckWikidata'}), "(a=self.toolbar, text='check', icon='check_box', click=self.\n onCheckWikidata)\n", (18443, 18524), False, 'from jpwidgets.widgets import LodGrid, MenuButton, MenuLink, QAlert, QPasswordDialog\n'), ((18525, 18657), 'jpwidgets.widgets.MenuLink', 'MenuLink', ([], {'a': 'self.toolbar', 'text': '"""docs"""', 'icon': '"""description"""', 'href': '"""https://wiki.bitplan.com/index.php/PyOnlineSpreadSheetEditing"""'}), "(a=self.toolbar, text='docs', icon='description', href=\n 'https://wiki.bitplan.com/index.php/PyOnlineSpreadSheetEditing')\n", (18533, 18657), False, 'from jpwidgets.widgets import LodGrid, MenuButton, MenuLink, QAlert, QPasswordDialog\n'), ((18658, 18783), 'jpwidgets.widgets.MenuLink', 'MenuLink', ([], {'a': 'self.toolbar', 'text': '"""github"""', 'icon': '"""forum"""', 'href': '"""https://github.com/WolfgangFahl/pyOnlineSpreadSheetEditing"""'}), "(a=self.toolbar, text='github', icon='forum', href=\n 'https://github.com/WolfgangFahl/pyOnlineSpreadSheetEditing')\n", (18666, 18783), False, 'from jpwidgets.widgets import LodGrid, MenuButton, MenuLink, QAlert, QPasswordDialog\n'), ((18802, 18889), 'jpwidgets.widgets.MenuButton', 'MenuButton', ([], {'a': 'self.toolbar', 'icon': '"""chevron_right"""', 'text': '"""login"""', 'click': 'self.onLogin'}), "(a=self.toolbar, icon='chevron_right', text='login', click=self.\n onLogin)\n", (18812, 18889), False, 'from jpwidgets.widgets import LodGrid, MenuButton, MenuLink, QAlert, QPasswordDialog\n'), ((18910, 18936), 'jpwidgets.widgets.QPasswordDialog', 'QPasswordDialog', ([], {'a': 'self.wp'}), '(a=self.wp)\n', (18925, 18936), False, 'from jpwidgets.widgets import LodGrid, MenuButton, MenuLink, QAlert, QPasswordDialog\n'), ((18962, 18979), 'jpwidgets.widgets.QAlert', 'QAlert', ([], {'a': 'self.wp'}), '(a=self.wp)\n', (18968, 18979), False, 'from jpwidgets.widgets import LodGrid, MenuButton, MenuLink, QAlert, QPasswordDialog\n'), ((19093, 19164), 'justpy.A', 'jp.A', ([], {'a': 'self.header', 'href': 'self.url', 'target': '"""_blank"""', 'title': 'urlLabelText'}), "(a=self.header, href=self.url, target='_blank', title=urlLabelText)\n", (19097, 19164), True, 'import justpy as jp\n'), ((19184, 19234), 'justpy.QIcon', 'jp.QIcon', ([], {'a': 'self.gsheetUrl', 'name': '"""link"""', 'size': '"""md"""'}), "(a=self.gsheetUrl, name='link', size='md')\n", (19192, 19234), True, 'import justpy as jp\n'), ((19255, 19358), 'justpy.Input', 'jp.Input', ([], {'a': 'self.header', 'placeholder': 'urlLabelText', 'size': '(80)', 'value': 'self.url', 'change': 'self.onChangeUrl'}), '(a=self.header, placeholder=urlLabelText, size=80, value=self.url,\n change=self.onChangeUrl)\n', (19263, 19358), True, 'import justpy as jp\n'), ((19377, 19444), 'justpy.QToggle', 'jp.QToggle', ([], {'a': 'self.header', 'text': '"""dry run"""', 'value': '(True)', 'disable': '(True)'}), "(a=self.header, text='dry run', value=True, disable=True)\n", (19387, 19444), True, 'import justpy as jp\n'), ((19508, 19528), 'justpy.Br', 'jp.Br', ([], {'a': 'self.header'}), '(a=self.header)\n', (19513, 19528), True, 'import justpy as jp\n'), ((19687, 19789), 'justpy.Select', 'jp.Select', ([], {'classes': 'selectorClasses', 'a': 'self.header', 'value': 'self.sheetName', 'change': 'self.onChangeSheet'}), '(classes=selectorClasses, a=self.header, value=self.sheetName,\n change=self.onChangeSheet)\n', (19696, 19789), True, 'import justpy as jp\n'), ((19978, 20071), 'justpy.Select', 'jp.Select', ([], {'classes': 'selectorClasses', 'a': 'self.header', 'value': 'self.pk', 'change': 'self.onChangePk'}), '(classes=selectorClasses, a=self.header, value=self.pk, change=\n self.onChangePk)\n', (19987, 20071), True, 'import justpy as jp\n'), ((20085, 20105), 'justpy.Br', 'jp.Br', ([], {'a': 'self.header'}), '(a=self.header)\n', (20090, 20105), True, 'import justpy as jp\n'), ((20126, 20170), 'justpy.Span', 'jp.Span', ([], {'a': 'self.container', 'style': '"""color:red"""'}), "(a=self.container, style='color:red')\n", (20133, 20170), True, 'import justpy as jp\n'), ((20403, 20435), 'justpy.justpy', 'jp.justpy', (['self.gridForDataFrame'], {}), '(self.gridForDataFrame)\n', (20412, 20435), True, 'import justpy as jp\n'), ((21292, 21385), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': 'program_license', 'formatter_class': 'RawDescriptionHelpFormatter'}), '(description=program_license, formatter_class=\n RawDescriptionHelpFormatter)\n', (21306, 21385), False, 'from argparse import ArgumentParser\n'), ((22581, 22602), 'sys.argv.append', 'sys.argv.append', (['"""-d"""'], {}), "('-d')\n", (22596, 22602), False, 'import sys\n'), ((4760, 4786), 'pprint.pprint', 'pprint.pprint', (['self.wbRows'], {}), '(self.wbRows)\n', (4773, 4786), False, 'import pprint\n'), ((6657, 6723), 're.sub', 're.sub', (['"""http://www.wikidata.org/entity/(Q[0-9]+)"""', '"""\\\\1"""', 'pkValue'], {}), "('http://www.wikidata.org/entity/(Q[0-9]+)', '\\\\1', pkValue)\n", (6663, 6723), False, 'import re\n'), ((10261, 10283), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (10281, 10283), False, 'import traceback\n'), ((12200, 12234), 'pprint.pprint', 'pprint.pprint', (['viewLod[:showLimit]'], {}), '(viewLod[:showLimit])\n', (12213, 12234), False, 'import pprint\n'), ((13083, 13108), 'jpwidgets.widgets.LodGrid', 'LodGrid', ([], {'a': 'self.container'}), '(a=self.container)\n', (13090, 13108), False, 'from jpwidgets.widgets import LodGrid, MenuButton, MenuLink, QAlert, QPasswordDialog\n'), ((13337, 13373), 'justpy.Option', 'jp.Option', ([], {'value': '"""item"""', 'text': '"""item"""'}), "(value='item', text='item')\n", (13346, 13373), True, 'import justpy as jp\n'), ((22420, 22470), 'sys.stderr.write', 'sys.stderr.write', (["(indent + ' for help use --help')"], {}), "(indent + ' for help use --help')\n", (22436, 22470), False, 'import sys\n'), ((3492, 3517), 're.match', 're.match', (['"""Q[0-9]+"""', 'item'], {}), "('Q[0-9]+', item)\n", (3500, 3517), False, 'import re\n'), ((19873, 19915), 'justpy.Option', 'jp.Option', ([], {'value': 'sheetName', 'text': 'sheetName'}), '(value=sheetName, text=sheetName)\n', (19882, 19915), True, 'import justpy as jp\n'), ((22485, 22507), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (22505, 22507), False, 'import traceback\n'), ((13592, 13638), 'justpy.Option', 'jp.Option', ([], {'value': 'propertyName', 'text': 'columnName'}), '(value=propertyName, text=columnName)\n', (13601, 13638), True, 'import justpy as jp\n'), ((17500, 17524), 'pprint.pformat', 'pprint.pformat', (['msg.data'], {}), '(msg.data)\n', (17514, 17524), False, 'import pprint\n'), ((17550, 17584), 'markupsafe.Markup', 'Markup', (['f"""<pre>{prettyData}</pre>"""'], {}), "(f'<pre>{prettyData}</pre>')\n", (17556, 17584), False, 'from markupsafe import Markup\n')]
""" Copyright (c) 2017, <NAME>. Distributed under the terms of the MIT License. The full license is in the file LICENSE, distributed with this software. Created on Oct 4, 2017 @author: jrm """ import sh import sys def main(): # Make sure instance is cleared from enaml.application import Application Application._instance = None from enamlnative.android.app import AndroidApplication app = AndroidApplication( debug=True, dev='remote', # "10.0.2.2" # or 'server' load_view=load_view ) app.timed_call(5000, run_gestures, app) app.start() def run_gestures(app): for i in range(30): #: Swipe to next page t = i*2000 app.timed_call(t, sh.adb, *'shell input swipe 250 300 -800 300'.split(), _bg=True) #: Tap a few places for j in range(4): app.timed_call(t+i*200, sh.adb, *'shell input tap 500 150'.split(), _bg=True) app.timed_call(120000, app.stop) def load_view(app): import enaml #: For debug purposes only! app.widget.resetBridgeStats() app.widget.resetBridgeCache() with enaml.imports(): import view if app.view: reload(view) app.view = view.ContentView() #: Time how long it takes app.show_view() def test_remote_debug(): #sh.pip('install tornado --user'.split()) enaml_native = sh.Command('enaml-native') enaml_native('start', '--remote-debugging', _bg=True) #: Add sys.path.append('src/apps/') sys.path.append('src/') #: Init remote nativehooks implementation from enamlnative.core import remotehooks remotehooks.init() main()
[ "sys.path.append", "view.ContentView", "enamlnative.android.app.AndroidApplication", "sh.Command", "enaml.imports", "enamlnative.core.remotehooks.init" ]
[((417, 482), 'enamlnative.android.app.AndroidApplication', 'AndroidApplication', ([], {'debug': '(True)', 'dev': '"""remote"""', 'load_view': 'load_view'}), "(debug=True, dev='remote', load_view=load_view)\n", (435, 482), False, 'from enamlnative.android.app import AndroidApplication\n'), ((1414, 1440), 'sh.Command', 'sh.Command', (['"""enaml-native"""'], {}), "('enaml-native')\n", (1424, 1440), False, 'import sh\n'), ((1515, 1543), 'sys.path.append', 'sys.path.append', (['"""src/apps/"""'], {}), "('src/apps/')\n", (1530, 1543), False, 'import sys\n'), ((1548, 1571), 'sys.path.append', 'sys.path.append', (['"""src/"""'], {}), "('src/')\n", (1563, 1571), False, 'import sys\n'), ((1668, 1686), 'enamlnative.core.remotehooks.init', 'remotehooks.init', ([], {}), '()\n', (1684, 1686), False, 'from enamlnative.core import remotehooks\n'), ((1151, 1166), 'enaml.imports', 'enaml.imports', ([], {}), '()\n', (1164, 1166), False, 'import enaml\n'), ((1253, 1271), 'view.ContentView', 'view.ContentView', ([], {}), '()\n', (1269, 1271), False, 'import view\n')]
# Django imports from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger from django.shortcuts import render # Partners app imports from partners.models.partner_models import Partner def partner_list(request): """ Display a list of partners and their details """ partners_list = Partner.objects.all() page = request.GET.get('page', 1) paginator = Paginator(partners_list, 10) try: partners = paginator.page(page) except PageNotAnInteger: partners = paginator.page(1) except EmptyPage: partners = paginator.page(paginator.num_pages) template_name = "partners/partners_list.html" context = {"partners": partners} return render(request, template_name, context)
[ "django.shortcuts.render", "django.core.paginator.Paginator", "partners.models.partner_models.Partner.objects.all" ]
[((315, 336), 'partners.models.partner_models.Partner.objects.all', 'Partner.objects.all', ([], {}), '()\n', (334, 336), False, 'from partners.models.partner_models import Partner\n'), ((393, 421), 'django.core.paginator.Paginator', 'Paginator', (['partners_list', '(10)'], {}), '(partners_list, 10)\n', (402, 421), False, 'from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger\n'), ((714, 753), 'django.shortcuts.render', 'render', (['request', 'template_name', 'context'], {}), '(request, template_name, context)\n', (720, 753), False, 'from django.shortcuts import render\n')]
# Copyright 2022, <NAME>, mailto:<EMAIL> # # Part of "Nuitka", an optimizing Python compiler that is compatible and # integrates with CPython, but also works on its own. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ Operations on the tree. This is mostly for the different kinds of visits that the node tree can have. You can visit a scope, a tree (module), or every scope of a tree (module). """ from nuitka.containers.oset import OrderedSet from nuitka.Tracing import general def visitTree(tree, visitor): visitor.onEnterNode(tree) for visitable in tree.getVisitableNodes(): if visitable is None: raise AssertionError("'None' child encountered", tree, tree.source_ref) visitTree(visitable, visitor) visitor.onLeaveNode(tree) def visitFunction(function, visitor): visitor.onEnterNode(function) visitor.onLeaveNode(function) def visitModule(module, visitor): visitor.onEnterNode(module) visitor.onLeaveNode(module) class VisitorNoopMixin(object): def onEnterNode(self, node): """Overloaded for operation before the node children were done.""" def onLeaveNode(self, node): """Overloaded for operation after the node children were done.""" class DetectUsedModules(VisitorNoopMixin): def __init__(self): self.used_modules = OrderedSet() def onEnterNode(self, node): try: self._onEnterNode(node) except Exception: general.my_print( "Problem with %r at %s" % (node, node.getSourceReference().getAsString()) ) raise def _onEnterNode(self, node): if node.isExpressionBuiltinImport(): for ( used_module_name, used_module_filename, finding, level, ) in node.getUsedModules(): self.used_modules.add( ( used_module_name, used_module_filename, finding, level, node.source_ref, ) ) elif ( node.isExpressionImportModuleHard() or node.isExpressionImportModuleNameHard() or node.isExpressionImportModuleFixed() ): used_module_name, used_module_filename, finding = node.getUsedModule() self.used_modules.add( (used_module_name, used_module_filename, finding, 0, node.source_ref) ) def getUsedModules(self): return self.used_modules
[ "nuitka.containers.oset.OrderedSet" ]
[((1883, 1895), 'nuitka.containers.oset.OrderedSet', 'OrderedSet', ([], {}), '()\n', (1893, 1895), False, 'from nuitka.containers.oset import OrderedSet\n')]
from __future__ import unicode_literals from django import VERSION as DJANGO_VERSION from django.apps import AppConfig class CoreConfig(AppConfig): name = 'mezzanine.core' def ready(self): from . import checks # noqa if DJANGO_VERSION < (1, 9): # add_to_builtins was removed in 1.9 and replaced with a # documented public API configured by the TEMPLATES setting. from django.template.base import add_to_builtins add_to_builtins("mezzanine.template.loader_tags")
[ "django.template.base.add_to_builtins" ]
[((491, 540), 'django.template.base.add_to_builtins', 'add_to_builtins', (['"""mezzanine.template.loader_tags"""'], {}), "('mezzanine.template.loader_tags')\n", (506, 540), False, 'from django.template.base import add_to_builtins\n')]
try: import cloudfiles except ImportError: pass class RackspaceBackend(object): def __init__(self, username, api_key, container_name): self.username = username self.api_key = api_key self.conn = cloudfiles.Connection(username=username, api_key=api_key) self.container = cloudfiles.Container(self.conn, name=container_name) @property def name(self): return "cloudfiles" def key(self, hash): return cloudfiles.Object(container=self.container, name="{}/{}".format(hash[:2], hash[2:])) def push(self, file, hash, cb=None): self.key(hash).load_from_filename(file.name, callback=cb) def pull(self, file, hash, cb=None): self.key(hash).save_to_filename(file.name, callback=cb) def exists(self, hash): return self.key(hash).etag is not None
[ "cloudfiles.Connection", "cloudfiles.Container" ]
[((232, 289), 'cloudfiles.Connection', 'cloudfiles.Connection', ([], {'username': 'username', 'api_key': 'api_key'}), '(username=username, api_key=api_key)\n', (253, 289), False, 'import cloudfiles\n'), ((315, 367), 'cloudfiles.Container', 'cloudfiles.Container', (['self.conn'], {'name': 'container_name'}), '(self.conn, name=container_name)\n', (335, 367), False, 'import cloudfiles\n')]
# Copyright (c) OpenMMLab. All rights reserved. import argparse import glob import json import os.path as osp import mmcv try: import xlrd except ImportError: xlrd = None try: import xlutils from xlutils.copy import copy except ImportError: xlutils = None def parse_args(): parser = argparse.ArgumentParser( description='Gather benchmarked models metric') parser.add_argument( 'root', type=str, help='root path of benchmarked models to be gathered') parser.add_argument( 'txt_path', type=str, help='txt path output by benchmark_filter') parser.add_argument( '--excel', type=str, help='input path of excel to be recorded') parser.add_argument( '--ncol', type=int, help='Number of column to be modified or appended') args = parser.parse_args() return args if __name__ == '__main__': args = parse_args() if args.excel: assert args.ncol, 'Please specify "--excel" and "--ncol" ' \ 'at the same time' if xlrd is None: raise RuntimeError( 'xlrd is not installed,' 'Please use “pip install xlrd==1.2.0” to install') if xlutils is None: raise RuntimeError( 'xlutils is not installed,' 'Please use “pip install xlutils==2.0.0” to install') readbook = xlrd.open_workbook(args.excel) root_path = args.root all_results_dict = {} with open(args.txt_path, 'r') as f: model_cfgs = f.readlines() model_cfgs = [_ for _ in model_cfgs if 'configs' in _] for i, config in enumerate(model_cfgs): config = config.strip() if len(config) == 0: continue config_name = osp.split(config)[-1] config_name = osp.splitext(config_name)[0] result_path = osp.join(root_path, config_name) if osp.exists(result_path): # 1 read config and excel cfg = mmcv.Config.fromfile(config) total_epochs = cfg.total_epochs # the first metric will be used to find the best ckpt has_final_ckpt = True if 'vid' in config: eval_metrics = ['bbox_mAP_50'] elif 'mot' in config: eval_metrics = ['MOTA', 'IDF1'] # tracktor and deepsort don't have ckpt. has_final_ckpt = False elif 'sot' in config: eval_metrics = ['success', 'norm_precision', 'precision'] else: raise NotImplementedError( f'Not supported config: {config}') if args.excel: xlrw = copy(readbook) if 'vid' in config: sheet = readbook.sheet_by_name('vid') table = xlrw.get_sheet('vid') elif 'mot' in config: sheet = readbook.sheet_by_name('mot') table = xlrw.get_sheet('mot') elif 'sot' in config: sheet = readbook.sheet_by_name('sot') table = xlrw.get_sheet('sot') sheet_info = {} for i in range(6, sheet.nrows): sheet_info[sheet.row_values(i)[0]] = i # 2 determine whether total_epochs ckpt exists ckpt_path = f'epoch_{total_epochs}.pth' if osp.exists(osp.join(result_path, ckpt_path)) or \ not has_final_ckpt: log_json_path = list( sorted(glob.glob(osp.join(result_path, '*.log.json'))))[-1] # 3 read metric result_dict = dict() with open(log_json_path, 'r') as f: for line in f.readlines(): log_line = json.loads(line) if 'mode' not in log_line.keys(): continue if log_line['mode'] == 'val' or \ log_line['mode'] == 'test': result_dict[f"epoch_{log_line['epoch']}"] = { key: log_line[key] for key in eval_metrics if key in log_line } # 4 find the best ckpt best_epoch_results = dict() for epoch in result_dict: if len(best_epoch_results) == 0: best_epoch_results = result_dict[epoch] else: if best_epoch_results[eval_metrics[ 0]] < result_dict[epoch][eval_metrics[0]]: best_epoch_results = result_dict[epoch] for metric in best_epoch_results: if 'success' in best_epoch_results: performance = round(best_epoch_results[metric], 1) else: performance = round( best_epoch_results[metric] * 100, 1) best_epoch_results[metric] = performance all_results_dict[config] = best_epoch_results # update and append excel content if args.excel: performance = '' for metric in best_epoch_results: performance += f'{best_epoch_results[metric]}/' row_num = sheet_info.get(config, None) if row_num: table.write(row_num, args.ncol, performance) else: table.write(sheet.nrows, 0, config) table.write(sheet.nrows, args.ncol, performance) filename, sufflx = osp.splitext(args.excel) xlrw.save(f'{filename}_o{sufflx}') readbook = xlrd.open_workbook(f'{filename}_o{sufflx}') else: print(f'{config} not exist: {ckpt_path}') else: print(f'not exist: {config}') # 4 save or print results print('===================================') for config_name, metrics in all_results_dict.items(): print(config_name, metrics) print('===================================') if args.excel: print(f'>>> Output {filename}_o{sufflx}')
[ "xlutils.copy.copy", "argparse.ArgumentParser", "json.loads", "xlrd.open_workbook", "os.path.exists", "mmcv.Config.fromfile", "os.path.splitext", "os.path.split", "os.path.join" ]
[((311, 382), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Gather benchmarked models metric"""'}), "(description='Gather benchmarked models metric')\n", (334, 382), False, 'import argparse\n'), ((1408, 1438), 'xlrd.open_workbook', 'xlrd.open_workbook', (['args.excel'], {}), '(args.excel)\n', (1426, 1438), False, 'import xlrd\n'), ((1902, 1934), 'os.path.join', 'osp.join', (['root_path', 'config_name'], {}), '(root_path, config_name)\n', (1910, 1934), True, 'import os.path as osp\n'), ((1950, 1973), 'os.path.exists', 'osp.exists', (['result_path'], {}), '(result_path)\n', (1960, 1973), True, 'import os.path as osp\n'), ((1799, 1816), 'os.path.split', 'osp.split', (['config'], {}), '(config)\n', (1808, 1816), True, 'import os.path as osp\n'), ((1847, 1872), 'os.path.splitext', 'osp.splitext', (['config_name'], {}), '(config_name)\n', (1859, 1872), True, 'import os.path as osp\n'), ((2039, 2067), 'mmcv.Config.fromfile', 'mmcv.Config.fromfile', (['config'], {}), '(config)\n', (2059, 2067), False, 'import mmcv\n'), ((2809, 2823), 'xlutils.copy.copy', 'copy', (['readbook'], {}), '(readbook)\n', (2813, 2823), False, 'from xlutils.copy import copy\n'), ((3597, 3629), 'os.path.join', 'osp.join', (['result_path', 'ckpt_path'], {}), '(result_path, ckpt_path)\n', (3605, 3629), True, 'import os.path as osp\n'), ((6285, 6309), 'os.path.splitext', 'osp.splitext', (['args.excel'], {}), '(args.excel)\n', (6297, 6309), True, 'import os.path as osp\n'), ((6404, 6447), 'xlrd.open_workbook', 'xlrd.open_workbook', (['f"""{filename}_o{sufflx}"""'], {}), "(f'{filename}_o{sufflx}')\n", (6422, 6447), False, 'import xlrd\n'), ((4080, 4096), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (4090, 4096), False, 'import json\n'), ((3763, 3798), 'os.path.join', 'osp.join', (['result_path', '"""*.log.json"""'], {}), "(result_path, '*.log.json')\n", (3771, 3798), True, 'import os.path as osp\n')]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='NadzornaEnota', fields=[ ('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')), ('oznaka', models.CharField(max_length=50)), ('naziv', models.CharField(max_length=255)), ('ip_naslov', models.CharField(max_length=255)), ('opis', models.TextField()), ], options={ 'verbose_name_plural': 'nadzorne enote', 'verbose_name': 'nadzorna enota', 'ordering': ['oznaka'], }, ), migrations.CreateModel( name='NadzorniSistem', fields=[ ('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')), ('oznaka', models.CharField(max_length=50)), ('naziv', models.CharField(max_length=255)), ], options={ 'verbose_name_plural': 'nadzorni sistemi', 'verbose_name': 'nadzorni sistem', }, ), migrations.AddField( model_name='nadzornaenota', name='nadzorni_sistem', field=models.ForeignKey(to='nadzornaplosca.NadzorniSistem'), ), ]
[ "django.db.models.ForeignKey", "django.db.models.TextField", "django.db.models.CharField", "django.db.models.AutoField" ]
[((1461, 1514), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""nadzornaplosca.NadzorniSistem"""'}), "(to='nadzornaplosca.NadzorniSistem')\n", (1478, 1514), False, 'from django.db import migrations, models\n'), ((305, 398), 'django.db.models.AutoField', 'models.AutoField', ([], {'serialize': '(False)', 'primary_key': '(True)', 'auto_created': '(True)', 'verbose_name': '"""ID"""'}), "(serialize=False, primary_key=True, auto_created=True,\n verbose_name='ID')\n", (321, 398), False, 'from django.db import migrations, models\n'), ((424, 455), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (440, 455), False, 'from django.db import migrations, models\n'), ((484, 516), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (500, 516), False, 'from django.db import migrations, models\n'), ((549, 581), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (565, 581), False, 'from django.db import migrations, models\n'), ((609, 627), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (625, 627), False, 'from django.db import migrations, models\n'), ((951, 1044), 'django.db.models.AutoField', 'models.AutoField', ([], {'serialize': '(False)', 'primary_key': '(True)', 'auto_created': '(True)', 'verbose_name': '"""ID"""'}), "(serialize=False, primary_key=True, auto_created=True,\n verbose_name='ID')\n", (967, 1044), False, 'from django.db import migrations, models\n'), ((1070, 1101), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (1086, 1101), False, 'from django.db import migrations, models\n'), ((1130, 1162), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (1146, 1162), False, 'from django.db import migrations, models\n')]
import asyncio import discord from discord.ext import commands, menus class AsyncEmbedCodeBlockTablePageSource(menus.AsyncIteratorPageSource): def __init__( self, data, title=None, count=None, show_index=False, format_embed=lambda x: None, format_item=str, ): super().__init__(data, per_page=20) self.title = title self.show_index = show_index self.format_embed = format_embed self.format_item = format_item self.count = count def justify(self, s, width): if s.isdigit(): return s.rjust(width) else: return s.ljust(width) async def format_page(self, menu, entries): start = menu.current_page * self.per_page table = [ (f"{i+1}.", *self.format_item(x)) if self.show_index else self.format_item(x) for i, x in enumerate(entries, start=menu.current_page * self.per_page) ] col_lens = [max(len(x) for x in col) for col in zip(*table)] lines = [ " ".join(self.justify(x, col_lens[i]) for i, x in enumerate(line)).rstrip() for line in table ] embed = discord.Embed( title=self.title, color=discord.Color.blurple(), description="```" + f"\n".join(lines) + "```", ) self.format_embed(embed) footer = f"Showing entries {start + 1}–{start + len(lines)}" if self.count is not None: footer += f" out of {self.count}" embed.set_footer(text=footer) return embed class EmbedListPageSource(menus.ListPageSource): def __init__(self, data, title=None, show_index=False, format_item=str): super().__init__(data, per_page=20) self.title = title self.show_index = show_index self.format_item = format_item async def format_page(self, menu, entries): lines = ( f"{i+1}. {self.format_item(x)}" if self.show_index else self.format_item(x) for i, x in enumerate(entries, start=menu.current_page * self.per_page) ) return discord.Embed( title=self.title, color=discord.Color.blurple(), description=f"\n".join(lines), ) class AsyncEmbedListPageSource(menus.AsyncIteratorPageSource): def __init__(self, data, title=None, count=None, show_index=False, format_item=str): super().__init__(data, per_page=20) self.title = title or discord.Embed.Empty self.show_index = show_index self.format_item = format_item self.count = count async def format_page(self, menu, entries): start = menu.current_page * self.per_page lines = [ f"{i+1}. {self.format_item(x)}" if self.show_index else self.format_item(x) for i, x in enumerate(entries, start=start) ] embed = discord.Embed( title=self.title, color=discord.Color.blurple(), description=f"\n".join(lines), ) footer = f"Showing entries {start + 1}–{start + len(lines) + 1}" if self.count is not None: footer += f" out of {self.count}" embed.set_footer(text=footer) return embed class AsyncEmbedFieldsPageSource(menus.AsyncIteratorPageSource): def __init__(self, data, title=None, count=None, format_item=lambda i, x: (i, x)): super().__init__(data, per_page=5) self.title = title self.format_item = format_item self.count = count async def format_page(self, menu, entries): embed = discord.Embed( title=self.title, color=discord.Color.blurple(), ) start = menu.current_page * self.per_page for i, x in enumerate(entries, start=start): embed.add_field(**self.format_item(i, x)) footer = f"Showing entries {start+1}–{i+1}" if self.count is not None: footer += f" out of {self.count}" embed.set_footer(text=footer) return embed class Paginator: def __init__(self, get_page, num_pages): self.num_pages = num_pages self.get_page = get_page async def send(self, ctx: commands.Context, pidx: int = 0): embed = await self.get_page(pidx) message = await ctx.send(embed=embed) if self.num_pages <= 1: return await message.add_reaction("⏮️") await message.add_reaction("◀") await message.add_reaction("▶") await message.add_reaction("⏭️") await message.add_reaction("🔢") await message.add_reaction("⏹") try: while True: reaction, user = await ctx.bot.wait_for( "reaction_add", check=lambda r, u: r.message.id == message.id and u.id == ctx.author.id, timeout=120, ) try: await reaction.remove(user) except: pass if reaction.emoji == "⏹": await message.delete() return elif reaction.emoji == "🔢": ask_message = await ctx.send("What page would you like to go to?") message = await ctx.bot.wait_for( "message", check=lambda m: m.author == ctx.author and m.channel == ctx.channel, timeout=30, ) try: pidx = (int(message.content) - 1) % self.num_pages except ValueError: await ctx.send("That's not a valid page number!") continue ctx.bot.loop.create_task(ask_message.delete()) ctx.bot.loop.create_task(message.delete()) else: pidx = { "⏮️": 0, "◀": pidx - 1, "▶": pidx + 1, "⏭️": self.num_pages - 1, }[reaction.emoji] % self.num_pages embed = await self.get_page(pidx) await message.edit(embed=embed) except asyncio.TimeoutError: await message.add_reaction("❌")
[ "discord.Color.blurple" ]
[((1278, 1301), 'discord.Color.blurple', 'discord.Color.blurple', ([], {}), '()\n', (1299, 1301), False, 'import discord\n'), ((2216, 2239), 'discord.Color.blurple', 'discord.Color.blurple', ([], {}), '()\n', (2237, 2239), False, 'import discord\n'), ((2995, 3018), 'discord.Color.blurple', 'discord.Color.blurple', ([], {}), '()\n', (3016, 3018), False, 'import discord\n'), ((3704, 3727), 'discord.Color.blurple', 'discord.Color.blurple', ([], {}), '()\n', (3725, 3727), False, 'import discord\n')]
#%% import numpy as np import scipy.signal as signal import matplotlib.pyplot as plt #%% N = 1000 n = np.arange (N) f = 100 fs = 44100 x = (1.58 * 0.3125) * np.sin (2 * np.pi * n * f / fs) #%% e_s_plus = 72 e_s_minus = -72 V_cm = (e_s_plus + e_s_minus) / 2 V_dm = (e_s_plus - e_s_minus) / 2 R_p = 0 # 50000 G = (R_p + 150 + 20000) / (R_p + 150) print (V_cm) print (V_dm) #%% y = np.zeros (N) y_1 = 0 for i in range (N): inner = (G*x[i] - V_cm) / V_dm sat = inner if sat < -1: sat = -1 elif sat >1: sat = 1 y[i] = V_cm + V_dm * sat #%% plt.figure() plt.plot (n, x) plt.plot (n, y) plt.axhline (66) #%%
[ "matplotlib.pyplot.axhline", "matplotlib.pyplot.plot", "numpy.zeros", "matplotlib.pyplot.figure", "numpy.sin", "numpy.arange" ]
[((103, 115), 'numpy.arange', 'np.arange', (['N'], {}), '(N)\n', (112, 115), True, 'import numpy as np\n'), ((384, 395), 'numpy.zeros', 'np.zeros', (['N'], {}), '(N)\n', (392, 395), True, 'import numpy as np\n'), ((578, 590), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (588, 590), True, 'import matplotlib.pyplot as plt\n'), ((591, 605), 'matplotlib.pyplot.plot', 'plt.plot', (['n', 'x'], {}), '(n, x)\n', (599, 605), True, 'import matplotlib.pyplot as plt\n'), ((607, 621), 'matplotlib.pyplot.plot', 'plt.plot', (['n', 'y'], {}), '(n, y)\n', (615, 621), True, 'import matplotlib.pyplot as plt\n'), ((623, 638), 'matplotlib.pyplot.axhline', 'plt.axhline', (['(66)'], {}), '(66)\n', (634, 638), True, 'import matplotlib.pyplot as plt\n'), ((158, 188), 'numpy.sin', 'np.sin', (['(2 * np.pi * n * f / fs)'], {}), '(2 * np.pi * n * f / fs)\n', (164, 188), True, 'import numpy as np\n')]
#!/usr/bin/env python from collections import OrderedDict from decimal import Decimal from glob import glob import json import os import six from agate.table import Table @classmethod def from_json(cls, path, column_names=None, column_types=None, keys=None, **kwargs): """ Create a new :class:`TableSet` from a directory of JSON files or a single JSON object with key value (Table key and list of row objects) pairs for each :class:`Table`. See :meth:`.Table.from_json` for additional details. :param path: Path to a directory containing JSON files or filepath/file-like object of nested JSON file. :param keys: A list of keys of the top-level dictionaries for each file. If specified, length must be equal to number of JSON files in path. :param column_types: See :meth:`Table.__init__`. """ from agate.tableset import TableSet if isinstance(path, six.string_types) and not os.path.isdir(path) and not os.path.isfile(path): raise IOError('Specified path doesn\'t exist.') tables = OrderedDict() if isinstance(path, six.string_types) and os.path.isdir(path): filepaths = glob(os.path.join(path, '*.json')) if keys is not None and len(keys) != len(filepaths): raise ValueError('If specified, keys must have length equal to number of JSON files') for i, filepath in enumerate(filepaths): name = os.path.split(filepath)[1].strip('.json') if keys is not None: tables[name] = Table.from_json(filepath, keys[i], column_types=column_types, **kwargs) else: tables[name] = Table.from_json(filepath, column_types=column_types, **kwargs) else: if hasattr(path, 'read'): js = json.load(path, object_pairs_hook=OrderedDict, parse_float=Decimal, **kwargs) else: with open(path, 'r') as f: js = json.load(f, object_pairs_hook=OrderedDict, parse_float=Decimal, **kwargs) for key, value in js.items(): tables[key] = Table.from_object(value, column_types=column_types, **kwargs) return TableSet(tables.values(), tables.keys())
[ "json.load", "os.path.isdir", "os.path.isfile", "agate.table.Table.from_object", "collections.OrderedDict", "agate.table.Table.from_json", "os.path.split", "os.path.join" ]
[((1087, 1100), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (1098, 1100), False, 'from collections import OrderedDict\n'), ((1148, 1167), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (1161, 1167), False, 'import os\n'), ((967, 986), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (980, 986), False, 'import os\n'), ((995, 1015), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (1009, 1015), False, 'import os\n'), ((1194, 1222), 'os.path.join', 'os.path.join', (['path', '"""*.json"""'], {}), "(path, '*.json')\n", (1206, 1222), False, 'import os\n'), ((1806, 1883), 'json.load', 'json.load', (['path'], {'object_pairs_hook': 'OrderedDict', 'parse_float': 'Decimal'}), '(path, object_pairs_hook=OrderedDict, parse_float=Decimal, **kwargs)\n', (1815, 1883), False, 'import json\n'), ((2098, 2159), 'agate.table.Table.from_object', 'Table.from_object', (['value'], {'column_types': 'column_types'}), '(value, column_types=column_types, **kwargs)\n', (2115, 2159), False, 'from agate.table import Table\n'), ((1560, 1631), 'agate.table.Table.from_json', 'Table.from_json', (['filepath', 'keys[i]'], {'column_types': 'column_types'}), '(filepath, keys[i], column_types=column_types, **kwargs)\n', (1575, 1631), False, 'from agate.table import Table\n'), ((1681, 1743), 'agate.table.Table.from_json', 'Table.from_json', (['filepath'], {'column_types': 'column_types'}), '(filepath, column_types=column_types, **kwargs)\n', (1696, 1743), False, 'from agate.table import Table\n'), ((1958, 2032), 'json.load', 'json.load', (['f'], {'object_pairs_hook': 'OrderedDict', 'parse_float': 'Decimal'}), '(f, object_pairs_hook=OrderedDict, parse_float=Decimal, **kwargs)\n', (1967, 2032), False, 'import json\n'), ((1453, 1476), 'os.path.split', 'os.path.split', (['filepath'], {}), '(filepath)\n', (1466, 1476), False, 'import os\n')]
import argparse def parse_args(): parser = argparse.ArgumentParser() parser.add_argument( '-dp', '--fn_depth_report', help = 'input read-depth calling report' ) args = parser.parse_args() return args def find_thresh(list_depth): list_d = [pair_info[1] for pair_info in list_depth if 'TRAV8-5*01' not in pair_info[0]] # rebuild a list without TRAV8-5*01 (outlier) stuff list_d = [list_d[0]]*3 + list_d + [list_d[-1]]*2 # padding max value and zeros #print(list_d) list_value = [] # the division value between two windows for idx in range(3,len(list_d)-2): window = (list_d[idx] + list_d[idx+1]*0.25 + list_d[idx+2]*0.1) p_window = (list_d[idx-3]*0.1 + list_d[idx-2]*0.25 + list_d[idx-1]) list_value.append((((window+2)/(p_window+2))*((window+2)/(window+0.5)),idx)) #print(idx-3, list_d[idx], format(((window+2)/(p_window+2))*((window+2)/(window+0.5)), '.3f')) #print(sorted(list_value)) sorted_value = sorted(list_value) thresh_id = -1 if sorted_value[0][0]*2 < sorted_value[1][0]: # absolute winner thresh_id = sorted_value[0][1] else: # if there are similar candidate, use the old method for idx in range(3,len(list_d)-2): if list_d[idx] / list_d[idx-1] < 0.73: thresh_id = idx break thresh = list_d[thresh_id] + 1 return thresh def thresh_divide(list_depth, thresh): total_num = 0 novel_num = 0 flag = True p_depth = 1 for allele_name, depth in list_depth: if depth < thresh: if flag: flag = False print("------------- thresh: " + str(thresh) + " ----------------") pass else: total_num += 1 if 'novel' in allele_name: novel_num += 1 print(allele_name, depth)#, depth/p_depth, sep='\t\t') p_depth = depth if depth == 0: return total_num, novel_num return total_num, novel_num if __name__ == '__main__': args = parse_args() fn_depth_report = args.fn_depth_report f_n = open(fn_depth_report, 'r') list_depth = [] # list_depth = [(name1,depth1), (name2,depth2), ... ] for line in f_n: fields = line.split() allele_name = fields[0] depth = float(fields[1]) list_depth.append((allele_name, depth)) if depth == 0: break f_n.close() thresh = find_thresh(list_depth) total_num, novel_num = thresh_divide(list_depth, thresh) print("\n========= Summary ===========") print("Total AIRRCall alleles:", total_num) print("Novel AIRRCall alleles:", novel_num)
[ "argparse.ArgumentParser" ]
[((48, 73), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (71, 73), False, 'import argparse\n')]
# Copyright (C) 2018-2019 <NAME> # SPDX-License-Identifier: Apache-2.0 import numpy def read_surfaces(res): inp = res.input res.surfaces = {} if 'probes' not in inp: return for probe in inp['probes']: if not (probe.get('enabled', True) and probe.get('type', '') == 'IsoSurface'): continue name = probe['name'] field_name = probe['field'] value = probe['value'] file_name_postfix = probe['file_name'] file_name = res.get_file_path(file_name_postfix) isosurf = IsoSurfaces(name, field_name, value, file_name) res.surfaces[name] = isosurf class IsoSurfaces(object): def __init__(self, name, field_name, value, file_name): self.name = name self.field_name = field_name self.value = value self.file_name = file_name self._cache = None def reload(self): self._cache = None def get_surfaces(self, cache=True): if cache and self._cache is not None: return self._cache timesteps = [] data = [] with open(self.file_name, 'rt') as f: description = f.readline()[1:].strip() value = float(f.readline().split()[-1]) dim = int(f.readline().split()[-1]) line = f.readline() while line: wds = line.split() try: time = float(wds[1]) nsurf = int(wds[3]) except Exception: break if nsurf == 0: timesteps.append(time) data.append([]) line = f.readline() continue datalines = [f.readline() for _ in range(nsurf * 3)] if not datalines[-1]: break timesteps.append(time) data.append([]) for i in range(nsurf): xvals = [float(v) for v in datalines[i * 3 + 0].split()] yvals = [float(v) for v in datalines[i * 3 + 1].split()] zvals = [float(v) for v in datalines[i * 3 + 2].split()] data[-1].append((xvals, yvals, zvals)) line = f.readline() res = (description, value, dim, numpy.array(timesteps), data) if cache: self._cache = res return res
[ "numpy.array" ]
[((2334, 2356), 'numpy.array', 'numpy.array', (['timesteps'], {}), '(timesteps)\n', (2345, 2356), False, 'import numpy\n')]
from django.db import models class DepartmentsF(models.Model): department_id = models.AutoField(primary_key=True) department = models.CharField(max_length=100) class Meta: managed = False db_table = 'departments_f' class OrdersF(models.Model): order_id = models.AutoField(primary_key=True) order_hour_of_day = models.PositiveIntegerField(blank=True, null=True) class Meta: managed = False db_table = 'orders_f' class ProductsF(models.Model): product_id = models.AutoField(primary_key=True) product_name = models.CharField(max_length=200) department = models.ForeignKey(DepartmentsF, models.DO_NOTHING) price = models.FloatField() margin = models.FloatField() class Meta: managed = False db_table = 'products_f' class OrderProductsF(models.Model): order = models.ForeignKey('OrdersF', models.DO_NOTHING, blank=True, null=False, primary_key=True) product = models.ForeignKey('ProductsF', models.DO_NOTHING, blank=True, null=False) quantity = models.PositiveIntegerField(blank=True, null=True) class Meta: managed = False db_table = 'order_products_f'
[ "django.db.models.CharField", "django.db.models.ForeignKey", "django.db.models.PositiveIntegerField", "django.db.models.FloatField", "django.db.models.AutoField" ]
[((84, 118), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (100, 118), False, 'from django.db import models\n'), ((136, 168), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (152, 168), False, 'from django.db import models\n'), ((291, 325), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (307, 325), False, 'from django.db import models\n'), ((350, 400), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (377, 400), False, 'from django.db import models\n'), ((522, 556), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (538, 556), False, 'from django.db import models\n'), ((576, 608), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (592, 608), False, 'from django.db import models\n'), ((626, 676), 'django.db.models.ForeignKey', 'models.ForeignKey', (['DepartmentsF', 'models.DO_NOTHING'], {}), '(DepartmentsF, models.DO_NOTHING)\n', (643, 676), False, 'from django.db import models\n'), ((689, 708), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (706, 708), False, 'from django.db import models\n'), ((722, 741), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (739, 741), False, 'from django.db import models\n'), ((865, 958), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""OrdersF"""', 'models.DO_NOTHING'], {'blank': '(True)', 'null': '(False)', 'primary_key': '(True)'}), "('OrdersF', models.DO_NOTHING, blank=True, null=False,\n primary_key=True)\n", (882, 958), False, 'from django.db import models\n'), ((969, 1042), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""ProductsF"""', 'models.DO_NOTHING'], {'blank': '(True)', 'null': '(False)'}), "('ProductsF', models.DO_NOTHING, blank=True, null=False)\n", (986, 1042), False, 'from django.db import models\n'), ((1058, 1108), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (1085, 1108), False, 'from django.db import models\n')]
# !/usr/bin/env python3 # -*- coding: utf-8 -*- from urllib import request import os import subprocess import sys import json import yaml import codecs import requests from collections import OrderedDict from pyquery import PyQuery as pq # the treading url_str = 'http://trending.codehub-app.com/v2/trending/' langs_str = 'https://raw.githubusercontent.com/github/linguist/master/lib/linguist/languages.yml' foldername = "json" filename = "trending.json" # folder_path = "./" + foldername + "/" file_path = "./" + filename def git_pull(): print("prepare to do 'git pull'") cmd = ['git', 'pull'] p = subprocess.Popen(cmd, cwd="./") p.wait() def git_add(): print("prepare to do 'git add'") cmd = ['git', 'add', '.'] p = subprocess.Popen(cmd, cwd="./") p.wait() def git_commit(): print("prepare to do 'git commit'") centext = "'refresh git trending'" cmd = ['git', 'commit', '-m', centext] p = subprocess.Popen(cmd, cwd="./") p.wait() def git_push(): print("prepare to do 'git push'") cmd = ['git', 'push', '-u', 'origin', 'master'] p = subprocess.Popen(cmd, cwd="./") p.wait() def file_handle(): git_pull() git_add() git_commit() git_push() def url_open(url): if not ('http' in url): url = 'http://' + url print('url is :' + url) req = request.Request(url) req.add_header('User-Agent', 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:36.0) Gecko/20100101 Firefox/36.0') response = request.urlopen(req) return response.read() def scrape(language, file_path): HEADERS = { 'User-Agent' : 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:11.0) Gecko/20100101 Firefox/11.0', 'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 'Accept-Encoding' : 'gzip,deflate,sdch', 'Accept-Language' : 'zh-CN,zh;q=0.8' } print("begin request") url = 'https://github.com/trending/{language}'.format(language=language) r = requests.get(url, headers=HEADERS) assert r.status_code == 200 # print(r.encoding) d = pq(r.content) items = d('ol.repo-list li') # codecs to solve the problem utf-8 codec like chinese with codecs.open(file_path, "w", encoding='utf-8') as f: arr = [] for item in items: i = pq(item) title = i("h3 a").text() language = i("div.f6 span[itemprop='programmingLanguage']").text() star = i("div.f6 svg.octicon-star").closest("a").text() fork = i("div.f6 svg.octicon-repo-forked").closest("a").text() description = i("p.col-9").text() hrefurl = i("h3 a").attr("href") urllist = hrefurl.split('/') login = urllist[1] name = urllist[2] full_name = login + '/' + name url = "https://github.com" + hrefurl # ownerImg = i("p.repo-list-meta a img").attr("src") # print(ownerImg) data = {} data["name"] = name data["owner"] = { "login":login } data["full_name"] = full_name data["forks_count"] = fork.replace(',', '') data["stargazers_count"] = star.replace(',', '') if (len(language) > 0): data["language"] = language data["repositoryDescription"] = description arr.append(data) print("get json data, ready write to file 'trending.json'") f.write(json.dumps(arr, indent=4, ensure_ascii=False)) def save_file(): scrape("", file_path) getColors() file_handle() def trending(): if not os.path.exists(foldername): os.mkdir(foldername) print('create folder success', foldername) os.chdir(foldername) folder_top = os.getcwd() print(folder_top) save_file() # ---------------------------------------------- def ordered_load(stream, Loader=yaml.Loader, object_pairs_hook=OrderedDict): """ Parse the first YAML document in a stream and produce the corresponding Python Orderered Dictionary. """ class OrderedLoader(Loader): pass OrderedLoader.add_constructor( yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, lambda loader, node: object_pairs_hook(loader.construct_pairs(node))) return yaml.load(stream, OrderedLoader) def order_by_keys(dict): """ Sort a dictionary by keys, case insensitive ie [ Ada, eC, Fortran ] Default ordering, or using json.dump with sort_keys=True, produces [ Ada, Fortran, eC ] """ from collections import OrderedDict return OrderedDict(sorted(dict.items(), key=lambda s: s[0].lower())) def getFile(url): """ Return the URL body, or False if page not found Keyword arguments: url -- url to parse """ try: r = request.urlopen(url) except: sys.exit("Request fatal error : %s" % sys.exc_info()[1]) if r.status != 200: return False return r.read() def write_json(text, filename='colors.json'): """ Write a JSON file from a dictionary """ with open(filename, 'w') as f: f.write(json.dumps(text, indent=4) + '\n') def getColors(): print("geting list of language") yml = getFile(langs_str) langs_yml = ordered_load(yml) langs_yml = order_by_keys(langs_yml) # List construction done, count keys lang_count = len(langs_yml) print("Found %d languages" % lang_count) # Construct the wanted list langs = OrderedDict() for lang in langs_yml.keys(): if ("type" not in langs_yml[lang] or "color" in langs_yml[lang] or langs_yml[lang]["type"] == "programming"): print(" Parsing the color for '%s' ..." % (lang)) langs[lang] = OrderedDict() langs[lang]["color"] = langs_yml[lang]["color"] if "color" in langs_yml[lang] else None langs[lang]["url"] = "https://github.com/trending?l=" + ( langs_yml[lang]["search_term"] if "search_term" in langs_yml[lang] else lang) langs[lang]["url"] = langs[lang]["url"].replace(' ', '-').replace('#', 'sharp') print("Writing a new JSON file ...") write_json(langs) print("All done!") if __name__ == '__main__': trending()
[ "os.mkdir", "subprocess.Popen", "pyquery.PyQuery", "urllib.request.Request", "yaml.load", "codecs.open", "os.getcwd", "os.path.exists", "urllib.request.urlopen", "json.dumps", "requests.get", "sys.exc_info", "collections.OrderedDict", "os.chdir" ]
[((614, 645), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'cwd': '"""./"""'}), "(cmd, cwd='./')\n", (630, 645), False, 'import subprocess\n'), ((750, 781), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'cwd': '"""./"""'}), "(cmd, cwd='./')\n", (766, 781), False, 'import subprocess\n'), ((945, 976), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'cwd': '"""./"""'}), "(cmd, cwd='./')\n", (961, 976), False, 'import subprocess\n'), ((1106, 1137), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'cwd': '"""./"""'}), "(cmd, cwd='./')\n", (1122, 1137), False, 'import subprocess\n'), ((1351, 1371), 'urllib.request.Request', 'request.Request', (['url'], {}), '(url)\n', (1366, 1371), False, 'from urllib import request\n'), ((1500, 1520), 'urllib.request.urlopen', 'request.urlopen', (['req'], {}), '(req)\n', (1515, 1520), False, 'from urllib import request\n'), ((2019, 2053), 'requests.get', 'requests.get', (['url'], {'headers': 'HEADERS'}), '(url, headers=HEADERS)\n', (2031, 2053), False, 'import requests\n'), ((2120, 2133), 'pyquery.PyQuery', 'pq', (['r.content'], {}), '(r.content)\n', (2122, 2133), True, 'from pyquery import PyQuery as pq\n'), ((3824, 3844), 'os.chdir', 'os.chdir', (['foldername'], {}), '(foldername)\n', (3832, 3844), False, 'import os\n'), ((3862, 3873), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3871, 3873), False, 'import os\n'), ((4392, 4424), 'yaml.load', 'yaml.load', (['stream', 'OrderedLoader'], {}), '(stream, OrderedLoader)\n', (4401, 4424), False, 'import yaml\n'), ((5601, 5614), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (5612, 5614), False, 'from collections import OrderedDict\n'), ((2236, 2281), 'codecs.open', 'codecs.open', (['file_path', '"""w"""'], {'encoding': '"""utf-8"""'}), "(file_path, 'w', encoding='utf-8')\n", (2247, 2281), False, 'import codecs\n'), ((3709, 3735), 'os.path.exists', 'os.path.exists', (['foldername'], {}), '(foldername)\n', (3723, 3735), False, 'import os\n'), ((3745, 3765), 'os.mkdir', 'os.mkdir', (['foldername'], {}), '(foldername)\n', (3753, 3765), False, 'import os\n'), ((4918, 4938), 'urllib.request.urlopen', 'request.urlopen', (['url'], {}), '(url)\n', (4933, 4938), False, 'from urllib import request\n'), ((2349, 2357), 'pyquery.PyQuery', 'pq', (['item'], {}), '(item)\n', (2351, 2357), True, 'from pyquery import PyQuery as pq\n'), ((3552, 3597), 'json.dumps', 'json.dumps', (['arr'], {'indent': '(4)', 'ensure_ascii': '(False)'}), '(arr, indent=4, ensure_ascii=False)\n', (3562, 3597), False, 'import json\n'), ((5897, 5910), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (5908, 5910), False, 'from collections import OrderedDict\n'), ((5243, 5269), 'json.dumps', 'json.dumps', (['text'], {'indent': '(4)'}), '(text, indent=4)\n', (5253, 5269), False, 'import json\n'), ((4998, 5012), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (5010, 5012), False, 'import sys\n')]
# Generated by Django 2.0.8 on 2018-09-13 13:38 from django.db import migrations class Migration(migrations.Migration): dependencies = [("menu", "0008_menu_json_content_new")] operations = [migrations.RemoveField(model_name="menu", name="json_content")]
[ "django.db.migrations.RemoveField" ]
[((203, 265), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""menu"""', 'name': '"""json_content"""'}), "(model_name='menu', name='json_content')\n", (225, 265), False, 'from django.db import migrations\n')]
# Copyright 2016 TensorLab. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except # in compliance with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License # is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express # or implied. See the License for the specific language governing permissions and limitations under # the License. # _predict.py # Implements PredictCommand. import json import os import sys import tensorflow as tf import tensorfx as tfx class PredictCommand(object): """Implements the tfx predict command to use a model to produce predictions. """ name = 'predict' help = 'Produces predictions using a model.' extra = False @staticmethod def build_parser(parser): parser.add_argument('--model', metavar='path', type=str, required=True, help='The path to a previously trained model.') parser.add_argument('--input', metavar='path', type=str, help='The path to a file with input instances. Uses stdin by default.') parser.add_argument('--output', metavar='path', type=str, help='The path to a file to write outputs to. Uses stdout by default.') parser.add_argument('--batch-size', metavar='instances', type=int, default=10, help='The number of instances to predict per batch.') @staticmethod def run(args): # TODO: Figure out where to do JSON and TF initialization in more common way. json.encoder.FLOAT_REPR = lambda f: ('%.5f' % f) tf.logging.set_verbosity(tf.logging.ERROR) os.environ['TF_CPP_MIN_LOG_LEVEL'] = str(tf.logging.ERROR) model = tfx.prediction.Model.load(args.model) with TextSource(args.input, args.batch_size) as source, TextSink(args.output) as sink: for instances in source: predictions = model.predict(instances) lines = map(lambda p: json.dumps(p, sort_keys=True), predictions) sink.write(lines) class TextSource(object): def __init__(self, file=None, batch_size=1): self._file = file self._batch_size = batch_size def __enter__(self): self._stream = open(self._file, 'r') if self._file else sys.stdin return self def __exit__(self, type, value, traceback): if self._stream and self._file: self._stream.close() def __iter__(self): instances = [] while True: instance = self._stream.readline().strip() if not instance: # EOF break instances.append(instance) if len(instances) == self._batch_size: # A desired batch of instances is available yield instances instances = [] if instances: yield instances class TextSink(object): def __init__(self, file=None): self._file = file def __enter__(self): self._stream = open(self._file, 'w') if self._file else sys.stdout return self def __exit__(self, type, value, traceback): if self._stream and self._file: self._stream.close() def write(self, lines): for l in lines: self._stream.write(l + '\n')
[ "tensorfx.prediction.Model.load", "tensorflow.logging.set_verbosity", "json.dumps" ]
[((1754, 1796), 'tensorflow.logging.set_verbosity', 'tf.logging.set_verbosity', (['tf.logging.ERROR'], {}), '(tf.logging.ERROR)\n', (1778, 1796), True, 'import tensorflow as tf\n'), ((1873, 1910), 'tensorfx.prediction.Model.load', 'tfx.prediction.Model.load', (['args.model'], {}), '(args.model)\n', (1898, 1910), True, 'import tensorfx as tfx\n'), ((2111, 2140), 'json.dumps', 'json.dumps', (['p'], {'sort_keys': '(True)'}), '(p, sort_keys=True)\n', (2121, 2140), False, 'import json\n')]
import os, torch, argparse from torch import optim from torch.utils.data import DataLoader from pytorch_pretrained_bert import BertTokenizer from _preprocessing import Vocabulary, DRCDDataset from _preprocessing import build_emb, create_bert_batch, create_jieba_batch from _model import EncoderRNN, BertEncoder from _model import DecoderRNN, LuongAttnDecoderRNN from _train import trainEpochs from _evaluation import train_evaluation, test_evaluation USE_CUDA = torch.cuda.is_available() device = torch.device("cuda" if USE_CUDA else "cpu") def main(args): print("Data preprocessing ...") if args.data_mode == "jieba": vocab = Vocabulary(args.data_set, args.vec_min) tokenizer = None data_transformer = DRCDDataset(args.data_set, args.data_sel, args.data_mode, args.with_ans, vocab, tokenizer) data_loader = DataLoader(data_transformer, batch_size=args.batch_size, shuffle=True, collate_fn=create_jieba_batch) embedding = build_emb(args.save_dir, args.vec_path, vocab, args.emb_size, args.loadEmbedding) embedding = embedding.to(device) elif args.data_mode == "bert": vocab = None tokenizer = BertTokenizer.from_pretrained("bert-base-chinese") data_transformer = DRCDDataset(args.data_set, args.data_sel, args.data_mode, args.with_ans, vocab, tokenizer) data_loader = DataLoader(data_transformer, batch_size=args.batch_size, shuffle=True, collate_fn=create_bert_batch) embedding = None print('Building encoder and decoder ...') if args.data_mode == "jieba": encoder = EncoderRNN(embedding, args.hidden_size, args.transfer_layer, args.encoder_n_layers, args.dropout) vocab_size = vocab.num_words elif args.data_mode == "bert": encoder = BertEncoder(args.transfer_layer) embedding = encoder.embedding vocab_size = encoder.vocab_size if args.attn_model == 'none': decoder = DecoderRNN(embedding, args.hidden_size, vocab_size, args.decoder_n_layers, args.dropout) else: decoder = LuongAttnDecoderRNN(args.attn_model, embedding, args.hidden_size, vocab_size, args.decoder_n_layers, args.dropout) # Load model if a loadFilename is provided if args.loadEncoder: print("Loading pretrained Encoder ...") checkpoint = torch.load(args.loadEncoder) prencoder_sd = checkpoint['en'] encoder_sd = encoder.state_dict() prencoder_sd = {k: v for k, v in encoder_sd.items() if k in prencoder_sd} encoder_sd.update(prencoder_sd) encoder.load_state_dict(encoder_sd) if args.fixed_enc: for param in encoder.parameters(): param.requires_grad = False encoder.out.weight.requires_grad = True encoder.out.bias.requires_grad = True if args.loadDecoder: print("Loading pretrained Decoder ...") checkpoint = torch.load(args.loadDecoder) decoder_sd = checkpoint['de'] decoder.load_state_dict(decoder_sd) if args.loadFilename: print("Loading pretrained Model ...") checkpoint = torch.load(args.loadFilename) encoder_sd = checkpoint['en'] encoder.load_state_dict(encoder_sd) decoder_sd = checkpoint['de'] decoder.load_state_dict(decoder_sd) # Use appropriate device encoder = encoder.to(device) decoder = decoder.to(device) # Ensure dropout layers are in train mode encoder.train() decoder.train() if args.training_flag: print('Building optimizers ...') if args.fixed_enc: encoder_optimizer = optim.Adam(filter(lambda p: p.requires_grad, encoder.parameters()), lr=args.encoder_op_lr) else: encoder_optimizer = optim.Adam(encoder.parameters(), lr=args.encoder_op_lr) decoder_optimizer = optim.Adam(decoder.parameters(), lr=args.decoder_op_lr) if args.loadEncoder: checkpoint = torch.load(args.loadEncoder) prencoder_optimizer_sd = checkpoint['en_opt'] encoder_optimizer_sd = encoder_optimizer.state_dict() prencoder_optimizer_sd = {k: v for k, v in encoder_optimizer_sd.items() if k in prencoder_optimizer_sd} encoder_optimizer_sd.update(prencoder_optimizer_sd) encoder_optimizer.load_state_dict(encoder_optimizer_sd) if args.loadDecoder: checkpoint = torch.load(args.loadDecoder) decoder_optimizer_sd = checkpoint['de_opt'] decoder_optimizer.load_state_dict(decoder_optimizer_sd) if args.loadFilename: checkpoint = torch.load(args.loadFilename) prencoder_optimizer_sd = checkpoint['en_opt'] encoder_optimizer_sd = encoder_optimizer.state_dict() prencoder_optimizer_sd = {k: v for k, v in encoder_optimizer_sd.items() if k in prencoder_optimizer_sd} encoder_optimizer_sd.update(prencoder_optimizer_sd) encoder_optimizer.load_state_dict(encoder_optimizer_sd) decoder_optimizer_sd = checkpoint['de_opt'] decoder_optimizer.load_state_dict(decoder_optimizer_sd) # If you have cuda, configure cuda to call for state in encoder_optimizer.state.values(): for k, v in state.items(): if isinstance(v, torch.Tensor): state[k] = v.cuda() for state in decoder_optimizer.state.values(): for k, v in state.items(): if isinstance(v, torch.Tensor): state[k] = v.cuda() print("Starting training!") trainEpochs(args.save_dir, args.data_mode, data_loader, vocab, tokenizer, args.attn_model, encoder, decoder, encoder_optimizer, decoder_optimizer, args.it_percent, args.checkpoint_epoch, args.num_epochs, args.teacher_forcing_ratio, args.data_sel) # Set dropout layers to eval mode encoder.eval() decoder.eval() if args.dev_flag: dev_transformer = DRCDDataset(args.dev_set, args.data_sel, args.data_mode, args.with_ans, vocab, tokenizer) if args.data_mode == "jieba": dev_loader = DataLoader(dev_transformer, batch_size=args.batch_size, shuffle=True, collate_fn=create_jieba_batch) elif args.data_mode == "bert": dev_loader = DataLoader(dev_transformer, batch_size=args.batch_size, shuffle=True , collate_fn=create_bert_batch) print("Starting evaluation!") test_evaluation(args.eval_flag, args.data_mode, args.data_sel, dev_loader, args.attn_model, vocab, tokenizer, encoder, decoder, args.max_length, args.save_dir, args.checkpoint_epoch) if args.eval_flag: eval_transformer = DRCDDataset(args.eval_set, args.data_sel, args.data_mode, args.with_ans, vocab, tokenizer) if args.data_mode == "jieba": eval_loader = DataLoader(eval_transformer, batch_size=args.batch_size, shuffle=False, collate_fn=create_jieba_batch) elif args.data_mode == "bert": eval_loader = DataLoader(eval_transformer, batch_size=args.batch_size, shuffle=False, collate_fn=create_bert_batch) print("Starting evaluation!") test_evaluation(args.eval_flag, args.data_mode, args.data_sel, eval_loader, args.attn_model, vocab, tokenizer, encoder, decoder, args.max_length, args.save_dir, args.checkpoint_epoch) if __name__ == "__main__": parser = argparse.ArgumentParser() # for data parser.add_argument('--vec_min', type=int, default=5) parser.add_argument('--vec_path', type=str, default='cc.zh.300.vec') #cc.zh.300.vec parser.add_argument('--emb_size', type=int, default=300) parser.add_argument('--batch_size', type=int, default=32) parser.add_argument('--data_set', type=str, default="sS200_train") parser.add_argument('--dev_set', type=str, default="sS200_dev") parser.add_argument('--data_sel', type=str, default="sq") parser.add_argument('--data_mode', type=str, default="bert") # for model parser.add_argument('--model_name', type=str, default='BERT_model') # ignore parser.add_argument('--attn_model', type=str, default='general') #dot #general #concat parser.add_argument('--with_ans', type=bool, default=False) parser.add_argument('--fixed_enc', type=bool, default=False) parser.add_argument('--transfer_layer', type=bool, default=False) parser.add_argument('--hidden_size', type=int, default=300) parser.add_argument('--encoder_n_layers', type=int, default=1) parser.add_argument('--decoder_n_layers', type=int, default=1) parser.add_argument('--dropout', type=float, default=0.1) # for training parser.add_argument('--training_flag', type=bool, default=True) parser.add_argument('--it_percent', type=int, default=0.5) parser.add_argument('--teacher_forcing_ratio', type=float, default=0.5) parser.add_argument('--encoder_op_lr', type=float, default=1e-4) parser.add_argument('--decoder_op_lr', type=float, default=1e-4) parser.add_argument('--num_epochs', type=int, default=300) # for loading save_dir = 'BERT_SQ' check_epoch = 0 encoder_epoch = 0 decoder_epoch = 0 loadFilename = os.path.join(save_dir, 'E{}_checkpoint.tar'.format(check_epoch)) loadEncodername = os.path.join(save_dir, 'SS_E{}_checkpoint.tar'.format(encoder_epoch)) loadDecodername = os.path.join(save_dir, 'QQ_E{}_checkpoint.tar'.format(decoder_epoch)) loadEmbedding = os.path.join(save_dir, 'emb_matrix.tar') if not os.path.exists(loadFilename): loadFilename = None if not os.path.exists(loadEncodername): loadEncodername = None if not os.path.exists(loadDecodername): loadDecodername = None if not os.path.exists(loadEmbedding): loadEmbedding = None parser.add_argument('--loadFilename', default=loadFilename) parser.add_argument('--loadEncoder', default=loadEncodername) parser.add_argument('--loadDecoder', default=loadDecodername) parser.add_argument('--loadEmbedding', default=loadEmbedding) parser.add_argument('--save_dir', type=str, default=save_dir) parser.add_argument('--checkpoint_epoch', type=int, default=check_epoch) # for evaluation parser.add_argument('--dev_flag', type=bool, default=False) parser.add_argument('--eval_flag', type=bool, default=False) parser.add_argument('--eval_set', type=str, default="sQ30_test") parser.add_argument('--max_length', type=int, default=50) args = parser.parse_args(args=[]) print("[["+args.model_name+"]]") main(args)
[ "argparse.ArgumentParser", "_preprocessing.Vocabulary", "torch.utils.data.DataLoader", "_model.LuongAttnDecoderRNN", "pytorch_pretrained_bert.BertTokenizer.from_pretrained", "torch.load", "_model.BertEncoder", "os.path.exists", "_model.DecoderRNN", "torch.cuda.is_available", "_train.trainEpochs", "torch.device", "_evaluation.test_evaluation", "_preprocessing.build_emb", "os.path.join", "_model.EncoderRNN", "_preprocessing.DRCDDataset" ]
[((476, 501), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (499, 501), False, 'import os, torch, argparse\n'), ((512, 555), 'torch.device', 'torch.device', (["('cuda' if USE_CUDA else 'cpu')"], {}), "('cuda' if USE_CUDA else 'cpu')\n", (524, 555), False, 'import os, torch, argparse\n'), ((6833, 6858), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (6856, 6858), False, 'import os, torch, argparse\n'), ((8822, 8862), 'os.path.join', 'os.path.join', (['save_dir', '"""emb_matrix.tar"""'], {}), "(save_dir, 'emb_matrix.tar')\n", (8834, 8862), False, 'import os, torch, argparse\n'), ((656, 695), '_preprocessing.Vocabulary', 'Vocabulary', (['args.data_set', 'args.vec_min'], {}), '(args.data_set, args.vec_min)\n', (666, 695), False, 'from _preprocessing import Vocabulary, DRCDDataset\n'), ((738, 832), '_preprocessing.DRCDDataset', 'DRCDDataset', (['args.data_set', 'args.data_sel', 'args.data_mode', 'args.with_ans', 'vocab', 'tokenizer'], {}), '(args.data_set, args.data_sel, args.data_mode, args.with_ans,\n vocab, tokenizer)\n', (749, 832), False, 'from _preprocessing import Vocabulary, DRCDDataset\n'), ((846, 951), 'torch.utils.data.DataLoader', 'DataLoader', (['data_transformer'], {'batch_size': 'args.batch_size', 'shuffle': '(True)', 'collate_fn': 'create_jieba_batch'}), '(data_transformer, batch_size=args.batch_size, shuffle=True,\n collate_fn=create_jieba_batch)\n', (856, 951), False, 'from torch.utils.data import DataLoader\n'), ((963, 1049), '_preprocessing.build_emb', 'build_emb', (['args.save_dir', 'args.vec_path', 'vocab', 'args.emb_size', 'args.loadEmbedding'], {}), '(args.save_dir, args.vec_path, vocab, args.emb_size, args.\n loadEmbedding)\n', (972, 1049), False, 'from _preprocessing import build_emb, create_bert_batch, create_jieba_batch\n'), ((1538, 1640), '_model.EncoderRNN', 'EncoderRNN', (['embedding', 'args.hidden_size', 'args.transfer_layer', 'args.encoder_n_layers', 'args.dropout'], {}), '(embedding, args.hidden_size, args.transfer_layer, args.\n encoder_n_layers, args.dropout)\n', (1548, 1640), False, 'from _model import EncoderRNN, BertEncoder\n'), ((1862, 1954), '_model.DecoderRNN', 'DecoderRNN', (['embedding', 'args.hidden_size', 'vocab_size', 'args.decoder_n_layers', 'args.dropout'], {}), '(embedding, args.hidden_size, vocab_size, args.decoder_n_layers,\n args.dropout)\n', (1872, 1954), False, 'from _model import DecoderRNN, LuongAttnDecoderRNN\n'), ((1972, 2090), '_model.LuongAttnDecoderRNN', 'LuongAttnDecoderRNN', (['args.attn_model', 'embedding', 'args.hidden_size', 'vocab_size', 'args.decoder_n_layers', 'args.dropout'], {}), '(args.attn_model, embedding, args.hidden_size,\n vocab_size, args.decoder_n_layers, args.dropout)\n', (1991, 2090), False, 'from _model import DecoderRNN, LuongAttnDecoderRNN\n'), ((2218, 2246), 'torch.load', 'torch.load', (['args.loadEncoder'], {}), '(args.loadEncoder)\n', (2228, 2246), False, 'import os, torch, argparse\n'), ((2739, 2767), 'torch.load', 'torch.load', (['args.loadDecoder'], {}), '(args.loadDecoder)\n', (2749, 2767), False, 'import os, torch, argparse\n'), ((2924, 2953), 'torch.load', 'torch.load', (['args.loadFilename'], {}), '(args.loadFilename)\n', (2934, 2953), False, 'import os, torch, argparse\n'), ((5118, 5377), '_train.trainEpochs', 'trainEpochs', (['args.save_dir', 'args.data_mode', 'data_loader', 'vocab', 'tokenizer', 'args.attn_model', 'encoder', 'decoder', 'encoder_optimizer', 'decoder_optimizer', 'args.it_percent', 'args.checkpoint_epoch', 'args.num_epochs', 'args.teacher_forcing_ratio', 'args.data_sel'], {}), '(args.save_dir, args.data_mode, data_loader, vocab, tokenizer,\n args.attn_model, encoder, decoder, encoder_optimizer, decoder_optimizer,\n args.it_percent, args.checkpoint_epoch, args.num_epochs, args.\n teacher_forcing_ratio, args.data_sel)\n', (5129, 5377), False, 'from _train import trainEpochs\n'), ((5491, 5584), '_preprocessing.DRCDDataset', 'DRCDDataset', (['args.dev_set', 'args.data_sel', 'args.data_mode', 'args.with_ans', 'vocab', 'tokenizer'], {}), '(args.dev_set, args.data_sel, args.data_mode, args.with_ans,\n vocab, tokenizer)\n', (5502, 5584), False, 'from _preprocessing import Vocabulary, DRCDDataset\n'), ((5922, 6112), '_evaluation.test_evaluation', 'test_evaluation', (['args.eval_flag', 'args.data_mode', 'args.data_sel', 'dev_loader', 'args.attn_model', 'vocab', 'tokenizer', 'encoder', 'decoder', 'args.max_length', 'args.save_dir', 'args.checkpoint_epoch'], {}), '(args.eval_flag, args.data_mode, args.data_sel, dev_loader,\n args.attn_model, vocab, tokenizer, encoder, decoder, args.max_length,\n args.save_dir, args.checkpoint_epoch)\n', (5937, 6112), False, 'from _evaluation import train_evaluation, test_evaluation\n'), ((6159, 6253), '_preprocessing.DRCDDataset', 'DRCDDataset', (['args.eval_set', 'args.data_sel', 'args.data_mode', 'args.with_ans', 'vocab', 'tokenizer'], {}), '(args.eval_set, args.data_sel, args.data_mode, args.with_ans,\n vocab, tokenizer)\n', (6170, 6253), False, 'from _preprocessing import Vocabulary, DRCDDataset\n'), ((6596, 6787), '_evaluation.test_evaluation', 'test_evaluation', (['args.eval_flag', 'args.data_mode', 'args.data_sel', 'eval_loader', 'args.attn_model', 'vocab', 'tokenizer', 'encoder', 'decoder', 'args.max_length', 'args.save_dir', 'args.checkpoint_epoch'], {}), '(args.eval_flag, args.data_mode, args.data_sel, eval_loader,\n args.attn_model, vocab, tokenizer, encoder, decoder, args.max_length,\n args.save_dir, args.checkpoint_epoch)\n', (6611, 6787), False, 'from _evaluation import train_evaluation, test_evaluation\n'), ((8872, 8900), 'os.path.exists', 'os.path.exists', (['loadFilename'], {}), '(loadFilename)\n', (8886, 8900), False, 'import os, torch, argparse\n'), ((8934, 8965), 'os.path.exists', 'os.path.exists', (['loadEncodername'], {}), '(loadEncodername)\n', (8948, 8965), False, 'import os, torch, argparse\n'), ((9002, 9033), 'os.path.exists', 'os.path.exists', (['loadDecodername'], {}), '(loadDecodername)\n', (9016, 9033), False, 'import os, torch, argparse\n'), ((9070, 9099), 'os.path.exists', 'os.path.exists', (['loadEmbedding'], {}), '(loadEmbedding)\n', (9084, 9099), False, 'import os, torch, argparse\n'), ((1145, 1195), 'pytorch_pretrained_bert.BertTokenizer.from_pretrained', 'BertTokenizer.from_pretrained', (['"""bert-base-chinese"""'], {}), "('bert-base-chinese')\n", (1174, 1195), False, 'from pytorch_pretrained_bert import BertTokenizer\n'), ((1218, 1312), '_preprocessing.DRCDDataset', 'DRCDDataset', (['args.data_set', 'args.data_sel', 'args.data_mode', 'args.with_ans', 'vocab', 'tokenizer'], {}), '(args.data_set, args.data_sel, args.data_mode, args.with_ans,\n vocab, tokenizer)\n', (1229, 1312), False, 'from _preprocessing import Vocabulary, DRCDDataset\n'), ((1326, 1430), 'torch.utils.data.DataLoader', 'DataLoader', (['data_transformer'], {'batch_size': 'args.batch_size', 'shuffle': '(True)', 'collate_fn': 'create_bert_batch'}), '(data_transformer, batch_size=args.batch_size, shuffle=True,\n collate_fn=create_bert_batch)\n', (1336, 1430), False, 'from torch.utils.data import DataLoader\n'), ((1714, 1746), '_model.BertEncoder', 'BertEncoder', (['args.transfer_layer'], {}), '(args.transfer_layer)\n', (1725, 1746), False, 'from _model import EncoderRNN, BertEncoder\n'), ((3683, 3711), 'torch.load', 'torch.load', (['args.loadEncoder'], {}), '(args.loadEncoder)\n', (3693, 3711), False, 'import os, torch, argparse\n'), ((4090, 4118), 'torch.load', 'torch.load', (['args.loadDecoder'], {}), '(args.loadDecoder)\n', (4100, 4118), False, 'import os, torch, argparse\n'), ((4271, 4300), 'torch.load', 'torch.load', (['args.loadFilename'], {}), '(args.loadFilename)\n', (4281, 4300), False, 'import os, torch, argparse\n'), ((5631, 5735), 'torch.utils.data.DataLoader', 'DataLoader', (['dev_transformer'], {'batch_size': 'args.batch_size', 'shuffle': '(True)', 'collate_fn': 'create_jieba_batch'}), '(dev_transformer, batch_size=args.batch_size, shuffle=True,\n collate_fn=create_jieba_batch)\n', (5641, 5735), False, 'from torch.utils.data import DataLoader\n'), ((6301, 6407), 'torch.utils.data.DataLoader', 'DataLoader', (['eval_transformer'], {'batch_size': 'args.batch_size', 'shuffle': '(False)', 'collate_fn': 'create_jieba_batch'}), '(eval_transformer, batch_size=args.batch_size, shuffle=False,\n collate_fn=create_jieba_batch)\n', (6311, 6407), False, 'from torch.utils.data import DataLoader\n'), ((5783, 5886), 'torch.utils.data.DataLoader', 'DataLoader', (['dev_transformer'], {'batch_size': 'args.batch_size', 'shuffle': '(True)', 'collate_fn': 'create_bert_batch'}), '(dev_transformer, batch_size=args.batch_size, shuffle=True,\n collate_fn=create_bert_batch)\n', (5793, 5886), False, 'from torch.utils.data import DataLoader\n'), ((6456, 6561), 'torch.utils.data.DataLoader', 'DataLoader', (['eval_transformer'], {'batch_size': 'args.batch_size', 'shuffle': '(False)', 'collate_fn': 'create_bert_batch'}), '(eval_transformer, batch_size=args.batch_size, shuffle=False,\n collate_fn=create_bert_batch)\n', (6466, 6561), False, 'from torch.utils.data import DataLoader\n')]
from PyQt5 import QtCore, QtGui, QtWidgets version = '1.0.0' class AboutDialog(object): def setupUi(self, Dialog): Dialog.setObjectName("Dialog") Dialog.resize(390, 110) Dialog.setFixedSize(390, 110) self.appNameLabel = QtWidgets.QLabel(Dialog) self.appNameLabel.setGeometry(QtCore.QRect(10, 10, 381, 20)) font = QtGui.QFont() font.setPointSize(12) self.appNameLabel.setFont(font) self.appNameLabel.setAlignment(QtCore.Qt.AlignCenter) self.appNameLabel.setObjectName("appNameLabel") self.appInfoLabel = QtWidgets.QLabel(Dialog) self.appInfoLabel.setGeometry(QtCore.QRect(0, 40, 391, 20)) font = QtGui.QFont() font.setPointSize(10) self.appInfoLabel.setFont(font) self.appInfoLabel.setAlignment(QtCore.Qt.AlignCenter) self.appInfoLabel.setObjectName("appInfoLabel") self.repoButton = QtWidgets.QPushButton(Dialog) self.repoButton.setGeometry(QtCore.QRect(150, 70, 101, 23)) self.repoButton.setObjectName("repoButton") self.retranslateUi(Dialog) self.bind_signals() QtCore.QMetaObject.connectSlotsByName(Dialog) def retranslateUi(self, Dialog): _translate = QtCore.QCoreApplication.translate Dialog.setWindowTitle(_translate("Dialog", "About Discord RPC")) self.appNameLabel.setText(_translate("Dialog", "Discord Rich Presence Customizer")) self.appInfoLabel.setText(_translate("Dialog", "Made by Geekid812 - Version " + version)) self.repoButton.setText(_translate("Dialog", "GitHub Repository")) def bind_signals(self): self.repoButton.clicked.connect(lambda: QtGui.QDesktopServices.openUrl(QtCore.QUrl("https://github.com/geekid812/discord-rpc-customizer")))
[ "PyQt5.QtWidgets.QLabel", "PyQt5.QtCore.QRect", "PyQt5.QtCore.QUrl", "PyQt5.QtWidgets.QPushButton", "PyQt5.QtGui.QFont", "PyQt5.QtCore.QMetaObject.connectSlotsByName" ]
[((259, 283), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['Dialog'], {}), '(Dialog)\n', (275, 283), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((368, 381), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (379, 381), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((599, 623), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['Dialog'], {}), '(Dialog)\n', (615, 623), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((707, 720), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (718, 720), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((936, 965), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['Dialog'], {}), '(Dialog)\n', (957, 965), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1158, 1203), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['Dialog'], {}), '(Dialog)\n', (1195, 1203), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((322, 351), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(10)', '(381)', '(20)'], {}), '(10, 10, 381, 20)\n', (334, 351), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((662, 690), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(0)', '(40)', '(391)', '(20)'], {}), '(0, 40, 391, 20)\n', (674, 690), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1002, 1032), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(150)', '(70)', '(101)', '(23)'], {}), '(150, 70, 101, 23)\n', (1014, 1032), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1743, 1809), 'PyQt5.QtCore.QUrl', 'QtCore.QUrl', (['"""https://github.com/geekid812/discord-rpc-customizer"""'], {}), "('https://github.com/geekid812/discord-rpc-customizer')\n", (1754, 1809), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')]
import os import sys import socket import logging from uuid import uuid4 from datetime import datetime from .paths import Path from PySide2.QtCore import QStandardPaths PORT = 8977 LOG_TO_FILE = False RUNNING_BUNDLE = getattr(sys, 'frozen', False) # noinspection SpellCheckingInspection logging.basicConfig( level=logging.INFO, format='%(asctime)s %(name)-8s %(levelname)-8s %(message)s', datefmt='%m-%d %H:%M', filename=os.path.join(QStandardPaths.writableLocation(QStandardPaths.TempLocation), 'courier.log'), filemode="w") def is_valid_ip(ip: str) -> bool: try: socket.inet_aton(ip) return True except socket.error: return False # noinspection PyPep8Naming class logger: @staticmethod def _log(*args, mode=logging.info): MODE = "INFO" if mode == logging.debug: MODE = "DEBUG" elif mode == logging.error: MODE = "ERROR" elif mode == logging.warn: MODE = "WARN" if not RUNNING_BUNDLE: print(f"{MODE}: ", *args) if LOG_TO_FILE: mode(" ".join([str(i) for i in args])) @staticmethod def log(*args): logger._log(*args) @staticmethod def debug(*args): logger._log(*args, mode=logging.debug) @staticmethod def error(*args): logger._log(*args, mode=logging.error) @staticmethod def warn(*args): logger._log(*args, mode=logging.warning) def getUniqueId() -> str: """ creates a unique id for this device. the id will be used for unique identification in chats. if there's no unique id, a new one will be created """ path = Path() filedir = path.UUID_FILE if os.path.exists(filedir): # just get the file and return data with open(filedir) as file: uid = file.read() return uid # create new and return data uid = uuid4().__str__()+"-"+datetime.now().__str__() with open(filedir, "w") as file: file.write(uid) return uid def username(name: str=None) -> str: """ returns the hostname if client has not set a username if client has a username, just return it then. if the name argument is passed, just set if as a new username """ path = Path() filedir = path.USERNAME_FILE if not name: if os.path.exists(filedir): # just get the file and return data with open(filedir) as file: username = file.read() return username.splitlines(keepends=False)[0] # create new and return data username = name or socket.gethostname() with open(filedir, "w") as file: file.write(username) return username
[ "uuid.uuid4", "os.path.exists", "socket.inet_aton", "socket.gethostname", "PySide2.QtCore.QStandardPaths.writableLocation", "datetime.datetime.now" ]
[((1534, 1557), 'os.path.exists', 'os.path.exists', (['filedir'], {}), '(filedir)\n', (1548, 1557), False, 'import os\n'), ((580, 600), 'socket.inet_aton', 'socket.inet_aton', (['ip'], {}), '(ip)\n', (596, 600), False, 'import socket\n'), ((2093, 2116), 'os.path.exists', 'os.path.exists', (['filedir'], {}), '(filedir)\n', (2107, 2116), False, 'import os\n'), ((2317, 2337), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (2335, 2337), False, 'import socket\n'), ((443, 503), 'PySide2.QtCore.QStandardPaths.writableLocation', 'QStandardPaths.writableLocation', (['QStandardPaths.TempLocation'], {}), '(QStandardPaths.TempLocation)\n', (474, 503), False, 'from PySide2.QtCore import QStandardPaths\n'), ((1723, 1737), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1735, 1737), False, 'from datetime import datetime\n'), ((1701, 1708), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (1706, 1708), False, 'from uuid import uuid4\n')]
import urllib.request as request import json wFactor = [11,10,9,8,7,6,5,4,3] ## Weighted array for wfCalc function def identDig(inpt): ## Task 1 check if all digits are identical function if all(i == inpt[0] for i in inpt): return True else: return False def wfCalc(inpt): ## Task 2 check digit verification using weighted array function wSum = [] for i in range(len(inpt)-1): wSum.append(int(inpt[i]) * wFactor[i]) remainder = sum(wSum) % 12 remainder = 12 - remainder ## previously removed line oops # if remainder in {10,12}: ## [EDIT] Obsolete condition statement as met on line 23 # return False if remainder == int(inpt[9]) or (remainder == 11 and int(inpt[9]) == 0): return True else: return False def importJson(): ## Task 3 import json file from URL function with request.urlopen('https://s3.amazonaws.com/cognisant-interview-resources/identifiers.json') as r: s = r.read() array = json.loads(s) return array
[ "urllib.request.urlopen", "json.loads" ]
[((954, 1049), 'urllib.request.urlopen', 'request.urlopen', (['"""https://s3.amazonaws.com/cognisant-interview-resources/identifiers.json"""'], {}), "(\n 'https://s3.amazonaws.com/cognisant-interview-resources/identifiers.json')\n", (969, 1049), True, 'import urllib.request as request\n'), ((1090, 1103), 'json.loads', 'json.loads', (['s'], {}), '(s)\n', (1100, 1103), False, 'import json\n')]
from django.db import models from django.core.validators import MaxValueValidator, MinValueValidator class promoCode(models.Model): title = models.CharField(max_length=40, unique=True, blank=False, null=False) code = models.CharField(max_length=40, blank=False, null=False) promo = models.FloatField(default=0, validators=[MinValueValidator(0)]) def __str__(self): return self.title
[ "django.db.models.CharField", "django.core.validators.MinValueValidator" ]
[((145, 214), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)', 'unique': '(True)', 'blank': '(False)', 'null': '(False)'}), '(max_length=40, unique=True, blank=False, null=False)\n', (161, 214), False, 'from django.db import models\n'), ((226, 282), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)', 'blank': '(False)', 'null': '(False)'}), '(max_length=40, blank=False, null=False)\n', (242, 282), False, 'from django.db import models\n'), ((336, 356), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(0)'], {}), '(0)\n', (353, 356), False, 'from django.core.validators import MaxValueValidator, MinValueValidator\n')]
""" Copyright 2020 ICES, University of Manchester, Evenset Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ #Code by <NAME> import nltk nltk.download('punkt') from nltk.tokenize.util import align_tokens from nltk.tokenize.treebank import TreebankWordTokenizer import re import tensorflow_hub as hub #from bert.tokenization import FullTokenizer import tensorflow as tf sess = tf.compat.v1.Session() _treebank_word_tokenizer = TreebankWordTokenizer() def tokenize_to_seq(documents): # in the end you will get a big list of concatenated documents, no division between individual documents anymore. sequences = [] sequence = [] for doc in documents: if len(sequence)>0: sequences.append(sequence) sequence = [] text = doc["text"] file = doc["id"] text = text.replace("\"", "'") text = text.replace("`", "'") text = text.replace("``", "") text = text.replace("''", "") tokens = custom_span_tokenize(text) for token in tokens: token_txt = text[token[0]:token[1]] found = False for tag in doc["tags"]: if int(tag["start"])<=token[0] and int(tag["end"])>=token[1]: token_tag = tag["tag"] #token_tag_type = tag["type"] found = True if found==False: token_tag = "O" #token_tag_type = "O" sequence.append((token_txt,token_tag)) if token_txt == "." or token_txt == "?" or token_txt == "!": sequences.append(sequence) sequence = [] sequences.append(sequence) return sequences def tokenize_fa(documents): """ Tokenization function. Returns list of sequences :param documents: list of texts :type language: list """ sequences = [] sequence = [] for doc in documents: if len(sequence) > 0: sequences.append(sequence) sequence = [] text = doc text = text.replace("\"", "'") text = text.replace("`", "'") text = text.replace("``", "") text = text.replace("''", "") tokens = custom_span_tokenize(text) for token in tokens: token_txt = text[token[0]:token[1]] found = False if found == False: token_tag = "O" # token_tag_type = "O" sequence.append((token_txt, token_tag)) if token_txt == "." or token_txt == "?" or token_txt == "!": sequences.append(sequence) sequence = [] sequences.append(sequence) return sequences def custom_span_tokenize(text, language='english', preserve_line=True): """ Returns a spans of tokens in text. :param text: text to split into words :param language: the model name in the Punkt corpus :type language: str :param preserve_line: An option to keep the preserve the sentence and not sentence tokenize it. :type preserver_line: bool """ tokens = custom_word_tokenize(text) tokens = ['"' if tok in ['``', "''"] else tok for tok in tokens] return align_tokens(tokens, text) def custom_word_tokenize(text, language='english', preserve_line=False): """ Return a tokenized copy of *text*, using NLTK's recommended word tokenizer (currently an improved :class:`.TreebankWordTokenizer` along with :class:`.PunktSentenceTokenizer` for the specified language). :param text: text to split into words :param text: str :param language: the model name in the Punkt corpus :type language: str :param preserve_line: An option to keep the preserve the sentence and not sentence tokenize it. :type preserver_line: bool """ tokens = [] sentences = [text] if preserve_line else nltk.sent_tokenize(text, language) # splits the text into list of sentences. for sent in sentences: for token in _treebank_word_tokenizer.tokenize(sent): # TreeBankWordTokezier returns their tokenized version of those words in sentences in a list. So output is list of words/tokens. if "-" in token: m = re.compile("(\d+)(-)([a-zA-z-]+)") g = m.match(token) if g: for group in g.groups(): tokens.append(group) else: tokens.append(token) else: tokens.append(token) return tokens def shape(self,word): shape = "" for letter in word: if letter.isdigit(): shape = shape + "d" elif letter.isalpha(): if letter.isupper(): shape = shape + "W" else: shape = shape + "w" else: shape = shape + letter return shape
[ "nltk.sent_tokenize", "nltk.tokenize.util.align_tokens", "tensorflow.compat.v1.Session", "nltk.download", "nltk.tokenize.treebank.TreebankWordTokenizer", "re.compile" ]
[((622, 644), 'nltk.download', 'nltk.download', (['"""punkt"""'], {}), "('punkt')\n", (635, 644), False, 'import nltk\n'), ((861, 883), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (881, 883), True, 'import tensorflow as tf\n'), ((911, 934), 'nltk.tokenize.treebank.TreebankWordTokenizer', 'TreebankWordTokenizer', ([], {}), '()\n', (932, 934), False, 'from nltk.tokenize.treebank import TreebankWordTokenizer\n'), ((3769, 3795), 'nltk.tokenize.util.align_tokens', 'align_tokens', (['tokens', 'text'], {}), '(tokens, text)\n', (3781, 3795), False, 'from nltk.tokenize.util import align_tokens\n'), ((4445, 4479), 'nltk.sent_tokenize', 'nltk.sent_tokenize', (['text', 'language'], {}), '(text, language)\n', (4463, 4479), False, 'import nltk\n'), ((4789, 4824), 're.compile', 're.compile', (['"""(\\\\d+)(-)([a-zA-z-]+)"""'], {}), "('(\\\\d+)(-)([a-zA-z-]+)')\n", (4799, 4824), False, 'import re\n')]
import os import pickle import numpy as np def deviation_from_actual_value(array): """ Calculates standard deviation for the parameters :param array: either (num_iters, num_points_in_sim, [n] params) or (num_iters, num_points_in_sim, [n*m] params) :return: """ if array.ndim == 3: deviations = np.zeros((array.shape[1],array.shape[2])) for pt in range(array.shape[1]): for param in range(array.shape[2]): dev = np.std(array[:,pt,param]) deviations[pt,param] = dev return deviations elif array.ndim == 4: deviations = np.zeros((array.shape[1], array.shape[2], array.shape[3])) for pt in range(array.shape[1]): for param_ind1 in range(array.shape[2]): for param_ind2 in range(array.shape[3]): dev = np.std(array[:, pt, param_ind1, param_ind2]) deviations[pt, param_ind1, param_ind2] = dev return deviations else: raise ValueError("Wrong num of dimensions") def main(): #retrieving pickle data calculated from parameter_deviation_calculator.py directory_path = os.path.dirname( os.path.dirname(os.path.join(os.getcwd(), os.listdir(os.getcwd())[0]))) ## directory of directory of file pickle_dir = directory_path + '/Bound_Estimation/Parameter_Deviation/' with open(pickle_dir + 'theta.pkl', 'rb') as f: theta_l_r = pickle.load(f) with open(pickle_dir + 'rtof_dist.pkl', 'rb') as f: rtof_dist = pickle.load(f) with open(pickle_dir + 'tdoa_dist.pkl', 'rb') as f: tdoa_dist = pickle.load(f) #calculating deviation for theta, rtof_dist.pkl, tdoa_dist deviation_theta = deviation_from_actual_value(theta_l_r) deviation_rtof_dist = deviation_from_actual_value(rtof_dist) deviation_tdoa_dist = deviation_from_actual_value(tdoa_dist) #saving calculated deviation parameters. with open(pickle_dir + 'deviation_theta.pkl', 'wb') as f: pickle.dump(deviation_theta, f) with open(pickle_dir + 'deviation_rtof_dist.pkl', 'wb') as f: pickle.dump(deviation_rtof_dist, f) with open(pickle_dir + 'deviation_tdoa_dist.pkl', 'wb') as f: pickle.dump(deviation_tdoa_dist, f) if __name__ == '__main__': main()
[ "pickle.dump", "numpy.std", "os.getcwd", "numpy.zeros", "pickle.load" ]
[((328, 370), 'numpy.zeros', 'np.zeros', (['(array.shape[1], array.shape[2])'], {}), '((array.shape[1], array.shape[2]))\n', (336, 370), True, 'import numpy as np\n'), ((1451, 1465), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1462, 1465), False, 'import pickle\n'), ((1543, 1557), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1554, 1557), False, 'import pickle\n'), ((1635, 1649), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1646, 1649), False, 'import pickle\n'), ((2021, 2052), 'pickle.dump', 'pickle.dump', (['deviation_theta', 'f'], {}), '(deviation_theta, f)\n', (2032, 2052), False, 'import pickle\n'), ((2127, 2162), 'pickle.dump', 'pickle.dump', (['deviation_rtof_dist', 'f'], {}), '(deviation_rtof_dist, f)\n', (2138, 2162), False, 'import pickle\n'), ((2237, 2272), 'pickle.dump', 'pickle.dump', (['deviation_tdoa_dist', 'f'], {}), '(deviation_tdoa_dist, f)\n', (2248, 2272), False, 'import pickle\n'), ((624, 682), 'numpy.zeros', 'np.zeros', (['(array.shape[1], array.shape[2], array.shape[3])'], {}), '((array.shape[1], array.shape[2], array.shape[3]))\n', (632, 682), True, 'import numpy as np\n'), ((481, 508), 'numpy.std', 'np.std', (['array[:, pt, param]'], {}), '(array[:, pt, param])\n', (487, 508), True, 'import numpy as np\n'), ((1225, 1236), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1234, 1236), False, 'import os\n'), ((860, 904), 'numpy.std', 'np.std', (['array[:, pt, param_ind1, param_ind2]'], {}), '(array[:, pt, param_ind1, param_ind2])\n', (866, 904), True, 'import numpy as np\n'), ((1249, 1260), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1258, 1260), False, 'import os\n')]
from django.contrib import admin, messages from django_summernote.admin import SummernoteModelAdmin from posts.models import Post, Tag @admin.register(Post) class PostAdmin(SummernoteModelAdmin): summernote_fields = ('body',) list_display = ('title', 'id', 'is_public', 'slug', 'author', 'edited_at', 'created_at') list_filter = ('is_public', 'created_at', 'edited_at',) search_fields = ['title', 'slug', 'author'] prepopulated_fields = {'slug': ('title',)} actions = ['make_public', 'make_unpublic'] def make_public(modeladmin, request, queryset): queryset.update(is_public=True) messages.success( request, 'Selected Post(s) are now public !') def make_unpublic(modeladmin, request, queryset): queryset.update(is_public=False) messages.success( request, 'Selected Post(s) are no longer public!') @admin.register(Tag) class TagAdmin(admin.ModelAdmin): list_display = ('name', 'id') list_filter = ('name',) search_fields = ('name',)
[ "django.contrib.admin.register", "django.contrib.messages.success" ]
[((139, 159), 'django.contrib.admin.register', 'admin.register', (['Post'], {}), '(Post)\n', (153, 159), False, 'from django.contrib import admin, messages\n'), ((913, 932), 'django.contrib.admin.register', 'admin.register', (['Tag'], {}), '(Tag)\n', (927, 932), False, 'from django.contrib import admin, messages\n'), ((649, 711), 'django.contrib.messages.success', 'messages.success', (['request', '"""Selected Post(s) are now public !"""'], {}), "(request, 'Selected Post(s) are now public !')\n", (665, 711), False, 'from django.contrib import admin, messages\n'), ((829, 896), 'django.contrib.messages.success', 'messages.success', (['request', '"""Selected Post(s) are no longer public!"""'], {}), "(request, 'Selected Post(s) are no longer public!')\n", (845, 896), False, 'from django.contrib import admin, messages\n')]
import seaborn as sns import matplotlib.pyplot as plt import numpy as np def plot(gather_count, filename): gather_count = np.log(gather_count + 1) sns.color_palette("light:b", as_cmap=True) ax = sns.heatmap(gather_count, vmax=8, vmin=0, cmap="Purples", xticklabels=False, yticklabels=False, cbar=False, square=True) ax.spines['top'].set_visible(True) ax.spines['right'].set_visible(True) ax.spines['bottom'].set_visible(True) ax.spines['left'].set_visible(True) [i.set_linewidth(2) for i in ax.spines.values()] plt.tight_layout() # plt.show() plt.savefig(filename) def main(): gather_count = [[0, 0, 0, 0, 0], [0, 0, 1, 0, 1], [0, 1, 2, 0, 4], [0, 0, 0, 7, 24], [0, 0, 5, 18, 4549]] gather_count = np.array(gather_count) gather_count = np.log(gather_count + 1) plot(gather_count) if __name__ == "__main__": main()
[ "seaborn.heatmap", "numpy.log", "numpy.array", "seaborn.color_palette", "matplotlib.pyplot.tight_layout", "matplotlib.pyplot.savefig" ]
[((128, 152), 'numpy.log', 'np.log', (['(gather_count + 1)'], {}), '(gather_count + 1)\n', (134, 152), True, 'import numpy as np\n'), ((157, 199), 'seaborn.color_palette', 'sns.color_palette', (['"""light:b"""'], {'as_cmap': '(True)'}), "('light:b', as_cmap=True)\n", (174, 199), True, 'import seaborn as sns\n'), ((209, 333), 'seaborn.heatmap', 'sns.heatmap', (['gather_count'], {'vmax': '(8)', 'vmin': '(0)', 'cmap': '"""Purples"""', 'xticklabels': '(False)', 'yticklabels': '(False)', 'cbar': '(False)', 'square': '(True)'}), "(gather_count, vmax=8, vmin=0, cmap='Purples', xticklabels=False,\n yticklabels=False, cbar=False, square=True)\n", (220, 333), True, 'import seaborn as sns\n'), ((591, 609), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (607, 609), True, 'import matplotlib.pyplot as plt\n'), ((631, 652), 'matplotlib.pyplot.savefig', 'plt.savefig', (['filename'], {}), '(filename)\n', (642, 652), True, 'import matplotlib.pyplot as plt\n'), ((796, 818), 'numpy.array', 'np.array', (['gather_count'], {}), '(gather_count)\n', (804, 818), True, 'import numpy as np\n'), ((838, 862), 'numpy.log', 'np.log', (['(gather_count + 1)'], {}), '(gather_count + 1)\n', (844, 862), True, 'import numpy as np\n')]
'''Manual data ingestion DAG.''' from util import create_bq_ingest_operator # Ingore the Airflow module, it is installed in both our dev and prod environments from airflow.models import Variable # type: ignore from airflow import DAG # type: ignore from airflow.utils.dates import days_ago # type: ignore default_args = { 'start_date': days_ago(0), } manual_ingestion_dag = DAG( 'manual_ingestion_dag', default_args=default_args, schedule_interval=None, description='Triggering for manual uploads.') # Manual Uplaods manual_uploads_payload = {'message': {'is_airflow_run': True, 'gcs_bucket': Variable.get('GCS_MANUAL_UPLOADS_BUCKET'), 'id': 'MANUAL_UPLOADS'}} manual_uploads_bq_operator = create_bq_ingest_operator( 'manual_uploads_task', manual_uploads_payload, manual_ingestion_dag)
[ "airflow.models.Variable.get", "util.create_bq_ingest_operator", "airflow.utils.dates.days_ago", "airflow.DAG" ]
[((383, 512), 'airflow.DAG', 'DAG', (['"""manual_ingestion_dag"""'], {'default_args': 'default_args', 'schedule_interval': 'None', 'description': '"""Triggering for manual uploads."""'}), "('manual_ingestion_dag', default_args=default_args, schedule_interval=\n None, description='Triggering for manual uploads.')\n", (386, 512), False, 'from airflow import DAG\n'), ((792, 890), 'util.create_bq_ingest_operator', 'create_bq_ingest_operator', (['"""manual_uploads_task"""', 'manual_uploads_payload', 'manual_ingestion_dag'], {}), "('manual_uploads_task', manual_uploads_payload,\n manual_ingestion_dag)\n", (817, 890), False, 'from util import create_bq_ingest_operator\n'), ((344, 355), 'airflow.utils.dates.days_ago', 'days_ago', (['(0)'], {}), '(0)\n', (352, 355), False, 'from airflow.utils.dates import days_ago\n'), ((657, 698), 'airflow.models.Variable.get', 'Variable.get', (['"""GCS_MANUAL_UPLOADS_BUCKET"""'], {}), "('GCS_MANUAL_UPLOADS_BUCKET')\n", (669, 698), False, 'from airflow.models import Variable\n')]
# Date: Nrupatunga: Tuesday 04 July 2017 # Email: <EMAIL> # Name: Nrupatunga # Description: loading Imagenet dataset from __future__ import print_function import sys import os import cv2 import glob from annotation import annotation import xml.etree.ElementTree as ET from ..logger.logger import setup_logger from ..helper import config kMaxRatio = 0.66 class loader_imagenet: """Docstring for loader_imagenetdet. """ def __init__(self, imagenet_folder, annotations_folder, logger): """TODO: to be defined1. """ self.logger = logger self.imagenet_folder = imagenet_folder self.annotations_folder = annotations_folder if not os.path.isdir(imagenet_folder): logger.error('{} is not a valid directory'.format(imagenet_folder)) def loaderImageNetDet(self): """TODO: Docstring for get_videos. :returns: TODO """ logger = self.logger imagenet_folder = self.imagenet_folder imagenet_subdirs = sorted(self.find_subfolders(self.annotations_folder)) num_annotations = 0 list_of_annotations_out = [] for i, imgnet_sub_folder in enumerate(imagenet_subdirs): annotations_files = sorted(glob.glob(os.path.join(self.annotations_folder, imgnet_sub_folder, '*.xml'))) logger.info('Loading {}/{} - annotation file from folder = {}'.format(i+1, len(imagenet_subdirs), imgnet_sub_folder)) for ann in annotations_files: list_of_annotations, num_ann_curr = self.load_annotation_file(ann) num_annotations = num_annotations + num_ann_curr if len(list_of_annotations) == 0: continue list_of_annotations_out.append(list_of_annotations) logger.info('Found {} annotations from {} images'.format(num_annotations, len(list_of_annotations_out))) # save it for future use self.list_of_annotations_out = list_of_annotations_out self.num_annotations = num_annotations return list_of_annotations_out def find_subfolders(self, imagenet_folder): """TODO: Docstring for find_subfolders. :vot_folder: directory for vot videos :returns: list of video sub directories """ return [dir_name for dir_name in os.listdir(imagenet_folder) if os.path.isdir(os.path.join(imagenet_folder, dir_name))] def load_annotation_file(self, annotation_file): """TODO: Docstring for load_annotation_file. :returns: TODO """ list_of_annotations = [] num_annotations = 0 root = ET.parse(annotation_file).getroot() folder = root.find('folder').text filename = root.find('filename').text size = root.find('size') disp_width = int(size.find('width').text) disp_height = int(size.find('height').text) for obj in root.findall('object'): bbox = obj.find('bndbox') xmin = int(bbox.find('xmin').text) xmax = int(bbox.find('xmax').text) ymin = int(bbox.find('ymin').text) ymax = int(bbox.find('ymax').text) width = xmax - xmin height = ymax - ymin if width > (kMaxRatio * disp_width) or height > (kMaxRatio * disp_height): continue if ((xmin < 0) or (ymin < 0) or (xmax <= xmin) or (ymax <= ymin)): continue objAnnotation = annotation() objAnnotation.setbbox(xmin, xmax, ymin, ymax) objAnnotation.setWidthHeight(disp_width, disp_height) objAnnotation.setImagePath(os.path.join(folder, filename)) list_of_annotations.append(objAnnotation) num_annotations = num_annotations + 1 return list_of_annotations, num_annotations def load_annotation(self, image_num, annotation_num): """TODO: Docstring for load_annotation. :returns: TODO """ logger = self.logger images = self.list_of_annotations_out list_annotations = images[image_num] random_ann = list_annotations[annotation_num] img_path = os.path.join(self.imagenet_folder, random_ann.image_path + '.JPEG') if config.DEBUG: img_path = "/media/nrupatunga/Data-Backup/DL/goturn/ILSVRC2014/ILSVRC2014_DET_train/ILSVRC2014_train_0005/ILSVRC2014_train_00059375.JPEG" random_ann.bbox.x1 = 243 random_ann.bbox.y1 = 157 random_ann.bbox.x2 = 278 random_ann.bbox.y2 = 176 random_ann.disp_height = 375 random_ann.disp_width = 500 image = cv2.imread(img_path) img_height = image.shape[0] img_width = image.shape[1] sc_factor_1 = 1.0 if img_height != random_ann.disp_height or img_width != random_ann.disp_width: logger.info('Image Number = {}, Annotation Number = {}, Image file = {}'.format(image_num, annotation_num, img_path)) logger.info('Image Size = {} x {}'.format(img_width, img_height)) logger.info('Display Size = {} x {}'.format(random_ann.disp_width, random_ann.disp_height)) sc_factor_1 = (img_height * 1.) / random_ann.disp_height sc_factor_2 = (img_width * 1.) / random_ann.disp_width logger.info('Factor: {} {}'.format(sc_factor_1, sc_factor_2)) bbox = random_ann.bbox bbox.x1 = bbox.x1 * sc_factor_1 bbox.x2 = bbox.x2 * sc_factor_1 bbox.y1 = bbox.y1 * sc_factor_1 bbox.y2 = bbox.y2 * sc_factor_1 return image, bbox if '__main__' == __name__: logger = setup_logger(logfile=None) objLoaderImgNet = loader_imagenet('/media/nrupatunga/data/datasets/ILSVRC2014/ILSVRC2014_DET_train/', '/media/nrupatunga/data/datasets/ILSVRC2014/ILSVRC2014_DET_bbox_train/', logger) dict_list_of_annotations = objLoaderImgNet.loaderImageNetDet()
[ "xml.etree.ElementTree.parse", "os.path.isdir", "cv2.imread", "annotation.annotation", "os.path.join", "os.listdir" ]
[((4181, 4248), 'os.path.join', 'os.path.join', (['self.imagenet_folder', "(random_ann.image_path + '.JPEG')"], {}), "(self.imagenet_folder, random_ann.image_path + '.JPEG')\n", (4193, 4248), False, 'import os\n'), ((4672, 4692), 'cv2.imread', 'cv2.imread', (['img_path'], {}), '(img_path)\n', (4682, 4692), False, 'import cv2\n'), ((679, 709), 'os.path.isdir', 'os.path.isdir', (['imagenet_folder'], {}), '(imagenet_folder)\n', (692, 709), False, 'import os\n'), ((3477, 3489), 'annotation.annotation', 'annotation', ([], {}), '()\n', (3487, 3489), False, 'from annotation import annotation\n'), ((2329, 2356), 'os.listdir', 'os.listdir', (['imagenet_folder'], {}), '(imagenet_folder)\n', (2339, 2356), False, 'import os\n'), ((2635, 2660), 'xml.etree.ElementTree.parse', 'ET.parse', (['annotation_file'], {}), '(annotation_file)\n', (2643, 2660), True, 'import xml.etree.ElementTree as ET\n'), ((3653, 3683), 'os.path.join', 'os.path.join', (['folder', 'filename'], {}), '(folder, filename)\n', (3665, 3683), False, 'import os\n'), ((1241, 1306), 'os.path.join', 'os.path.join', (['self.annotations_folder', 'imgnet_sub_folder', '"""*.xml"""'], {}), "(self.annotations_folder, imgnet_sub_folder, '*.xml')\n", (1253, 1306), False, 'import os\n'), ((2374, 2413), 'os.path.join', 'os.path.join', (['imagenet_folder', 'dir_name'], {}), '(imagenet_folder, dir_name)\n', (2386, 2413), False, 'import os\n')]
import win32serviceutil import win32service import win32event import servicemanager import socket import logging from logging import handlers import threading from xmediusmailrelayserver import server from xmediusmailrelayserver.server import start_server from os.path import dirname, join from os import mkdir, stat import yaml import io def handle_command_line(argv): return win32serviceutil.HandleCommandLine(XMRSServiceRunner, None, argv) class XMRSServiceRunner (win32serviceutil.ServiceFramework): _svc_name_ = "xmediusmailrelayserver" _svc_display_name_ = "XMedius Mail Relay Server" _svc_description_ = "Relays emails to chosen server according to recipient patterns" IsStopping = False def __init__(self,args): win32serviceutil.ServiceFramework.__init__(self,args) self._WaitStop = threading.Event() socket.setdefaulttimeout(60) def SvcStop(self): self.IsStopping = True self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) logger = logging.getLogger("XMediusMailRelayServer") logger.info('Service stopped.') self._WaitStop.set() def SvcDoRun(self): self.ReportServiceStatus(win32service.SERVICE_START_PENDING) servicemanager.LogMsg(servicemanager.EVENTLOG_INFORMATION_TYPE, servicemanager.PYS_SERVICE_STARTED, (self._svc_name_,'')) self.main() self.ReportServiceStatus(win32service.SERVICE_RUNNING) self._WaitStop.wait() def main(self): logger = logging.getLogger('XMediusMailRelayServer') localpath = dirname(__file__) logfile = join(localpath, 'trace', 'server.log') logpath = dirname(logfile) try: stat(logpath) except: mkdir(logpath) file_hdlr = logging.handlers.RotatingFileHandler(logfile, maxBytes=100*1024*1024, backupCount=10) formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr) config = yaml.safe_load(io.open(join(localpath, 'config.yml'))) if int(config['Debug']) == 1: logging.getLogger('').setLevel(logging.DEBUG) logging.getLogger('mail.log').addHandler(file_hdlr) else: logging.getLogger('').setLevel(logging.INFO) logger.info('Running in service mode') start_server()
[ "win32serviceutil.HandleCommandLine", "os.mkdir", "os.stat", "os.path.dirname", "logging.Formatter", "socket.setdefaulttimeout", "threading.Event", "servicemanager.LogMsg", "win32serviceutil.ServiceFramework.__init__", "xmediusmailrelayserver.server.start_server", "os.path.join", "logging.handlers.RotatingFileHandler", "logging.getLogger" ]
[((382, 447), 'win32serviceutil.HandleCommandLine', 'win32serviceutil.HandleCommandLine', (['XMRSServiceRunner', 'None', 'argv'], {}), '(XMRSServiceRunner, None, argv)\n', (416, 447), False, 'import win32serviceutil\n'), ((755, 809), 'win32serviceutil.ServiceFramework.__init__', 'win32serviceutil.ServiceFramework.__init__', (['self', 'args'], {}), '(self, args)\n', (797, 809), False, 'import win32serviceutil\n'), ((834, 851), 'threading.Event', 'threading.Event', ([], {}), '()\n', (849, 851), False, 'import threading\n'), ((860, 888), 'socket.setdefaulttimeout', 'socket.setdefaulttimeout', (['(60)'], {}), '(60)\n', (884, 888), False, 'import socket\n'), ((1029, 1072), 'logging.getLogger', 'logging.getLogger', (['"""XMediusMailRelayServer"""'], {}), "('XMediusMailRelayServer')\n", (1046, 1072), False, 'import logging\n'), ((1245, 1371), 'servicemanager.LogMsg', 'servicemanager.LogMsg', (['servicemanager.EVENTLOG_INFORMATION_TYPE', 'servicemanager.PYS_SERVICE_STARTED', "(self._svc_name_, '')"], {}), "(servicemanager.EVENTLOG_INFORMATION_TYPE,\n servicemanager.PYS_SERVICE_STARTED, (self._svc_name_, ''))\n", (1266, 1371), False, 'import servicemanager\n'), ((1579, 1622), 'logging.getLogger', 'logging.getLogger', (['"""XMediusMailRelayServer"""'], {}), "('XMediusMailRelayServer')\n", (1596, 1622), False, 'import logging\n'), ((1643, 1660), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (1650, 1660), False, 'from os.path import dirname, join\n'), ((1679, 1717), 'os.path.join', 'join', (['localpath', '"""trace"""', '"""server.log"""'], {}), "(localpath, 'trace', 'server.log')\n", (1683, 1717), False, 'from os.path import dirname, join\n'), ((1737, 1753), 'os.path.dirname', 'dirname', (['logfile'], {}), '(logfile)\n', (1744, 1753), False, 'from os.path import dirname, join\n'), ((1857, 1950), 'logging.handlers.RotatingFileHandler', 'logging.handlers.RotatingFileHandler', (['logfile'], {'maxBytes': '(100 * 1024 * 1024)', 'backupCount': '(10)'}), '(logfile, maxBytes=100 * 1024 * 1024,\n backupCount=10)\n', (1893, 1950), False, 'import logging\n'), ((1964, 2026), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(levelname)s - %(message)s"""'], {}), "('%(asctime)s - %(levelname)s - %(message)s')\n", (1981, 2026), False, 'import logging\n'), ((2467, 2481), 'xmediusmailrelayserver.server.start_server', 'start_server', ([], {}), '()\n', (2479, 2481), False, 'from xmediusmailrelayserver.server import start_server\n'), ((1779, 1792), 'os.stat', 'stat', (['logpath'], {}), '(logpath)\n', (1783, 1792), False, 'from os import mkdir, stat\n'), ((1821, 1835), 'os.mkdir', 'mkdir', (['logpath'], {}), '(logpath)\n', (1826, 1835), False, 'from os import mkdir, stat\n'), ((2147, 2176), 'os.path.join', 'join', (['localpath', '"""config.yml"""'], {}), "(localpath, 'config.yml')\n", (2151, 2176), False, 'from os.path import dirname, join\n'), ((2230, 2251), 'logging.getLogger', 'logging.getLogger', (['""""""'], {}), "('')\n", (2247, 2251), False, 'import logging\n'), ((2288, 2317), 'logging.getLogger', 'logging.getLogger', (['"""mail.log"""'], {}), "('mail.log')\n", (2305, 2317), False, 'import logging\n'), ((2366, 2387), 'logging.getLogger', 'logging.getLogger', (['""""""'], {}), "('')\n", (2383, 2387), False, 'import logging\n')]
import numpy as np import matplotlib.pyplot as plt import matplotlib as mpl import sys import os from collections import defaultdict labelsize = 16 legendsize = 14 mpl.rcParams['xtick.labelsize'] = labelsize mpl.rcParams['ytick.labelsize'] = labelsize mpl.rcParams['axes.labelsize'] = labelsize mpl.rcParams['axes.titlesize'] = labelsize mpl.rcParams['font.size'] = labelsize plt.style.use('seaborn-deep') # plt.rcParams.update({ # "text.usetex": True, # "font.family": "sans-serif", # "font.sans-serif": ["Helvetica"]}) plt.rcParams['pdf.fonttype'] = 42 plt.rcParams['text.usetex'] = True colormap = plt.cm.gist_ncar def plot_ax(ax, params, ys, legends, ylabel, full, title=None, add_legend=True): labelsize = 20 legendsize = 20 mpl.rcParams['xtick.labelsize'] = labelsize mpl.rcParams['ytick.labelsize'] = labelsize mpl.rcParams['axes.labelsize'] = labelsize mpl.rcParams['axes.titlesize'] = labelsize mpl.rcParams['font.size'] = labelsize color_base = ["blue", "red", "green", "tab:orange", "purple", "tab:cyan"] markers = ["o", "v", "s", "*", "8"] sorted_xs = list(set([x for xs in params for x in xs])) sorted_xs = sorted(sorted_xs) xticks = [format(xx) for xx in sorted_xs] for ii, (x, y) in enumerate(zip(params[::-1], ys[::-1])): ax.plot(x, y, c=color_base[ii], marker=markers[ii], ms=10, linewidth=3) ax.set_xlim(ax.get_xlim()[0], 15) p1 = ax.get_xlim() p1 = [p1[0]-0.1, p1[1]+1.0] p2 = [full, full] ax.plot(p1, p2, "--", ms=6, c="black", linewidth=2) # ax.set_xscale('log', basex=10) legends = legends[::-1] + ["Full Fine-tuning", "Ours"] if add_legend: ax.legend(legends, loc="best", fontsize=legendsize) # ax.set_xticks(sorted_xs, xticks) if title is not None: ax.set(xlabel=r"Fine-tuned Parameters (\%)", ylabel=ylabel) else: ax.set(title=title, xlabel=r"Fine-tuned Parameters (\%)", ylabel=ylabel) ax.grid() ax.set_facecolor("white") def plot_intro(): color_base = ["blue", "purple", "green", "tab:orange", "red", "tab:cyan"] # color_base = ["blue", "blue", "blue", "blue", "red", "tab:cyan"] color_base = ["dodgerblue", "mediumvioletred", "olivedrab", "goldenrod", "firebrick", "tab:cyan"] color_base = ["dodgerblue", "hotpink", "olivedrab", "goldenrod", "crimson", "tab:cyan"] color_base = ["gray", "dodgerblue", "olivedrab", "hotpink", "crimson", "tab:cyan"] markers = ["o", "v", "s", "*", "D"] markers = ["o", "o", "o", "o", "D"] fig, ax = plt.subplots(1, 1) full = 21.94 legends = ["Full Fine-tuning", "BitFit", "PrefixTuning", "Adapter", "LoRA", "Ours"] params = [0.08, 3.6, 12.3, 14.4, 6.7] xsum = [17.32, 20.46, 20.98, 20.5, 21.9] for ii, (param, r2) in enumerate(zip(params, xsum)): ax.scatter(param, r2, c=color_base[ii], marker=markers[ii], edgecolor='black', linewidth=1, s=300) ax.set_xlim(ax.get_xlim()[0], 15) p1 = ax.get_xlim() p1 = [p1[0]-0.1, p1[1]+1.0] p2 = [full, full] ax.plot(p1, p2, "--", ms=6, c="black", linewidth=2) # ax.legend(legends, loc='best', fontsize=12) ax.grid() ax.set_facecolor("white") ax.set(xlabel=r"Fine-tuned Parameters (\%)", ylabel="ROUGE-2") fig.set_size_inches(5, 5) fig.savefig("intro.pdf", bbox_inches='tight') def compute_params(r): base = 200 * 2 * 3 * 1024 * 12 base_params = 3.6 print(r * 1.0 / base * base_params) return r * 1.0 / base * base_params def format(n): return r"{:.1f}%".format(n) def plot_overview(): d, L = 1024, 12 # fig, axes = plt.subplots(2, 1) # percentage of parameters params_bitfit = [0.08] # params_prompt = [compute_params(d * 1), compute_params(d * 30), compute_params(d * 200), compute_params(d * 300)] params_prompt = [compute_params(d * 300)] params_pt = [compute_params(1 * 2 * 3 * d * L), compute_params(30 * 2 * 3 * d * L), compute_params(200 * 2 * 3 * d * L), compute_params(512 * 2 * 3 * d * L)] params_hously_adapter_ffn_ho = [compute_params(30 * 2 * 2 * d * L), compute_params(200 * 2 * 2 * d * L), compute_params(512 * 2 * 2 * d * L), compute_params(1024 * 2 * 2 * d * L)] params_lora_attn = [compute_params(1*4*3*d*L), compute_params(30*4*3*d*L), compute_params(200*4*3*d*L), compute_params(400*4*3*d*L)] params_lora_ffn = [compute_params(1*10*2*d*L), compute_params(102*10*2*d*L), compute_params(120*10*2*d*L)] params_hously_adapter_attn_ho = [compute_params(1 * 2 * 3 * d * L), compute_params(30 * 2 * 3 * d * L), compute_params(200 * 2 * 3 * d * L), compute_params(512 * 2 * 3 * d * L), compute_params(1024 * 2 * 3 * d * L)] # print("prompt: 300") # print(params_prompt) # print("pt: 1, 30, 200, 512") # print(params_pt) # print("ho/hi ffn: 1, 30, 200, 512, 1024") # print(params_hously_adapter_ffn_ho) # print("ho/hi attn: 1, 30, 200, 512, 1024") # print(params_hously_adapter_attn_ho) # print("lora attn: 1, 30, 200, 400") # print(params_lora_attn) # print("lora ffn: 1, 102, 120") # print(params_lora_ffn) # xsum xsum_bitfit = [17.32] # xsum_prompt = [5.33, 14, 15.49, 15.98] # 1, 30?, 200, 300 # xsum_prompt = [15.98] # 300 xsum_pt = [18.14, 20.01, 20.46, 20.40] # 1, 30, 200, 512 xsum_hously_adapter_ffn_ho = [17, 18.81, 20.4, 20.58, 20.98] # 1, 30, 200?, 512?, 1024? xsum_hously_adapter_ffn_ho = [18.81, 20.4, 20.58, 20.98] # 1, 30, 200?, 512?, 1024? xsum_lora_attn = [17.4, 19.59, 20.29, 20.5] # 1, 30, 200, 400 # mt mt_bitfit = [26.4] # mt_prompt = [6.0, 16.7, 21] # 1, 30, 200 # mt_prompt = [21] # 200 mt_pt = [30.2, 35.2, 35.6, 35.1] # 1, 30, 200, 512 mt_hously_adapter_ffn_ho = [24.3, 33.0, 35.6, 36.3, 36.7] # 1, 30, 200, 512, 1024 mt_hously_adapter_ffn_ho = [33.0, 35.6, 36.3, 36.7] # 1, 30, 200, 512, 1024 mt_lora_attn = [25.5, 34.2, 36.2, 36.6] # 1, 30, 200, 400 # legends = ["BitFit (bias)", "PromptTuning (input)", "PrefixTuning (attn)", "Adapter (ffn)", "LoRA (attn)"] # plot_ax(axes[0], [params_bitfit, params_prompt, params_pt, params_hously_adapter_ffn_ho, params_lora_attn], # [xsum_bitfit, xsum_prompt, xsum_pt, xsum_hously_adapter_ffn_ho, xsum_lora_attn], legends, "ROUGE-2", full=21.94, ours=21.90, # title="(a) abstractive text summarization", add_legend=False) # plot_ax(axes[1], [params_bitfit, params_prompt, params_pt, params_hously_adapter_ffn_ho, params_lora_attn], # [mt_bitfit, mt_prompt, mt_pt, mt_hously_adapter_ffn_ho, mt_lora_attn], legends, "BLEU", full=37.3, ours=37.5, # title="(b) machine translation") fig, ax = plt.subplots(1, 1) legends = ["BitFit", "PrefixTuning", "Adapter", "LoRA"] plot_ax(ax, [params_bitfit, params_pt, params_hously_adapter_ffn_ho, params_lora_attn], [xsum_bitfit, xsum_pt, xsum_hously_adapter_ffn_ho, xsum_lora_attn], legends, "XSum ROUGE-2", full=21.94, title=None, add_legend=False) fig.set_size_inches(5, 5) fig.savefig("xsum_overview.pdf", bbox_inches='tight') fig, ax = plt.subplots(1, 1) plot_ax(ax, [params_bitfit, params_pt, params_hously_adapter_ffn_ho, params_lora_attn], [mt_bitfit, mt_pt, mt_hously_adapter_ffn_ho, mt_lora_attn], legends, "MT BLEU", full=37.3, title=None) fig.set_size_inches(5,5) fig.savefig("mt_overview.pdf", bbox_inches='tight') def plot_table4(): color_base = ["blue", "red", "green", "tab:orange", "tab:cyan", "purple", ] markers = ["o", "v", "s", "*", "D"] fig, ax = plt.subplots(1, 1) ylabel = "XSum ROUGE-2" params_pt = [3.6, 9.2] params_lora = [7.2] params_adapter = [3.6, 9.2] r2_pt = [20.46, 20.40] r2_lora = [20.29] r2_adapter = [20.31, 20.83] ffn_params_lora = [6.1] ffn_r2_lora = [21.31] ffn_params_adapter = [2.4, 6.1, 12.3] ffn_r2_adapter = [20.66, 20.98, 21.24] ax.plot(params_pt, r2_pt, c=color_base[0], marker=markers[0], ms=10, linewidth=2) ax.plot(params_adapter, r2_adapter, c=color_base[0], marker=markers[1], ms=10, linewidth=2) ax.plot(params_lora, r2_lora, c=color_base[0], marker=markers[2], ms=10, linewidth=2) ax.plot(ffn_params_adapter, ffn_r2_adapter, "--", c=color_base[1], marker=markers[1], ms=10, linewidth=2) ax.plot(ffn_params_lora, ffn_r2_lora, "--", c=color_base[1], marker=markers[2], ms=10, linewidth=2) # legends = ["attn-PT", "attn-PA", "attn-LoRA", "ffn-PA", # "ffn-LoRA"] # ax.legend(legends, loc="lower right", fontsize=12) ax.set(xlabel=r"Fine-tuned Parameters (\%)", ylabel=ylabel) ax.grid() ax.set_facecolor("white") fig.set_size_inches(5, 3) fig.savefig("xsum_modification_position.pdf", bbox_inches='tight') fig, ax = plt.subplots(1, 1) ylabel = "MT BLEU" params_pt = [3.6, 9.2] params_lora = [7.2] params_adapter = [3.6, 9.2] bleu_pt = [35.6, 35.1] bleu_lora = [36.2] bleu_adapter = [35.6, 36.2] ffn_params_lora = [6.1] ffn_params_adapter = [2.4, 6.1, 12.3] ffn_bleu_lora = [36.5] ffn_bleu_adapter = [36.4, 37.1, 37.3] ax.plot(params_pt, bleu_pt, c=color_base[0], marker=markers[0], ms=10, linewidth=2) ax.plot(params_adapter, bleu_adapter, c=color_base[0], marker=markers[1], ms=10, linewidth=2) ax.plot(params_lora, bleu_lora, c=color_base[0], marker=markers[2], ms=10, linewidth=2) ax.plot(ffn_params_adapter, ffn_bleu_adapter, "--", c=color_base[1], marker=markers[1], ms=10, linewidth=2) ax.plot(ffn_params_lora, ffn_bleu_lora, "--", c=color_base[1], marker=markers[2], ms=10, linewidth=2) # legends = ["attn-Prefix Tuning", "attn-Parallel Adapter", "attn-LoRA", "ffn-Parallel Adaptaer", "ffn-LoRA"] # ax.legend(legends, loc="lower right", fontsize=12, bbox_to_anchor=(1.27, 0.005)) legends = ["Prefix (attn)", "PA (attn)", "LoRA (attn)", "PA (ffn)", "LoRA (ffn)"] ax.legend(legends, loc="lower right", fontsize=12, bbox_to_anchor=(1.11, 0.00)) ax.set(xlabel=r"Fine-tuned Parameters (\%)", ylabel=ylabel) ax.grid() ax.set_facecolor("white") fig.set_size_inches(5, 3) fig.savefig("mt_modification_position.pdf", bbox_inches='tight') # plot_overview() plot_intro() # plot_table4()
[ "matplotlib.pyplot.style.use", "matplotlib.pyplot.subplots" ]
[((377, 406), 'matplotlib.pyplot.style.use', 'plt.style.use', (['"""seaborn-deep"""'], {}), "('seaborn-deep')\n", (390, 406), True, 'import matplotlib.pyplot as plt\n'), ((2544, 2562), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (2556, 2562), True, 'import matplotlib.pyplot as plt\n'), ((6864, 6882), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (6876, 6882), True, 'import matplotlib.pyplot as plt\n'), ((7298, 7316), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (7310, 7316), True, 'import matplotlib.pyplot as plt\n'), ((7776, 7794), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (7788, 7794), True, 'import matplotlib.pyplot as plt\n'), ((8988, 9006), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (9000, 9006), True, 'import matplotlib.pyplot as plt\n')]
from __future__ import print_function from apiclient import discovery, errors from httplib2 import Http from oauth2client import file, client, tools import json # Set up auth for the API SCOPES = 'https://www.googleapis.com/auth/drive.readonly.metadata' # SCOPES = 'https://www.googleapis.com/auth/drive' store = file.Storage('storage.json') # This stores the OAuth tokens. creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets('client_id.json', SCOPES) creds = tools.run_flow(flow, store) DRIVE = discovery.build('drive', 'v3', http=creds.authorize(Http())) # Query! (While chasing the next page tokens.) has_next = True files_by_hash = {} files_with_md5 = 0 # How many files with valid MD5's did we count? total_files_seen = 0 # How many files were seen, MD5 or not? request = DRIVE.files().list(fields='*') response = request.execute() while has_next: files = response.get('files', []) for f in files: if 'md5Checksum' in f: if f['md5Checksum'] in files_by_hash: files_by_hash[f['md5Checksum']].append(f) print("%s conflicts with %s, both with a MD5 hash of %s." % (f['id'], files_by_hash[f['md5Checksum']][0]['id'], f['md5Checksum'])) else: files_by_hash[f['md5Checksum']] = [f] files_with_md5+=1 total_files_seen += len(files) print("%d files analyzed." % total_files_seen) if response.get('nextPageToken'): request = DRIVE.files().list_next(previous_request=request, previous_response=response) response = request.execute() else: has_next = False print("%d total files checked for MD5." % files_with_md5) with open('md5dedup.json', 'w') as outfile: json.dump(files_by_hash, outfile)
[ "oauth2client.file.Storage", "json.dump", "httplib2.Http", "oauth2client.client.flow_from_clientsecrets", "oauth2client.tools.run_flow" ]
[((315, 343), 'oauth2client.file.Storage', 'file.Storage', (['"""storage.json"""'], {}), "('storage.json')\n", (327, 343), False, 'from oauth2client import file, client, tools\n'), ((438, 494), 'oauth2client.client.flow_from_clientsecrets', 'client.flow_from_clientsecrets', (['"""client_id.json"""', 'SCOPES'], {}), "('client_id.json', SCOPES)\n", (468, 494), False, 'from oauth2client import file, client, tools\n'), ((507, 534), 'oauth2client.tools.run_flow', 'tools.run_flow', (['flow', 'store'], {}), '(flow, store)\n', (521, 534), False, 'from oauth2client import file, client, tools\n'), ((1747, 1780), 'json.dump', 'json.dump', (['files_by_hash', 'outfile'], {}), '(files_by_hash, outfile)\n', (1756, 1780), False, 'import json\n'), ((595, 601), 'httplib2.Http', 'Http', ([], {}), '()\n', (599, 601), False, 'from httplib2 import Http\n')]
import random import math import numpy from planner.state_space import test_goal, test_parent_operator, StateSpace, Solution, Plan from misc.numerical import INF from misc.functions import randomize def random_policy(current_vertex): edges = current_vertex.get_successors() if not edges: return None # current_vertex return random.choice(edges) def greedy_policy(current_vertex, weight=1, shuffle=True): # TODO: function that returns the policy # TODO: use evaluators edges = current_vertex.get_successors() if not edges: return None if shuffle: edges = randomize(edges) return min(edges, key=lambda e: e.cost + weight*e.sink.get_h_cost()) ################################################## def random_walk(start, goal, generator, _=None, policy=random_policy, max_steps=INF, debug=None, **kwargs): space = StateSpace(generator, start, max_extensions=INF, **kwargs) current_vertex = space.root edge_path = [] while space.is_active() and len(edge_path) < max_steps: #current_vertex.generate_all() space.new_iteration(current_vertex) if debug is not None: debug(current_vertex) if test_goal(current_vertex, goal): operator_path = [edge.operator for edge in edge_path] plan = Plan(start, operator_path) return Solution(plan, space) #return space.solution(current_vertex) edge = policy(current_vertex) if edge is None: break edge_path.append(edge) current_vertex = edge.sink return space.failure() ################################################## MAX_ROLLOUT = 100 # 100 | INF class TreeNode(object): def __init__(self, vertex, parent_edge=None, parent_node=None): self.vertex = vertex self.parent_edge = parent_edge self.parent_node = parent_node self.rollouts = [] # TODO: rename to estimates? self.children = [] # TODO: map from edges to nodes if self.parent_node is not None: self.parent_node.children.append(self) def is_leaf(self): return not bool(self.children) # def is_explored(self): # return set(self.vertex.get_successors()) == {child.vertex for child in self.children} def num_rollouts(self): return len(self.rollouts) def get_estimate(self): if not self.rollouts: return INF return numpy.average(self.rollouts) def get_uct(self, c=math.sqrt(2)): # https://en.wikipedia.org/wiki/Monte_Carlo_tree_search estimate = -self.get_estimate() if (self.parent_node is None) or (c == 0): return estimate diverse = math.sqrt(math.log(self.parent_node.num_rollouts()) / self.num_rollouts()) if c == INF: return diverse return estimate + c*diverse def ancestors(self): if self.parent_node is None: return [] return self.parent_node.ancestors() + [self.parent_node] def descendants(self): nodes = [self] for child in self.children: nodes.extend(child.descendants()) return nodes def random_leaf(self): if self.is_leaf(): # is_leaf | is_explored return self child = random.choice(self.children) return child.random_leaf() def uniform_leaf(self): leaves = list(filter(TreeNode.is_leaf, self.descendants())) return random.choice(leaves) def uct_leaf(self, **kwargs): if self.is_leaf(): # is_leaf | is_explored return self best_child = max(self.children, key=lambda n: n.get_uct(**kwargs)) return best_child.uct_leaf() def __repr__(self): return '{}({})'.format(self.__class__.__name__, self.vertex) ################################################## def goal_rollout(vertex, goal): if test_goal(vertex, goal): return 0 return 1 # TODO: min action cost def deadend_rollout(vertex, goal): if test_goal(vertex, goal): return 0 if not vertex.get_successors(): return MAX_ROLLOUT return 1 def heuristic_rollout(vertex, goal): return vertex.get_h_cost() def simulation(start_vertex, goal, policy=random_policy, max_steps=5): current_vertex = start_vertex path = [] while len(path) < max_steps: if test_goal(current_vertex, goal): # TODO: greedy version break edge = policy(current_vertex) if edge is None: break path.append(edge) current_vertex = edge.sink return path def simulated_rollout(vertex, goal, evaluator=deadend_rollout, **kwargs): path = simulation(vertex, goal, **kwargs) cost = 0 estimates = [cost + evaluator(vertex, goal)] for edge in path: cost += edge.cost estimates.append(cost + evaluator(vertex, goal)) return estimates[-1] #return numpy.average(estimates) def simulated_rollouts(vertex, goal, num=1, **kwargs): assert num >= 1 return numpy.average([simulated_rollout(vertex, goal, **kwargs) for _ in range(num)]) ################################################## def mcts(start, goal, generator, _=None, debug=None, **kwargs): # TODO: dynamic programming instead of independent tree # https://gist.github.com/qpwo/c538c6f73727e254fdc7fab81024f6e1 # https://github.com/pbsinclair42/MCTS/blob/master/mcts.py # https://github.com/int8/monte-carlo-tree-search/blob/master/mctspy/tree/search.py space = StateSpace(generator, start, max_extensions=INF, **kwargs) root = TreeNode(space.root) while space.is_active(): #leaf = root.uniform_leaf() #leaf = root.random_leaf() leaf = root.uct_leaf() vertex = leaf.vertex space.new_iteration(vertex) if debug is not None: debug(vertex) if test_goal(vertex, goal): return space.solution(vertex) for edge in vertex.get_successors(): # TODO: sample a subset new_vertex = edge.sink if test_goal(new_vertex, goal): return space.solution(new_vertex) node = TreeNode(new_vertex, parent_edge=edge, parent_node=leaf) #rollout = goal_rollout(new_vertex, goal) #rollout = deadend_rollout(new_vertex, goal) #rollout = heuristic_rollout(new_vertex, goal) #rollout = simulated_rollout(new_vertex, goal) rollout = simulated_rollouts(new_vertex, goal, num=3) for ancestor in reversed(node.ancestors() + [node]): ancestor.rollouts.append(rollout) if ancestor.parent_edge is not None: rollout += ancestor.parent_edge.cost return space.failure()
[ "planner.state_space.Plan", "numpy.average", "math.sqrt", "planner.state_space.StateSpace", "random.choice", "planner.state_space.test_goal", "planner.state_space.Solution", "misc.functions.randomize" ]
[((346, 366), 'random.choice', 'random.choice', (['edges'], {}), '(edges)\n', (359, 366), False, 'import random\n'), ((876, 934), 'planner.state_space.StateSpace', 'StateSpace', (['generator', 'start'], {'max_extensions': 'INF'}), '(generator, start, max_extensions=INF, **kwargs)\n', (886, 934), False, 'from planner.state_space import test_goal, test_parent_operator, StateSpace, Solution, Plan\n'), ((3901, 3924), 'planner.state_space.test_goal', 'test_goal', (['vertex', 'goal'], {}), '(vertex, goal)\n', (3910, 3924), False, 'from planner.state_space import test_goal, test_parent_operator, StateSpace, Solution, Plan\n'), ((4023, 4046), 'planner.state_space.test_goal', 'test_goal', (['vertex', 'goal'], {}), '(vertex, goal)\n', (4032, 4046), False, 'from planner.state_space import test_goal, test_parent_operator, StateSpace, Solution, Plan\n'), ((5542, 5600), 'planner.state_space.StateSpace', 'StateSpace', (['generator', 'start'], {'max_extensions': 'INF'}), '(generator, start, max_extensions=INF, **kwargs)\n', (5552, 5600), False, 'from planner.state_space import test_goal, test_parent_operator, StateSpace, Solution, Plan\n'), ((613, 629), 'misc.functions.randomize', 'randomize', (['edges'], {}), '(edges)\n', (622, 629), False, 'from misc.functions import randomize\n'), ((1204, 1235), 'planner.state_space.test_goal', 'test_goal', (['current_vertex', 'goal'], {}), '(current_vertex, goal)\n', (1213, 1235), False, 'from planner.state_space import test_goal, test_parent_operator, StateSpace, Solution, Plan\n'), ((2450, 2478), 'numpy.average', 'numpy.average', (['self.rollouts'], {}), '(self.rollouts)\n', (2463, 2478), False, 'import numpy\n'), ((2503, 2515), 'math.sqrt', 'math.sqrt', (['(2)'], {}), '(2)\n', (2512, 2515), False, 'import math\n'), ((3298, 3326), 'random.choice', 'random.choice', (['self.children'], {}), '(self.children)\n', (3311, 3326), False, 'import random\n'), ((3473, 3494), 'random.choice', 'random.choice', (['leaves'], {}), '(leaves)\n', (3486, 3494), False, 'import random\n'), ((4374, 4405), 'planner.state_space.test_goal', 'test_goal', (['current_vertex', 'goal'], {}), '(current_vertex, goal)\n', (4383, 4405), False, 'from planner.state_space import test_goal, test_parent_operator, StateSpace, Solution, Plan\n'), ((5896, 5919), 'planner.state_space.test_goal', 'test_goal', (['vertex', 'goal'], {}), '(vertex, goal)\n', (5905, 5919), False, 'from planner.state_space import test_goal, test_parent_operator, StateSpace, Solution, Plan\n'), ((1322, 1348), 'planner.state_space.Plan', 'Plan', (['start', 'operator_path'], {}), '(start, operator_path)\n', (1326, 1348), False, 'from planner.state_space import test_goal, test_parent_operator, StateSpace, Solution, Plan\n'), ((1368, 1389), 'planner.state_space.Solution', 'Solution', (['plan', 'space'], {}), '(plan, space)\n', (1376, 1389), False, 'from planner.state_space import test_goal, test_parent_operator, StateSpace, Solution, Plan\n'), ((6082, 6109), 'planner.state_space.test_goal', 'test_goal', (['new_vertex', 'goal'], {}), '(new_vertex, goal)\n', (6091, 6109), False, 'from planner.state_space import test_goal, test_parent_operator, StateSpace, Solution, Plan\n')]
from datetime import datetime, timedelta from string import ascii_uppercase from dateutil.parser import parse from flask import abort, flash, redirect, render_template, request, url_for from flask_login import current_user, login_required from markupsafe import Markup from notifications_python_client.errors import HTTPError from notifications_utils.formatters import nl2br from notifications_utils.recipients import first_column_headings from app import current_service, service_api_client, template_statistics_client from app.main import main from app.main.forms import ( ChooseTemplateType, EmailTemplateForm, LetterTemplateForm, SearchTemplatesForm, SetTemplateSenderForm, SMSTemplateForm, ) from app.main.views.send import get_example_csv_rows, get_sender_details from app.template_previews import TemplatePreview, get_page_count_for_letter from app.utils import ( email_or_sms_not_enabled, get_template, user_has_permissions, ) form_objects = { 'email': EmailTemplateForm, 'sms': SMSTemplateForm, 'letter': LetterTemplateForm } page_headings = { 'email': 'email', 'sms': 'text message' } @main.route("/services/<service_id>/templates/<uuid:template_id>") @login_required @user_has_permissions('view_activity', 'send_messages') def view_template(service_id, template_id): if not current_user.has_permissions('view_activity'): return redirect(url_for( '.send_one_off', service_id=service_id, template_id=template_id )) template = service_api_client.get_service_template(service_id, str(template_id))['data'] if template["template_type"] == "letter": letter_contact_details = service_api_client.get_letter_contacts(service_id) default_letter_contact_block_id = next( (x['id'] for x in letter_contact_details if x['is_default']), None ) else: default_letter_contact_block_id = None return render_template( 'views/templates/template.html', template=get_template( template, current_service, expand_emails=True, letter_preview_url=url_for( '.view_letter_template_preview', service_id=service_id, template_id=template_id, filetype='png', ), show_recipient=True, page_count=get_page_count_for_letter(template), ), default_letter_contact_block_id=default_letter_contact_block_id, ) @main.route("/services/<service_id>/start-tour/<uuid:template_id>") @login_required @user_has_permissions('view_activity') def start_tour(service_id, template_id): template = service_api_client.get_service_template(service_id, str(template_id))['data'] if template['template_type'] != 'sms': abort(404) return render_template( 'views/templates/start-tour.html', template=get_template( template, current_service, show_recipient=True, ), help='1', ) @main.route("/services/<service_id>/templates") @main.route("/services/<service_id>/templates/<template_type>") @login_required @user_has_permissions('view_activity', 'send_messages') def choose_template(service_id, template_type='all'): templates = service_api_client.get_service_templates(service_id)['data'] letters_available = ( 'letter' in current_service['permissions'] and current_user.has_permissions('view_activity') ) available_template_types = list(filter(None, ( 'email', 'sms', 'letter' if letters_available else None, ))) templates = [ template for template in templates if template['template_type'] in available_template_types ] has_multiple_template_types = len({ template['template_type'] for template in templates }) > 1 template_nav_items = [ (label, key, url_for('.choose_template', service_id=current_service['id'], template_type=key), '') for label, key in filter(None, [ ('All', 'all'), ('Text message', 'sms'), ('Email', 'email'), ('Letter', 'letter') if letters_available else None, ]) ] templates_on_page = [ template for template in templates if ( template_type in ['all', template['template_type']] and template['template_type'] in available_template_types ) ] if current_user.has_permissions('view_activity'): page_title = 'Templates' else: page_title = 'Choose a template' return render_template( 'views/templates/choose.html', page_title=page_title, templates=templates_on_page, show_search_box=(len(templates_on_page) > 7), show_template_nav=has_multiple_template_types and (len(templates) > 2), template_nav_items=template_nav_items, template_type=template_type, search_form=SearchTemplatesForm(), ) @main.route("/services/<service_id>/templates/<template_id>.<filetype>") @login_required @user_has_permissions('view_activity', 'send_messages') def view_letter_template_preview(service_id, template_id, filetype): if filetype not in ('pdf', 'png'): abort(404) db_template = service_api_client.get_service_template(service_id, template_id)['data'] return TemplatePreview.from_database_object(db_template, filetype, page=request.args.get('page')) def _view_template_version(service_id, template_id, version, letters_as_pdf=False): return dict(template=get_template( service_api_client.get_service_template(service_id, template_id, version=version)['data'], current_service, expand_emails=True, letter_preview_url=url_for( '.view_template_version_preview', service_id=service_id, template_id=template_id, version=version, filetype='png', ) if not letters_as_pdf else None )) @main.route("/services/<service_id>/templates/<template_id>/version/<int:version>") @login_required @user_has_permissions('view_activity') def view_template_version(service_id, template_id, version): return render_template( 'views/templates/template_history.html', **_view_template_version(service_id=service_id, template_id=template_id, version=version) ) @main.route("/services/<service_id>/templates/<template_id>/version/<int:version>.<filetype>") @login_required @user_has_permissions('view_activity') def view_template_version_preview(service_id, template_id, version, filetype): db_template = service_api_client.get_service_template(service_id, template_id, version=version)['data'] return TemplatePreview.from_database_object(db_template, filetype) @main.route("/services/<service_id>/templates/add", methods=['GET', 'POST']) @login_required @user_has_permissions('manage_templates') def add_template_by_type(service_id): form = ChooseTemplateType( include_letters='letter' in current_service['permissions'] ) if form.validate_on_submit(): if form.template_type.data == 'letter': blank_letter = service_api_client.create_service_template( 'Untitled', 'letter', 'Body', service_id, 'Main heading', 'normal', ) return redirect(url_for( '.view_template', service_id=service_id, template_id=blank_letter['data']['id'], )) if email_or_sms_not_enabled(form.template_type.data, current_service['permissions']): return redirect(url_for( '.action_blocked', service_id=service_id, notification_type=form.template_type.data, return_to='add_new_template', template_id='0' )) else: return redirect(url_for( '.add_service_template', service_id=service_id, template_type=form.template_type.data, )) return render_template('views/templates/add.html', form=form) @main.route("/services/<service_id>/templates/action-blocked/<notification_type>/<return_to>/<template_id>") @login_required @user_has_permissions('manage_templates') def action_blocked(service_id, notification_type, return_to, template_id): if notification_type == 'sms': notification_type = 'text messages' elif notification_type == 'email': notification_type = 'emails' return render_template( 'views/templates/action_blocked.html', service_id=service_id, notification_type=notification_type, return_to=return_to, template_id=template_id ) @main.route("/services/<service_id>/templates/add-<template_type>", methods=['GET', 'POST']) @login_required @user_has_permissions('manage_templates') def add_service_template(service_id, template_type): if template_type not in ['sms', 'email', 'letter']: abort(404) if 'letter' not in current_service['permissions'] and template_type == 'letter': abort(403) form = form_objects[template_type]() if form.validate_on_submit(): if form.process_type.data == 'priority': abort_403_if_not_admin_user() try: new_template = service_api_client.create_service_template( form.name.data, template_type, form.template_content.data, service_id, form.subject.data if hasattr(form, 'subject') else None, form.process_type.data ) except HTTPError as e: if ( e.status_code == 400 and 'content' in e.message and any(['character count greater than' in x for x in e.message['content']]) ): form.template_content.errors.extend(e.message['content']) else: raise e else: return redirect( url_for('.view_template', service_id=service_id, template_id=new_template['data']['id']) ) if email_or_sms_not_enabled(template_type, current_service['permissions']): return redirect(url_for( '.action_blocked', service_id=service_id, notification_type=template_type, return_to='templates', template_id='0' )) else: return render_template( 'views/edit-{}-template.html'.format(template_type), form=form, template_type=template_type, heading_action='Add', ) def abort_403_if_not_admin_user(): if not current_user.platform_admin: abort(403) @main.route("/services/<service_id>/templates/<template_id>/edit", methods=['GET', 'POST']) @login_required @user_has_permissions('manage_templates') def edit_service_template(service_id, template_id): template = service_api_client.get_service_template(service_id, template_id)['data'] template['template_content'] = template['content'] form = form_objects[template['template_type']](**template) if form.validate_on_submit(): if form.process_type.data != template['process_type']: abort_403_if_not_admin_user() subject = form.subject.data if hasattr(form, 'subject') else None new_template = get_template({ 'name': form.name.data, 'content': form.template_content.data, 'subject': subject, 'template_type': template['template_type'], 'id': template['id'], 'process_type': form.process_type.data, 'reply_to_text': template['reply_to_text'] }, current_service) template_change = get_template(template, current_service).compare_to(new_template) if template_change.placeholders_added and not request.form.get('confirm'): example_column_headings = ( first_column_headings[new_template.template_type] + list(new_template.placeholders) ) return render_template( 'views/templates/breaking-change.html', template_change=template_change, new_template=new_template, column_headings=list(ascii_uppercase[:len(example_column_headings)]), example_rows=[ example_column_headings, get_example_csv_rows(new_template), get_example_csv_rows(new_template) ], form=form ) try: service_api_client.update_service_template( template_id, form.name.data, template['template_type'], form.template_content.data, service_id, subject, form.process_type.data ) except HTTPError as e: if e.status_code == 400: if 'content' in e.message and any(['character count greater than' in x for x in e.message['content']]): form.template_content.errors.extend(e.message['content']) else: raise e else: raise e else: return redirect(url_for( '.view_template', service_id=service_id, template_id=template_id )) db_template = service_api_client.get_service_template(service_id, template_id)['data'] if email_or_sms_not_enabled(db_template['template_type'], current_service['permissions']): return redirect(url_for( '.action_blocked', service_id=service_id, notification_type=db_template['template_type'], return_to='view_template', template_id=template_id )) else: return render_template( 'views/edit-{}-template.html'.format(template['template_type']), form=form, template_id=template_id, template_type=template['template_type'], heading_action='Edit' ) @main.route("/services/<service_id>/templates/<template_id>/delete", methods=['GET', 'POST']) @login_required @user_has_permissions('manage_templates') def delete_service_template(service_id, template_id): template = service_api_client.get_service_template(service_id, template_id)['data'] if request.method == 'POST': service_api_client.delete_service_template(service_id, template_id) return redirect(url_for( '.choose_template', service_id=service_id, )) try: last_used_notification = template_statistics_client.get_template_statistics_for_template( service_id, template['id'] ) message = 'It was last used {} ago'.format( 'more than seven days' if not last_used_notification else get_human_readable_delta( parse(last_used_notification['created_at']).replace(tzinfo=None), datetime.utcnow() ) ) except HTTPError as e: if e.status_code == 404: message = None else: raise e return render_template( 'views/templates/template.html', template_delete_confirmation_message=( 'Are you sure you want to delete {}?'.format(template['name']), message, ), template=get_template( template, current_service, expand_emails=True, letter_preview_url=url_for( '.view_letter_template_preview', service_id=service_id, template_id=template['id'], filetype='png', ), show_recipient=True, ), ) @main.route("/services/<service_id>/templates/<template_id>/redact", methods=['GET']) @login_required @user_has_permissions('manage_templates') def confirm_redact_template(service_id, template_id): template = service_api_client.get_service_template(service_id, template_id)['data'] return render_template( 'views/templates/template.html', template=get_template( template, current_service, expand_emails=True, letter_preview_url=url_for( '.view_letter_template_preview', service_id=service_id, template_id=template_id, filetype='png', ), show_recipient=True, ), show_redaction_message=True, ) @main.route("/services/<service_id>/templates/<template_id>/redact", methods=['POST']) @login_required @user_has_permissions('manage_templates') def redact_template(service_id, template_id): service_api_client.redact_service_template(service_id, template_id) flash( 'Personalised content will be hidden for messages sent with this template', 'default_with_tick' ) return redirect(url_for( '.view_template', service_id=service_id, template_id=template_id, )) @main.route('/services/<service_id>/templates/<template_id>/versions') @login_required @user_has_permissions('view_activity') def view_template_versions(service_id, template_id): return render_template( 'views/templates/choose_history.html', versions=[ get_template( template, current_service, expand_emails=True, letter_preview_url=url_for( '.view_template_version_preview', service_id=service_id, template_id=template_id, version=template['version'], filetype='png', ) ) for template in service_api_client.get_service_template_versions(service_id, template_id)['data'] ] ) @main.route('/services/<service_id>/templates/<template_id>/set-template-sender', methods=['GET', 'POST']) @login_required @user_has_permissions('manage_templates') def set_template_sender(service_id, template_id): template = service_api_client.get_service_template(service_id, template_id)['data'] sender_details = get_template_sender_form_dict(service_id, template) no_senders = sender_details.get('no_senders', False) form = SetTemplateSenderForm( sender=sender_details['current_choice'], sender_choices=sender_details['value_and_label'], ) option_hints = {sender_details['default_sender']: '(Default)'} if form.validate_on_submit(): service_api_client.update_service_template_sender( service_id, template_id, form.sender.data if form.sender.data else None, ) return redirect(url_for('.view_template', service_id=service_id, template_id=template_id)) return render_template( 'views/templates/set-template-sender.html', form=form, template_id=template_id, no_senders=no_senders, option_hints=option_hints ) def get_template_sender_form_dict(service_id, template): context = { 'email': { 'field_name': 'email_address' }, 'letter': { 'field_name': 'contact_block' }, 'sms': { 'field_name': 'sms_sender' } }[template['template_type']] sender_format = context['field_name'] service_senders = get_sender_details(service_id, template['template_type']) context['default_sender'] = next( (x['id'] for x in service_senders if x['is_default']), "Not set" ) if not service_senders: context['no_senders'] = True context['value_and_label'] = [(sender['id'], Markup(nl2br(sender[sender_format]))) for sender in service_senders] context['value_and_label'].insert(0, ('', 'Blank')) # Add blank option to start of list context['current_choice'] = template['service_letter_contact'] if template['service_letter_contact'] else '' return context def get_last_use_message(template_name, template_statistics): try: most_recent_use = max( parse(template_stats['updated_at']).replace(tzinfo=None) for template_stats in template_statistics ) except ValueError: return '{} has never been used'.format(template_name) return '{} was last used {} ago'.format( template_name, get_human_readable_delta(most_recent_use, datetime.utcnow()) ) def get_human_readable_delta(from_time, until_time): delta = until_time - from_time if delta < timedelta(seconds=60): return 'under a minute' elif delta < timedelta(hours=1): minutes = int(delta.seconds / 60) return '{} minute{}'.format(minutes, '' if minutes == 1 else 's') elif delta < timedelta(days=1): hours = int(delta.seconds / 3600) return '{} hour{}'.format(hours, '' if hours == 1 else 's') else: days = delta.days return '{} day{}'.format(days, '' if days == 1 else 's') def should_show_template(template_type): return ( template_type != 'letter' or 'letter' in current_service['permissions'] )
[ "flask.flash", "flask.request.form.get", "datetime.datetime.utcnow", "flask.url_for", "app.main.forms.SetTemplateSenderForm", "app.template_statistics_client.get_template_statistics_for_template", "app.service_api_client.create_service_template", "app.utils.user_has_permissions", "app.main.forms.ChooseTemplateType", "app.service_api_client.get_service_template", "flask.request.args.get", "app.main.views.send.get_sender_details", "flask.abort", "app.main.views.send.get_example_csv_rows", "app.service_api_client.get_service_template_versions", "app.main.main.route", "datetime.timedelta", "flask.render_template", "notifications_utils.formatters.nl2br", "app.service_api_client.get_letter_contacts", "app.service_api_client.update_service_template", "dateutil.parser.parse", "app.utils.email_or_sms_not_enabled", "app.service_api_client.delete_service_template", "app.template_previews.get_page_count_for_letter", "app.service_api_client.update_service_template_sender", "app.service_api_client.get_service_templates", "app.template_previews.TemplatePreview.from_database_object", "app.main.forms.SearchTemplatesForm", "app.utils.get_template", "flask_login.current_user.has_permissions", "app.service_api_client.redact_service_template" ]
[((1158, 1223), 'app.main.main.route', 'main.route', (['"""/services/<service_id>/templates/<uuid:template_id>"""'], {}), "('/services/<service_id>/templates/<uuid:template_id>')\n", (1168, 1223), False, 'from app.main import main\n'), ((1241, 1295), 'app.utils.user_has_permissions', 'user_has_permissions', (['"""view_activity"""', '"""send_messages"""'], {}), "('view_activity', 'send_messages')\n", (1261, 1295), False, 'from app.utils import email_or_sms_not_enabled, get_template, user_has_permissions\n'), ((2520, 2586), 'app.main.main.route', 'main.route', (['"""/services/<service_id>/start-tour/<uuid:template_id>"""'], {}), "('/services/<service_id>/start-tour/<uuid:template_id>')\n", (2530, 2586), False, 'from app.main import main\n'), ((2604, 2641), 'app.utils.user_has_permissions', 'user_has_permissions', (['"""view_activity"""'], {}), "('view_activity')\n", (2624, 2641), False, 'from app.utils import email_or_sms_not_enabled, get_template, user_has_permissions\n'), ((3065, 3111), 'app.main.main.route', 'main.route', (['"""/services/<service_id>/templates"""'], {}), "('/services/<service_id>/templates')\n", (3075, 3111), False, 'from app.main import main\n'), ((3113, 3175), 'app.main.main.route', 'main.route', (['"""/services/<service_id>/templates/<template_type>"""'], {}), "('/services/<service_id>/templates/<template_type>')\n", (3123, 3175), False, 'from app.main import main\n'), ((3193, 3247), 'app.utils.user_has_permissions', 'user_has_permissions', (['"""view_activity"""', '"""send_messages"""'], {}), "('view_activity', 'send_messages')\n", (3213, 3247), False, 'from app.utils import email_or_sms_not_enabled, get_template, user_has_permissions\n'), ((5040, 5111), 'app.main.main.route', 'main.route', (['"""/services/<service_id>/templates/<template_id>.<filetype>"""'], {}), "('/services/<service_id>/templates/<template_id>.<filetype>')\n", (5050, 5111), False, 'from app.main import main\n'), ((5129, 5183), 'app.utils.user_has_permissions', 'user_has_permissions', (['"""view_activity"""', '"""send_messages"""'], {}), "('view_activity', 'send_messages')\n", (5149, 5183), False, 'from app.utils import email_or_sms_not_enabled, get_template, user_has_permissions\n'), ((6046, 6133), 'app.main.main.route', 'main.route', (['"""/services/<service_id>/templates/<template_id>/version/<int:version>"""'], {}), "(\n '/services/<service_id>/templates/<template_id>/version/<int:version>')\n", (6056, 6133), False, 'from app.main import main\n'), ((6146, 6183), 'app.utils.user_has_permissions', 'user_has_permissions', (['"""view_activity"""'], {}), "('view_activity')\n", (6166, 6183), False, 'from app.utils import email_or_sms_not_enabled, get_template, user_has_permissions\n'), ((6429, 6532), 'app.main.main.route', 'main.route', (['"""/services/<service_id>/templates/<template_id>/version/<int:version>.<filetype>"""'], {}), "(\n '/services/<service_id>/templates/<template_id>/version/<int:version>.<filetype>'\n )\n", (6439, 6532), False, 'from app.main import main\n'), ((6540, 6577), 'app.utils.user_has_permissions', 'user_has_permissions', (['"""view_activity"""'], {}), "('view_activity')\n", (6560, 6577), False, 'from app.utils import email_or_sms_not_enabled, get_template, user_has_permissions\n'), ((6839, 6914), 'app.main.main.route', 'main.route', (['"""/services/<service_id>/templates/add"""'], {'methods': "['GET', 'POST']"}), "('/services/<service_id>/templates/add', methods=['GET', 'POST'])\n", (6849, 6914), False, 'from app.main import main\n'), ((6932, 6972), 'app.utils.user_has_permissions', 'user_has_permissions', (['"""manage_templates"""'], {}), "('manage_templates')\n", (6952, 6972), False, 'from app.utils import email_or_sms_not_enabled, get_template, user_has_permissions\n'), ((8259, 8376), 'app.main.main.route', 'main.route', (['"""/services/<service_id>/templates/action-blocked/<notification_type>/<return_to>/<template_id>"""'], {}), "(\n '/services/<service_id>/templates/action-blocked/<notification_type>/<return_to>/<template_id>'\n )\n", (8269, 8376), False, 'from app.main import main\n'), ((8384, 8424), 'app.utils.user_has_permissions', 'user_has_permissions', (['"""manage_templates"""'], {}), "('manage_templates')\n", (8404, 8424), False, 'from app.utils import email_or_sms_not_enabled, get_template, user_has_permissions\n'), ((8877, 8973), 'app.main.main.route', 'main.route', (['"""/services/<service_id>/templates/add-<template_type>"""'], {'methods': "['GET', 'POST']"}), "('/services/<service_id>/templates/add-<template_type>', methods=\n ['GET', 'POST'])\n", (8887, 8973), False, 'from app.main import main\n'), ((8986, 9026), 'app.utils.user_has_permissions', 'user_has_permissions', (['"""manage_templates"""'], {}), "('manage_templates')\n", (9006, 9026), False, 'from app.utils import email_or_sms_not_enabled, get_template, user_has_permissions\n'), ((10899, 10994), 'app.main.main.route', 'main.route', (['"""/services/<service_id>/templates/<template_id>/edit"""'], {'methods': "['GET', 'POST']"}), "('/services/<service_id>/templates/<template_id>/edit', methods=[\n 'GET', 'POST'])\n", (10909, 10994), False, 'from app.main import main\n'), ((11007, 11047), 'app.utils.user_has_permissions', 'user_has_permissions', (['"""manage_templates"""'], {}), "('manage_templates')\n", (11027, 11047), False, 'from app.utils import email_or_sms_not_enabled, get_template, user_has_permissions\n'), ((14335, 14432), 'app.main.main.route', 'main.route', (['"""/services/<service_id>/templates/<template_id>/delete"""'], {'methods': "['GET', 'POST']"}), "('/services/<service_id>/templates/<template_id>/delete', methods\n =['GET', 'POST'])\n", (14345, 14432), False, 'from app.main import main\n'), ((14445, 14485), 'app.utils.user_has_permissions', 'user_has_permissions', (['"""manage_templates"""'], {}), "('manage_templates')\n", (14465, 14485), False, 'from app.utils import email_or_sms_not_enabled, get_template, user_has_permissions\n'), ((16027, 16116), 'app.main.main.route', 'main.route', (['"""/services/<service_id>/templates/<template_id>/redact"""'], {'methods': "['GET']"}), "('/services/<service_id>/templates/<template_id>/redact', methods\n =['GET'])\n", (16037, 16116), False, 'from app.main import main\n'), ((16129, 16169), 'app.utils.user_has_permissions', 'user_has_permissions', (['"""manage_templates"""'], {}), "('manage_templates')\n", (16149, 16169), False, 'from app.utils import email_or_sms_not_enabled, get_template, user_has_permissions\n'), ((16802, 16892), 'app.main.main.route', 'main.route', (['"""/services/<service_id>/templates/<template_id>/redact"""'], {'methods': "['POST']"}), "('/services/<service_id>/templates/<template_id>/redact', methods\n =['POST'])\n", (16812, 16892), False, 'from app.main import main\n'), ((16905, 16945), 'app.utils.user_has_permissions', 'user_has_permissions', (['"""manage_templates"""'], {}), "('manage_templates')\n", (16925, 16945), False, 'from app.utils import email_or_sms_not_enabled, get_template, user_has_permissions\n'), ((17325, 17394), 'app.main.main.route', 'main.route', (['"""/services/<service_id>/templates/<template_id>/versions"""'], {}), "('/services/<service_id>/templates/<template_id>/versions')\n", (17335, 17394), False, 'from app.main import main\n'), ((17412, 17449), 'app.utils.user_has_permissions', 'user_has_permissions', (['"""view_activity"""'], {}), "('view_activity')\n", (17432, 17449), False, 'from app.utils import email_or_sms_not_enabled, get_template, user_has_permissions\n'), ((18150, 18260), 'app.main.main.route', 'main.route', (['"""/services/<service_id>/templates/<template_id>/set-template-sender"""'], {'methods': "['GET', 'POST']"}), "('/services/<service_id>/templates/<template_id>/set-template-sender'\n , methods=['GET', 'POST'])\n", (18160, 18260), False, 'from app.main import main\n'), ((18273, 18313), 'app.utils.user_has_permissions', 'user_has_permissions', (['"""manage_templates"""'], {}), "('manage_templates')\n", (18293, 18313), False, 'from app.utils import email_or_sms_not_enabled, get_template, user_has_permissions\n'), ((4503, 4548), 'flask_login.current_user.has_permissions', 'current_user.has_permissions', (['"""view_activity"""'], {}), "('view_activity')\n", (4531, 4548), False, 'from flask_login import current_user, login_required\n'), ((6776, 6835), 'app.template_previews.TemplatePreview.from_database_object', 'TemplatePreview.from_database_object', (['db_template', 'filetype'], {}), '(db_template, filetype)\n', (6812, 6835), False, 'from app.template_previews import TemplatePreview, get_page_count_for_letter\n'), ((7023, 7101), 'app.main.forms.ChooseTemplateType', 'ChooseTemplateType', ([], {'include_letters': "('letter' in current_service['permissions'])"}), "(include_letters='letter' in current_service['permissions'])\n", (7041, 7101), False, 'from app.main.forms import ChooseTemplateType, EmailTemplateForm, LetterTemplateForm, SearchTemplatesForm, SetTemplateSenderForm, SMSTemplateForm\n'), ((8201, 8255), 'flask.render_template', 'render_template', (['"""views/templates/add.html"""'], {'form': 'form'}), "('views/templates/add.html', form=form)\n", (8216, 8255), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((8667, 8836), 'flask.render_template', 'render_template', (['"""views/templates/action_blocked.html"""'], {'service_id': 'service_id', 'notification_type': 'notification_type', 'return_to': 'return_to', 'template_id': 'template_id'}), "('views/templates/action_blocked.html', service_id=\n service_id, notification_type=notification_type, return_to=return_to,\n template_id=template_id)\n", (8682, 8836), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((10294, 10365), 'app.utils.email_or_sms_not_enabled', 'email_or_sms_not_enabled', (['template_type', "current_service['permissions']"], {}), "(template_type, current_service['permissions'])\n", (10318, 10365), False, 'from app.utils import email_or_sms_not_enabled, get_template, user_has_permissions\n'), ((13723, 13814), 'app.utils.email_or_sms_not_enabled', 'email_or_sms_not_enabled', (["db_template['template_type']", "current_service['permissions']"], {}), "(db_template['template_type'], current_service[\n 'permissions'])\n", (13747, 13814), False, 'from app.utils import email_or_sms_not_enabled, get_template, user_has_permissions\n'), ((16997, 17064), 'app.service_api_client.redact_service_template', 'service_api_client.redact_service_template', (['service_id', 'template_id'], {}), '(service_id, template_id)\n', (17039, 17064), False, 'from app import current_service, service_api_client, template_statistics_client\n'), ((17070, 17181), 'flask.flash', 'flash', (['"""Personalised content will be hidden for messages sent with this template"""', '"""default_with_tick"""'], {}), "(\n 'Personalised content will be hidden for messages sent with this template',\n 'default_with_tick')\n", (17075, 17181), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((18594, 18710), 'app.main.forms.SetTemplateSenderForm', 'SetTemplateSenderForm', ([], {'sender': "sender_details['current_choice']", 'sender_choices': "sender_details['value_and_label']"}), "(sender=sender_details['current_choice'],\n sender_choices=sender_details['value_and_label'])\n", (18615, 18710), False, 'from app.main.forms import ChooseTemplateType, EmailTemplateForm, LetterTemplateForm, SearchTemplatesForm, SetTemplateSenderForm, SMSTemplateForm\n'), ((19121, 19270), 'flask.render_template', 'render_template', (['"""views/templates/set-template-sender.html"""'], {'form': 'form', 'template_id': 'template_id', 'no_senders': 'no_senders', 'option_hints': 'option_hints'}), "('views/templates/set-template-sender.html', form=form,\n template_id=template_id, no_senders=no_senders, option_hints=option_hints)\n", (19136, 19270), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((19697, 19754), 'app.main.views.send.get_sender_details', 'get_sender_details', (['service_id', "template['template_type']"], {}), "(service_id, template['template_type'])\n", (19715, 19754), False, 'from app.main.views.send import get_example_csv_rows, get_sender_details\n'), ((1351, 1396), 'flask_login.current_user.has_permissions', 'current_user.has_permissions', (['"""view_activity"""'], {}), "('view_activity')\n", (1379, 1396), False, 'from flask_login import current_user, login_required\n'), ((1690, 1740), 'app.service_api_client.get_letter_contacts', 'service_api_client.get_letter_contacts', (['service_id'], {}), '(service_id)\n', (1728, 1740), False, 'from app import current_service, service_api_client, template_statistics_client\n'), ((2829, 2839), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (2834, 2839), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((3318, 3370), 'app.service_api_client.get_service_templates', 'service_api_client.get_service_templates', (['service_id'], {}), '(service_id)\n', (3358, 3370), False, 'from app import current_service, service_api_client, template_statistics_client\n'), ((3469, 3514), 'flask_login.current_user.has_permissions', 'current_user.has_permissions', (['"""view_activity"""'], {}), "('view_activity')\n", (3497, 3514), False, 'from flask_login import current_user, login_required\n'), ((5300, 5310), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (5305, 5310), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((5330, 5394), 'app.service_api_client.get_service_template', 'service_api_client.get_service_template', (['service_id', 'template_id'], {}), '(service_id, template_id)\n', (5369, 5394), False, 'from app import current_service, service_api_client, template_statistics_client\n'), ((6675, 6761), 'app.service_api_client.get_service_template', 'service_api_client.get_service_template', (['service_id', 'template_id'], {'version': 'version'}), '(service_id, template_id, version=\n version)\n', (6714, 6761), False, 'from app import current_service, service_api_client, template_statistics_client\n'), ((7642, 7728), 'app.utils.email_or_sms_not_enabled', 'email_or_sms_not_enabled', (['form.template_type.data', "current_service['permissions']"], {}), "(form.template_type.data, current_service[\n 'permissions'])\n", (7666, 7728), False, 'from app.utils import email_or_sms_not_enabled, get_template, user_has_permissions\n'), ((9145, 9155), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (9150, 9155), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((9249, 9259), 'flask.abort', 'abort', (['(403)'], {}), '(403)\n', (9254, 9259), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((10885, 10895), 'flask.abort', 'abort', (['(403)'], {}), '(403)\n', (10890, 10895), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((11115, 11179), 'app.service_api_client.get_service_template', 'service_api_client.get_service_template', (['service_id', 'template_id'], {}), '(service_id, template_id)\n', (11154, 11179), False, 'from app import current_service, service_api_client, template_statistics_client\n'), ((11544, 11820), 'app.utils.get_template', 'get_template', (["{'name': form.name.data, 'content': form.template_content.data, 'subject':\n subject, 'template_type': template['template_type'], 'id': template[\n 'id'], 'process_type': form.process_type.data, 'reply_to_text':\n template['reply_to_text']}", 'current_service'], {}), "({'name': form.name.data, 'content': form.template_content.data,\n 'subject': subject, 'template_type': template['template_type'], 'id':\n template['id'], 'process_type': form.process_type.data, 'reply_to_text':\n template['reply_to_text']}, current_service)\n", (11556, 11820), False, 'from app.utils import email_or_sms_not_enabled, get_template, user_has_permissions\n'), ((13642, 13706), 'app.service_api_client.get_service_template', 'service_api_client.get_service_template', (['service_id', 'template_id'], {}), '(service_id, template_id)\n', (13681, 13706), False, 'from app import current_service, service_api_client, template_statistics_client\n'), ((14555, 14619), 'app.service_api_client.get_service_template', 'service_api_client.get_service_template', (['service_id', 'template_id'], {}), '(service_id, template_id)\n', (14594, 14619), False, 'from app import current_service, service_api_client, template_statistics_client\n'), ((14670, 14737), 'app.service_api_client.delete_service_template', 'service_api_client.delete_service_template', (['service_id', 'template_id'], {}), '(service_id, template_id)\n', (14712, 14737), False, 'from app import current_service, service_api_client, template_statistics_client\n'), ((14892, 14987), 'app.template_statistics_client.get_template_statistics_for_template', 'template_statistics_client.get_template_statistics_for_template', (['service_id', "template['id']"], {}), "(service_id,\n template['id'])\n", (14955, 14987), False, 'from app import current_service, service_api_client, template_statistics_client\n'), ((16239, 16303), 'app.service_api_client.get_service_template', 'service_api_client.get_service_template', (['service_id', 'template_id'], {}), '(service_id, template_id)\n', (16278, 16303), False, 'from app import current_service, service_api_client, template_statistics_client\n'), ((17216, 17289), 'flask.url_for', 'url_for', (['""".view_template"""'], {'service_id': 'service_id', 'template_id': 'template_id'}), "('.view_template', service_id=service_id, template_id=template_id)\n", (17223, 17289), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((18379, 18443), 'app.service_api_client.get_service_template', 'service_api_client.get_service_template', (['service_id', 'template_id'], {}), '(service_id, template_id)\n', (18418, 18443), False, 'from app import current_service, service_api_client, template_statistics_client\n'), ((18840, 18967), 'app.service_api_client.update_service_template_sender', 'service_api_client.update_service_template_sender', (['service_id', 'template_id', '(form.sender.data if form.sender.data else None)'], {}), '(service_id, template_id, \n form.sender.data if form.sender.data else None)\n', (18889, 18967), False, 'from app import current_service, service_api_client, template_statistics_client\n'), ((20853, 20874), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(60)'}), '(seconds=60)\n', (20862, 20874), False, 'from datetime import datetime, timedelta\n'), ((1422, 1494), 'flask.url_for', 'url_for', (['""".send_one_off"""'], {'service_id': 'service_id', 'template_id': 'template_id'}), "('.send_one_off', service_id=service_id, template_id=template_id)\n", (1429, 1494), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((2929, 2989), 'app.utils.get_template', 'get_template', (['template', 'current_service'], {'show_recipient': '(True)'}), '(template, current_service, show_recipient=True)\n', (2941, 2989), False, 'from app.utils import email_or_sms_not_enabled, get_template, user_has_permissions\n'), ((3956, 4041), 'flask.url_for', 'url_for', (['""".choose_template"""'], {'service_id': "current_service['id']", 'template_type': 'key'}), "('.choose_template', service_id=current_service['id'], template_type=key\n )\n", (3963, 4041), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((5008, 5029), 'app.main.forms.SearchTemplatesForm', 'SearchTemplatesForm', ([], {}), '()\n', (5027, 5029), False, 'from app.main.forms import ChooseTemplateType, EmailTemplateForm, LetterTemplateForm, SearchTemplatesForm, SetTemplateSenderForm, SMSTemplateForm\n'), ((5480, 5504), 'flask.request.args.get', 'request.args.get', (['"""page"""'], {}), "('page')\n", (5496, 5504), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((7227, 7341), 'app.service_api_client.create_service_template', 'service_api_client.create_service_template', (['"""Untitled"""', '"""letter"""', '"""Body"""', 'service_id', '"""Main heading"""', '"""normal"""'], {}), "('Untitled', 'letter', 'Body',\n service_id, 'Main heading', 'normal')\n", (7269, 7341), False, 'from app import current_service, service_api_client, template_statistics_client\n'), ((10391, 10518), 'flask.url_for', 'url_for', (['""".action_blocked"""'], {'service_id': 'service_id', 'notification_type': 'template_type', 'return_to': '"""templates"""', 'template_id': '"""0"""'}), "('.action_blocked', service_id=service_id, notification_type=\n template_type, return_to='templates', template_id='0')\n", (10398, 10518), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((12788, 12967), 'app.service_api_client.update_service_template', 'service_api_client.update_service_template', (['template_id', 'form.name.data', "template['template_type']", 'form.template_content.data', 'service_id', 'subject', 'form.process_type.data'], {}), "(template_id, form.name.data,\n template['template_type'], form.template_content.data, service_id,\n subject, form.process_type.data)\n", (12830, 12967), False, 'from app import current_service, service_api_client, template_statistics_client\n'), ((13835, 13994), 'flask.url_for', 'url_for', (['""".action_blocked"""'], {'service_id': 'service_id', 'notification_type': "db_template['template_type']", 'return_to': '"""view_template"""', 'template_id': 'template_id'}), "('.action_blocked', service_id=service_id, notification_type=\n db_template['template_type'], return_to='view_template', template_id=\n template_id)\n", (13842, 13994), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((14762, 14812), 'flask.url_for', 'url_for', (['""".choose_template"""'], {'service_id': 'service_id'}), "('.choose_template', service_id=service_id)\n", (14769, 14812), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((19034, 19107), 'flask.url_for', 'url_for', (['""".view_template"""'], {'service_id': 'service_id', 'template_id': 'template_id'}), "('.view_template', service_id=service_id, template_id=template_id)\n", (19041, 19107), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((20723, 20740), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (20738, 20740), False, 'from datetime import datetime, timedelta\n'), ((20925, 20943), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (20934, 20943), False, 'from datetime import datetime, timedelta\n'), ((7477, 7570), 'flask.url_for', 'url_for', (['""".view_template"""'], {'service_id': 'service_id', 'template_id': "blank_letter['data']['id']"}), "('.view_template', service_id=service_id, template_id=blank_letter[\n 'data']['id'])\n", (7484, 7570), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((7753, 7897), 'flask.url_for', 'url_for', (['""".action_blocked"""'], {'service_id': 'service_id', 'notification_type': 'form.template_type.data', 'return_to': '"""add_new_template"""', 'template_id': '"""0"""'}), "('.action_blocked', service_id=service_id, notification_type=form.\n template_type.data, return_to='add_new_template', template_id='0')\n", (7760, 7897), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((8030, 8129), 'flask.url_for', 'url_for', (['""".add_service_template"""'], {'service_id': 'service_id', 'template_type': 'form.template_type.data'}), "('.add_service_template', service_id=service_id, template_type=form.\n template_type.data)\n", (8037, 8129), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((10183, 10276), 'flask.url_for', 'url_for', (['""".view_template"""'], {'service_id': 'service_id', 'template_id': "new_template['data']['id']"}), "('.view_template', service_id=service_id, template_id=new_template[\n 'data']['id'])\n", (10190, 10276), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((11929, 11968), 'app.utils.get_template', 'get_template', (['template', 'current_service'], {}), '(template, current_service)\n', (11941, 11968), False, 'from app.utils import email_or_sms_not_enabled, get_template, user_has_permissions\n'), ((12048, 12075), 'flask.request.form.get', 'request.form.get', (['"""confirm"""'], {}), "('confirm')\n", (12064, 12075), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((13486, 13559), 'flask.url_for', 'url_for', (['""".view_template"""'], {'service_id': 'service_id', 'template_id': 'template_id'}), "('.view_template', service_id=service_id, template_id=template_id)\n", (13493, 13559), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((19994, 20022), 'notifications_utils.formatters.nl2br', 'nl2br', (['sender[sender_format]'], {}), '(sender[sender_format])\n', (19999, 20022), False, 'from notifications_utils.formatters import nl2br\n'), ((21078, 21095), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (21087, 21095), False, 'from datetime import datetime, timedelta\n'), ((2149, 2258), 'flask.url_for', 'url_for', (['""".view_letter_template_preview"""'], {'service_id': 'service_id', 'template_id': 'template_id', 'filetype': '"""png"""'}), "('.view_letter_template_preview', service_id=service_id, template_id\n =template_id, filetype='png')\n", (2156, 2258), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((2390, 2425), 'app.template_previews.get_page_count_for_letter', 'get_page_count_for_letter', (['template'], {}), '(template)\n', (2415, 2425), False, 'from app.template_previews import TemplatePreview, get_page_count_for_letter\n'), ((5639, 5725), 'app.service_api_client.get_service_template', 'service_api_client.get_service_template', (['service_id', 'template_id'], {'version': 'version'}), '(service_id, template_id, version=\n version)\n', (5678, 5725), False, 'from app import current_service, service_api_client, template_statistics_client\n'), ((15252, 15269), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (15267, 15269), False, 'from datetime import datetime, timedelta\n'), ((15786, 15898), 'flask.url_for', 'url_for', (['""".view_letter_template_preview"""'], {'service_id': 'service_id', 'template_id': "template['id']", 'filetype': '"""png"""'}), "('.view_letter_template_preview', service_id=service_id, template_id\n =template['id'], filetype='png')\n", (15793, 15898), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((16527, 16636), 'flask.url_for', 'url_for', (['""".view_letter_template_preview"""'], {'service_id': 'service_id', 'template_id': 'template_id', 'filetype': '"""png"""'}), "('.view_letter_template_preview', service_id=service_id, template_id\n =template_id, filetype='png')\n", (16534, 16636), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((5810, 5936), 'flask.url_for', 'url_for', (['""".view_template_version_preview"""'], {'service_id': 'service_id', 'template_id': 'template_id', 'version': 'version', 'filetype': '"""png"""'}), "('.view_template_version_preview', service_id=service_id,\n template_id=template_id, version=version, filetype='png')\n", (5817, 5936), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((12613, 12647), 'app.main.views.send.get_example_csv_rows', 'get_example_csv_rows', (['new_template'], {}), '(new_template)\n', (12633, 12647), False, 'from app.main.views.send import get_example_csv_rows, get_sender_details\n'), ((12669, 12703), 'app.main.views.send.get_example_csv_rows', 'get_example_csv_rows', (['new_template'], {}), '(new_template)\n', (12689, 12703), False, 'from app.main.views.send import get_example_csv_rows, get_sender_details\n'), ((17753, 17891), 'flask.url_for', 'url_for', (['""".view_template_version_preview"""'], {'service_id': 'service_id', 'template_id': 'template_id', 'version': "template['version']", 'filetype': '"""png"""'}), "('.view_template_version_preview', service_id=service_id,\n template_id=template_id, version=template['version'], filetype='png')\n", (17760, 17891), False, 'from flask import abort, flash, redirect, render_template, request, url_for\n'), ((18049, 18122), 'app.service_api_client.get_service_template_versions', 'service_api_client.get_service_template_versions', (['service_id', 'template_id'], {}), '(service_id, template_id)\n', (18097, 18122), False, 'from app import current_service, service_api_client, template_statistics_client\n'), ((20398, 20433), 'dateutil.parser.parse', 'parse', (["template_stats['updated_at']"], {}), "(template_stats['updated_at'])\n", (20403, 20433), False, 'from dateutil.parser import parse\n'), ((15170, 15213), 'dateutil.parser.parse', 'parse', (["last_used_notification['created_at']"], {}), "(last_used_notification['created_at'])\n", (15175, 15213), False, 'from dateutil.parser import parse\n')]
"""Test layered configuration pipeline.""" from pathlib import Path from unittest.mock import patch import yaml from nitinat.pipeline4 import SYSTEM_CONFIG, read_layered_config GET_HOME_DIR = "nitinat.pipeline4._get_home_dir" def make_file(fs, path, contents): fs.create_file(path, contents=yaml.dump(contents)) def test_layered_config_read_system(fs): fs.cwd = "/home/person/project/analysis" expected = {"alpha": 1} make_file(fs, SYSTEM_CONFIG, expected) with patch(GET_HOME_DIR, return_value=Path("/home/person")): actual = read_layered_config("test.yml") assert actual == expected def test_layered_config_read_personal(fs): fs.cwd = "/home/person/project/analysis" expected = {"beta": 2} make_file(fs, "/home/person/.nitinat.yml", expected) with patch(GET_HOME_DIR, return_value=Path("/home/person")): actual = read_layered_config("test.yml") assert actual == expected def test_layered_config_read_project_from_project_root(fs): fs.cwd = "/home/person/project/analysis" expected = {"gamma": 3} make_file(fs, "/home/person/project/.nitinat.yml", expected) with patch(GET_HOME_DIR, return_value=Path("/home/person")): actual = read_layered_config("test.yml") assert actual == expected def test_layered_config_read_project_from_project_subdir(fs): fs.cwd = "/home/person/project/analysis" expected = {"gamma": 3} make_file(fs, "/home/person/project/.nitinat.yml", expected) with patch(GET_HOME_DIR, return_value=Path("/home/person")): actual = read_layered_config("temp/test.yml") assert actual == expected def test_layered_config_combine_files(fs): fs.cwd = "/home/person/project/analysis" make_file(fs, SYSTEM_CONFIG, {"alpha": 1}) make_file(fs, "/home/person/.nitinat.yml", {"beta": 2}) make_file(fs, "/home/person/project/.nitinat.yml", {"gamma": 3}) with patch(GET_HOME_DIR, return_value=Path("/home/person")): actual = read_layered_config("temp/test.yml") assert actual == {"alpha": 1, "beta": 2, "gamma": 3}
[ "nitinat.pipeline4.read_layered_config", "pathlib.Path", "yaml.dump" ]
[((563, 594), 'nitinat.pipeline4.read_layered_config', 'read_layered_config', (['"""test.yml"""'], {}), "('test.yml')\n", (582, 594), False, 'from nitinat.pipeline4 import SYSTEM_CONFIG, read_layered_config\n'), ((881, 912), 'nitinat.pipeline4.read_layered_config', 'read_layered_config', (['"""test.yml"""'], {}), "('test.yml')\n", (900, 912), False, 'from nitinat.pipeline4 import SYSTEM_CONFIG, read_layered_config\n'), ((1225, 1256), 'nitinat.pipeline4.read_layered_config', 'read_layered_config', (['"""test.yml"""'], {}), "('test.yml')\n", (1244, 1256), False, 'from nitinat.pipeline4 import SYSTEM_CONFIG, read_layered_config\n'), ((1571, 1607), 'nitinat.pipeline4.read_layered_config', 'read_layered_config', (['"""temp/test.yml"""'], {}), "('temp/test.yml')\n", (1590, 1607), False, 'from nitinat.pipeline4 import SYSTEM_CONFIG, read_layered_config\n'), ((1986, 2022), 'nitinat.pipeline4.read_layered_config', 'read_layered_config', (['"""temp/test.yml"""'], {}), "('temp/test.yml')\n", (2005, 2022), False, 'from nitinat.pipeline4 import SYSTEM_CONFIG, read_layered_config\n'), ((301, 320), 'yaml.dump', 'yaml.dump', (['contents'], {}), '(contents)\n', (310, 320), False, 'import yaml\n'), ((523, 543), 'pathlib.Path', 'Path', (['"""/home/person"""'], {}), "('/home/person')\n", (527, 543), False, 'from pathlib import Path\n'), ((841, 861), 'pathlib.Path', 'Path', (['"""/home/person"""'], {}), "('/home/person')\n", (845, 861), False, 'from pathlib import Path\n'), ((1185, 1205), 'pathlib.Path', 'Path', (['"""/home/person"""'], {}), "('/home/person')\n", (1189, 1205), False, 'from pathlib import Path\n'), ((1531, 1551), 'pathlib.Path', 'Path', (['"""/home/person"""'], {}), "('/home/person')\n", (1535, 1551), False, 'from pathlib import Path\n'), ((1946, 1966), 'pathlib.Path', 'Path', (['"""/home/person"""'], {}), "('/home/person')\n", (1950, 1966), False, 'from pathlib import Path\n')]
import pytest from elasticmagic.search import SearchQuery from .conftest import Car @pytest.mark.asyncio async def test_get(es_index, cars): doc = await es_index.get(1, doc_cls=Car) assert doc.name == '<NAME>' assert doc._id == '1' assert doc._index == es_index.get_name() assert doc._score is None doc = await es_index.get(2, doc_cls=Car) assert doc.name == '<NAME>' assert doc._id == '2' assert doc._index == es_index.get_name() assert doc._score is None @pytest.mark.asyncio async def test_multi_get_by_ids(es_index, cars): docs = await es_index.multi_get([1, 2, 3], doc_cls=Car) assert len(docs) == 3 doc = docs[0] assert doc.name == '<NAME>' assert doc._id == '1' assert doc._index == es_index.get_name() assert doc._score is None doc = docs[1] assert doc.name == '<NAME>' assert doc._id == '2' assert doc._index == es_index.get_name() assert doc._score is None doc = docs[2] assert doc is None @pytest.mark.asyncio async def test_multi_get_by_ids_with_doc_cls_as_list(es_index, cars): docs = await es_index.multi_get([1, 2], doc_cls=[Car]) doc = docs[0] assert doc.name == '<NAME>' assert doc._id == '1' assert doc._index == es_index.get_name() assert doc._score is None doc = docs[1] assert doc.name == '<NAME>' assert doc._id == '2' assert doc._index == es_index.get_name() assert doc._score is None @pytest.mark.asyncio async def test_multi_get_by_docs(es_index, cars): docs = await es_index.multi_get([Car(_id=1), Car(_id=2)]) doc = docs[0] assert doc.name == '<NAME>' assert doc._id == '1' assert doc._index == es_index.get_name() assert doc._score is None doc = docs[1] assert doc.name == '<NAME>' assert doc._id == '2' assert doc._index == es_index.get_name() assert doc._score is None @pytest.mark.asyncio async def test_multi_get_by_dicts(es_index, cars): docs = await es_index.multi_get([ {'_id': 1, '_type': 'car'}, {'_id': 2, 'doc_cls': Car}, ]) doc = docs[0] assert doc.name == '<NAME>' assert doc._id == '1' assert doc._index == es_index.get_name() assert doc._score is None doc = docs[1] assert doc.name == '<NAME>' assert doc._id == '2' assert doc._index == es_index.get_name() assert doc._score is None @pytest.mark.asyncio async def test_search(es_index, cars): res = await es_index.search( SearchQuery(Car.name.match("Lightning")) ) assert res.total == 1 assert len(res.hits) == 1 doc = res.hits[0] assert doc.name == '<NAME>' assert doc._id == '1' assert doc._index == es_index.get_name() assert doc._score > 0 assert doc._score == res.max_score @pytest.mark.asyncio async def test_count(es_index, cars): res = await es_index.count( SearchQuery(Car.name.match("Lightning")) ) assert res.count == 1 @pytest.mark.asyncio async def test_scroll(es_index, cars): with pytest.warns(UserWarning, match='Cannot determine document class'): search_res = await es_index.search( SearchQuery(), scroll='1m', ) assert search_res.total == 2 assert len(search_res.hits) == 2 assert search_res.scroll_id is not None scroll_res = await es_index.scroll(search_res.scroll_id, scroll='1m') assert scroll_res.total == 2 assert len(scroll_res.hits) == 0 clear_scroll_res = await es_index.clear_scroll(scroll_res.scroll_id) assert clear_scroll_res.succeeded is True @pytest.mark.asyncio async def test_multi_search(es_index, cars): results = await es_index.multi_search([ SearchQuery(Car.name.match("Lightning")), SearchQuery(Car.name.match("Sally")), ]) assert len(results) == 2 res = results[0] assert res.total == 1 assert len(res.hits) == 1 doc = res.hits[0] assert doc.name == '<NAME>' assert doc._id == '1' assert doc._index == es_index.get_name() assert doc._score > 0 assert doc._score == res.max_score res = results[1] assert res.total == 1 assert len(res.hits) == 1 doc = res.hits[0] assert doc.name == '<NAME>' assert doc._id == '2' assert doc._index == es_index.get_name() assert doc._score > 0 assert doc._score == res.max_score @pytest.mark.asyncio async def test_delete(es_index, cars): res = await es_index.delete(1, doc_type='car') es_version = await es_index.get_cluster().get_es_version() if es_version.major >= 5: assert res.result == 'deleted' if es_version.major <= 5: assert res.found is True @pytest.mark.asyncio async def test_delete_by_query(es_index, cars): res = await es_index.delete_by_query( SearchQuery(Car.name.match("Lightning")), refresh=True, ) assert res.deleted == 1 assert (await es_index.count()).count == 1 @pytest.mark.asyncio async def test_flush(es_index, cars): await es_index.add([Car(name='Mater')]) res = await es_index.flush() assert res
[ "pytest.warns", "elasticmagic.search.SearchQuery" ]
[((3040, 3106), 'pytest.warns', 'pytest.warns', (['UserWarning'], {'match': '"""Cannot determine document class"""'}), "(UserWarning, match='Cannot determine document class')\n", (3052, 3106), False, 'import pytest\n'), ((3164, 3177), 'elasticmagic.search.SearchQuery', 'SearchQuery', ([], {}), '()\n', (3175, 3177), False, 'from elasticmagic.search import SearchQuery\n')]
from os import path, system from platform import system as osInfo from time import sleep def checkFile(): print("\n") checkFileData() sleep(0.1) print("\n") checkFileEssential() sleep(0.1) print("\n") def checkFileData(): try: if osInfo() == "Windows": if path.isdir("data"): if path.exists("data/appData.json"): print("appData.json [✅]") else: print("appData.json [❌]") x = input("Start repair your data/appData: (y/N) ") if x == "y": system("repairfiles.exe") elif x == "N": exit() else: exit() if path.exists("data/uuidData.json"): print("uuidData.json [✅]") else: print("uuidData.json [❌]") x = input("Start repair your data/uuidData: (y/N) ") if x == "y": system("repairfiles.exe") elif x == "N": exit() else: exit() else: print("'data/' path [❌]") x = input("Start repair your 'data/': (y/N) ") if x == "y": system("repairfiles.exe") elif x == "N": exit() else: exit() elif osInfo() == "Darwin": print("Not supported on this platform for now.") elif osInfo() == "Linux": print("Not supported on this platform for now.") else: print("We cannnot find your operating system") except Exception as error: print(f"Error {error}") def checkFileEssential(): try: if osInfo() == "Windows": if path.exists("main.exe"): print("main.exe[✅]") if path.exists("launcher.exe"): print("launcher.exe [✅]") else: print("launcher.exe [❌]") x = input("Start repair your launcher.exe file: (y/N) ") if x == "y": system("repairfiles.exe") elif x == "N": exit() else: exit() if path.exists("uuid_gen.exe"): print("uuid_gen.exe [✅]") else: print("uuid_gen.exe [❌]") x = input("Start repair your uuid_gen.exe file: (y/N) ") if x == "y": system("repairfiles.exe") elif x == "N": exit() else: exit() else: print("main.exe [❌]") x = input("Start repair your main.exe file: (y/N) ") if x == "y": system("repairfiles.exe") elif x == "N": exit() else: exit() elif osInfo() == "Darwin": print("Not supported on this platform for now.") elif osInfo() == "Linux": print("Not supported on this platform for now.") else: print("We cannnot find your operating system") except Exception as error: print(f"Error {error}") if __name__ == '__main__': checkFile() sleep(6)
[ "os.path.isdir", "os.path.exists", "os.system", "time.sleep", "platform.system" ]
[((3828, 3836), 'time.sleep', 'sleep', (['(6)'], {}), '(6)\n', (3833, 3836), False, 'from time import sleep\n'), ((154, 164), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (159, 164), False, 'from time import sleep\n'), ((213, 223), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (218, 223), False, 'from time import sleep\n'), ((289, 297), 'platform.system', 'osInfo', ([], {}), '()\n', (295, 297), True, 'from platform import system as osInfo\n'), ((328, 346), 'os.path.isdir', 'path.isdir', (['"""data"""'], {}), "('data')\n", (338, 346), False, 'from os import path, system\n'), ((2044, 2052), 'platform.system', 'osInfo', ([], {}), '()\n', (2050, 2052), True, 'from platform import system as osInfo\n'), ((2083, 2106), 'os.path.exists', 'path.exists', (['"""main.exe"""'], {}), "('main.exe')\n", (2094, 2106), False, 'from os import path, system\n'), ((368, 400), 'os.path.exists', 'path.exists', (['"""data/appData.json"""'], {}), "('data/appData.json')\n", (379, 400), False, 'from os import path, system\n'), ((848, 881), 'os.path.exists', 'path.exists', (['"""data/uuidData.json"""'], {}), "('data/uuidData.json')\n", (859, 881), False, 'from os import path, system\n'), ((1660, 1668), 'platform.system', 'osInfo', ([], {}), '()\n', (1666, 1668), True, 'from platform import system as osInfo\n'), ((2166, 2193), 'os.path.exists', 'path.exists', (['"""launcher.exe"""'], {}), "('launcher.exe')\n", (2177, 2193), False, 'from os import path, system\n'), ((2644, 2671), 'os.path.exists', 'path.exists', (['"""uuid_gen.exe"""'], {}), "('uuid_gen.exe')\n", (2655, 2671), False, 'from os import path, system\n'), ((3454, 3462), 'platform.system', 'osInfo', ([], {}), '()\n', (3460, 3462), True, 'from platform import system as osInfo\n'), ((1507, 1532), 'os.system', 'system', (['"""repairfiles.exe"""'], {}), "('repairfiles.exe')\n", (1513, 1532), False, 'from os import path, system\n'), ((1760, 1768), 'platform.system', 'osInfo', ([], {}), '()\n', (1766, 1768), True, 'from platform import system as osInfo\n'), ((3301, 3326), 'os.system', 'system', (['"""repairfiles.exe"""'], {}), "('repairfiles.exe')\n", (3307, 3326), False, 'from os import path, system\n'), ((3554, 3562), 'platform.system', 'osInfo', ([], {}), '()\n', (3560, 3562), True, 'from platform import system as osInfo\n'), ((673, 698), 'os.system', 'system', (['"""repairfiles.exe"""'], {}), "('repairfiles.exe')\n", (679, 698), False, 'from os import path, system\n'), ((1157, 1182), 'os.system', 'system', (['"""repairfiles.exe"""'], {}), "('repairfiles.exe')\n", (1163, 1182), False, 'from os import path, system\n'), ((2471, 2496), 'os.system', 'system', (['"""repairfiles.exe"""'], {}), "('repairfiles.exe')\n", (2477, 2496), False, 'from os import path, system\n'), ((2949, 2974), 'os.system', 'system', (['"""repairfiles.exe"""'], {}), "('repairfiles.exe')\n", (2955, 2974), False, 'from os import path, system\n')]
# Copyright 2017 Mycroft AI Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import time class EnclosureMouth: """ Listens to enclosure commands for Mycroft's Mouth. Performs the associated command on Arduino by writing on the Serial port. """ def __init__(self, ws, writer): self.ws = ws self.writer = writer self.is_timer_on = False self.__init_events() def __init_events(self): self.ws.on('enclosure.mouth.reset', self.reset) self.ws.on('enclosure.mouth.talk', self.talk) self.ws.on('enclosure.mouth.think', self.think) self.ws.on('enclosure.mouth.listen', self.listen) self.ws.on('enclosure.mouth.smile', self.smile) self.ws.on('enclosure.mouth.viseme', self.viseme) self.ws.on('enclosure.mouth.text', self.text) self.ws.on('enclosure.mouth.display', self.display) def reset(self, event=None): self.writer.write("mouth.reset") def talk(self, event=None): self.writer.write("mouth.talk") def think(self, event=None): self.writer.write("mouth.think") def listen(self, event=None): self.writer.write("mouth.listen") def smile(self, event=None): self.writer.write("mouth.smile") def viseme(self, event=None): if event and event.data: code = event.data.get("code") time_until = event.data.get("until") # Skip the viseme if the time has expired. This helps when a # system glitch overloads the bus and throws off the timing of # the animation timing. if code and (not time_until or time.time() < time_until): self.writer.write("mouth.viseme=" + code) def text(self, event=None): text = "" if event and event.data: text = event.data.get("text", text) self.writer.write("mouth.text=" + text) def display(self, event=None): code = "" xOffset = "" yOffset = "" clearPrevious = "" if event and event.data: code = event.data.get("img_code", code) xOffset = event.data.get("xOffset", xOffset) yOffset = event.data.get("yOffset", yOffset) clearPrevious = event.data.get("clearPrev", clearPrevious) clearPrevious = int(str(clearPrevious) == "True") clearPrevious = "cP=" + str(clearPrevious) + "," x_offset = "x=" + str(xOffset) + "," y_offset = "y=" + str(yOffset) + "," message = "mouth.icon=" + x_offset + y_offset + clearPrevious + code # Check if message exceeds Arduino's serial buffer input limit 64 bytes if len(message) > 60: message1 = message[:31] message2 = message[31:] message1 += "$" message2 += "$" message2 = "mouth.icon=" + message2 self.writer.write(message1) time.sleep(0.25) # writer bugs out if sending messages too rapidly self.writer.write(message2) else: time.sleep(0.1) self.writer.write(message)
[ "time.time", "time.sleep" ]
[((3439, 3455), 'time.sleep', 'time.sleep', (['(0.25)'], {}), '(0.25)\n', (3449, 3455), False, 'import time\n'), ((3573, 3588), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (3583, 3588), False, 'import time\n'), ((2159, 2170), 'time.time', 'time.time', ([], {}), '()\n', (2168, 2170), False, 'import time\n')]
import oci import os import io import time import sys from pathlib import Path from oci.config import validate_config from oci.streaming import StreamClient from oci.streaming.models import CreateCursorDetails import base64 # configuration for connection to Oracle OCI # for user, tenancy you have to specify the OCID # the key is the key (PEM) you have uploaded to your profile # config = { "user": "ocid1.XXXXXX", "key_file": "/Users/lsaetta/Progetti/xxxx/oci_api_key.pem", "fingerprint": "<KEY>", "tenancy": "ocid1.ZZZZZ", "region": "eu-frankfurt-1" } SLEEP_TIME = 2 # in sec. # check command line params def check_params(): N_PARAMS = 1 # expected # of params n_params = len(sys.argv) if (n_params < (N_PARAMS + 1)): print("Usage: stream_subscriber.py partition_id") print("") sys.exit(-1) else: print("Running with: ") print("partition_id {}".format(sys.argv[1])) print("") def decode(str): return base64.b64decode(str).decode('utf-8') # # Main # print("") check_params() validate_config(config) print("Validate config OK") print("") partition_id = sys.argv[1] stream_id = "ocid1.stream.oc1.eu-frankfurt-1.aaaaaaaafsxpk4zdonaed3d27s5jwhazylryizrqmbd4ihnsgbbkpj3k6saa" # check on partition_id OK, on offset OK cursor_details = CreateCursorDetails(partition = partition_id, type = "LATEST") # initialize consumer client = StreamClient(config) print("*** GET cursor ") response = client.create_cursor(stream_id = stream_id, create_cursor_details = cursor_details) ## extract cursor from response cursor = response.data.value # infinite READ loop... while True: # print("*** GET messages ") response_mess = client.get_messages(stream_id = stream_id, cursor = cursor) # prepare for goimg forward # you need to pass ***new*** cursor cursor = response_mess.headers['opc-next-cursor'] print("*") if (len(response_mess.data) > 0): print("Messages: ") for mess in response_mess.data: print(decode(mess.value)) # sleep before next loop time.sleep(SLEEP_TIME)
[ "oci.config.validate_config", "oci.streaming.StreamClient", "base64.b64decode", "time.sleep", "oci.streaming.models.CreateCursorDetails", "sys.exit" ]
[((1076, 1099), 'oci.config.validate_config', 'validate_config', (['config'], {}), '(config)\n', (1091, 1099), False, 'from oci.config import validate_config\n'), ((1334, 1392), 'oci.streaming.models.CreateCursorDetails', 'CreateCursorDetails', ([], {'partition': 'partition_id', 'type': '"""LATEST"""'}), "(partition=partition_id, type='LATEST')\n", (1353, 1392), False, 'from oci.streaming.models import CreateCursorDetails\n'), ((1429, 1449), 'oci.streaming.StreamClient', 'StreamClient', (['config'], {}), '(config)\n', (1441, 1449), False, 'from oci.streaming import StreamClient\n'), ((2106, 2128), 'time.sleep', 'time.sleep', (['SLEEP_TIME'], {}), '(SLEEP_TIME)\n', (2116, 2128), False, 'import time\n'), ((844, 856), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (852, 856), False, 'import sys\n'), ((999, 1020), 'base64.b64decode', 'base64.b64decode', (['str'], {}), '(str)\n', (1015, 1020), False, 'import base64\n')]
import numpy as np import math from scipy.interpolate import interp1d import scipy.linalg as LA import os import numpy as np from skimage.transform import resize from multiprocessing import Process import shutil from tqdm import tqdm from concurrent.futures import ProcessPoolExecutor, as_completed def compute_tf_fig(org_sig): final_sig = create_extended_sig(org_sig) wave2000 = final_sig ps_SampleRate = 2000 s_Len = len(final_sig) #exts_len = len(final_sig) s_HalfLen = math.floor(s_Len/2)+1 ps_MinFreqHz = 10 ps_MaxFreqHz = 500 ps_FreqSeg = 512 v_WAxis = np.linspace(0, 2*np.pi, s_Len, endpoint=False) v_WAxis = v_WAxis* ps_SampleRate v_WAxisHalf = v_WAxis[:s_HalfLen] v_FreqAxis = np.linspace(ps_MinFreqHz, ps_MaxFreqHz,num=ps_FreqSeg)#ps_MinFreqHz:s_FreqStep:ps_MaxFreqHz v_FreqAxis = v_FreqAxis[::-1] v_InputSignalFFT = np.fft.fft(wave2000) ps_StDevCycles = 3 m_GaborWT = np.zeros((ps_FreqSeg, s_Len),dtype=complex) for i, s_FreqCounter in enumerate(v_FreqAxis): v_WinFFT = np.zeros(s_Len) s_StDevSec = (1 / s_FreqCounter) * ps_StDevCycles v_WinFFT[:s_HalfLen] = np.exp(-0.5*np.power( v_WAxisHalf - (2* np.pi* s_FreqCounter) , 2)* (s_StDevSec**2)) v_WinFFT = v_WinFFT* np.sqrt(s_Len)/ LA.norm(v_WinFFT, 2) m_GaborWT[i, :] = np.fft.ifft(v_InputSignalFFT* v_WinFFT)/np.sqrt(s_StDevSec) return s_HalfLen, v_FreqAxis, v_WAxisHalf, v_InputSignalFFT, m_GaborWT def compute_spectrum(org_sig): final_sig = create_extended_sig(org_sig) wave2000 = final_sig ps_SampleRate = 2000 s_Len = len(final_sig) #exts_len = len(final_sig) s_HalfLen = math.floor(s_Len/2)+1 ps_MinFreqHz = 10 ps_MaxFreqHz = 500 ps_FreqSeg = 512 v_WAxis = np.linspace(0, 2*np.pi, s_Len, endpoint=False) v_WAxis = v_WAxis* ps_SampleRate v_WAxisHalf = v_WAxis[:s_HalfLen] v_FreqAxis = np.linspace(ps_MinFreqHz, ps_MaxFreqHz,num=ps_FreqSeg)#ps_MinFreqHz:s_FreqStep:ps_MaxFreqHz v_FreqAxis = v_FreqAxis[::-1] v_InputSignalFFT = np.fft.fft(wave2000) ps_StDevCycles = 3 m_GaborWT = np.zeros((ps_FreqSeg, s_Len),dtype=complex) for i, s_FreqCounter in enumerate(v_FreqAxis): v_WinFFT = np.zeros(s_Len) s_StDevSec = (1 / s_FreqCounter) * ps_StDevCycles v_WinFFT[:s_HalfLen] = np.exp(-0.5*np.power( v_WAxisHalf - (2* np.pi* s_FreqCounter) , 2)* (s_StDevSec**2)) v_WinFFT = v_WinFFT* np.sqrt(s_Len)/ LA.norm(v_WinFFT, 2) m_GaborWT[i, :] = np.fft.ifft(v_InputSignalFFT* v_WinFFT)/np.sqrt(s_StDevSec) return resize(np.abs(m_GaborWT[:, 3000:5000]), (224,224)) def create_extended_sig(wave2000): #wave2000 = bb s_len = len(wave2000) s_halflen = int(np.ceil(s_len/2)) + 1 sig = wave2000 start_win = sig[:s_halflen] - sig[0] end_win = sig[s_len - s_halflen - 1:] - sig[-1] start_win = -start_win[::-1] + sig[0] end_win = -end_win[::-1] + sig[-1] final_sig = np.concatenate((start_win[:-1],sig, end_win[1:])) #print(s_halflen, start_win.shape, end_win.shape, sig.shape, final_sig.shape) if len(final_sig)%2 == 0: final_sig = final_sig[:-1] return final_sig def strip_key(key): key = key.strip() key = key.replace('EEG', '').strip() key = key.replace('Ref', '').strip() key = key.replace('-', '').strip() key = key.replace('_', ' ').strip() key = key.split(" ") if len(key) > 1: key = key[1] else: key = key[0] return key def normalized(a, max_ = 2000-11): c = (max_*(a - np.min(a))/np.ptp(a)).astype(int) c = c + 5 return c def construct_features(raw_signal, length=1000): #HFO with spike canvas = np.zeros((2*length, 2*length)) hfo_spike = normalized(raw_signal) index = np.arange(len(hfo_spike)) for ii in range(3): canvas[index,hfo_spike-ii] = 256 canvas[index,hfo_spike+ii] = 256 spike_image = resize(canvas, (224, 224)) intensity_image = np.zeros_like(canvas) intensity_image[index, :] = raw_signal hfo_image = resize(intensity_image, (224, 224)) return spike_image, hfo_image def clean_folder(saved_fn): if not os.path.exists(saved_fn): #os.mkdir(saved_fn) os.makedirs(saved_fn) else: shutil.rmtree(saved_fn) os.mkdir(saved_fn) def parallel_process(array, function, n_jobs=16, use_kwargs=False, front_num=3): """ A parallel version of the map function with a progress bar. Args: array (array-like): An array to iterate over. function (function): A python function to apply to the elements of array n_jobs (int, default=16): The number of cores to use use_kwargs (boolean, default=False): Whether to consider the elements of array as dictionaries of keyword arguments to function front_num (int, default=3): The number of iterations to run serially before kicking off the parallel job. Useful for catching bugs Returns: [function(array[0]), function(array[1]), ...] """ #We run the first few iterations serially to catch bugs if front_num > 0: front = [function(**a) if use_kwargs else function(a) for a in array[:front_num]] #If we set n_jobs to 1, just run a list comprehension. This is useful for benchmarking and debugging. if n_jobs==1: return front + [function(**a) if use_kwargs else function(a) for a in tqdm(array[front_num:])] #Assemble the workers with ProcessPoolExecutor(max_workers=n_jobs) as pool: #Pass the elements of array into function if use_kwargs: futures = [pool.submit(function, **a) for a in array[front_num:]] else: futures = [pool.submit(function, a) for a in array[front_num:]] kwargs = { 'total': len(futures), 'unit': 'it', 'unit_scale': True, 'leave': True } #Print out the progress as tasks complete for f in tqdm(as_completed(futures), **kwargs): pass out = [] #Get the results from the futures. for i, future in tqdm(enumerate(futures)): try: out.append(future.result()) except Exception as e: out.append(e) return front + out
[ "os.mkdir", "numpy.abs", "concurrent.futures.ProcessPoolExecutor", "skimage.transform.resize", "shutil.rmtree", "numpy.zeros_like", "numpy.fft.fft", "numpy.power", "os.path.exists", "numpy.linspace", "numpy.fft.ifft", "tqdm.tqdm", "numpy.ceil", "numpy.min", "concurrent.futures.as_completed", "numpy.concatenate", "os.makedirs", "numpy.ptp", "numpy.zeros", "math.floor", "scipy.linalg.norm", "numpy.sqrt" ]
[((604, 652), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', 's_Len'], {'endpoint': '(False)'}), '(0, 2 * np.pi, s_Len, endpoint=False)\n', (615, 652), True, 'import numpy as np\n'), ((743, 798), 'numpy.linspace', 'np.linspace', (['ps_MinFreqHz', 'ps_MaxFreqHz'], {'num': 'ps_FreqSeg'}), '(ps_MinFreqHz, ps_MaxFreqHz, num=ps_FreqSeg)\n', (754, 798), True, 'import numpy as np\n'), ((897, 917), 'numpy.fft.fft', 'np.fft.fft', (['wave2000'], {}), '(wave2000)\n', (907, 917), True, 'import numpy as np\n'), ((957, 1001), 'numpy.zeros', 'np.zeros', (['(ps_FreqSeg, s_Len)'], {'dtype': 'complex'}), '((ps_FreqSeg, s_Len), dtype=complex)\n', (965, 1001), True, 'import numpy as np\n'), ((1811, 1859), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', 's_Len'], {'endpoint': '(False)'}), '(0, 2 * np.pi, s_Len, endpoint=False)\n', (1822, 1859), True, 'import numpy as np\n'), ((1950, 2005), 'numpy.linspace', 'np.linspace', (['ps_MinFreqHz', 'ps_MaxFreqHz'], {'num': 'ps_FreqSeg'}), '(ps_MinFreqHz, ps_MaxFreqHz, num=ps_FreqSeg)\n', (1961, 2005), True, 'import numpy as np\n'), ((2104, 2124), 'numpy.fft.fft', 'np.fft.fft', (['wave2000'], {}), '(wave2000)\n', (2114, 2124), True, 'import numpy as np\n'), ((2164, 2208), 'numpy.zeros', 'np.zeros', (['(ps_FreqSeg, s_Len)'], {'dtype': 'complex'}), '((ps_FreqSeg, s_Len), dtype=complex)\n', (2172, 2208), True, 'import numpy as np\n'), ((3026, 3076), 'numpy.concatenate', 'np.concatenate', (['(start_win[:-1], sig, end_win[1:])'], {}), '((start_win[:-1], sig, end_win[1:]))\n', (3040, 3076), True, 'import numpy as np\n'), ((3768, 3802), 'numpy.zeros', 'np.zeros', (['(2 * length, 2 * length)'], {}), '((2 * length, 2 * length))\n', (3776, 3802), True, 'import numpy as np\n'), ((4001, 4027), 'skimage.transform.resize', 'resize', (['canvas', '(224, 224)'], {}), '(canvas, (224, 224))\n', (4007, 4027), False, 'from skimage.transform import resize\n'), ((4051, 4072), 'numpy.zeros_like', 'np.zeros_like', (['canvas'], {}), '(canvas)\n', (4064, 4072), True, 'import numpy as np\n'), ((4132, 4167), 'skimage.transform.resize', 'resize', (['intensity_image', '(224, 224)'], {}), '(intensity_image, (224, 224))\n', (4138, 4167), False, 'from skimage.transform import resize\n'), ((501, 522), 'math.floor', 'math.floor', (['(s_Len / 2)'], {}), '(s_Len / 2)\n', (511, 522), False, 'import math\n'), ((1071, 1086), 'numpy.zeros', 'np.zeros', (['s_Len'], {}), '(s_Len)\n', (1079, 1086), True, 'import numpy as np\n'), ((1708, 1729), 'math.floor', 'math.floor', (['(s_Len / 2)'], {}), '(s_Len / 2)\n', (1718, 1729), False, 'import math\n'), ((2278, 2293), 'numpy.zeros', 'np.zeros', (['s_Len'], {}), '(s_Len)\n', (2286, 2293), True, 'import numpy as np\n'), ((2650, 2681), 'numpy.abs', 'np.abs', (['m_GaborWT[:, 3000:5000]'], {}), '(m_GaborWT[:, 3000:5000])\n', (2656, 2681), True, 'import numpy as np\n'), ((4243, 4267), 'os.path.exists', 'os.path.exists', (['saved_fn'], {}), '(saved_fn)\n', (4257, 4267), False, 'import os\n'), ((4305, 4326), 'os.makedirs', 'os.makedirs', (['saved_fn'], {}), '(saved_fn)\n', (4316, 4326), False, 'import os\n'), ((4345, 4368), 'shutil.rmtree', 'shutil.rmtree', (['saved_fn'], {}), '(saved_fn)\n', (4358, 4368), False, 'import shutil\n'), ((4377, 4395), 'os.mkdir', 'os.mkdir', (['saved_fn'], {}), '(saved_fn)\n', (4385, 4395), False, 'import os\n'), ((5613, 5652), 'concurrent.futures.ProcessPoolExecutor', 'ProcessPoolExecutor', ([], {'max_workers': 'n_jobs'}), '(max_workers=n_jobs)\n', (5632, 5652), False, 'from concurrent.futures import ProcessPoolExecutor, as_completed\n'), ((1318, 1338), 'scipy.linalg.norm', 'LA.norm', (['v_WinFFT', '(2)'], {}), '(v_WinFFT, 2)\n', (1325, 1338), True, 'import scipy.linalg as LA\n'), ((1365, 1405), 'numpy.fft.ifft', 'np.fft.ifft', (['(v_InputSignalFFT * v_WinFFT)'], {}), '(v_InputSignalFFT * v_WinFFT)\n', (1376, 1405), True, 'import numpy as np\n'), ((1405, 1424), 'numpy.sqrt', 'np.sqrt', (['s_StDevSec'], {}), '(s_StDevSec)\n', (1412, 1424), True, 'import numpy as np\n'), ((2525, 2545), 'scipy.linalg.norm', 'LA.norm', (['v_WinFFT', '(2)'], {}), '(v_WinFFT, 2)\n', (2532, 2545), True, 'import scipy.linalg as LA\n'), ((2572, 2612), 'numpy.fft.ifft', 'np.fft.ifft', (['(v_InputSignalFFT * v_WinFFT)'], {}), '(v_InputSignalFFT * v_WinFFT)\n', (2583, 2612), True, 'import numpy as np\n'), ((2612, 2631), 'numpy.sqrt', 'np.sqrt', (['s_StDevSec'], {}), '(s_StDevSec)\n', (2619, 2631), True, 'import numpy as np\n'), ((2795, 2813), 'numpy.ceil', 'np.ceil', (['(s_len / 2)'], {}), '(s_len / 2)\n', (2802, 2813), True, 'import numpy as np\n'), ((6123, 6144), 'concurrent.futures.as_completed', 'as_completed', (['futures'], {}), '(futures)\n', (6135, 6144), False, 'from concurrent.futures import ProcessPoolExecutor, as_completed\n'), ((1302, 1316), 'numpy.sqrt', 'np.sqrt', (['s_Len'], {}), '(s_Len)\n', (1309, 1316), True, 'import numpy as np\n'), ((2509, 2523), 'numpy.sqrt', 'np.sqrt', (['s_Len'], {}), '(s_Len)\n', (2516, 2523), True, 'import numpy as np\n'), ((3633, 3642), 'numpy.ptp', 'np.ptp', (['a'], {}), '(a)\n', (3639, 3642), True, 'import numpy as np\n'), ((1188, 1240), 'numpy.power', 'np.power', (['(v_WAxisHalf - 2 * np.pi * s_FreqCounter)', '(2)'], {}), '(v_WAxisHalf - 2 * np.pi * s_FreqCounter, 2)\n', (1196, 1240), True, 'import numpy as np\n'), ((2395, 2447), 'numpy.power', 'np.power', (['(v_WAxisHalf - 2 * np.pi * s_FreqCounter)', '(2)'], {}), '(v_WAxisHalf - 2 * np.pi * s_FreqCounter, 2)\n', (2403, 2447), True, 'import numpy as np\n'), ((5553, 5576), 'tqdm.tqdm', 'tqdm', (['array[front_num:]'], {}), '(array[front_num:])\n', (5557, 5576), False, 'from tqdm import tqdm\n'), ((3622, 3631), 'numpy.min', 'np.min', (['a'], {}), '(a)\n', (3628, 3631), True, 'import numpy as np\n')]
from django.shortcuts import render from django.http import HttpResponse from django.http import HttpResponseRedirect from .models import Country, Covid_Cases import pymongo from .forms import CountryForm # Connect to MongoDb client = pymongo.MongoClient('mongodb://localhost:27017/') if client: DB = client['termproject'] # Create your views here. def country_list(req): country = Country.objects.all() covid_cases = Covid_Cases.objects.all() country_list = [] for i in country: for j in covid_cases: if (i.id == j.id): country_list.append({ 'id': i.id, 'name': i.name, 'flag': i.flag, 'area': i.area, 'population': i.population, 'total_cases': j.total_cases, 'cases_milion': str(round((float(1000000) * float(str(j.total_cases).replace(',', '')))/float(str(i.population).replace(',', '')), 2)), 'new_cases': j.new_cases, 'total_deaths': j.total_deaths, }) context = { 'country_list': country_list } return render(req, 'countries/country_list.html', context) def country_edit(req, id): country = Country.objects.get(id=id) covid_cases = Covid_Cases.objects.get(id=id) selected_country = { 'id': id, 'name': country.name, 'flag': country.flag, 'area': country.area, 'population': country.population, 'total_cases': str(covid_cases.total_cases).replace(',', ''), 'new_cases': str(covid_cases.new_cases).replace(',', ''), 'total_deaths': str(covid_cases.total_deaths).replace(',', ''), } context = { 'country': selected_country, } return render(req, 'countries/edit_country.html', context) def country_add(req): return render(req, 'countries/add_country.html') def country_insert(req): if req.method == 'POST': form = CountryForm(req.POST) if form.is_valid(): id = form.cleaned_data['id'] name = form.cleaned_data['name'] population = form.cleaned_data['population'] area = form.cleaned_data['area'] flag = form.cleaned_data['flag'] total_cases = form.cleaned_data['total_cases'] new_cases = form.cleaned_data['new_cases'] total_deaths = form.cleaned_data['total_deaths'] country_document = { 'id': name + "_" + id, 'name': name, 'population': population, 'area': area, 'flag': flag } covid_cases_document = { 'id': name + "_" + id, 'country_name': name, 'total_cases': total_cases, 'new_cases': new_cases, 'total_deaths': total_deaths, } col = DB['countries_country'] if col: document_exists = col.find_one({'id': id}) if document_exists: col.delete_one({'id': id}) col.update_one(country_document, { '$set': country_document}, upsert=True) col = DB['countries_covid_cases'] if col: document_exists = col.find_one({'id': id}) if document_exists: col.delete_one({'id': id}) col.update_one(covid_cases_document, { '$set': covid_cases_document}, upsert=True) return HttpResponseRedirect('/') else: form = CountryForm() return render(req, 'countries/add_country.html', {'form': form}) def country_update(req): if req.method == 'POST': form = CountryForm(req.POST) if form.is_valid(): id = form.cleaned_data['id'] name = form.cleaned_data['name'] population = form.cleaned_data['population'] area = form.cleaned_data['area'] flag = form.cleaned_data['flag'] total_cases = form.cleaned_data['total_cases'] new_cases = form.cleaned_data['new_cases'] total_deaths = form.cleaned_data['total_deaths'] # Update into DB country_document = { 'id': id, 'name': name, 'population': population, 'area': area, 'flag': flag } covid_cases_document = { 'id': id, 'country_name': name, 'total_cases': total_cases, 'new_cases': new_cases, 'total_deaths': total_deaths, } col = DB['countries_country'] if col: document_exists = col.find_one({'id': id}) if document_exists: col.delete_one({'id': id}) col.update_one(country_document, { '$set': country_document}, upsert=True) col = DB['countries_covid_cases'] if col: document_exists = col.find_one({'id': id}) if document_exists: col.delete_one({'id': id}) col.update_one(covid_cases_document, { '$set': covid_cases_document}, upsert=True) return HttpResponseRedirect('/') else: form = CountryForm() return render(req, 'countries/edit_country.html', {'form': form}) def country_delete(req, id): col = DB['countries_country'] if col: document_exists = col.find_one({'id': id}) if document_exists: col.delete_one({'id': id}) col = DB['countries_covid_cases'] if col: document_exists = col.find_one({'id': id}) if document_exists: col.delete_one({'id': id}) return HttpResponseRedirect('/')
[ "pymongo.MongoClient", "django.http.HttpResponseRedirect", "django.shortcuts.render" ]
[((237, 286), 'pymongo.MongoClient', 'pymongo.MongoClient', (['"""mongodb://localhost:27017/"""'], {}), "('mongodb://localhost:27017/')\n", (256, 286), False, 'import pymongo\n'), ((1189, 1240), 'django.shortcuts.render', 'render', (['req', '"""countries/country_list.html"""', 'context'], {}), "(req, 'countries/country_list.html', context)\n", (1195, 1240), False, 'from django.shortcuts import render\n'), ((1822, 1873), 'django.shortcuts.render', 'render', (['req', '"""countries/edit_country.html"""', 'context'], {}), "(req, 'countries/edit_country.html', context)\n", (1828, 1873), False, 'from django.shortcuts import render\n'), ((1909, 1950), 'django.shortcuts.render', 'render', (['req', '"""countries/add_country.html"""'], {}), "(req, 'countries/add_country.html')\n", (1915, 1950), False, 'from django.shortcuts import render\n'), ((3722, 3779), 'django.shortcuts.render', 'render', (['req', '"""countries/add_country.html"""', "{'form': form}"], {}), "(req, 'countries/add_country.html', {'form': form})\n", (3728, 3779), False, 'from django.shortcuts import render\n'), ((5554, 5612), 'django.shortcuts.render', 'render', (['req', '"""countries/edit_country.html"""', "{'form': form}"], {}), "(req, 'countries/edit_country.html', {'form': form})\n", (5560, 5612), False, 'from django.shortcuts import render\n'), ((5989, 6014), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['"""/"""'], {}), "('/')\n", (6009, 6014), False, 'from django.http import HttpResponseRedirect\n'), ((3645, 3670), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['"""/"""'], {}), "('/')\n", (3665, 3670), False, 'from django.http import HttpResponseRedirect\n'), ((5477, 5502), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['"""/"""'], {}), "('/')\n", (5497, 5502), False, 'from django.http import HttpResponseRedirect\n')]